Imported Upstream version 2.5.1 upstream/2.5.1
authorJinWang An <jinwang.an@samsung.com>
Tue, 5 Jan 2021 03:19:58 +0000 (12:19 +0900)
committerJinWang An <jinwang.an@samsung.com>
Tue, 5 Jan 2021 03:19:58 +0000 (12:19 +0900)
709 files changed:
.hgignore [deleted file]
.hgtags [deleted file]
CHANGES
Makefile
PKG-INFO
Pygments.egg-info/PKG-INFO
Pygments.egg-info/SOURCES.txt
README.rst
bitbucket-pipelines.yml [deleted file]
doc/_build/doctrees/docs/api.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/authors.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/changelog.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/cmdline.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/filterdevelopment.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/filters.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/formatterdevelopment.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/formatters.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/index.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/integrate.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/java.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/lexerdevelopment.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/lexers.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/moinmoin.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/plugins.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/quickstart.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/rstdirective.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/styles.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/tokens.doctree [new file with mode: 0644]
doc/_build/doctrees/docs/unicode.doctree [new file with mode: 0644]
doc/_build/doctrees/download.doctree [new file with mode: 0644]
doc/_build/doctrees/environment.pickle [new file with mode: 0644]
doc/_build/doctrees/faq.doctree [new file with mode: 0644]
doc/_build/doctrees/index.doctree [new file with mode: 0644]
doc/_build/doctrees/languages.doctree [new file with mode: 0644]
doc/_build/html/.buildinfo [new file with mode: 0644]
doc/_build/html/_sources/docs/api.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/authors.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/changelog.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/cmdline.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/filterdevelopment.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/filters.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/formatterdevelopment.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/formatters.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/index.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/integrate.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/java.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/lexerdevelopment.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/lexers.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/moinmoin.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/plugins.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/quickstart.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/rstdirective.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/styles.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/tokens.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/docs/unicode.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/download.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/faq.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/index.rst.txt [new file with mode: 0644]
doc/_build/html/_sources/languages.rst.txt [new file with mode: 0644]
doc/_build/html/_static/basic.css [new file with mode: 0644]
doc/_build/html/_static/bodybg.png [new file with mode: 0644]
doc/_build/html/_static/demo.css [new file with mode: 0644]
doc/_build/html/_static/demo.js [new file with mode: 0644]
doc/_build/html/_static/docbg.png [new file with mode: 0644]
doc/_build/html/_static/doctools.js [new file with mode: 0644]
doc/_build/html/_static/documentation_options.js [new file with mode: 0644]
doc/_build/html/_static/favicon.ico [new file with mode: 0644]
doc/_build/html/_static/file.png [new file with mode: 0644]
doc/_build/html/_static/github.png [new file with mode: 0644]
doc/_build/html/_static/jquery-3.4.1.js [new file with mode: 0644]
doc/_build/html/_static/jquery.js [new file with mode: 0644]
doc/_build/html/_static/language_data.js [new file with mode: 0644]
doc/_build/html/_static/listitem.png [new file with mode: 0644]
doc/_build/html/_static/logo.png [new file with mode: 0644]
doc/_build/html/_static/logo_new.png [new file with mode: 0644]
doc/_build/html/_static/logo_only.png [new file with mode: 0644]
doc/_build/html/_static/minus.png [new file with mode: 0644]
doc/_build/html/_static/plus.png [new file with mode: 0644]
doc/_build/html/_static/pocoo.png [new file with mode: 0644]
doc/_build/html/_static/pygments.css [new file with mode: 0644]
doc/_build/html/_static/pygments14.css [new file with mode: 0644]
doc/_build/html/_static/searchtools.js [new file with mode: 0644]
doc/_build/html/_static/spinner.gif [new file with mode: 0644]
doc/_build/html/_static/underscore-1.3.1.js [new file with mode: 0644]
doc/_build/html/_static/underscore.js [new file with mode: 0644]
doc/_build/html/docs/api.html [new file with mode: 0644]
doc/_build/html/docs/authors.html [new file with mode: 0644]
doc/_build/html/docs/changelog.html [new file with mode: 0644]
doc/_build/html/docs/cmdline.html [new file with mode: 0644]
doc/_build/html/docs/filterdevelopment.html [new file with mode: 0644]
doc/_build/html/docs/filters.html [new file with mode: 0644]
doc/_build/html/docs/formatterdevelopment.html [new file with mode: 0644]
doc/_build/html/docs/formatters.html [new file with mode: 0644]
doc/_build/html/docs/index.html [new file with mode: 0644]
doc/_build/html/docs/integrate.html [new file with mode: 0644]
doc/_build/html/docs/java.html [new file with mode: 0644]
doc/_build/html/docs/lexerdevelopment.html [new file with mode: 0644]
doc/_build/html/docs/lexers.html [new file with mode: 0644]
doc/_build/html/docs/moinmoin.html [new file with mode: 0644]
doc/_build/html/docs/plugins.html [new file with mode: 0644]
doc/_build/html/docs/quickstart.html [new file with mode: 0644]
doc/_build/html/docs/rstdirective.html [new file with mode: 0644]
doc/_build/html/docs/styles.html [new file with mode: 0644]
doc/_build/html/docs/tokens.html [new file with mode: 0644]
doc/_build/html/docs/unicode.html [new file with mode: 0644]
doc/_build/html/download.html [new file with mode: 0644]
doc/_build/html/faq.html [new file with mode: 0644]
doc/_build/html/genindex.html [new file with mode: 0644]
doc/_build/html/index.html [new file with mode: 0644]
doc/_build/html/languages.html [new file with mode: 0644]
doc/_build/html/objects.inv [new file with mode: 0644]
doc/_build/html/py-modindex.html [new file with mode: 0644]
doc/_build/html/search.html [new file with mode: 0644]
doc/_build/html/searchindex.js [new file with mode: 0644]
doc/_static/demo.css [new file with mode: 0644]
doc/_static/demo.js [new file with mode: 0644]
doc/_static/github.png [new file with mode: 0644]
doc/_static/spinner.gif [new file with mode: 0644]
doc/_templates/demo.html [new file with mode: 0644]
doc/_templates/demo_sidebar.html [new file with mode: 0644]
doc/_templates/index_with_try.html [new file with mode: 0644]
doc/_templates/indexsidebar.html
doc/conf.py
doc/docs/index.rst
doc/docs/lexerdevelopment.rst
doc/download.rst
doc/faq.rst
doc/index.rst
doc/languages.rst
external/autopygmentize
external/rst-directive.py
pygmentize [deleted file]
pygments/__init__.py
pygments/__main__.py [new file with mode: 0644]
pygments/cmdline.py
pygments/formatters/html.py
pygments/formatters/img.py
pygments/formatters/other.py
pygments/formatters/rtf.py
pygments/lexers/__init__.py
pygments/lexers/_asy_builtins.py
pygments/lexers/_cl_builtins.py
pygments/lexers/_cocoa_builtins.py
pygments/lexers/_csound_builtins.py
pygments/lexers/_lua_builtins.py
pygments/lexers/_mapping.py
pygments/lexers/_mql_builtins.py
pygments/lexers/asm.py
pygments/lexers/bibtex.py
pygments/lexers/c_cpp.py
pygments/lexers/c_like.py
pygments/lexers/configs.py
pygments/lexers/csound.py
pygments/lexers/data.py
pygments/lexers/dsls.py
pygments/lexers/dylan.py
pygments/lexers/elm.py
pygments/lexers/email.py [new file with mode: 0644]
pygments/lexers/erlang.py
pygments/lexers/freefem.py
pygments/lexers/haskell.py
pygments/lexers/haxe.py
pygments/lexers/hdl.py
pygments/lexers/html.py
pygments/lexers/javascript.py
pygments/lexers/jvm.py
pygments/lexers/lisp.py
pygments/lexers/matlab.py
pygments/lexers/mime.py [new file with mode: 0644]
pygments/lexers/ml.py
pygments/lexers/pascal.py
pygments/lexers/pawn.py
pygments/lexers/praat.py
pygments/lexers/prolog.py
pygments/lexers/python.py
pygments/lexers/rdf.py
pygments/lexers/resource.py
pygments/lexers/robotframework.py
pygments/lexers/ruby.py
pygments/lexers/rust.py
pygments/lexers/scdoc.py [new file with mode: 0644]
pygments/lexers/shell.py
pygments/lexers/slash.py
pygments/lexers/solidity.py [new file with mode: 0644]
pygments/lexers/special.py
pygments/lexers/sql.py
pygments/lexers/templates.py
pygments/lexers/teraterm.py
pygments/lexers/textfmts.py
pygments/lexers/typoscript.py
pygments/lexers/zig.py [new file with mode: 0644]
pygments/styles/__init__.py
pygments/styles/inkpot.py [new file with mode: 0644]
pygments/styles/monokai.py
requirements.txt [deleted file]
scripts/.release-checklist.swp [deleted file]
scripts/release-checklist
setup.py
tests/__init__.py [new file with mode: 0644]
tests/examplefiles/MIME_example.eml [new file with mode: 0644]
tests/examplefiles/example.eml [new file with mode: 0644]
tests/examplefiles/example.praat
tests/examplefiles/example.shex [new file with mode: 0644]
tests/examplefiles/example.zig [new file with mode: 0644]
tests/examplefiles/notmuch_example [new file with mode: 0644]
tests/examplefiles/output/99_bottles_of_beer.chpl
tests/examplefiles/output/AcidStateAdvanced.hs
tests/examplefiles/output/AlternatingGroup.mu
tests/examplefiles/output/BOM.js
tests/examplefiles/output/Blink.ino
tests/examplefiles/output/CPDictionary.j
tests/examplefiles/output/Charmci.ci
tests/examplefiles/output/Config.in.cache
tests/examplefiles/output/Constants.mo
tests/examplefiles/output/DancingSudoku.lhs
tests/examplefiles/output/Deflate.fs
tests/examplefiles/output/Error.pmod
tests/examplefiles/output/Errors.scala
tests/examplefiles/output/FakeFile.pike
tests/examplefiles/output/Get-CommandDefinitionHtml.ps1
tests/examplefiles/output/IPDispatchC.nc
tests/examplefiles/output/IPDispatchP.nc
tests/examplefiles/output/Intro.java
tests/examplefiles/output/MIME_example.eml [new file with mode: 0644]
tests/examplefiles/output/Makefile
tests/examplefiles/output/Object.st
tests/examplefiles/output/OrderedMap.hx
tests/examplefiles/output/RoleQ.pm6
tests/examplefiles/output/SmallCheck.hs
tests/examplefiles/output/Sorting.mod
tests/examplefiles/output/StdGeneric.icl
tests/examplefiles/output/Sudoku.lhs
tests/examplefiles/output/abnf_example1.abnf
tests/examplefiles/output/abnf_example2.abnf
tests/examplefiles/output/addressbook.proto
tests/examplefiles/output/ahcon.f
tests/examplefiles/output/all.nit
tests/examplefiles/output/antlr_ANTLRv3.g
tests/examplefiles/output/antlr_throws
tests/examplefiles/output/apache2.conf
tests/examplefiles/output/as3_test.as
tests/examplefiles/output/as3_test2.as
tests/examplefiles/output/as3_test3.as
tests/examplefiles/output/aspx-cs_example
tests/examplefiles/output/autoit_submit.au3
tests/examplefiles/output/automake.mk
tests/examplefiles/output/badcase.java
tests/examplefiles/output/bigtest.nsi
tests/examplefiles/output/bnf_example1.bnf
tests/examplefiles/output/boot-9.scm
tests/examplefiles/output/ca65_example
tests/examplefiles/output/capdl_example.cdl
tests/examplefiles/output/cbmbas_example
tests/examplefiles/output/cells.ps
tests/examplefiles/output/ceval.c
tests/examplefiles/output/char.scala
tests/examplefiles/output/cheetah_example.html
tests/examplefiles/output/classes.dylan
tests/examplefiles/output/clojure-weird-keywords.clj
tests/examplefiles/output/condensed_ruby.rb
tests/examplefiles/output/coq_RelationClasses
tests/examplefiles/output/core.cljs
tests/examplefiles/output/database.pytb
tests/examplefiles/output/de.MoinMoin.po
tests/examplefiles/output/demo.ahk
tests/examplefiles/output/demo.cfm
tests/examplefiles/output/demo.css.in
tests/examplefiles/output/demo.frt
tests/examplefiles/output/demo.hbs
tests/examplefiles/output/demo.js.in
tests/examplefiles/output/demo.thrift
tests/examplefiles/output/demo.xul.in
tests/examplefiles/output/django_sample.html+django
tests/examplefiles/output/docker.docker
tests/examplefiles/output/durexmania.aheui
tests/examplefiles/output/dwarf.cw
tests/examplefiles/output/eg_example1.eg
tests/examplefiles/output/ember.handlebars
tests/examplefiles/output/erl_session
tests/examplefiles/output/es6.js
tests/examplefiles/output/escape_semicolon.clj
tests/examplefiles/output/eval.rs
tests/examplefiles/output/evil_regex.js
tests/examplefiles/output/example.Rd
tests/examplefiles/output/example.als
tests/examplefiles/output/example.bat
tests/examplefiles/output/example.bbc
tests/examplefiles/output/example.bc
tests/examplefiles/output/example.boa
tests/examplefiles/output/example.bug
tests/examplefiles/output/example.c
tests/examplefiles/output/example.ceylon
tests/examplefiles/output/example.chai
tests/examplefiles/output/example.clay
tests/examplefiles/output/example.cls
tests/examplefiles/output/example.cob
tests/examplefiles/output/example.coffee
tests/examplefiles/output/example.cpp
tests/examplefiles/output/example.e
tests/examplefiles/output/example.elm
tests/examplefiles/output/example.eml [new file with mode: 0644]
tests/examplefiles/output/example.ezt
tests/examplefiles/output/example.f90
tests/examplefiles/output/example.feature
tests/examplefiles/output/example.fish
tests/examplefiles/output/example.flo
tests/examplefiles/output/example.gd
tests/examplefiles/output/example.gi
tests/examplefiles/output/example.golo
tests/examplefiles/output/example.groovy
tests/examplefiles/output/example.gs
tests/examplefiles/output/example.gst
tests/examplefiles/output/example.hlsl
tests/examplefiles/output/example.hs
tests/examplefiles/output/example.hx
tests/examplefiles/output/example.i6t
tests/examplefiles/output/example.i7x
tests/examplefiles/output/example.icn
tests/examplefiles/output/example.icon
tests/examplefiles/output/example.j
tests/examplefiles/output/example.jag
tests/examplefiles/output/example.java
tests/examplefiles/output/example.jcl
tests/examplefiles/output/example.jsgf
tests/examplefiles/output/example.jsonld
tests/examplefiles/output/example.juttle
tests/examplefiles/output/example.kal
tests/examplefiles/output/example.kt
tests/examplefiles/output/example.lagda
tests/examplefiles/output/example.liquid
tests/examplefiles/output/example.lua
tests/examplefiles/output/example.ma
tests/examplefiles/output/example.mac
tests/examplefiles/output/example.md
tests/examplefiles/output/example.monkey
tests/examplefiles/output/example.moo
tests/examplefiles/output/example.moon
tests/examplefiles/output/example.mq4
tests/examplefiles/output/example.mqh
tests/examplefiles/output/example.msc
tests/examplefiles/output/example.ng2
tests/examplefiles/output/example.ni
tests/examplefiles/output/example.nim
tests/examplefiles/output/example.nix
tests/examplefiles/output/example.ns2
tests/examplefiles/output/example.pas
tests/examplefiles/output/example.pcmk
tests/examplefiles/output/example.pony
tests/examplefiles/output/example.pp
tests/examplefiles/output/example.praat
tests/examplefiles/output/example.prg
tests/examplefiles/output/example.rb
tests/examplefiles/output/example.red
tests/examplefiles/output/example.reds
tests/examplefiles/output/example.reg
tests/examplefiles/output/example.rexx
tests/examplefiles/output/example.rhtml
tests/examplefiles/output/example.rkt
tests/examplefiles/output/example.rpf
tests/examplefiles/output/example.rts
tests/examplefiles/output/example.sbl
tests/examplefiles/output/example.scd
tests/examplefiles/output/example.sgf
tests/examplefiles/output/example.sh
tests/examplefiles/output/example.sh-session
tests/examplefiles/output/example.shell-session
tests/examplefiles/output/example.shex [new file with mode: 0644]
tests/examplefiles/output/example.sl
tests/examplefiles/output/example.slim
tests/examplefiles/output/example.sls
tests/examplefiles/output/example.sml
tests/examplefiles/output/example.snobol
tests/examplefiles/output/example.stan
tests/examplefiles/output/example.tap
tests/examplefiles/output/example.tasm
tests/examplefiles/output/example.tea
tests/examplefiles/output/example.tf
tests/examplefiles/output/example.thy
tests/examplefiles/output/example.todotxt
tests/examplefiles/output/example.toml
tests/examplefiles/output/example.ttl
tests/examplefiles/output/example.u
tests/examplefiles/output/example.u1
tests/examplefiles/output/example.vbs
tests/examplefiles/output/example.weechatlog
tests/examplefiles/output/example.whiley
tests/examplefiles/output/example.x10
tests/examplefiles/output/example.xhtml
tests/examplefiles/output/example.xtend
tests/examplefiles/output/example.xtm
tests/examplefiles/output/example.yaml
tests/examplefiles/output/example.zig [new file with mode: 0644]
tests/examplefiles/output/example1.cadl
tests/examplefiles/output/example2.aspx
tests/examplefiles/output/example2.cpp
tests/examplefiles/output/example2.msc
tests/examplefiles/output/exampleScript.cfc
tests/examplefiles/output/exampleTag.cfc
tests/examplefiles/output/example_coq.v
tests/examplefiles/output/example_elixir.ex
tests/examplefiles/output/example_file.fy
tests/examplefiles/output/ezhil_primefactors.n
tests/examplefiles/output/fennelview.fnl
tests/examplefiles/output/fibonacci.tokigun.aheui
tests/examplefiles/output/firefox.mak
tests/examplefiles/output/flatline_example
tests/examplefiles/output/flipflop.sv
tests/examplefiles/output/foo.sce
tests/examplefiles/output/format.ml
tests/examplefiles/output/freefem.edp
tests/examplefiles/output/fucked_up.rb
tests/examplefiles/output/function.mu
tests/examplefiles/output/functional.rst
tests/examplefiles/output/garcia-wachs.kk
tests/examplefiles/output/genclass.clj
tests/examplefiles/output/genshi_example.xml+genshi
tests/examplefiles/output/genshitext_example.genshitext
tests/examplefiles/output/glsl.frag
tests/examplefiles/output/glsl.vert
tests/examplefiles/output/grammar-test.p6
tests/examplefiles/output/guidance.smv
tests/examplefiles/output/hash_syntax.rb
tests/examplefiles/output/hello-world.puzzlet.aheui
tests/examplefiles/output/hello.at
tests/examplefiles/output/hello.golo
tests/examplefiles/output/hello.lsl
tests/examplefiles/output/hello.smali
tests/examplefiles/output/hello.sp
tests/examplefiles/output/hexdump_debugexe
tests/examplefiles/output/hexdump_hd
tests/examplefiles/output/hexdump_hexcat
tests/examplefiles/output/hexdump_hexdump
tests/examplefiles/output/hexdump_od
tests/examplefiles/output/hexdump_xxd
tests/examplefiles/output/html+php_faulty.php
tests/examplefiles/output/http_request_example
tests/examplefiles/output/http_response_example
tests/examplefiles/output/hybris_File.hy
tests/examplefiles/output/idl_sample.pro
tests/examplefiles/output/iex_example
tests/examplefiles/output/inet_pton6.dg
tests/examplefiles/output/inform6_example
tests/examplefiles/output/interp.scala
tests/examplefiles/output/intro.ik
tests/examplefiles/output/ints.php
tests/examplefiles/output/intsyn.fun
tests/examplefiles/output/intsyn.sig
tests/examplefiles/output/irb_heredoc
tests/examplefiles/output/irc.lsp
tests/examplefiles/output/java.properties
tests/examplefiles/output/jbst_example1.jbst
tests/examplefiles/output/jbst_example2.jbst
tests/examplefiles/output/jinjadesignerdoc.rst
tests/examplefiles/output/json.lasso
tests/examplefiles/output/json.lasso9
tests/examplefiles/output/language.hy
tests/examplefiles/output/lighttpd_config.conf
tests/examplefiles/output/limbo.b
tests/examplefiles/output/linecontinuation.py
tests/examplefiles/output/livescript-demo.ls
tests/examplefiles/output/logos_example.xm
tests/examplefiles/output/ltmain.sh
tests/examplefiles/output/main.cmake
tests/examplefiles/output/markdown.lsp
tests/examplefiles/output/matlab_noreturn
tests/examplefiles/output/matlab_sample
tests/examplefiles/output/matlabsession_sample.txt
tests/examplefiles/output/metagrammar.treetop
tests/examplefiles/output/minehunt.qml
tests/examplefiles/output/minimal.ns2
tests/examplefiles/output/modula2_test_cases.def
tests/examplefiles/output/moin_SyntaxReference.txt
tests/examplefiles/output/multiline_regexes.rb
tests/examplefiles/output/nanomsg.intr
tests/examplefiles/output/nasm_aoutso.asm
tests/examplefiles/output/nasm_objexe.asm
tests/examplefiles/output/nemerle_sample.n
tests/examplefiles/output/nginx_nginx.conf
tests/examplefiles/output/noexcept.cpp
tests/examplefiles/output/numbers.c
tests/examplefiles/output/objc_example.m
tests/examplefiles/output/openedge_example
tests/examplefiles/output/pacman.conf
tests/examplefiles/output/pacman.ijs
tests/examplefiles/output/pawn_example
tests/examplefiles/output/perl_misc
tests/examplefiles/output/perl_perl5db
tests/examplefiles/output/perl_regex-delims
tests/examplefiles/output/perlfunc.1
tests/examplefiles/output/phpMyAdmin.spec
tests/examplefiles/output/phpcomplete.vim
tests/examplefiles/output/pkgconfig_example.pc
tests/examplefiles/output/plain.bst
tests/examplefiles/output/pleac.in.rb
tests/examplefiles/output/postgresql_test.txt
tests/examplefiles/output/pppoe.applescript
tests/examplefiles/output/psql_session.txt
tests/examplefiles/output/py3_test.txt
tests/examplefiles/output/py3tb_test.py3tb
tests/examplefiles/output/pycon_ctrlc_traceback
tests/examplefiles/output/pycon_test.pycon
tests/examplefiles/output/pytb_test2.pytb
tests/examplefiles/output/pytb_test3.pytb
tests/examplefiles/output/python25-bsd.mak
tests/examplefiles/output/qbasic_example
tests/examplefiles/output/qsort.prolog
tests/examplefiles/output/r-console-transcript.Rout
tests/examplefiles/output/r6rs-comments.scm
tests/examplefiles/output/ragel-cpp_rlscan
tests/examplefiles/output/ragel-cpp_snippet
tests/examplefiles/output/regex.js
tests/examplefiles/output/resourcebundle_demo
tests/examplefiles/output/reversi.lsp
tests/examplefiles/output/rnc_example.rnc
tests/examplefiles/output/roboconf.graph
tests/examplefiles/output/roboconf.instances
tests/examplefiles/output/robotframework_test.txt
tests/examplefiles/output/rql-queries.rql
tests/examplefiles/output/ruby_func_def.rb
tests/examplefiles/output/sample.qvto
tests/examplefiles/output/scilab.sci
tests/examplefiles/output/scope.cirru
tests/examplefiles/output/session.dylan-console
tests/examplefiles/output/sibling.prolog
tests/examplefiles/output/simple.camkes
tests/examplefiles/output/simple.croc
tests/examplefiles/output/smarty_example.html
tests/examplefiles/output/source.lgt
tests/examplefiles/output/sources.list
tests/examplefiles/output/sparql.rq
tests/examplefiles/output/sphere.pov
tests/examplefiles/output/sqlite3.sqlite3-console
tests/examplefiles/output/squid.conf
tests/examplefiles/output/string.jl
tests/examplefiles/output/string_delimiters.d
tests/examplefiles/output/stripheredoc.sh
tests/examplefiles/output/subr.el
tests/examplefiles/output/swig_java.swg
tests/examplefiles/output/swig_std_vector.i
tests/examplefiles/output/tads3_example.t
tests/examplefiles/output/teraterm.ttl
tests/examplefiles/output/termcap
tests/examplefiles/output/terminfo
tests/examplefiles/output/test-3.0.xq
tests/examplefiles/output/test-exist-update.xq
tests/examplefiles/output/test.R
tests/examplefiles/output/test.adb
tests/examplefiles/output/test.adls
tests/examplefiles/output/test.agda
tests/examplefiles/output/test.apl
tests/examplefiles/output/test.asy
tests/examplefiles/output/test.awk
tests/examplefiles/output/test.bb
tests/examplefiles/output/test.bib
tests/examplefiles/output/test.bmx
tests/examplefiles/output/test.boo
tests/examplefiles/output/test.bpl
tests/examplefiles/output/test.bro
tests/examplefiles/output/test.cadl
tests/examplefiles/output/test.cr
tests/examplefiles/output/test.cs
tests/examplefiles/output/test.csd
tests/examplefiles/output/test.css
tests/examplefiles/output/test.cu
tests/examplefiles/output/test.cyp
tests/examplefiles/output/test.d
tests/examplefiles/output/test.dart
tests/examplefiles/output/test.dtd
tests/examplefiles/output/test.ebnf
tests/examplefiles/output/test.ec
tests/examplefiles/output/test.eh
tests/examplefiles/output/test.erl
tests/examplefiles/output/test.escript
tests/examplefiles/output/test.evoque
tests/examplefiles/output/test.fan
tests/examplefiles/output/test.flx
tests/examplefiles/output/test.gdc
tests/examplefiles/output/test.gradle
tests/examplefiles/output/test.groovy
tests/examplefiles/output/test.hsail
tests/examplefiles/output/test.html
tests/examplefiles/output/test.idr
tests/examplefiles/output/test.ini
tests/examplefiles/output/test.java
tests/examplefiles/output/test.jsp
tests/examplefiles/output/test.lean
tests/examplefiles/output/test.maql
tests/examplefiles/output/test.mask
tests/examplefiles/output/test.mod
tests/examplefiles/output/test.moo
tests/examplefiles/output/test.mt
tests/examplefiles/output/test.myt
tests/examplefiles/output/test.ncl
tests/examplefiles/output/test.nim
tests/examplefiles/output/test.odin
tests/examplefiles/output/test.opa
tests/examplefiles/output/test.orc
tests/examplefiles/output/test.p6
tests/examplefiles/output/test.pan
tests/examplefiles/output/test.pas
tests/examplefiles/output/test.php
tests/examplefiles/output/test.pig
tests/examplefiles/output/test.plot
tests/examplefiles/output/test.ps1
tests/examplefiles/output/test.psl
tests/examplefiles/output/test.pwn
tests/examplefiles/output/test.pypylog
tests/examplefiles/output/test.r3
tests/examplefiles/output/test.rb
tests/examplefiles/output/test.rhtml
tests/examplefiles/output/test.rsl
tests/examplefiles/output/test.scaml
tests/examplefiles/output/test.sco
tests/examplefiles/output/test.shen
tests/examplefiles/output/test.sil
tests/examplefiles/output/test.ssp
tests/examplefiles/output/test.swift
tests/examplefiles/output/test.tcsh
tests/examplefiles/output/test.vb
tests/examplefiles/output/test.vhdl
tests/examplefiles/output/test.xqy
tests/examplefiles/output/test.xsl
tests/examplefiles/output/test.zep
tests/examplefiles/output/test2.odin
tests/examplefiles/output/test2.pypylog
tests/examplefiles/output/test_basic.adls
tests/examplefiles/output/truncated.pytb
tests/examplefiles/output/tsql_example.sql
tests/examplefiles/output/twig_test
tests/examplefiles/output/type.lisp
tests/examplefiles/output/typescript_example
tests/examplefiles/output/typoscript_example
tests/examplefiles/output/underscore.coffee
tests/examplefiles/output/unicode.applescript
tests/examplefiles/output/unicode.go
tests/examplefiles/output/unicode.js
tests/examplefiles/output/unicodedoc.py
tests/examplefiles/output/unix-io.lid
tests/examplefiles/output/varnish.vcl
tests/examplefiles/output/vbnet_test.bas
tests/examplefiles/output/vctreestatus_hg
tests/examplefiles/output/vimrc
tests/examplefiles/output/vpath.mk
tests/examplefiles/output/wdiff_example1.wdiff
tests/examplefiles/output/wdiff_example3.wdiff
tests/examplefiles/output/webkit-transition.css
tests/examplefiles/output/while.pov
tests/examplefiles/output/wiki.factor
tests/examplefiles/output/xml_example
tests/examplefiles/output/xorg.conf
tests/examplefiles/output/yahalom.cpsa
tests/examplefiles/output/zmlrpc.f90
tests/examplefiles/scdoc_manual.scd [new file with mode: 0644]
tests/examplefiles/test.orc
tests/examplefiles/test.sco
tests/examplefiles/test.sol [new file with mode: 0644]
tests/examplefiles/test.zeek [new file with mode: 0644]
tests/run.py [deleted file]
tests/string_asserts.py [deleted file]
tests/support.py [deleted file]
tests/test_apache_conf.py [new file with mode: 0644]
tests/test_basic.py
tests/test_basic_api.py
tests/test_bibtex.py
tests/test_cfm.py
tests/test_clexer.py
tests/test_cmdline.py
tests/test_cpp.py
tests/test_crystal.py
tests/test_csound.py
tests/test_data.py
tests/test_examplefiles.py
tests/test_ezhil.py
tests/test_guessing.py [new file with mode: 0644]
tests/test_html_formatter.py
tests/test_inherit.py
tests/test_irc_formatter.py
tests/test_java.py
tests/test_javascript.py
tests/test_julia.py
tests/test_kotlin.py
tests/test_latex_formatter.py
tests/test_lexers_other.py
tests/test_markdown_lexer.py
tests/test_modeline.py
tests/test_objectiveclexer.py
tests/test_perllexer.py
tests/test_php.py
tests/test_praat.py
tests/test_properties.py
tests/test_python.py
tests/test_qbasiclexer.py
tests/test_r.py
tests/test_regexlexer.py
tests/test_regexopt.py
tests/test_rtf_formatter.py
tests/test_ruby.py
tests/test_shell.py
tests/test_smarty.py
tests/test_sql.py
tests/test_string_asserts.py [deleted file]
tests/test_terminal_formatter.py
tests/test_textfmts.py
tests/test_token.py
tests/test_unistring.py
tests/test_using_api.py
tests/test_util.py
tests/test_whiley.py
tox.ini [deleted file]

diff --git a/.hgignore b/.hgignore
deleted file mode 100644 (file)
index b564df8..0000000
--- a/.hgignore
+++ /dev/null
@@ -1,20 +0,0 @@
-syntax: glob
-*.egg
-*.pyc
-*.pyo
-.*.sw[op]
-.idea/
-.ropeproject
-.project
-.tags
-.tox
-.cache/
-Pygments.egg-info/*
-TAGS
-build/*
-dist/*
-doc/_build
-TAGS
-tests/.coverage
-tests/cover
-tests/examplefiles/output
diff --git a/.hgtags b/.hgtags
deleted file mode 100644 (file)
index 27ce430..0000000
--- a/.hgtags
+++ /dev/null
@@ -1,36 +0,0 @@
-634420aa4221cc1eb2b3753bd571166bd9e611d4 0.9
-942ecbb5c84ca5d57ae82f5697775973f4e12717 0.10
-63632d0340958d891176db20fe9a32a56abcd5ea 0.11
-13834ec94d2c5a90a68bc2c2a327abd962c486bc 0.11.1
-a5748745272afffd725570e068a560d46e28dc1f 1.0
-5a794a620dc711a219722a7af94d9d2e95cda26d 1.1
-dd81c35efd95292de4965153c66c8bbfe435f1c4 1.1.1
-e7691aa4f473a2cdaa2e5b7bfed8aec196719aca 0.5.1
-6f53364d63ddb8bd9532bb6ea402e3af05275b03 0.5
-11efe99c11e601071c3a77910b9fca769de66fbf 0.6
-99df0a7404d168b05626ffced6fd16edcf58c145 0.7
-d0b08fd569d3d9dafec4c045a7d8876442b3ef64 0.7.1
-1054522d1dda9c7899516ead3e65e5e363fdf30d 0.8
-066e56d8f5caa31e15386fff6f938bedd85a8732 0.8.1
-bae0833cae75e5a641abe3c4b430fa384cd9d258 1.2
-f6e5acee4f761696676e05a9112c91a5a5670b49 1.2.1
-580c5ce755486bc92c79c50f80cfc79924e15140 1.2.2
-c62867700c9e98cc2988c62f298ec54cee9b6927 1.3
-3a3846c2503db85bb70a243c8bc702629c4bce57 1.3.1
-8ad6d35dd2ab0530a1e2c088ab7fe0e00426b5f9 1.4
-eff3aee4abff2b72564ddfde77fcc82adbba52ad 1.5
-2c262bfc66b05a8aecc1109c3acc5b9447a5213c 1.6rc1
-7c962dcb484cb73394aec7f41709940340dc8a9c 1.6
-da509a68ea620bbb8ee3f5d5cf7761375d8f4451 2.0rc1
-ed3206a773e9cb90a0edeabee8ef6b56b5b9a53c 2.0
-94e1e056c92d97e3a54759f9216e8deff22efbdd 2.0.1
-142a870bf0f1822414649ae26f433b112a5c92d5 2.0.2
-34530db252d35d7ef57a8dbb9fce7bcc46f6ba6b 2.1
-2935c3a59672e8ae74ffb7ea66ea6567f49782f6 2.1.1
-8e7ebc56153cf899067333bff4f15ae98758a2e1 2.1.2
-88527db663dce0729c2cd6e3bc2f3c657ae39254 2.1.3
-cc861d7ba005b8bc91829210e7c4ee0eb6580434 2.2.0
-9c8cab961cde0a179b29e2de90946bf720a463f6 2.3.0
-f3f550c25fe0caba1cb725dd1994363c003d97f2 2.3.1
-88f01f09ab5e2e4689ac0daa287843e3820774fa 2.4.0
-d13e2dc5bb057b45c7998b68188ed57e63e23a73 2.4.1
diff --git a/CHANGES b/CHANGES
index 6822bf91029dcbab309712fb55f890a31fd62e4c..5b19155d0bb83df76b7a1efd4594f7f5716744f1 100644 (file)
--- a/CHANGES
+++ b/CHANGES
@@ -1,10 +1,82 @@
 Pygments changelog
 ==================
 
-Issue numbers refer to the tracker at
-<https://bitbucket.org/birkenfeld/pygments-main/issues>,
+Since 2.5.0, issue numbers refer to the tracker at
+<https://github.com/pygments/pygments/issues>,
 pull request numbers to the requests at
-<https://bitbucket.org/birkenfeld/pygments-main/pull-requests/merged>.
+<https://github.com/pygments/pygments/pulls>.
+
+Version 2.5.1
+-------------
+(released November 26, 2019)
+
+- This release fixes a packaging issue. No functional changes.
+
+Version 2.5.0
+-------------
+(released November 26, 2019)
+
+- Added lexers:
+
+  * Email (PR#1246)
+  * Erlang, Elxir shells (PR#823, #1521)
+  * Notmuch (PR#1264)
+  * `Scdoc <https://git.sr.ht/~sircmpwn/scdoc>`_ (PR#1268)
+  * `Solidity <https://solidity.readthedocs.io/>`_ (#1214)
+  * `Zeek <https://www.zeek.org>`_ (new name for Bro) (PR#1269)
+  * `Zig <https://ziglang.org/>`_ (PR#820)
+
+- Updated lexers:
+
+  * Apache2 Configuration (PR#1251)
+  * Bash sessions (#1253)
+  * CSound (PR#1250)
+  * Dart
+  * Dockerfile
+  * Emacs Lisp
+  * Handlebars (PR#773)
+  * Java (#1101, #987)
+  * Logtalk (PR#1261)
+  * Matlab (PR#1271)
+  * Praat (PR#1277)
+  * Python3 (PR#1255)
+  * Ruby
+  * YAML (#1528)
+  * Velocity
+
+- Added styles:
+
+  * Inkpot (PR#1276)
+
+- The ``PythonLexer`` class is now an alias for the former ``Python3Lexer``.
+  The old ``PythonLexer`` is available as ``Python2Lexer``.  Same change has
+  been done for the ``PythonTracebackLexer``.  The ``python3`` option for
+  the ``PythonConsoleLexer`` is now true by default.
+
+- Bump ``NasmLexer`` priority over ``TasmLexer`` for ``.asm`` files
+  (fixes #1326)
+- Default font in the ``ImageFormatter`` has been updated (#928, PR#1245)
+- Test suite switched to py.test, removed nose dependency (#1490)
+- Reduce ``TeraTerm`` lexer score -- it used to match nearly all languages
+  (#1256)
+- Treat ``Skylark``/``Starlark`` files as Python files (PR#1259)
+- Image formatter: actually respect ``line_number_separator`` option
+
+- Add LICENSE file to wheel builds
+- Agda: fix lambda highlighting
+- Dart: support ``@`` annotations
+- Dockerfile: accept ``FROM ... AS`` syntax
+- Emacs Lisp: add more string functions
+- GAS: accept registers in directive arguments
+- Java: make structural punctuation (braces, parens, colon, comma) ``Punctuation``, not ``Operator`` (#987)
+- Java: support ``var`` contextual keyword (#1101)
+- Matlab: Fix recognition of ``function`` keyword (PR#1271)
+- Python: recognize ``.jy`` filenames (#976)
+- Python: recognize ``f`` string prefix (#1156)
+- Ruby: support squiggly heredocs
+- Shell sessions: recognize Virtualenv prompt (PR#1266)
+- Velocity: support silent reference syntax
+
 
 Version 2.4.2
 -------------
@@ -82,6 +154,7 @@ Version 2.4.0
 - Updated Trove classifiers and ``pip`` requirements (PR#799)
 
 
+
 Version 2.3.1
 -------------
 (released Dec 16, 2018)
index 2fcb832f0d907d5f025cf7954ad5ad6ff5b92bba..61ee393ee7fdcabbb67d541d468cd48b775c9964 100644 (file)
--- a/Makefile
+++ b/Makefile
@@ -49,14 +49,16 @@ pylint:
 reindent:
        @$(PYTHON) scripts/reindent.py -r -B .
 
+TEST = tests
+
 test:
-       @$(PYTHON) tests/run.py -d $(TEST)
+       @$(PYTHON) `which py.test` $(TEST)
 
 test-coverage:
-       @$(PYTHON) tests/run.py -d --with-coverage --cover-package=pygments --cover-erase $(TEST)
+       @$(PYTHON) `which py.test` --cov --cov-report=html --cov-report=term $(TEST)
 
 test-examplefiles:
-       nosetests tests/test_examplefiles.py
+       @$(PYTHON) `which py.test` tests.test_examplefiles
 
 tox-test:
        @tox -- $(TEST)
index 17dbe8320552679b1ec7f563460861b5a6f00719..620cb365a55fab706017032528d88581fdc5362d 100644 (file)
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.2
 Name: Pygments
-Version: 2.4.2
+Version: 2.5.1
 Summary: Pygments is a syntax highlighting package written in Python.
 Home-page: http://pygments.org/
 Author: Georg Brandl
@@ -39,6 +39,9 @@ Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3.5
 Classifier: Programming Language :: Python :: 3.6
 Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
 Classifier: Operating System :: OS Independent
 Classifier: Topic :: Text Processing :: Filters
 Classifier: Topic :: Utilities
index 17dbe8320552679b1ec7f563460861b5a6f00719..620cb365a55fab706017032528d88581fdc5362d 100644 (file)
@@ -1,6 +1,6 @@
 Metadata-Version: 1.2
 Name: Pygments
-Version: 2.4.2
+Version: 2.5.1
 Summary: Pygments is a syntax highlighting package written in Python.
 Home-page: http://pygments.org/
 Author: Georg Brandl
@@ -39,6 +39,9 @@ Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3.5
 Classifier: Programming Language :: Python :: 3.6
 Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
 Classifier: Operating System :: OS Independent
 Classifier: Topic :: Text Processing :: Filters
 Classifier: Topic :: Utilities
index 965e419c361e1c8922ae25f7910ab8235f5df14d..a5ffa479675a1fb3b4c0c66bbb99ac8c3a257262 100644 (file)
@@ -1,5 +1,3 @@
-.hgignore
-.hgtags
 AUTHORS
 CHANGES
 LICENSE
@@ -7,12 +5,8 @@ MANIFEST.in
 Makefile
 README.rst
 TODO
-bitbucket-pipelines.yml
-pygmentize
-requirements.txt
 setup.cfg
 setup.py
-tox.ini
 Pygments.egg-info/PKG-INFO
 Pygments.egg-info/SOURCES.txt
 Pygments.egg-info/dependency_links.txt
@@ -27,10 +21,122 @@ doc/index.rst
 doc/languages.rst
 doc/make.bat
 doc/pygmentize.1
+doc/_build/doctrees/download.doctree
+doc/_build/doctrees/environment.pickle
+doc/_build/doctrees/faq.doctree
+doc/_build/doctrees/index.doctree
+doc/_build/doctrees/languages.doctree
+doc/_build/doctrees/docs/api.doctree
+doc/_build/doctrees/docs/authors.doctree
+doc/_build/doctrees/docs/changelog.doctree
+doc/_build/doctrees/docs/cmdline.doctree
+doc/_build/doctrees/docs/filterdevelopment.doctree
+doc/_build/doctrees/docs/filters.doctree
+doc/_build/doctrees/docs/formatterdevelopment.doctree
+doc/_build/doctrees/docs/formatters.doctree
+doc/_build/doctrees/docs/index.doctree
+doc/_build/doctrees/docs/integrate.doctree
+doc/_build/doctrees/docs/java.doctree
+doc/_build/doctrees/docs/lexerdevelopment.doctree
+doc/_build/doctrees/docs/lexers.doctree
+doc/_build/doctrees/docs/moinmoin.doctree
+doc/_build/doctrees/docs/plugins.doctree
+doc/_build/doctrees/docs/quickstart.doctree
+doc/_build/doctrees/docs/rstdirective.doctree
+doc/_build/doctrees/docs/styles.doctree
+doc/_build/doctrees/docs/tokens.doctree
+doc/_build/doctrees/docs/unicode.doctree
+doc/_build/html/.buildinfo
+doc/_build/html/download.html
+doc/_build/html/faq.html
+doc/_build/html/genindex.html
+doc/_build/html/index.html
+doc/_build/html/languages.html
+doc/_build/html/objects.inv
+doc/_build/html/py-modindex.html
+doc/_build/html/search.html
+doc/_build/html/searchindex.js
+doc/_build/html/_sources/download.rst.txt
+doc/_build/html/_sources/faq.rst.txt
+doc/_build/html/_sources/index.rst.txt
+doc/_build/html/_sources/languages.rst.txt
+doc/_build/html/_sources/docs/api.rst.txt
+doc/_build/html/_sources/docs/authors.rst.txt
+doc/_build/html/_sources/docs/changelog.rst.txt
+doc/_build/html/_sources/docs/cmdline.rst.txt
+doc/_build/html/_sources/docs/filterdevelopment.rst.txt
+doc/_build/html/_sources/docs/filters.rst.txt
+doc/_build/html/_sources/docs/formatterdevelopment.rst.txt
+doc/_build/html/_sources/docs/formatters.rst.txt
+doc/_build/html/_sources/docs/index.rst.txt
+doc/_build/html/_sources/docs/integrate.rst.txt
+doc/_build/html/_sources/docs/java.rst.txt
+doc/_build/html/_sources/docs/lexerdevelopment.rst.txt
+doc/_build/html/_sources/docs/lexers.rst.txt
+doc/_build/html/_sources/docs/moinmoin.rst.txt
+doc/_build/html/_sources/docs/plugins.rst.txt
+doc/_build/html/_sources/docs/quickstart.rst.txt
+doc/_build/html/_sources/docs/rstdirective.rst.txt
+doc/_build/html/_sources/docs/styles.rst.txt
+doc/_build/html/_sources/docs/tokens.rst.txt
+doc/_build/html/_sources/docs/unicode.rst.txt
+doc/_build/html/_static/basic.css
+doc/_build/html/_static/bodybg.png
+doc/_build/html/_static/demo.css
+doc/_build/html/_static/demo.js
+doc/_build/html/_static/docbg.png
+doc/_build/html/_static/doctools.js
+doc/_build/html/_static/documentation_options.js
+doc/_build/html/_static/favicon.ico
+doc/_build/html/_static/file.png
+doc/_build/html/_static/github.png
+doc/_build/html/_static/jquery-3.4.1.js
+doc/_build/html/_static/jquery.js
+doc/_build/html/_static/language_data.js
+doc/_build/html/_static/listitem.png
+doc/_build/html/_static/logo.png
+doc/_build/html/_static/logo_new.png
+doc/_build/html/_static/logo_only.png
+doc/_build/html/_static/minus.png
+doc/_build/html/_static/plus.png
+doc/_build/html/_static/pocoo.png
+doc/_build/html/_static/pygments.css
+doc/_build/html/_static/pygments14.css
+doc/_build/html/_static/searchtools.js
+doc/_build/html/_static/spinner.gif
+doc/_build/html/_static/underscore-1.3.1.js
+doc/_build/html/_static/underscore.js
+doc/_build/html/docs/api.html
+doc/_build/html/docs/authors.html
+doc/_build/html/docs/changelog.html
+doc/_build/html/docs/cmdline.html
+doc/_build/html/docs/filterdevelopment.html
+doc/_build/html/docs/filters.html
+doc/_build/html/docs/formatterdevelopment.html
+doc/_build/html/docs/formatters.html
+doc/_build/html/docs/index.html
+doc/_build/html/docs/integrate.html
+doc/_build/html/docs/java.html
+doc/_build/html/docs/lexerdevelopment.html
+doc/_build/html/docs/lexers.html
+doc/_build/html/docs/moinmoin.html
+doc/_build/html/docs/plugins.html
+doc/_build/html/docs/quickstart.html
+doc/_build/html/docs/rstdirective.html
+doc/_build/html/docs/styles.html
+doc/_build/html/docs/tokens.html
+doc/_build/html/docs/unicode.html
+doc/_static/demo.css
+doc/_static/demo.js
 doc/_static/favicon.ico
+doc/_static/github.png
 doc/_static/logo_new.png
 doc/_static/logo_only.png
+doc/_static/spinner.gif
+doc/_templates/demo.html
+doc/_templates/demo_sidebar.html
 doc/_templates/docssidebar.html
+doc/_templates/index_with_try.html
 doc/_templates/indexsidebar.html
 doc/_themes/pygments14/layout.html
 doc/_themes/pygments14/theme.conf
@@ -67,6 +173,7 @@ external/moin-parser.py
 external/pygments.bashcomp
 external/rst-directive.py
 pygments/__init__.py
+pygments/__main__.py
 pygments/cmdline.py
 pygments/console.py
 pygments/filter.py
@@ -147,6 +254,7 @@ pygments/lexers/dylan.py
 pygments/lexers/ecl.py
 pygments/lexers/eiffel.py
 pygments/lexers/elm.py
+pygments/lexers/email.py
 pygments/lexers/erlang.py
 pygments/lexers/esoteric.py
 pygments/lexers/ezhil.py
@@ -183,6 +291,7 @@ pygments/lexers/make.py
 pygments/lexers/markup.py
 pygments/lexers/math.py
 pygments/lexers/matlab.py
+pygments/lexers/mime.py
 pygments/lexers/ml.py
 pygments/lexers/modeling.py
 pygments/lexers/modula2.py
@@ -216,6 +325,7 @@ pygments/lexers/robotframework.py
 pygments/lexers/ruby.py
 pygments/lexers/rust.py
 pygments/lexers/sas.py
+pygments/lexers/scdoc.py
 pygments/lexers/scripting.py
 pygments/lexers/sgf.py
 pygments/lexers/shell.py
@@ -223,6 +333,7 @@ pygments/lexers/slash.py
 pygments/lexers/smalltalk.py
 pygments/lexers/smv.py
 pygments/lexers/snobol.py
+pygments/lexers/solidity.py
 pygments/lexers/special.py
 pygments/lexers/sql.py
 pygments/lexers/stata.py
@@ -246,6 +357,7 @@ pygments/lexers/webmisc.py
 pygments/lexers/whiley.py
 pygments/lexers/x10.py
 pygments/lexers/xorg.py
+pygments/lexers/zig.py
 pygments/styles/__init__.py
 pygments/styles/abap.py
 pygments/styles/algol.py
@@ -260,6 +372,7 @@ pygments/styles/emacs.py
 pygments/styles/friendly.py
 pygments/styles/fruity.py
 pygments/styles/igor.py
+pygments/styles/inkpot.py
 pygments/styles/lovelace.py
 pygments/styles/manni.py
 pygments/styles/monokai.py
@@ -280,7 +393,6 @@ pygments/styles/trac.py
 pygments/styles/vim.py
 pygments/styles/vs.py
 pygments/styles/xcode.py
-scripts/.release-checklist.swp
 scripts/check_sources.py
 scripts/debug_lexer.py
 scripts/detect_missing_analyse_text.py
@@ -290,9 +402,8 @@ scripts/get_vimkw.py
 scripts/pylintrc
 scripts/release-checklist
 scripts/vim2pygments.py
-tests/run.py
-tests/string_asserts.py
-tests/support.py
+tests/__init__.py
+tests/test_apache_conf.py
 tests/test_asm.py
 tests/test_basic.py
 tests/test_basic_api.py
@@ -306,6 +417,7 @@ tests/test_csound.py
 tests/test_data.py
 tests/test_examplefiles.py
 tests/test_ezhil.py
+tests/test_guessing.py
 tests/test_html_formatter.py
 tests/test_inherit.py
 tests/test_irc_formatter.py
@@ -332,7 +444,6 @@ tests/test_ruby.py
 tests/test_shell.py
 tests/test_smarty.py
 tests/test_sql.py
-tests/test_string_asserts.py
 tests/test_terminal_formatter.py
 tests/test_textfmts.py
 tests/test_token.py
@@ -366,6 +477,7 @@ tests/examplefiles/Get-CommandDefinitionHtml.ps1
 tests/examplefiles/IPDispatchC.nc
 tests/examplefiles/IPDispatchP.nc
 tests/examplefiles/Intro.java
+tests/examplefiles/MIME_example.eml
 tests/examplefiles/Makefile
 tests/examplefiles/Object.st
 tests/examplefiles/OrderedMap.hx
@@ -442,6 +554,7 @@ tests/examplefiles/example.coffee
 tests/examplefiles/example.cpp
 tests/examplefiles/example.e
 tests/examplefiles/example.elm
+tests/examplefiles/example.eml
 tests/examplefiles/example.ezt
 tests/examplefiles/example.f90
 tests/examplefiles/example.feature
@@ -507,6 +620,7 @@ tests/examplefiles/example.sgf
 tests/examplefiles/example.sh
 tests/examplefiles/example.sh-session
 tests/examplefiles/example.shell-session
+tests/examplefiles/example.shex
 tests/examplefiles/example.sl
 tests/examplefiles/example.slim
 tests/examplefiles/example.sls
@@ -531,6 +645,7 @@ tests/examplefiles/example.xhtml
 tests/examplefiles/example.xtend
 tests/examplefiles/example.xtm
 tests/examplefiles/example.yaml
+tests/examplefiles/example.zig
 tests/examplefiles/example1.cadl
 tests/examplefiles/example2.aspx
 tests/examplefiles/example2.cpp
@@ -618,6 +733,7 @@ tests/examplefiles/nasm_objexe.asm
 tests/examplefiles/nemerle_sample.n
 tests/examplefiles/nginx_nginx.conf
 tests/examplefiles/noexcept.cpp
+tests/examplefiles/notmuch_example
 tests/examplefiles/numbers.c
 tests/examplefiles/objc_example.m
 tests/examplefiles/openedge_example
@@ -659,6 +775,7 @@ tests/examplefiles/robotframework_test.txt
 tests/examplefiles/rql-queries.rql
 tests/examplefiles/ruby_func_def.rb
 tests/examplefiles/sample.qvto
+tests/examplefiles/scdoc_manual.scd
 tests/examplefiles/scilab.sci
 tests/examplefiles/scope.cirru
 tests/examplefiles/session.dylan-console
@@ -754,6 +871,7 @@ tests/examplefiles/test.scaml
 tests/examplefiles/test.sco
 tests/examplefiles/test.shen
 tests/examplefiles/test.sil
+tests/examplefiles/test.sol
 tests/examplefiles/test.ssp
 tests/examplefiles/test.swift
 tests/examplefiles/test.tcsh
@@ -761,6 +879,7 @@ tests/examplefiles/test.vb
 tests/examplefiles/test.vhdl
 tests/examplefiles/test.xqy
 tests/examplefiles/test.xsl
+tests/examplefiles/test.zeek
 tests/examplefiles/test.zep
 tests/examplefiles/test2.odin
 tests/examplefiles/test2.pypylog
@@ -809,6 +928,7 @@ tests/examplefiles/output/Get-CommandDefinitionHtml.ps1
 tests/examplefiles/output/IPDispatchC.nc
 tests/examplefiles/output/IPDispatchP.nc
 tests/examplefiles/output/Intro.java
+tests/examplefiles/output/MIME_example.eml
 tests/examplefiles/output/Makefile
 tests/examplefiles/output/Object.st
 tests/examplefiles/output/OrderedMap.hx
@@ -885,6 +1005,7 @@ tests/examplefiles/output/example.coffee
 tests/examplefiles/output/example.cpp
 tests/examplefiles/output/example.e
 tests/examplefiles/output/example.elm
+tests/examplefiles/output/example.eml
 tests/examplefiles/output/example.ezt
 tests/examplefiles/output/example.f90
 tests/examplefiles/output/example.feature
@@ -950,6 +1071,7 @@ tests/examplefiles/output/example.sgf
 tests/examplefiles/output/example.sh
 tests/examplefiles/output/example.sh-session
 tests/examplefiles/output/example.shell-session
+tests/examplefiles/output/example.shex
 tests/examplefiles/output/example.sl
 tests/examplefiles/output/example.slim
 tests/examplefiles/output/example.sls
@@ -974,6 +1096,7 @@ tests/examplefiles/output/example.xhtml
 tests/examplefiles/output/example.xtend
 tests/examplefiles/output/example.xtm
 tests/examplefiles/output/example.yaml
+tests/examplefiles/output/example.zig
 tests/examplefiles/output/example1.cadl
 tests/examplefiles/output/example2.aspx
 tests/examplefiles/output/example2.cpp
index 350e242ee72b8cb024a1e5e93c57ed0402f0c0e5..ef0cbfd24876dd08856fd0098bf665daf6dac61e 100644 (file)
@@ -1,19 +1,20 @@
-README for Pygments
+Welcome to Pygments
 ===================
 
-This is the source of Pygments.  It is a generic syntax highlighter that
-supports over 300 languages and text formats, for use in code hosting, forums,
-wikis or other applications that need to prettify source code.
+This is the source of Pygments.  It is a **generic syntax highlighter** written
+in Python that supports over 300 languages and text formats, for use in code
+hosting, forums, wikis or other applications that need to prettify source code.
 
 Installing
 ----------
 
-... works as usual, use ``python setup.py install``.
+... works as usual, use ``pip install Pygments`` to get published versions,
+or ``python setup.py install`` to install from a checkout.
 
 Documentation
 -------------
 
-... can be found online at http://pygments.org/ or created by ::
+... can be found online at http://pygments.org/ or created with Sphinx by ::
 
    cd doc
    make html
@@ -21,19 +22,23 @@ Documentation
 Development
 -----------
 
-... takes place on `Bitbucket
-<https://bitbucket.org/birkenfeld/pygments-main>`_, where the Mercurial
-repository, tickets and pull requests can be viewed.
+... takes place on `GitHub <https://github.com/pygments/pygments>`_, where the
+Git repository, tickets and pull requests can be viewed.
 
-Continuous testing runs on drone.io:
+Continuous testing runs on GitHub workflows:
 
-.. image:: https://drone.io/bitbucket.org/birkenfeld/pygments-main/status.png
-   :target: https://drone.io/bitbucket.org/birkenfeld/pygments-main
+.. image:: https://github.com/pygments/pygments/workflows/Pygments/badge.svg
+   :target: https://github.com/pygments/pygments/actions?query=workflow%3APygments
 
 The authors
 -----------
 
-Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*.
+Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*
+and **Matthäus Chajdas**.
 
 Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of
 the `Pocoo <http://dev.pocoo.org/>`_ team and **Tim Hatch**.
+
+The code is distributed under the BSD 2-clause license.  Contributors making pull
+requests must agree that they are able and willing to put their contributions
+under that license.
diff --git a/bitbucket-pipelines.yml b/bitbucket-pipelines.yml
deleted file mode 100644 (file)
index 4a9f1b6..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-pipelines:
-  default:
-    - step:
-        name: Test on Python 2.7
-        image: python:2.7
-        caches:
-          - pip
-        script:
-          - pip install -r requirements.txt
-          - tox -e py27
-    - step:
-        name: Test on Python 3.5
-        image: python:3.5
-        caches:
-          - pip
-        script:
-          - pip install -r requirements.txt
-          - tox -e py35
-    - step:
-        name: Test on Python 3.6
-        image: python:3.6
-        caches:
-          - pip
-        script:
-          - pip install -r requirements.txt
-          - tox -e py36
-    - step:
-        name: Test on Python 3.7
-        image: python:3.7
-        caches:
-          - pip
-        script:
-          - pip install -r requirements.txt
-          - tox -e py37
diff --git a/doc/_build/doctrees/docs/api.doctree b/doc/_build/doctrees/docs/api.doctree
new file mode 100644 (file)
index 0000000..730fb8b
Binary files /dev/null and b/doc/_build/doctrees/docs/api.doctree differ
diff --git a/doc/_build/doctrees/docs/authors.doctree b/doc/_build/doctrees/docs/authors.doctree
new file mode 100644 (file)
index 0000000..0d25fa8
Binary files /dev/null and b/doc/_build/doctrees/docs/authors.doctree differ
diff --git a/doc/_build/doctrees/docs/changelog.doctree b/doc/_build/doctrees/docs/changelog.doctree
new file mode 100644 (file)
index 0000000..6edfce7
Binary files /dev/null and b/doc/_build/doctrees/docs/changelog.doctree differ
diff --git a/doc/_build/doctrees/docs/cmdline.doctree b/doc/_build/doctrees/docs/cmdline.doctree
new file mode 100644 (file)
index 0000000..74544a0
Binary files /dev/null and b/doc/_build/doctrees/docs/cmdline.doctree differ
diff --git a/doc/_build/doctrees/docs/filterdevelopment.doctree b/doc/_build/doctrees/docs/filterdevelopment.doctree
new file mode 100644 (file)
index 0000000..36ac3ef
Binary files /dev/null and b/doc/_build/doctrees/docs/filterdevelopment.doctree differ
diff --git a/doc/_build/doctrees/docs/filters.doctree b/doc/_build/doctrees/docs/filters.doctree
new file mode 100644 (file)
index 0000000..e407baa
Binary files /dev/null and b/doc/_build/doctrees/docs/filters.doctree differ
diff --git a/doc/_build/doctrees/docs/formatterdevelopment.doctree b/doc/_build/doctrees/docs/formatterdevelopment.doctree
new file mode 100644 (file)
index 0000000..9feccf2
Binary files /dev/null and b/doc/_build/doctrees/docs/formatterdevelopment.doctree differ
diff --git a/doc/_build/doctrees/docs/formatters.doctree b/doc/_build/doctrees/docs/formatters.doctree
new file mode 100644 (file)
index 0000000..b128f6a
Binary files /dev/null and b/doc/_build/doctrees/docs/formatters.doctree differ
diff --git a/doc/_build/doctrees/docs/index.doctree b/doc/_build/doctrees/docs/index.doctree
new file mode 100644 (file)
index 0000000..5bf88f3
Binary files /dev/null and b/doc/_build/doctrees/docs/index.doctree differ
diff --git a/doc/_build/doctrees/docs/integrate.doctree b/doc/_build/doctrees/docs/integrate.doctree
new file mode 100644 (file)
index 0000000..b41e758
Binary files /dev/null and b/doc/_build/doctrees/docs/integrate.doctree differ
diff --git a/doc/_build/doctrees/docs/java.doctree b/doc/_build/doctrees/docs/java.doctree
new file mode 100644 (file)
index 0000000..4f61c1c
Binary files /dev/null and b/doc/_build/doctrees/docs/java.doctree differ
diff --git a/doc/_build/doctrees/docs/lexerdevelopment.doctree b/doc/_build/doctrees/docs/lexerdevelopment.doctree
new file mode 100644 (file)
index 0000000..f7d372e
Binary files /dev/null and b/doc/_build/doctrees/docs/lexerdevelopment.doctree differ
diff --git a/doc/_build/doctrees/docs/lexers.doctree b/doc/_build/doctrees/docs/lexers.doctree
new file mode 100644 (file)
index 0000000..ea4ab6d
Binary files /dev/null and b/doc/_build/doctrees/docs/lexers.doctree differ
diff --git a/doc/_build/doctrees/docs/moinmoin.doctree b/doc/_build/doctrees/docs/moinmoin.doctree
new file mode 100644 (file)
index 0000000..ef2922a
Binary files /dev/null and b/doc/_build/doctrees/docs/moinmoin.doctree differ
diff --git a/doc/_build/doctrees/docs/plugins.doctree b/doc/_build/doctrees/docs/plugins.doctree
new file mode 100644 (file)
index 0000000..c8af0ba
Binary files /dev/null and b/doc/_build/doctrees/docs/plugins.doctree differ
diff --git a/doc/_build/doctrees/docs/quickstart.doctree b/doc/_build/doctrees/docs/quickstart.doctree
new file mode 100644 (file)
index 0000000..82ac98c
Binary files /dev/null and b/doc/_build/doctrees/docs/quickstart.doctree differ
diff --git a/doc/_build/doctrees/docs/rstdirective.doctree b/doc/_build/doctrees/docs/rstdirective.doctree
new file mode 100644 (file)
index 0000000..1e80256
Binary files /dev/null and b/doc/_build/doctrees/docs/rstdirective.doctree differ
diff --git a/doc/_build/doctrees/docs/styles.doctree b/doc/_build/doctrees/docs/styles.doctree
new file mode 100644 (file)
index 0000000..3cf3380
Binary files /dev/null and b/doc/_build/doctrees/docs/styles.doctree differ
diff --git a/doc/_build/doctrees/docs/tokens.doctree b/doc/_build/doctrees/docs/tokens.doctree
new file mode 100644 (file)
index 0000000..6f4a1a7
Binary files /dev/null and b/doc/_build/doctrees/docs/tokens.doctree differ
diff --git a/doc/_build/doctrees/docs/unicode.doctree b/doc/_build/doctrees/docs/unicode.doctree
new file mode 100644 (file)
index 0000000..ec129cb
Binary files /dev/null and b/doc/_build/doctrees/docs/unicode.doctree differ
diff --git a/doc/_build/doctrees/download.doctree b/doc/_build/doctrees/download.doctree
new file mode 100644 (file)
index 0000000..6788fc3
Binary files /dev/null and b/doc/_build/doctrees/download.doctree differ
diff --git a/doc/_build/doctrees/environment.pickle b/doc/_build/doctrees/environment.pickle
new file mode 100644 (file)
index 0000000..f0a0d35
Binary files /dev/null and b/doc/_build/doctrees/environment.pickle differ
diff --git a/doc/_build/doctrees/faq.doctree b/doc/_build/doctrees/faq.doctree
new file mode 100644 (file)
index 0000000..6f5324c
Binary files /dev/null and b/doc/_build/doctrees/faq.doctree differ
diff --git a/doc/_build/doctrees/index.doctree b/doc/_build/doctrees/index.doctree
new file mode 100644 (file)
index 0000000..e35a485
Binary files /dev/null and b/doc/_build/doctrees/index.doctree differ
diff --git a/doc/_build/doctrees/languages.doctree b/doc/_build/doctrees/languages.doctree
new file mode 100644 (file)
index 0000000..1ef0561
Binary files /dev/null and b/doc/_build/doctrees/languages.doctree differ
diff --git a/doc/_build/html/.buildinfo b/doc/_build/html/.buildinfo
new file mode 100644 (file)
index 0000000..23b7cb1
--- /dev/null
@@ -0,0 +1,4 @@
+# Sphinx build info version 1
+# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
+config: c79bd2c7f6735b09a44f7dfcaa237099
+tags: 645f666f9bcd5a90fca523b33c5a78b7
diff --git a/doc/_build/html/_sources/docs/api.rst.txt b/doc/_build/html/_sources/docs/api.rst.txt
new file mode 100644 (file)
index 0000000..a6b242d
--- /dev/null
@@ -0,0 +1,354 @@
+.. -*- mode: rst -*-
+
+=====================
+The full Pygments API
+=====================
+
+This page describes the Pygments API.
+
+High-level API
+==============
+
+.. module:: pygments
+
+Functions from the :mod:`pygments` module:
+
+.. function:: lex(code, lexer)
+
+    Lex `code` with the `lexer` (must be a `Lexer` instance)
+    and return an iterable of tokens. Currently, this only calls
+    `lexer.get_tokens()`.
+
+.. function:: format(tokens, formatter, outfile=None)
+
+    Format a token stream (iterable of tokens) `tokens` with the
+    `formatter` (must be a `Formatter` instance). The result is
+    written to `outfile`, or if that is ``None``, returned as a
+    string.
+
+.. function:: highlight(code, lexer, formatter, outfile=None)
+
+    This is the most high-level highlighting function.
+    It combines `lex` and `format` in one function.
+
+
+.. module:: pygments.lexers
+
+Functions from :mod:`pygments.lexers`:
+
+.. function:: get_lexer_by_name(alias, **options)
+
+    Return an instance of a `Lexer` subclass that has `alias` in its
+    aliases list. The lexer is given the `options` at its
+    instantiation.
+
+    Will raise :exc:`pygments.util.ClassNotFound` if no lexer with that alias is
+    found.
+
+.. function:: get_lexer_for_filename(fn, **options)
+
+    Return a `Lexer` subclass instance that has a filename pattern
+    matching `fn`. The lexer is given the `options` at its
+    instantiation.
+
+    Will raise :exc:`pygments.util.ClassNotFound` if no lexer for that filename
+    is found.
+
+.. function:: get_lexer_for_mimetype(mime, **options)
+
+    Return a `Lexer` subclass instance that has `mime` in its mimetype
+    list. The lexer is given the `options` at its instantiation.
+
+    Will raise :exc:`pygments.util.ClassNotFound` if not lexer for that mimetype
+    is found.
+
+.. function:: load_lexer_from_file(filename, lexername="CustomLexer", **options)
+
+    Return a `Lexer` subclass instance loaded from the provided file, relative
+    to the current directory. The file is expected to contain a Lexer class
+    named `lexername` (by default, CustomLexer). Users should be very careful with
+    the input, because this method is equivalent to running eval on the input file.
+    The lexer is given the `options` at its instantiation.
+
+    :exc:`ClassNotFound` is raised if there are any errors loading the Lexer
+
+    .. versionadded:: 2.2
+
+.. function:: guess_lexer(text, **options)
+
+    Return a `Lexer` subclass instance that's guessed from the text in
+    `text`. For that, the :meth:`.analyse_text()` method of every known lexer
+    class is called with the text as argument, and the lexer which returned the
+    highest value will be instantiated and returned.
+
+    :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can
+    handle the content.
+
+.. function:: guess_lexer_for_filename(filename, text, **options)
+
+    As :func:`guess_lexer()`, but only lexers which have a pattern in `filenames`
+    or `alias_filenames` that matches `filename` are taken into consideration.
+
+    :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can
+    handle the content.
+
+.. function:: get_all_lexers()
+
+    Return an iterable over all registered lexers, yielding tuples in the
+    format::
+
+       (longname, tuple of aliases, tuple of filename patterns, tuple of mimetypes)
+
+    .. versionadded:: 0.6
+
+.. function:: find_lexer_class_by_name(alias)
+
+    Return the `Lexer` subclass that has `alias` in its aliases list, without
+    instantiating it.
+
+    Will raise :exc:`pygments.util.ClassNotFound` if no lexer with that alias is
+    found.
+
+    .. versionadded:: 2.2
+
+.. function:: find_lexer_class(name)
+
+    Return the `Lexer` subclass that with the *name* attribute as given by
+    the *name* argument.
+
+
+.. module:: pygments.formatters
+
+Functions from :mod:`pygments.formatters`:
+
+.. function:: get_formatter_by_name(alias, **options)
+
+    Return an instance of a :class:`.Formatter` subclass that has `alias` in its
+    aliases list. The formatter is given the `options` at its instantiation.
+
+    Will raise :exc:`pygments.util.ClassNotFound` if no formatter with that
+    alias is found.
+
+.. function:: get_formatter_for_filename(fn, **options)
+
+    Return a :class:`.Formatter` subclass instance that has a filename pattern
+    matching `fn`. The formatter is given the `options` at its instantiation.
+
+    Will raise :exc:`pygments.util.ClassNotFound` if no formatter for that filename
+    is found.
+
+.. function:: load_formatter_from_file(filename, formattername="CustomFormatter", **options)
+
+    Return a `Formatter` subclass instance loaded from the provided file, relative
+    to the current directory. The file is expected to contain a Formatter class
+    named ``formattername`` (by default, CustomFormatter). Users should be very
+    careful with the input, because this method is equivalent to running eval
+    on the input file. The formatter is given the `options` at its instantiation.
+
+    :exc:`ClassNotFound` is raised if there are any errors loading the Formatter
+
+    .. versionadded:: 2.2
+
+.. module:: pygments.styles
+
+Functions from :mod:`pygments.styles`:
+
+.. function:: get_style_by_name(name)
+
+    Return a style class by its short name. The names of the builtin styles
+    are listed in :data:`pygments.styles.STYLE_MAP`.
+
+    Will raise :exc:`pygments.util.ClassNotFound` if no style of that name is
+    found.
+
+.. function:: get_all_styles()
+
+    Return an iterable over all registered styles, yielding their names.
+
+    .. versionadded:: 0.6
+
+
+.. module:: pygments.lexer
+
+Lexers
+======
+
+The base lexer class from which all lexers are derived is:
+
+.. class:: Lexer(**options)
+
+    The constructor takes a \*\*keywords dictionary of options.
+    Every subclass must first process its own options and then call
+    the `Lexer` constructor, since it processes the `stripnl`,
+    `stripall` and `tabsize` options.
+
+    An example looks like this:
+
+    .. sourcecode:: python
+
+        def __init__(self, **options):
+            self.compress = options.get('compress', '')
+            Lexer.__init__(self, **options)
+
+    As these options must all be specifiable as strings (due to the
+    command line usage), there are various utility functions
+    available to help with that, see `Option processing`_.
+
+    .. method:: get_tokens(text)
+
+        This method is the basic interface of a lexer. It is called by
+        the `highlight()` function. It must process the text and return an
+        iterable of ``(tokentype, value)`` pairs from `text`.
+
+        Normally, you don't need to override this method. The default
+        implementation processes the `stripnl`, `stripall` and `tabsize`
+        options and then yields all tokens from `get_tokens_unprocessed()`,
+        with the ``index`` dropped.
+
+    .. method:: get_tokens_unprocessed(text)
+
+        This method should process the text and return an iterable of
+        ``(index, tokentype, value)`` tuples where ``index`` is the starting
+        position of the token within the input text.
+
+        This method must be overridden by subclasses.
+
+    .. staticmethod:: analyse_text(text)
+
+        A static method which is called for lexer guessing. It should analyse
+        the text and return a float in the range from ``0.0`` to ``1.0``.
+        If it returns ``0.0``, the lexer will not be selected as the most
+        probable one, if it returns ``1.0``, it will be selected immediately.
+
+        .. note:: You don't have to add ``@staticmethod`` to the definition of
+                  this method, this will be taken care of by the Lexer's metaclass.
+
+    For a list of known tokens have a look at the :doc:`tokens` page.
+
+    A lexer also can have the following attributes (in fact, they are mandatory
+    except `alias_filenames`) that are used by the builtin lookup mechanism.
+
+    .. attribute:: name
+
+        Full name for the lexer, in human-readable form.
+
+    .. attribute:: aliases
+
+        A list of short, unique identifiers that can be used to lookup
+        the lexer from a list, e.g. using `get_lexer_by_name()`.
+
+    .. attribute:: filenames
+
+        A list of `fnmatch` patterns that match filenames which contain
+        content for this lexer. The patterns in this list should be unique among
+        all lexers.
+
+    .. attribute:: alias_filenames
+
+        A list of `fnmatch` patterns that match filenames which may or may not
+        contain content for this lexer. This list is used by the
+        :func:`.guess_lexer_for_filename()` function, to determine which lexers
+        are then included in guessing the correct one. That means that
+        e.g. every lexer for HTML and a template language should include
+        ``\*.html`` in this list.
+
+    .. attribute:: mimetypes
+
+        A list of MIME types for content that can be lexed with this
+        lexer.
+
+
+.. module:: pygments.formatter
+
+Formatters
+==========
+
+A formatter is derived from this class:
+
+
+.. class:: Formatter(**options)
+
+    As with lexers, this constructor processes options and then must call the
+    base class :meth:`__init__`.
+
+    The :class:`Formatter` class recognizes the options `style`, `full` and
+    `title`.  It is up to the formatter class whether it uses them.
+
+    .. method:: get_style_defs(arg='')
+
+        This method must return statements or declarations suitable to define
+        the current style for subsequent highlighted text (e.g. CSS classes
+        in the `HTMLFormatter`).
+
+        The optional argument `arg` can be used to modify the generation and
+        is formatter dependent (it is standardized because it can be given on
+        the command line).
+
+        This method is called by the ``-S`` :doc:`command-line option <cmdline>`,
+        the `arg` is then given by the ``-a`` option.
+
+    .. method:: format(tokensource, outfile)
+
+        This method must format the tokens from the `tokensource` iterable and
+        write the formatted version to the file object `outfile`.
+
+        Formatter options can control how exactly the tokens are converted.
+
+    .. versionadded:: 0.7
+       A formatter must have the following attributes that are used by the
+       builtin lookup mechanism.
+
+    .. attribute:: name
+
+        Full name for the formatter, in human-readable form.
+
+    .. attribute:: aliases
+
+        A list of short, unique identifiers that can be used to lookup
+        the formatter from a list, e.g. using :func:`.get_formatter_by_name()`.
+
+    .. attribute:: filenames
+
+        A list of :mod:`fnmatch` patterns that match filenames for which this
+        formatter can produce output. The patterns in this list should be unique
+        among all formatters.
+
+
+.. module:: pygments.util
+
+Option processing
+=================
+
+The :mod:`pygments.util` module has some utility functions usable for option
+processing:
+
+.. exception:: OptionError
+
+    This exception will be raised by all option processing functions if
+    the type or value of the argument is not correct.
+
+.. function:: get_bool_opt(options, optname, default=None)
+
+    Interpret the key `optname` from the dictionary `options` as a boolean and
+    return it. Return `default` if `optname` is not in `options`.
+
+    The valid string values for ``True`` are ``1``, ``yes``, ``true`` and
+    ``on``, the ones for ``False`` are ``0``, ``no``, ``false`` and ``off``
+    (matched case-insensitively).
+
+.. function:: get_int_opt(options, optname, default=None)
+
+    As :func:`get_bool_opt`, but interpret the value as an integer.
+
+.. function:: get_list_opt(options, optname, default=None)
+
+    If the key `optname` from the dictionary `options` is a string,
+    split it at whitespace and return it. If it is already a list
+    or a tuple, it is returned as a list.
+
+.. function:: get_choice_opt(options, optname, allowed, default=None)
+
+    If the key `optname` from the dictionary is not in the sequence
+    `allowed`, raise an error, otherwise return it.
+
+    .. versionadded:: 0.8
diff --git a/doc/_build/html/_sources/docs/authors.rst.txt b/doc/_build/html/_sources/docs/authors.rst.txt
new file mode 100644 (file)
index 0000000..f8373f0
--- /dev/null
@@ -0,0 +1,4 @@
+Full contributor list
+=====================
+
+.. include:: ../../AUTHORS
diff --git a/doc/_build/html/_sources/docs/changelog.rst.txt b/doc/_build/html/_sources/docs/changelog.rst.txt
new file mode 100644 (file)
index 0000000..f264cab
--- /dev/null
@@ -0,0 +1 @@
+.. include:: ../../CHANGES
diff --git a/doc/_build/html/_sources/docs/cmdline.rst.txt b/doc/_build/html/_sources/docs/cmdline.rst.txt
new file mode 100644 (file)
index 0000000..e4f94ea
--- /dev/null
@@ -0,0 +1,166 @@
+.. -*- mode: rst -*-
+
+======================
+Command Line Interface
+======================
+
+You can use Pygments from the shell, provided you installed the
+:program:`pygmentize` script::
+
+    $ pygmentize test.py
+    print "Hello World"
+
+will print the file test.py to standard output, using the Python lexer
+(inferred from the file name extension) and the terminal formatter (because
+you didn't give an explicit formatter name).
+
+If you want HTML output::
+
+    $ pygmentize -f html -l python -o test.html test.py
+
+As you can see, the -l option explicitly selects a lexer. As seen above, if you
+give an input file name and it has an extension that Pygments recognizes, you can
+omit this option.
+
+The ``-o`` option gives an output file name. If it is not given, output is
+written to stdout.
+
+The ``-f`` option selects a formatter (as with ``-l``, it can also be omitted
+if an output file name is given and has a supported extension).
+If no output file name is given and ``-f`` is omitted, the
+:class:`.TerminalFormatter` is used.
+
+The above command could therefore also be given as::
+
+    $ pygmentize -o test.html test.py
+
+To create a full HTML document, including line numbers and stylesheet (using the
+"emacs" style), highlighting the Python file ``test.py`` to ``test.html``::
+
+    $ pygmentize -O full,style=emacs -o test.html test.py
+
+
+Options and filters
+-------------------
+
+Lexer and formatter options can be given using the ``-O`` option::
+
+    $ pygmentize -f html -O style=colorful,linenos=1 -l python test.py
+
+Be sure to enclose the option string in quotes if it contains any special shell
+characters, such as spaces or expansion wildcards like ``*``. If an option
+expects a list value, separate the list entries with spaces (you'll have to
+quote the option value in this case too, so that the shell doesn't split it).
+
+Since the ``-O`` option argument is split at commas and expects the split values
+to be of the form ``name=value``, you can't give an option value that contains
+commas or equals signs.  Therefore, an option ``-P`` is provided (as of Pygments
+0.9) that works like ``-O`` but can only pass one option per ``-P``. Its value
+can then contain all characters::
+
+    $ pygmentize -P "heading=Pygments, the Python highlighter" ...
+
+Filters are added to the token stream using the ``-F`` option::
+
+    $ pygmentize -f html -l pascal -F keywordcase:case=upper main.pas
+
+As you see, options for the filter are given after a colon. As for ``-O``, the
+filter name and options must be one shell word, so there may not be any spaces
+around the colon.
+
+
+Generating styles
+-----------------
+
+Formatters normally don't output full style information.  For example, the HTML
+formatter by default only outputs ``<span>`` tags with ``class`` attributes.
+Therefore, there's a special ``-S`` option for generating style definitions.
+Usage is as follows::
+
+    $ pygmentize -f html -S colorful -a .syntax
+
+generates a CSS style sheet (because you selected the HTML formatter) for
+the "colorful" style prepending a ".syntax" selector to all style rules.
+
+For an explanation what ``-a`` means for :doc:`a particular formatter
+<formatters>`, look for the `arg` argument for the formatter's
+:meth:`.get_style_defs()` method.
+
+
+Getting lexer names
+-------------------
+
+.. versionadded:: 1.0
+
+The ``-N`` option guesses a lexer name for a given filename, so that ::
+
+    $ pygmentize -N setup.py
+
+will print out ``python``.  It won't highlight anything yet.  If no specific
+lexer is known for that filename, ``text`` is printed.
+
+Custom Lexers and Formatters
+----------------------------
+
+.. versionadded:: 2.2
+
+The ``-x`` flag enables custom lexers and formatters to be loaded
+from files relative to the current directory. Create a file with a class named
+CustomLexer or CustomFormatter, then specify it on the command line::
+
+    $ pygmentize -l your_lexer.py -f your_formatter.py -x
+
+You can also specify the name of your class with a colon::
+
+    $ pygmentize -l your_lexer.py:SomeLexer -x
+
+For more information, see :doc:`the Pygments documentation on Lexer development
+<lexerdevelopment>`.
+
+Getting help
+------------
+
+The ``-L`` option lists lexers, formatters, along with their short
+names and supported file name extensions, styles and filters. If you want to see
+only one category, give it as an argument::
+
+    $ pygmentize -L filters
+
+will list only all installed filters.
+
+The ``-H`` option will give you detailed information (the same that can be found
+in this documentation) about a lexer, formatter or filter. Usage is as follows::
+
+    $ pygmentize -H formatter html
+
+will print the help for the HTML formatter, while ::
+
+    $ pygmentize -H lexer python
+
+will print the help for the Python lexer, etc.
+
+
+A note on encodings
+-------------------
+
+.. versionadded:: 0.9
+
+Pygments tries to be smart regarding encodings in the formatting process:
+
+* If you give an ``encoding`` option, it will be used as the input and
+  output encoding.
+
+* If you give an ``outencoding`` option, it will override ``encoding``
+  as the output encoding.
+
+* If you give an ``inencoding`` option, it will override ``encoding``
+  as the input encoding.
+
+* If you don't give an encoding and have given an output file, the default
+  encoding for lexer and formatter is the terminal encoding or the default
+  locale encoding of the system.  As a last resort, ``latin1`` is used (which
+  will pass through all non-ASCII characters).
+
+* If you don't give an encoding and haven't given an output file (that means
+  output is written to the console), the default encoding for lexer and
+  formatter is the terminal encoding (``sys.stdout.encoding``).
diff --git a/doc/_build/html/_sources/docs/filterdevelopment.rst.txt b/doc/_build/html/_sources/docs/filterdevelopment.rst.txt
new file mode 100644 (file)
index 0000000..fbcd0a0
--- /dev/null
@@ -0,0 +1,71 @@
+.. -*- mode: rst -*-
+
+=====================
+Write your own filter
+=====================
+
+.. versionadded:: 0.7
+
+Writing own filters is very easy. All you have to do is to subclass
+the `Filter` class and override the `filter` method. Additionally a
+filter is instantiated with some keyword arguments you can use to
+adjust the behavior of your filter.
+
+
+Subclassing Filters
+===================
+
+As an example, we write a filter that converts all `Name.Function` tokens
+to normal `Name` tokens to make the output less colorful.
+
+.. sourcecode:: python
+
+    from pygments.util import get_bool_opt
+    from pygments.token import Name
+    from pygments.filter import Filter
+
+    class UncolorFilter(Filter):
+
+        def __init__(self, **options):
+            Filter.__init__(self, **options)
+            self.class_too = get_bool_opt(options, 'classtoo')
+
+        def filter(self, lexer, stream):
+            for ttype, value in stream:
+                if ttype is Name.Function or (self.class_too and
+                                              ttype is Name.Class):
+                    ttype = Name
+                yield ttype, value
+
+Some notes on the `lexer` argument: that can be quite confusing since it doesn't
+need to be a lexer instance. If a filter was added by using the `add_filter()`
+function of lexers, that lexer is registered for the filter. In that case
+`lexer` will refer to the lexer that has registered the filter. It *can* be used
+to access options passed to a lexer. Because it could be `None` you always have
+to check for that case if you access it.
+
+
+Using a decorator
+=================
+
+You can also use the `simplefilter` decorator from the `pygments.filter` module:
+
+.. sourcecode:: python
+
+    from pygments.util import get_bool_opt
+    from pygments.token import Name
+    from pygments.filter import simplefilter
+
+
+    @simplefilter
+    def uncolor(self, lexer, stream, options):
+        class_too = get_bool_opt(options, 'classtoo')
+        for ttype, value in stream:
+            if ttype is Name.Function or (class_too and
+                                          ttype is Name.Class):
+                ttype = Name
+            yield ttype, value
+
+The decorator automatically subclasses an internal filter class and uses the
+decorated function as a method for filtering.  (That's why there is a `self`
+argument that you probably won't end up using in the method.)
diff --git a/doc/_build/html/_sources/docs/filters.rst.txt b/doc/_build/html/_sources/docs/filters.rst.txt
new file mode 100644 (file)
index 0000000..ff2519a
--- /dev/null
@@ -0,0 +1,41 @@
+.. -*- mode: rst -*-
+
+=======
+Filters
+=======
+
+.. versionadded:: 0.7
+
+You can filter token streams coming from lexers to improve or annotate the
+output. For example, you can highlight special words in comments, convert
+keywords to upper or lowercase to enforce a style guide etc.
+
+To apply a filter, you can use the `add_filter()` method of a lexer:
+
+.. sourcecode:: pycon
+
+    >>> from pygments.lexers import PythonLexer
+    >>> l = PythonLexer()
+    >>> # add a filter given by a string and options
+    >>> l.add_filter('codetagify', case='lower')
+    >>> l.filters
+    [<pygments.filters.CodeTagFilter object at 0xb785decc>]
+    >>> from pygments.filters import KeywordCaseFilter
+    >>> # or give an instance
+    >>> l.add_filter(KeywordCaseFilter(case='lower'))
+
+The `add_filter()` method takes keyword arguments which are forwarded to
+the constructor of the filter.
+
+To get a list of all registered filters by name, you can use the
+`get_all_filters()` function from the `pygments.filters` module that returns an
+iterable for all known filters.
+
+If you want to write your own filter, have a look at :doc:`Write your own filter
+<filterdevelopment>`.
+
+
+Builtin Filters
+===============
+
+.. pygmentsdoc:: filters
diff --git a/doc/_build/html/_sources/docs/formatterdevelopment.rst.txt b/doc/_build/html/_sources/docs/formatterdevelopment.rst.txt
new file mode 100644 (file)
index 0000000..2bfac05
--- /dev/null
@@ -0,0 +1,169 @@
+.. -*- mode: rst -*-
+
+========================
+Write your own formatter
+========================
+
+As well as creating :doc:`your own lexer <lexerdevelopment>`, writing a new
+formatter for Pygments is easy and straightforward.
+
+A formatter is a class that is initialized with some keyword arguments (the
+formatter options) and that must provides a `format()` method.
+Additionally a formatter should provide a `get_style_defs()` method that
+returns the style definitions from the style in a form usable for the
+formatter's output format.
+
+
+Quickstart
+==========
+
+The most basic formatter shipped with Pygments is the `NullFormatter`. It just
+sends the value of a token to the output stream:
+
+.. sourcecode:: python
+
+    from pygments.formatter import Formatter
+
+    class NullFormatter(Formatter):
+        def format(self, tokensource, outfile):
+            for ttype, value in tokensource:
+                outfile.write(value)
+
+As you can see, the `format()` method is passed two parameters: `tokensource`
+and `outfile`. The first is an iterable of ``(token_type, value)`` tuples,
+the latter a file like object with a `write()` method.
+
+Because the formatter is that basic it doesn't overwrite the `get_style_defs()`
+method.
+
+
+Styles
+======
+
+Styles aren't instantiated but their metaclass provides some class functions
+so that you can access the style definitions easily.
+
+Styles are iterable and yield tuples in the form ``(ttype, d)`` where `ttype`
+is a token and `d` is a dict with the following keys:
+
+``'color'``
+    Hexadecimal color value (eg: ``'ff0000'`` for red) or `None` if not
+    defined.
+
+``'bold'``
+    `True` if the value should be bold
+
+``'italic'``
+    `True` if the value should be italic
+
+``'underline'``
+    `True` if the value should be underlined
+
+``'bgcolor'``
+    Hexadecimal color value for the background (eg: ``'eeeeeee'`` for light
+    gray) or `None` if not defined.
+
+``'border'``
+    Hexadecimal color value for the border (eg: ``'0000aa'`` for a dark
+    blue) or `None` for no border.
+
+Additional keys might appear in the future, formatters should ignore all keys
+they don't support.
+
+
+HTML 3.2 Formatter
+==================
+
+For an more complex example, let's implement a HTML 3.2 Formatter. We don't
+use CSS but inline markup (``<u>``, ``<font>``, etc). Because this isn't good
+style this formatter isn't in the standard library ;-)
+
+.. sourcecode:: python
+
+    from pygments.formatter import Formatter
+
+    class OldHtmlFormatter(Formatter):
+
+        def __init__(self, **options):
+            Formatter.__init__(self, **options)
+
+            # create a dict of (start, end) tuples that wrap the
+            # value of a token so that we can use it in the format
+            # method later
+            self.styles = {}
+
+            # we iterate over the `_styles` attribute of a style item
+            # that contains the parsed style values.
+            for token, style in self.style:
+                start = end = ''
+                # a style item is a tuple in the following form:
+                # colors are readily specified in hex: 'RRGGBB'
+                if style['color']:
+                    start += '<font color="#%s">' % style['color']
+                    end = '</font>' + end
+                if style['bold']:
+                    start += '<b>'
+                    end = '</b>' + end
+                if style['italic']:
+                    start += '<i>'
+                    end = '</i>' + end
+                if style['underline']:
+                    start += '<u>'
+                    end = '</u>' + end
+                self.styles[token] = (start, end)
+
+        def format(self, tokensource, outfile):
+            # lastval is a string we use for caching
+            # because it's possible that an lexer yields a number
+            # of consecutive tokens with the same token type.
+            # to minimize the size of the generated html markup we
+            # try to join the values of same-type tokens here
+            lastval = ''
+            lasttype = None
+
+            # wrap the whole output with <pre>
+            outfile.write('<pre>')
+
+            for ttype, value in tokensource:
+                # if the token type doesn't exist in the stylemap
+                # we try it with the parent of the token type
+                # eg: parent of Token.Literal.String.Double is
+                # Token.Literal.String
+                while ttype not in self.styles:
+                    ttype = ttype.parent
+                if ttype == lasttype:
+                    # the current token type is the same of the last
+                    # iteration. cache it
+                    lastval += value
+                else:
+                    # not the same token as last iteration, but we
+                    # have some data in the buffer. wrap it with the
+                    # defined style and write it to the output file
+                    if lastval:
+                        stylebegin, styleend = self.styles[lasttype]
+                        outfile.write(stylebegin + lastval + styleend)
+                    # set lastval/lasttype to current values
+                    lastval = value
+                    lasttype = ttype
+
+            # if something is left in the buffer, write it to the
+            # output file, then close the opened <pre> tag
+            if lastval:
+                stylebegin, styleend = self.styles[lasttype]
+                outfile.write(stylebegin + lastval + styleend)
+            outfile.write('</pre>\n')
+
+The comments should explain it. Again, this formatter doesn't override the
+`get_style_defs()` method. If we would have used CSS classes instead of
+inline HTML markup, we would need to generate the CSS first. For that
+purpose the `get_style_defs()` method exists:
+
+
+Generating Style Definitions
+============================
+
+Some formatters like the `LatexFormatter` and the `HtmlFormatter` don't
+output inline markup but reference either macros or css classes. Because
+the definitions of those are not part of the output, the `get_style_defs()`
+method exists. It is passed one parameter (if it's used and how it's used
+is up to the formatter) and has to return a string or ``None``.
diff --git a/doc/_build/html/_sources/docs/formatters.rst.txt b/doc/_build/html/_sources/docs/formatters.rst.txt
new file mode 100644 (file)
index 0000000..9e7074e
--- /dev/null
@@ -0,0 +1,48 @@
+.. -*- mode: rst -*-
+
+====================
+Available formatters
+====================
+
+This page lists all builtin formatters.
+
+Common options
+==============
+
+All formatters support these options:
+
+`encoding`
+    If given, must be an encoding name (such as ``"utf-8"``). This will
+    be used to convert the token strings (which are Unicode strings)
+    to byte strings in the output (default: ``None``).
+    It will also be written in an encoding declaration suitable for the
+    document format if the `full` option is given (e.g. a ``meta
+    content-type`` directive in HTML or an invocation of the `inputenc`
+    package in LaTeX).
+
+    If this is ``""`` or ``None``, Unicode strings will be written
+    to the output file, which most file-like objects do not support.
+    For example, `pygments.highlight()` will return a Unicode string if
+    called with no `outfile` argument and a formatter that has `encoding`
+    set to ``None`` because it uses a `StringIO.StringIO` object that
+    supports Unicode arguments to `write()`. Using a regular file object
+    wouldn't work.
+
+    .. versionadded:: 0.6
+
+`outencoding`
+    When using Pygments from the command line, any `encoding` option given is
+    passed to the lexer and the formatter. This is sometimes not desirable,
+    for example if you want to set the input encoding to ``"guess"``.
+    Therefore, `outencoding` has been introduced which overrides `encoding`
+    for the formatter if given.
+
+    .. versionadded:: 0.7
+
+
+Formatter classes
+=================
+
+All these classes are importable from :mod:`pygments.formatters`.
+
+.. pygmentsdoc:: formatters
diff --git a/doc/_build/html/_sources/docs/index.rst.txt b/doc/_build/html/_sources/docs/index.rst.txt
new file mode 100644 (file)
index 0000000..4cf710f
--- /dev/null
@@ -0,0 +1,61 @@
+Pygments documentation
+======================
+
+**Starting with Pygments**
+
+.. toctree::
+   :maxdepth: 1
+
+   ../download
+   quickstart
+   cmdline
+
+**Builtin components**
+
+.. toctree::
+   :maxdepth: 1
+
+   lexers
+   filters
+   formatters
+   styles
+
+**Reference**
+
+.. toctree::
+   :maxdepth: 1
+
+   unicode
+   tokens
+   api
+
+**Hacking for Pygments**
+
+.. toctree::
+   :maxdepth: 1
+
+   lexerdevelopment
+   formatterdevelopment
+   filterdevelopment
+   plugins
+
+**Hints and tricks**
+
+.. toctree::
+   :maxdepth: 1
+
+   rstdirective
+   moinmoin
+   java
+   integrate
+
+**About Pygments**
+
+.. toctree::
+   :maxdepth: 1
+
+   changelog
+   authors
+
+If you find bugs or have suggestions for the documentation, please submit them
+on `GitHub <https://github.com/pygments/pygments>`.
diff --git a/doc/_build/html/_sources/docs/integrate.rst.txt b/doc/_build/html/_sources/docs/integrate.rst.txt
new file mode 100644 (file)
index 0000000..77daaa4
--- /dev/null
@@ -0,0 +1,40 @@
+.. -*- mode: rst -*-
+
+===================================
+Using Pygments in various scenarios
+===================================
+
+Markdown
+--------
+
+Since Pygments 0.9, the distribution ships Markdown_ preprocessor sample code
+that uses Pygments to render source code in
+:file:`external/markdown-processor.py`.  You can copy and adapt it to your
+liking.
+
+.. _Markdown: http://www.freewisdom.org/projects/python-markdown/
+
+TextMate
+--------
+
+Antonio Cangiano has created a Pygments bundle for TextMate that allows to
+colorize code via a simple menu option.  It can be found here_.
+
+.. _here: http://antoniocangiano.com/2008/10/28/pygments-textmate-bundle/
+
+Bash completion
+---------------
+
+The source distribution contains a file ``external/pygments.bashcomp`` that
+sets up completion for the ``pygmentize`` command in bash.
+
+Wrappers for other languages
+----------------------------
+
+These libraries provide Pygments highlighting for users of other languages
+than Python:
+
+* `pygments.rb <https://github.com/tmm1/pygments.rb>`_, a pygments wrapper for Ruby
+* `Clygments <https://github.com/bfontaine/clygments>`_, a pygments wrapper for
+  Clojure
+* `PHPygments <https://github.com/capynet/PHPygments>`_, a pygments wrapper for PHP
diff --git a/doc/_build/html/_sources/docs/java.rst.txt b/doc/_build/html/_sources/docs/java.rst.txt
new file mode 100644 (file)
index 0000000..f553463
--- /dev/null
@@ -0,0 +1,70 @@
+=====================
+Use Pygments in Java
+=====================
+
+Thanks to `Jython <http://www.jython.org>`_ it is possible to use Pygments in
+Java.
+
+This page is a simple tutorial to get an idea of how this works. You can
+then look at the `Jython documentation <http://www.jython.org/docs/>`_ for more
+advanced uses.
+
+Since version 1.5, Pygments is deployed on `Maven Central
+<http://repo1.maven.org/maven2/org/pygments/pygments/>`_ as a JAR, as is Jython
+which makes it a lot easier to create a Java project.
+
+Here is an example of a `Maven <http://www.maven.org>`_ ``pom.xml`` file for a
+project running Pygments:
+
+.. sourcecode:: xml
+
+    <?xml version="1.0" encoding="UTF-8"?>
+
+    <project xmlns="http://maven.apache.org/POM/4.0.0"
+             xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+             xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+                                 http://maven.apache.org/maven-v4_0_0.xsd">
+      <modelVersion>4.0.0</modelVersion>
+      <groupId>example</groupId>
+      <artifactId>example</artifactId>
+      <version>1.0-SNAPSHOT</version>
+      <dependencies>
+        <dependency>
+          <groupId>org.python</groupId>
+          <artifactId>jython-standalone</artifactId>
+          <version>2.5.3</version>
+        </dependency>
+        <dependency>
+          <groupId>org.pygments</groupId>
+          <artifactId>pygments</artifactId>
+          <version>1.5</version>
+          <scope>runtime</scope>
+        </dependency>
+      </dependencies>
+    </project>
+
+The following Java example:
+
+.. sourcecode:: java
+
+    PythonInterpreter interpreter = new PythonInterpreter();
+
+    // Set a variable with the content you want to work with
+    interpreter.set("code", code);
+
+    // Simple use Pygments as you would in Python
+    interpreter.exec("from pygments import highlight\n"
+        + "from pygments.lexers import PythonLexer\n"
+        + "from pygments.formatters import HtmlFormatter\n"
+        + "\nresult = highlight(code, PythonLexer(), HtmlFormatter())");
+
+    // Get the result that has been set in a variable
+    System.out.println(interpreter.get("result", String.class));
+
+will print something like:
+
+.. sourcecode:: html
+
+    <div class="highlight">
+    <pre><span class="k">print</span> <span class="s">&quot;Hello World&quot;</span></pre>
+    </div>
diff --git a/doc/_build/html/_sources/docs/lexerdevelopment.rst.txt b/doc/_build/html/_sources/docs/lexerdevelopment.rst.txt
new file mode 100644 (file)
index 0000000..5b6813f
--- /dev/null
@@ -0,0 +1,728 @@
+.. -*- mode: rst -*-
+
+.. highlight:: python
+
+====================
+Write your own lexer
+====================
+
+If a lexer for your favorite language is missing in the Pygments package, you
+can easily write your own and extend Pygments.
+
+All you need can be found inside the :mod:`pygments.lexer` module.  As you can
+read in the :doc:`API documentation <api>`, a lexer is a class that is
+initialized with some keyword arguments (the lexer options) and that provides a
+:meth:`.get_tokens_unprocessed()` method which is given a string or unicode
+object with the data to lex.
+
+The :meth:`.get_tokens_unprocessed()` method must return an iterator or iterable
+containing tuples in the form ``(index, token, value)``.  Normally you don't
+need to do this since there are base lexers that do most of the work and that
+you can subclass.
+
+
+RegexLexer
+==========
+
+The lexer base class used by almost all of Pygments' lexers is the
+:class:`RegexLexer`.  This class allows you to define lexing rules in terms of
+*regular expressions* for different *states*.
+
+States are groups of regular expressions that are matched against the input
+string at the *current position*.  If one of these expressions matches, a
+corresponding action is performed (such as yielding a token with a specific
+type, or changing state), the current position is set to where the last match
+ended and the matching process continues with the first regex of the current
+state.
+
+Lexer states are kept on a stack: each time a new state is entered, the new
+state is pushed onto the stack.  The most basic lexers (like the `DiffLexer`)
+just need one state.
+
+Each state is defined as a list of tuples in the form (`regex`, `action`,
+`new_state`) where the last item is optional.  In the most basic form, `action`
+is a token type (like `Name.Builtin`).  That means: When `regex` matches, emit a
+token with the match text and type `tokentype` and push `new_state` on the state
+stack.  If the new state is ``'#pop'``, the topmost state is popped from the
+stack instead.  To pop more than one state, use ``'#pop:2'`` and so on.
+``'#push'`` is a synonym for pushing the current state on the stack.
+
+The following example shows the `DiffLexer` from the builtin lexers.  Note that
+it contains some additional attributes `name`, `aliases` and `filenames` which
+aren't required for a lexer.  They are used by the builtin lexer lookup
+functions. ::
+
+    from pygments.lexer import RegexLexer
+    from pygments.token import *
+
+    class DiffLexer(RegexLexer):
+        name = 'Diff'
+        aliases = ['diff']
+        filenames = ['*.diff']
+
+        tokens = {
+            'root': [
+                (r' .*\n', Text),
+                (r'\+.*\n', Generic.Inserted),
+                (r'-.*\n', Generic.Deleted),
+                (r'@.*\n', Generic.Subheading),
+                (r'Index.*\n', Generic.Heading),
+                (r'=.*\n', Generic.Heading),
+                (r'.*\n', Text),
+            ]
+        }
+
+As you can see this lexer only uses one state.  When the lexer starts scanning
+the text, it first checks if the current character is a space.  If this is true
+it scans everything until newline and returns the data as a `Text` token (which
+is the "no special highlighting" token).
+
+If this rule doesn't match, it checks if the current char is a plus sign.  And
+so on.
+
+If no rule matches at the current position, the current char is emitted as an
+`Error` token that indicates a lexing error, and the position is increased by
+one.
+
+
+Adding and testing a new lexer
+==============================
+
+The easiest way to use a new lexer is to use Pygments' support for loading
+the lexer from a file relative to your current directory.
+
+First, change the name of your lexer class to CustomLexer:
+
+.. code-block:: python
+
+    from pygments.lexer import RegexLexer
+    from pygments.token import *
+
+    class CustomLexer(RegexLexer):
+        """All your lexer code goes here!"""
+
+Then you can load the lexer from the command line with the additional
+flag ``-x``:
+
+.. code-block:: console
+
+    $ pygmentize -l your_lexer_file.py -x
+
+To specify a class name other than CustomLexer, append it with a colon:
+
+.. code-block:: console
+
+    $ pygmentize -l your_lexer.py:SomeLexer -x
+
+Or, using the Python API:
+
+.. code-block:: python
+
+    # For a lexer named CustomLexer
+    your_lexer = load_lexer_from_file(filename, **options)
+
+    # For a lexer named MyNewLexer
+    your_named_lexer = load_lexer_from_file(filename, "MyNewLexer", **options)
+
+When loading custom lexers and formatters, be extremely careful to use only
+trusted files; Pygments will perform the equivalent of ``eval`` on them.
+
+If you only want to use your lexer with the Pygments API, you can import and
+instantiate the lexer yourself, then pass it to :func:`pygments.highlight`.
+
+To prepare your new lexer for inclusion in the Pygments distribution, so that it
+will be found when passing filenames or lexer aliases from the command line, you
+have to perform the following steps.
+
+First, change to the current directory containing the Pygments source code.  You
+will need to have either an unpacked source tarball, or (preferably) a copy
+cloned from GitHub.
+
+.. code-block:: console
+
+    $ cd .../pygments-main
+
+Select a matching module under ``pygments/lexers``, or create a new module for
+your lexer class.
+
+Next, make sure the lexer is known from outside of the module.  All modules in
+the ``pygments.lexers`` package specify ``__all__``. For example,
+``esoteric.py`` sets::
+
+    __all__ = ['BrainfuckLexer', 'BefungeLexer', ...]
+
+Add the name of your lexer class to this list (or create the list if your lexer
+is the only class in the module).
+
+Finally the lexer can be made publicly known by rebuilding the lexer mapping:
+
+.. code-block:: console
+
+    $ make mapfiles
+
+To test the new lexer, store an example file with the proper extension in
+``tests/examplefiles``.  For example, to test your ``DiffLexer``, add a
+``tests/examplefiles/example.diff`` containing a sample diff output.
+
+Now you can use pygmentize to render your example to HTML:
+
+.. code-block:: console
+
+    $ ./pygmentize -O full -f html -o /tmp/example.html tests/examplefiles/example.diff
+
+Note that this explicitly calls the ``pygmentize`` in the current directory
+by preceding it with ``./``. This ensures your modifications are used.
+Otherwise a possibly already installed, unmodified version without your new
+lexer would have been called from the system search path (``$PATH``).
+
+To view the result, open ``/tmp/example.html`` in your browser.
+
+Once the example renders as expected, you should run the complete test suite:
+
+.. code-block:: console
+
+    $ make test
+
+It also tests that your lexer fulfills the lexer API and certain invariants,
+such as that the concatenation of all token text is the same as the input text.
+
+
+Regex Flags
+===========
+
+You can either define regex flags locally in the regex (``r'(?x)foo bar'``) or
+globally by adding a `flags` attribute to your lexer class.  If no attribute is
+defined, it defaults to `re.MULTILINE`.  For more information about regular
+expression flags see the page about `regular expressions`_ in the Python
+documentation.
+
+.. _regular expressions: http://docs.python.org/library/re.html#regular-expression-syntax
+
+
+Scanning multiple tokens at once
+================================
+
+So far, the `action` element in the rule tuple of regex, action and state has
+been a single token type.  Now we look at the first of several other possible
+values.
+
+Here is a more complex lexer that highlights INI files.  INI files consist of
+sections, comments and ``key = value`` pairs::
+
+    from pygments.lexer import RegexLexer, bygroups
+    from pygments.token import *
+
+    class IniLexer(RegexLexer):
+        name = 'INI'
+        aliases = ['ini', 'cfg']
+        filenames = ['*.ini', '*.cfg']
+
+        tokens = {
+            'root': [
+                (r'\s+', Text),
+                (r';.*?$', Comment),
+                (r'\[.*?\]$', Keyword),
+                (r'(.*?)(\s*)(=)(\s*)(.*?)$',
+                 bygroups(Name.Attribute, Text, Operator, Text, String))
+            ]
+        }
+
+The lexer first looks for whitespace, comments and section names.  Later it
+looks for a line that looks like a key, value pair, separated by an ``'='``
+sign, and optional whitespace.
+
+The `bygroups` helper yields each capturing group in the regex with a different
+token type.  First the `Name.Attribute` token, then a `Text` token for the
+optional whitespace, after that a `Operator` token for the equals sign. Then a
+`Text` token for the whitespace again.  The rest of the line is returned as
+`String`.
+
+Note that for this to work, every part of the match must be inside a capturing
+group (a ``(...)``), and there must not be any nested capturing groups.  If you
+nevertheless need a group, use a non-capturing group defined using this syntax:
+``(?:some|words|here)`` (note the ``?:`` after the beginning parenthesis).
+
+If you find yourself needing a capturing group inside the regex which shouldn't
+be part of the output but is used in the regular expressions for backreferencing
+(eg: ``r'(<(foo|bar)>)(.*?)(</\2>)'``), you can pass `None` to the bygroups
+function and that group will be skipped in the output.
+
+
+Changing states
+===============
+
+Many lexers need multiple states to work as expected.  For example, some
+languages allow multiline comments to be nested.  Since this is a recursive
+pattern it's impossible to lex just using regular expressions.
+
+Here is a lexer that recognizes C++ style comments (multi-line with ``/* */``
+and single-line with ``//`` until end of line)::
+
+    from pygments.lexer import RegexLexer
+    from pygments.token import *
+
+    class CppCommentLexer(RegexLexer):
+        name = 'Example Lexer with states'
+
+        tokens = {
+            'root': [
+                (r'[^/]+', Text),
+                (r'/\*', Comment.Multiline, 'comment'),
+                (r'//.*?$', Comment.Singleline),
+                (r'/', Text)
+            ],
+            'comment': [
+                (r'[^*/]', Comment.Multiline),
+                (r'/\*', Comment.Multiline, '#push'),
+                (r'\*/', Comment.Multiline, '#pop'),
+                (r'[*/]', Comment.Multiline)
+            ]
+        }
+
+This lexer starts lexing in the ``'root'`` state. It tries to match as much as
+possible until it finds a slash (``'/'``).  If the next character after the slash
+is an asterisk (``'*'``) the `RegexLexer` sends those two characters to the
+output stream marked as `Comment.Multiline` and continues lexing with the rules
+defined in the ``'comment'`` state.
+
+If there wasn't an asterisk after the slash, the `RegexLexer` checks if it's a
+Singleline comment (i.e. followed by a second slash).  If this also wasn't the
+case it must be a single slash, which is not a comment starter (the separate
+regex for a single slash must also be given, else the slash would be marked as
+an error token).
+
+Inside the ``'comment'`` state, we do the same thing again.  Scan until the
+lexer finds a star or slash.  If it's the opening of a multiline comment, push
+the ``'comment'`` state on the stack and continue scanning, again in the
+``'comment'`` state.  Else, check if it's the end of the multiline comment.  If
+yes, pop one state from the stack.
+
+Note: If you pop from an empty stack you'll get an `IndexError`.  (There is an
+easy way to prevent this from happening: don't ``'#pop'`` in the root state).
+
+If the `RegexLexer` encounters a newline that is flagged as an error token, the
+stack is emptied and the lexer continues scanning in the ``'root'`` state.  This
+can help producing error-tolerant highlighting for erroneous input, e.g. when a
+single-line string is not closed.
+
+
+Advanced state tricks
+=====================
+
+There are a few more things you can do with states:
+
+- You can push multiple states onto the stack if you give a tuple instead of a
+  simple string as the third item in a rule tuple.  For example, if you want to
+  match a comment containing a directive, something like:
+
+  .. code-block:: text
+
+      /* <processing directive>    rest of comment */
+
+  you can use this rule::
+
+      tokens = {
+          'root': [
+              (r'/\* <', Comment, ('comment', 'directive')),
+              ...
+          ],
+          'directive': [
+              (r'[^>]*', Comment.Directive),
+              (r'>', Comment, '#pop'),
+          ],
+          'comment': [
+              (r'[^*]+', Comment),
+              (r'\*/', Comment, '#pop'),
+              (r'\*', Comment),
+          ]
+      }
+
+  When this encounters the above sample, first ``'comment'`` and ``'directive'``
+  are pushed onto the stack, then the lexer continues in the directive state
+  until it finds the closing ``>``, then it continues in the comment state until
+  the closing ``*/``.  Then, both states are popped from the stack again and
+  lexing continues in the root state.
+
+  .. versionadded:: 0.9
+     The tuple can contain the special ``'#push'`` and ``'#pop'`` (but not
+     ``'#pop:n'``) directives.
+
+
+- You can include the rules of a state in the definition of another.  This is
+  done by using `include` from `pygments.lexer`::
+
+      from pygments.lexer import RegexLexer, bygroups, include
+      from pygments.token import *
+
+      class ExampleLexer(RegexLexer):
+          tokens = {
+              'comments': [
+                  (r'/\*.*?\*/', Comment),
+                  (r'//.*?\n', Comment),
+              ],
+              'root': [
+                  include('comments'),
+                  (r'(function )(\w+)( {)',
+                   bygroups(Keyword, Name, Keyword), 'function'),
+                  (r'.', Text),
+              ],
+              'function': [
+                  (r'[^}/]+', Text),
+                  include('comments'),
+                  (r'/', Text),
+                  (r'\}', Keyword, '#pop'),
+              ]
+          }
+
+  This is a hypothetical lexer for a language that consist of functions and
+  comments.  Because comments can occur at toplevel and in functions, we need
+  rules for comments in both states.  As you can see, the `include` helper saves
+  repeating rules that occur more than once (in this example, the state
+  ``'comment'`` will never be entered by the lexer, as it's only there to be
+  included in ``'root'`` and ``'function'``).
+
+- Sometimes, you may want to "combine" a state from existing ones.  This is
+  possible with the `combined` helper from `pygments.lexer`.
+
+  If you, instead of a new state, write ``combined('state1', 'state2')`` as the
+  third item of a rule tuple, a new anonymous state will be formed from state1
+  and state2 and if the rule matches, the lexer will enter this state.
+
+  This is not used very often, but can be helpful in some cases, such as the
+  `PythonLexer`'s string literal processing.
+
+- If you want your lexer to start lexing in a different state you can modify the
+  stack by overriding the `get_tokens_unprocessed()` method::
+
+      from pygments.lexer import RegexLexer
+
+      class ExampleLexer(RegexLexer):
+          tokens = {...}
+
+          def get_tokens_unprocessed(self, text, stack=('root', 'otherstate')):
+              for item in RegexLexer.get_tokens_unprocessed(self, text, stack):
+                  yield item
+
+  Some lexers like the `PhpLexer` use this to make the leading ``<?php``
+  preprocessor comments optional.  Note that you can crash the lexer easily by
+  putting values into the stack that don't exist in the token map.  Also
+  removing ``'root'`` from the stack can result in strange errors!
+
+- In some lexers, a state should be popped if anything is encountered that isn't
+  matched by a rule in the state.  You could use an empty regex at the end of
+  the state list, but Pygments provides a more obvious way of spelling that:
+  ``default('#pop')`` is equivalent to ``('', Text, '#pop')``.
+
+  .. versionadded:: 2.0
+
+
+Subclassing lexers derived from RegexLexer
+==========================================
+
+.. versionadded:: 1.6
+
+Sometimes multiple languages are very similar, but should still be lexed by
+different lexer classes.
+
+When subclassing a lexer derived from RegexLexer, the ``tokens`` dictionaries
+defined in the parent and child class are merged.  For example::
+
+      from pygments.lexer import RegexLexer, inherit
+      from pygments.token import *
+
+      class BaseLexer(RegexLexer):
+          tokens = {
+              'root': [
+                  ('[a-z]+', Name),
+                  (r'/\*', Comment, 'comment'),
+                  ('"', String, 'string'),
+                  ('\s+', Text),
+              ],
+              'string': [
+                  ('[^"]+', String),
+                  ('"', String, '#pop'),
+              ],
+              'comment': [
+                  ...
+              ],
+          }
+
+      class DerivedLexer(BaseLexer):
+          tokens = {
+              'root': [
+                  ('[0-9]+', Number),
+                  inherit,
+              ],
+              'string': [
+                  (r'[^"\\]+', String),
+                  (r'\\.', String.Escape),
+                  ('"', String, '#pop'),
+              ],
+          }
+
+The `BaseLexer` defines two states, lexing names and strings.  The
+`DerivedLexer` defines its own tokens dictionary, which extends the definitions
+of the base lexer:
+
+* The "root" state has an additional rule and then the special object `inherit`,
+  which tells Pygments to insert the token definitions of the parent class at
+  that point.
+
+* The "string" state is replaced entirely, since there is not `inherit` rule.
+
+* The "comment" state is inherited entirely.
+
+
+Using multiple lexers
+=====================
+
+Using multiple lexers for the same input can be tricky.  One of the easiest
+combination techniques is shown here: You can replace the action entry in a rule
+tuple with a lexer class.  The matched text will then be lexed with that lexer,
+and the resulting tokens will be yielded.
+
+For example, look at this stripped-down HTML lexer::
+
+    from pygments.lexer import RegexLexer, bygroups, using
+    from pygments.token import *
+    from pygments.lexers.javascript import JavascriptLexer
+
+    class HtmlLexer(RegexLexer):
+        name = 'HTML'
+        aliases = ['html']
+        filenames = ['*.html', '*.htm']
+
+        flags = re.IGNORECASE | re.DOTALL
+        tokens = {
+            'root': [
+                ('[^<&]+', Text),
+                ('&.*?;', Name.Entity),
+                (r'<\s*script\s*', Name.Tag, ('script-content', 'tag')),
+                (r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'),
+                (r'<\s*/\s*[a-zA-Z0-9:]+\s*>', Name.Tag),
+            ],
+            'script-content': [
+                (r'(.+?)(<\s*/\s*script\s*>)',
+                 bygroups(using(JavascriptLexer), Name.Tag),
+                 '#pop'),
+            ]
+        }
+
+Here the content of a ``<script>`` tag is passed to a newly created instance of
+a `JavascriptLexer` and not processed by the `HtmlLexer`.  This is done using
+the `using` helper that takes the other lexer class as its parameter.
+
+Note the combination of `bygroups` and `using`.  This makes sure that the
+content up to the ``</script>`` end tag is processed by the `JavascriptLexer`,
+while the end tag is yielded as a normal token with the `Name.Tag` type.
+
+Also note the ``(r'<\s*script\s*', Name.Tag, ('script-content', 'tag'))`` rule.
+Here, two states are pushed onto the state stack, ``'script-content'`` and
+``'tag'``.  That means that first ``'tag'`` is processed, which will lex
+attributes and the closing ``>``, then the ``'tag'`` state is popped and the
+next state on top of the stack will be ``'script-content'``.
+
+Since you cannot refer to the class currently being defined, use `this`
+(imported from `pygments.lexer`) to refer to the current lexer class, i.e.
+``using(this)``.  This construct may seem unnecessary, but this is often the
+most obvious way of lexing arbitrary syntax between fixed delimiters without
+introducing deeply nested states.
+
+The `using()` helper has a special keyword argument, `state`, which works as
+follows: if given, the lexer to use initially is not in the ``"root"`` state,
+but in the state given by this argument.  This does not work with advanced
+`RegexLexer` subclasses such as `ExtendedRegexLexer` (see below).
+
+Any other keywords arguments passed to `using()` are added to the keyword
+arguments used to create the lexer.
+
+
+Delegating Lexer
+================
+
+Another approach for nested lexers is the `DelegatingLexer` which is for example
+used for the template engine lexers.  It takes two lexers as arguments on
+initialisation: a `root_lexer` and a `language_lexer`.
+
+The input is processed as follows: First, the whole text is lexed with the
+`language_lexer`.  All tokens yielded with the special type of ``Other`` are
+then concatenated and given to the `root_lexer`.  The language tokens of the
+`language_lexer` are then inserted into the `root_lexer`'s token stream at the
+appropriate positions. ::
+
+    from pygments.lexer import DelegatingLexer
+    from pygments.lexers.web import HtmlLexer, PhpLexer
+
+    class HtmlPhpLexer(DelegatingLexer):
+        def __init__(self, **options):
+            super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options)
+
+This procedure ensures that e.g. HTML with template tags in it is highlighted
+correctly even if the template tags are put into HTML tags or attributes.
+
+If you want to change the needle token ``Other`` to something else, you can give
+the lexer another token type as the third parameter::
+
+    DelegatingLexer.__init__(MyLexer, OtherLexer, Text, **options)
+
+
+Callbacks
+=========
+
+Sometimes the grammar of a language is so complex that a lexer would be unable
+to process it just by using regular expressions and stacks.
+
+For this, the `RegexLexer` allows callbacks to be given in rule tuples, instead
+of token types (`bygroups` and `using` are nothing else but preimplemented
+callbacks).  The callback must be a function taking two arguments:
+
+* the lexer itself
+* the match object for the last matched rule
+
+The callback must then return an iterable of (or simply yield) ``(index,
+tokentype, value)`` tuples, which are then just passed through by
+`get_tokens_unprocessed()`.  The ``index`` here is the position of the token in
+the input string, ``tokentype`` is the normal token type (like `Name.Builtin`),
+and ``value`` the associated part of the input string.
+
+You can see an example here::
+
+    from pygments.lexer import RegexLexer
+    from pygments.token import Generic
+
+    class HypotheticLexer(RegexLexer):
+
+        def headline_callback(lexer, match):
+            equal_signs = match.group(1)
+            text = match.group(2)
+            yield match.start(), Generic.Headline, equal_signs + text + equal_signs
+
+        tokens = {
+            'root': [
+                (r'(=+)(.*?)(\1)', headline_callback)
+            ]
+        }
+
+If the regex for the `headline_callback` matches, the function is called with
+the match object.  Note that after the callback is done, processing continues
+normally, that is, after the end of the previous match.  The callback has no
+possibility to influence the position.
+
+There are not really any simple examples for lexer callbacks, but you can see
+them in action e.g. in the `SMLLexer` class in `ml.py`_.
+
+.. _ml.py: https://github.com/pygments/pygments/blob/master/pygments/lexers/ml.py
+
+
+The ExtendedRegexLexer class
+============================
+
+The `RegexLexer`, even with callbacks, unfortunately isn't powerful enough for
+the funky syntax rules of languages such as Ruby.
+
+But fear not; even then you don't have to abandon the regular expression
+approach: Pygments has a subclass of `RegexLexer`, the `ExtendedRegexLexer`.
+All features known from RegexLexers are available here too, and the tokens are
+specified in exactly the same way, *except* for one detail:
+
+The `get_tokens_unprocessed()` method holds its internal state data not as local
+variables, but in an instance of the `pygments.lexer.LexerContext` class, and
+that instance is passed to callbacks as a third argument. This means that you
+can modify the lexer state in callbacks.
+
+The `LexerContext` class has the following members:
+
+* `text` -- the input text
+* `pos` -- the current starting position that is used for matching regexes
+* `stack` -- a list containing the state stack
+* `end` -- the maximum position to which regexes are matched, this defaults to
+  the length of `text`
+
+Additionally, the `get_tokens_unprocessed()` method can be given a
+`LexerContext` instead of a string and will then process this context instead of
+creating a new one for the string argument.
+
+Note that because you can set the current position to anything in the callback,
+it won't be automatically be set by the caller after the callback is finished.
+For example, this is how the hypothetical lexer above would be written with the
+`ExtendedRegexLexer`::
+
+    from pygments.lexer import ExtendedRegexLexer
+    from pygments.token import Generic
+
+    class ExHypotheticLexer(ExtendedRegexLexer):
+
+        def headline_callback(lexer, match, ctx):
+            equal_signs = match.group(1)
+            text = match.group(2)
+            yield match.start(), Generic.Headline, equal_signs + text + equal_signs
+            ctx.pos = match.end()
+
+        tokens = {
+            'root': [
+                (r'(=+)(.*?)(\1)', headline_callback)
+            ]
+        }
+
+This might sound confusing (and it can really be). But it is needed, and for an
+example look at the Ruby lexer in `ruby.py`_.
+
+.. _ruby.py: https://github.com/pygments/pygments/blob/master/pygments/lexers/ruby.py
+
+
+Handling Lists of Keywords
+==========================
+
+For a relatively short list (hundreds) you can construct an optimized regular
+expression directly using ``words()`` (longer lists, see next section).  This
+function handles a few things for you automatically, including escaping
+metacharacters and Python's first-match rather than longest-match in
+alternations.  Feel free to put the lists themselves in
+``pygments/lexers/_$lang_builtins.py`` (see examples there), and generated by
+code if possible.
+
+An example of using ``words()`` is something like::
+
+    from pygments.lexer import RegexLexer, words, Name
+
+    class MyLexer(RegexLexer):
+
+        tokens = {
+            'root': [
+                (words(('else', 'elseif'), suffix=r'\b'), Name.Builtin),
+                (r'\w+', Name),
+            ],
+        }
+
+As you can see, you can add ``prefix`` and ``suffix`` parts to the constructed
+regex.
+
+
+Modifying Token Streams
+=======================
+
+Some languages ship a lot of builtin functions (for example PHP).  The total
+amount of those functions differs from system to system because not everybody
+has every extension installed.  In the case of PHP there are over 3000 builtin
+functions.  That's an incredibly huge amount of functions, much more than you
+want to put into a regular expression.
+
+But because only `Name` tokens can be function names this is solvable by
+overriding the ``get_tokens_unprocessed()`` method.  The following lexer
+subclasses the `PythonLexer` so that it highlights some additional names as
+pseudo keywords::
+
+    from pygments.lexers.python import PythonLexer
+    from pygments.token import Name, Keyword
+
+    class MyPythonLexer(PythonLexer):
+        EXTRA_KEYWORDS = set(('foo', 'bar', 'foobar', 'barfoo', 'spam', 'eggs'))
+
+        def get_tokens_unprocessed(self, text):
+            for index, token, value in PythonLexer.get_tokens_unprocessed(self, text):
+                if token is Name and value in self.EXTRA_KEYWORDS:
+                    yield index, Keyword.Pseudo, value
+                else:
+                    yield index, token, value
+
+The `PhpLexer` and `LuaLexer` use this method to resolve builtin functions.
diff --git a/doc/_build/html/_sources/docs/lexers.rst.txt b/doc/_build/html/_sources/docs/lexers.rst.txt
new file mode 100644 (file)
index 0000000..ef40f14
--- /dev/null
@@ -0,0 +1,69 @@
+.. -*- mode: rst -*-
+
+================
+Available lexers
+================
+
+This page lists all available builtin lexers and the options they take.
+
+Currently, **all lexers** support these options:
+
+`stripnl`
+    Strip leading and trailing newlines from the input (default: ``True``)
+
+`stripall`
+    Strip all leading and trailing whitespace from the input (default:
+    ``False``).
+
+`ensurenl`
+    Make sure that the input ends with a newline (default: ``True``).  This
+    is required for some lexers that consume input linewise.
+
+    .. versionadded:: 1.3
+
+`tabsize`
+    If given and greater than 0, expand tabs in the input (default: ``0``).
+
+`encoding`
+    If given, must be an encoding name (such as ``"utf-8"``). This encoding
+    will be used to convert the input string to Unicode (if it is not already
+    a Unicode string). The default is ``"guess"``.
+
+    If this option is set to ``"guess"``, a simple UTF-8 vs. Latin-1
+    detection is used, if it is set to ``"chardet"``, the
+    `chardet library <https://chardet.github.io/>`_ is used to
+    guess the encoding of the input.
+
+    .. versionadded:: 0.6
+
+
+The "Short Names" field lists the identifiers that can be used with the
+`get_lexer_by_name()` function.
+
+These lexers are builtin and can be imported from `pygments.lexers`:
+
+.. pygmentsdoc:: lexers
+
+
+Iterating over all lexers
+-------------------------
+
+.. versionadded:: 0.6
+
+To get all lexers (both the builtin and the plugin ones), you can
+use the `get_all_lexers()` function from the `pygments.lexers`
+module:
+
+.. sourcecode:: pycon
+
+    >>> from pygments.lexers import get_all_lexers
+    >>> i = get_all_lexers()
+    >>> i.next()
+    ('Diff', ('diff',), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch'))
+    >>> i.next()
+    ('Delphi', ('delphi', 'objectpascal', 'pas', 'pascal'), ('*.pas',), ('text/x-pascal',))
+    >>> i.next()
+    ('XML+Ruby', ('xml+erb', 'xml+ruby'), (), ())
+
+As you can see, the return value is an iterator which yields tuples
+in the form ``(name, aliases, filetypes, mimetypes)``.
diff --git a/doc/_build/html/_sources/docs/moinmoin.rst.txt b/doc/_build/html/_sources/docs/moinmoin.rst.txt
new file mode 100644 (file)
index 0000000..8b2216b
--- /dev/null
@@ -0,0 +1,39 @@
+.. -*- mode: rst -*-
+
+============================
+Using Pygments with MoinMoin
+============================
+
+From Pygments 0.7, the source distribution ships a `Moin`_ parser plugin that
+can be used to get Pygments highlighting in Moin wiki pages.
+
+To use it, copy the file `external/moin-parser.py` from the Pygments
+distribution to the `data/plugin/parser` subdirectory of your Moin instance.
+Edit the options at the top of the file (currently ``ATTACHMENTS`` and
+``INLINESTYLES``) and rename the file to the name that the parser directive
+should have. For example, if you name the file ``code.py``, you can get a
+highlighted Python code sample with this Wiki markup::
+
+    {{{
+    #!code python
+    [...]
+    }}}
+
+where ``python`` is the Pygments name of the lexer to use.
+
+Additionally, if you set the ``ATTACHMENTS`` option to True, Pygments will also
+be called for all attachments for whose filenames there is no other parser
+registered.
+
+You are responsible for including CSS rules that will map the Pygments CSS
+classes to colors. You can output a stylesheet file with `pygmentize`, put it
+into the `htdocs` directory of your Moin instance and then include it in the
+`stylesheets` configuration option in the Moin config, e.g.::
+
+    stylesheets = [('screen', '/htdocs/pygments.css')]
+
+If you do not want to do that and are willing to accept larger HTML output, you
+can set the ``INLINESTYLES`` option to True.
+
+
+.. _Moin: http://moinmoin.wikiwikiweb.de/
diff --git a/doc/_build/html/_sources/docs/plugins.rst.txt b/doc/_build/html/_sources/docs/plugins.rst.txt
new file mode 100644 (file)
index 0000000..a6f8d7b
--- /dev/null
@@ -0,0 +1,93 @@
+================
+Register Plugins
+================
+
+If you want to extend Pygments without hacking the sources, but want to
+use the lexer/formatter/style/filter lookup functions (`lexers.get_lexer_by_name`
+et al.), you can use `setuptools`_ entrypoints to add new lexers, formatters
+or styles as if they were in the Pygments core.
+
+.. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools
+
+That means you can use your highlighter modules with the `pygmentize` script,
+which relies on the mentioned functions.
+
+
+Entrypoints
+===========
+
+Here is a list of setuptools entrypoints that Pygments understands:
+
+`pygments.lexers`
+
+    This entrypoint is used for adding new lexers to the Pygments core.
+    The name of the entrypoint values doesn't really matter, Pygments extracts
+    required metadata from the class definition:
+
+    .. sourcecode:: ini
+
+        [pygments.lexers]
+        yourlexer = yourmodule:YourLexer
+
+    Note that you have to define ``name``, ``aliases`` and ``filename``
+    attributes so that you can use the highlighter from the command line:
+
+    .. sourcecode:: python
+
+        class YourLexer(...):
+            name = 'Name Of Your Lexer'
+            aliases = ['alias']
+            filenames = ['*.ext']
+
+
+`pygments.formatters`
+
+    You can use this entrypoint to add new formatters to Pygments. The
+    name of an entrypoint item is the name of the formatter. If you
+    prefix the name with a slash it's used as a filename pattern:
+
+    .. sourcecode:: ini
+
+        [pygments.formatters]
+        yourformatter = yourmodule:YourFormatter
+        /.ext = yourmodule:YourFormatter
+
+
+`pygments.styles`
+
+    To add a new style you can use this entrypoint. The name of the entrypoint
+    is the name of the style:
+
+    .. sourcecode:: ini
+
+        [pygments.styles]
+        yourstyle = yourmodule:YourStyle
+
+
+`pygments.filters`
+
+    Use this entrypoint to register a new filter. The name of the
+    entrypoint is the name of the filter:
+
+    .. sourcecode:: ini
+
+        [pygments.filters]
+        yourfilter = yourmodule:YourFilter
+
+
+How To Use Entrypoints
+======================
+
+This documentation doesn't explain how to use those entrypoints because this is
+covered in the `setuptools documentation`_. That page should cover everything
+you need to write a plugin.
+
+.. _setuptools documentation: http://peak.telecommunity.com/DevCenter/setuptools
+
+
+Extending The Core
+==================
+
+If you have written a Pygments plugin that is open source, please inform us
+about that. There is a high chance that we'll add it to the Pygments
+distribution.
diff --git a/doc/_build/html/_sources/docs/quickstart.rst.txt b/doc/_build/html/_sources/docs/quickstart.rst.txt
new file mode 100644 (file)
index 0000000..3a823e7
--- /dev/null
@@ -0,0 +1,205 @@
+.. -*- mode: rst -*-
+
+===========================
+Introduction and Quickstart
+===========================
+
+
+Welcome to Pygments! This document explains the basic concepts and terms and
+gives a few examples of how to use the library.
+
+
+Architecture
+============
+
+There are four types of components that work together highlighting a piece of
+code:
+
+* A **lexer** splits the source into tokens, fragments of the source that
+  have a token type that determines what the text represents semantically
+  (e.g., keyword, string, or comment). There is a lexer for every language
+  or markup format that Pygments supports.
+* The token stream can be piped through **filters**, which usually modify
+  the token types or text fragments, e.g. uppercasing all keywords.
+* A **formatter** then takes the token stream and writes it to an output
+  file, in a format such as HTML, LaTeX or RTF.
+* While writing the output, a **style** determines how to highlight all the
+  different token types. It maps them to attributes like "red and bold".
+
+
+Example
+=======
+
+Here is a small example for highlighting Python code:
+
+.. sourcecode:: python
+
+    from pygments import highlight
+    from pygments.lexers import PythonLexer
+    from pygments.formatters import HtmlFormatter
+
+    code = 'print "Hello World"'
+    print(highlight(code, PythonLexer(), HtmlFormatter()))
+
+which prints something like this:
+
+.. sourcecode:: html
+
+    <div class="highlight">
+    <pre><span class="k">print</span> <span class="s">&quot;Hello World&quot;</span></pre>
+    </div>
+
+As you can see, Pygments uses CSS classes (by default, but you can change that)
+instead of inline styles in order to avoid outputting redundant style information over
+and over. A CSS stylesheet that contains all CSS classes possibly used in the output
+can be produced by:
+
+.. sourcecode:: python
+
+    print(HtmlFormatter().get_style_defs('.highlight'))
+
+The argument to :func:`get_style_defs` is used as an additional CSS selector:
+the output may look like this:
+
+.. sourcecode:: css
+
+    .highlight .k { color: #AA22FF; font-weight: bold }
+    .highlight .s { color: #BB4444 }
+    ...
+
+
+Options
+=======
+
+The :func:`highlight()` function supports a fourth argument called *outfile*, it
+must be a file object if given. The formatted output will then be written to
+this file instead of being returned as a string.
+
+Lexers and formatters both support options. They are given to them as keyword
+arguments either to the class or to the lookup method:
+
+.. sourcecode:: python
+
+    from pygments import highlight
+    from pygments.lexers import get_lexer_by_name
+    from pygments.formatters import HtmlFormatter
+
+    lexer = get_lexer_by_name("python", stripall=True)
+    formatter = HtmlFormatter(linenos=True, cssclass="source")
+    result = highlight(code, lexer, formatter)
+
+This makes the lexer strip all leading and trailing whitespace from the input
+(`stripall` option), lets the formatter output line numbers (`linenos` option),
+and sets the wrapping ``<div>``'s class to ``source`` (instead of
+``highlight``).
+
+Important options include:
+
+`encoding` : for lexers and formatters
+   Since Pygments uses Unicode strings internally, this determines which
+   encoding will be used to convert to or from byte strings.
+`style` : for formatters
+   The name of the style to use when writing the output.
+
+
+For an overview of builtin lexers and formatters and their options, visit the
+:doc:`lexer <lexers>` and :doc:`formatters <formatters>` lists.
+
+For a documentation on filters, see :doc:`this page <filters>`.
+
+
+Lexer and formatter lookup
+==========================
+
+If you want to lookup a built-in lexer by its alias or a filename, you can use
+one of the following methods:
+
+.. sourcecode:: pycon
+
+    >>> from pygments.lexers import (get_lexer_by_name,
+    ...     get_lexer_for_filename, get_lexer_for_mimetype)
+
+    >>> get_lexer_by_name('python')
+    <pygments.lexers.PythonLexer>
+
+    >>> get_lexer_for_filename('spam.rb')
+    <pygments.lexers.RubyLexer>
+
+    >>> get_lexer_for_mimetype('text/x-perl')
+    <pygments.lexers.PerlLexer>
+
+All these functions accept keyword arguments; they will be passed to the lexer
+as options.
+
+A similar API is available for formatters: use :func:`.get_formatter_by_name()`
+and :func:`.get_formatter_for_filename()` from the :mod:`pygments.formatters`
+module for this purpose.
+
+
+Guessing lexers
+===============
+
+If you don't know the content of the file, or you want to highlight a file
+whose extension is ambiguous, such as ``.html`` (which could contain plain HTML
+or some template tags), use these functions:
+
+.. sourcecode:: pycon
+
+    >>> from pygments.lexers import guess_lexer, guess_lexer_for_filename
+
+    >>> guess_lexer('#!/usr/bin/python\nprint "Hello World!"')
+    <pygments.lexers.PythonLexer>
+
+    >>> guess_lexer_for_filename('test.py', 'print "Hello World!"')
+    <pygments.lexers.PythonLexer>
+
+:func:`.guess_lexer()` passes the given content to the lexer classes'
+:meth:`analyse_text()` method and returns the one for which it returns the
+highest number.
+
+All lexers have two different filename pattern lists: the primary and the
+secondary one. The :func:`.get_lexer_for_filename()` function only uses the
+primary list, whose entries are supposed to be unique among all lexers.
+:func:`.guess_lexer_for_filename()`, however, will first loop through all lexers
+and look at the primary and secondary filename patterns if the filename matches.
+If only one lexer matches, it is returned, else the guessing mechanism of
+:func:`.guess_lexer()` is used with the matching lexers.
+
+As usual, keyword arguments to these functions are given to the created lexer
+as options.    
+
+
+Command line usage
+==================
+
+You can use Pygments from the command line, using the :program:`pygmentize`
+script::
+
+    $ pygmentize test.py
+
+will highlight the Python file test.py using ANSI escape sequences
+(a.k.a. terminal colors) and print the result to standard output.
+
+To output HTML, use the ``-f`` option::
+
+    $ pygmentize -f html -o test.html test.py
+
+to write an HTML-highlighted version of test.py to the file test.html.
+Note that it will only be a snippet of HTML, if you want a full HTML document,
+use the "full" option::
+
+    $ pygmentize -f html -O full -o test.html test.py
+
+This will produce a full HTML document with included stylesheet.
+
+A style can be selected with ``-O style=<name>``.
+
+If you need a stylesheet for an existing HTML file using Pygments CSS classes,
+it can be created with::
+
+    $ pygmentize -S default -f html > style.css
+
+where ``default`` is the style name.
+
+More options and tricks and be found in the :doc:`command line reference
+<cmdline>`.
diff --git a/doc/_build/html/_sources/docs/rstdirective.rst.txt b/doc/_build/html/_sources/docs/rstdirective.rst.txt
new file mode 100644 (file)
index 0000000..c0d503b
--- /dev/null
@@ -0,0 +1,22 @@
+.. -*- mode: rst -*-
+
+================================
+Using Pygments in ReST documents
+================================
+
+Many Python people use `ReST`_ for documentation their sourcecode, programs,
+scripts et cetera. This also means that documentation often includes sourcecode
+samples or snippets.
+
+You can easily enable Pygments support for your ReST texts using a custom
+directive -- this is also how this documentation displays source code.
+
+From Pygments 0.9, the directive is shipped in the distribution as
+`external/rst-directive.py`.  You can copy and adapt this code to your liking.
+
+.. removed -- too confusing
+   *Loosely related note:* The ReST lexer now recognizes ``.. sourcecode::`` and
+   ``.. code::`` directives and highlights the contents in the specified language
+   if the `handlecodeblocks` option is true.
+
+.. _ReST: http://docutils.sf.net/rst.html
diff --git a/doc/_build/html/_sources/docs/styles.rst.txt b/doc/_build/html/_sources/docs/styles.rst.txt
new file mode 100644 (file)
index 0000000..570293a
--- /dev/null
@@ -0,0 +1,232 @@
+.. -*- mode: rst -*-
+
+======
+Styles
+======
+
+Pygments comes with some builtin styles that work for both the HTML and
+LaTeX formatter.
+
+The builtin styles can be looked up with the `get_style_by_name` function:
+
+.. sourcecode:: pycon
+
+    >>> from pygments.styles import get_style_by_name
+    >>> get_style_by_name('colorful')
+    <class 'pygments.styles.colorful.ColorfulStyle'>
+
+You can pass a instance of a `Style` class to a formatter as the `style`
+option in form of a string:
+
+.. sourcecode:: pycon
+
+    >>> from pygments.styles import get_style_by_name
+    >>> from pygments.formatters import HtmlFormatter
+    >>> HtmlFormatter(style='colorful').style
+    <class 'pygments.styles.colorful.ColorfulStyle'>
+
+Or you can also import your own style (which must be a subclass of
+`pygments.style.Style`) and pass it to the formatter:
+
+.. sourcecode:: pycon
+
+    >>> from yourapp.yourmodule import YourStyle
+    >>> from pygments.formatters import HtmlFormatter
+    >>> HtmlFormatter(style=YourStyle).style
+    <class 'yourapp.yourmodule.YourStyle'>
+
+
+Creating Own Styles
+===================
+
+So, how to create a style? All you have to do is to subclass `Style` and
+define some styles:
+
+.. sourcecode:: python
+
+    from pygments.style import Style
+    from pygments.token import Keyword, Name, Comment, String, Error, \
+         Number, Operator, Generic
+
+    class YourStyle(Style):
+        default_style = ""
+        styles = {
+            Comment:                'italic #888',
+            Keyword:                'bold #005',
+            Name:                   '#f00',
+            Name.Function:          '#0f0',
+            Name.Class:             'bold #0f0',
+            String:                 'bg:#eee #111'
+        }
+
+That's it. There are just a few rules. When you define a style for `Name`
+the style automatically also affects `Name.Function` and so on. If you
+defined ``'bold'`` and you don't want boldface for a subtoken use ``'nobold'``.
+
+(Philosophy: the styles aren't written in CSS syntax since this way
+they can be used for a variety of formatters.)
+
+`default_style` is the style inherited by all token types.
+
+To make the style usable for Pygments, you must
+
+* either register it as a plugin (see :doc:`the plugin docs <plugins>`)
+* or drop it into the `styles` subpackage of your Pygments distribution one style
+  class per style, where the file name is the style name and the class name is
+  `StylenameClass`. For example, if your style should be called
+  ``"mondrian"``, name the class `MondrianStyle`, put it into the file
+  ``mondrian.py`` and this file into the ``pygments.styles`` subpackage
+  directory.
+
+
+Style Rules
+===========
+
+Here a small overview of all allowed styles:
+
+``bold``
+    render text as bold
+``nobold``
+    don't render text as bold (to prevent subtokens being highlighted bold)
+``italic``
+    render text italic
+``noitalic``
+    don't render text as italic
+``underline``
+    render text underlined
+``nounderline``
+    don't render text underlined
+``bg:``
+    transparent background
+``bg:#000000``
+    background color (black)
+``border:``
+    no border
+``border:#ffffff``
+    border color (white)
+``#ff0000``
+    text color (red)
+``noinherit``
+    don't inherit styles from supertoken
+
+Note that there may not be a space between ``bg:`` and the color value
+since the style definition string is split at whitespace.
+Also, using named colors is not allowed since the supported color names
+vary for different formatters.
+
+Furthermore, not all lexers might support every style.
+
+
+Builtin Styles
+==============
+
+Pygments ships some builtin styles which are maintained by the Pygments team.
+
+To get a list of known styles you can use this snippet:
+
+.. sourcecode:: pycon
+
+    >>> from pygments.styles import STYLE_MAP
+    >>> STYLE_MAP.keys()
+    ['default', 'emacs', 'friendly', 'colorful']
+
+
+Getting a list of available styles
+==================================
+
+.. versionadded:: 0.6
+
+Because it could be that a plugin registered a style, there is
+a way to iterate over all styles:
+
+.. sourcecode:: pycon
+
+    >>> from pygments.styles import get_all_styles
+    >>> styles = list(get_all_styles())
+
+
+.. _AnsiTerminalStyle:
+
+Terminal Styles
+===============
+
+.. versionadded:: 2.2
+
+Custom styles used with the 256-color terminal formatter can also map colors to
+use the 8 default ANSI colors.  To do so, use ``ansigreen``, ``ansibrightred`` or
+any other colors defined in :attr:`pygments.style.ansicolors`.  Foreground ANSI
+colors will be mapped to the corresponding `escape codes 30 to 37
+<https://en.wikipedia.org/wiki/ANSI_escape_code#Colors>`_ thus respecting any
+custom color mapping and themes provided by many terminal emulators.  Light
+variants are treated as foreground color with and an added bold flag.
+``bg:ansi<color>`` will also be respected, except the light variant will be the
+same shade as their dark variant.
+
+See the following example where the color of the string ``"hello world"`` is
+governed by the escape sequence ``\x1b[34;01m`` (Ansi bright blue, Bold, 41 being red
+background) instead of an extended foreground & background color.
+
+.. sourcecode:: pycon
+
+    >>> from pygments import highlight
+    >>> from pygments.style import Style
+    >>> from pygments.token import Token
+    >>> from pygments.lexers import Python3Lexer
+    >>> from pygments.formatters import Terminal256Formatter
+
+    >>> class MyStyle(Style):
+            styles = {
+                Token.String:     'ansibrightblue bg:ansibrightred',
+            }
+
+    >>> code = 'print("Hello World")'
+    >>> result = highlight(code, Python3Lexer(), Terminal256Formatter(style=MyStyle))
+    >>> print(result.encode())
+    b'\x1b[34;41;01m"\x1b[39;49;00m\x1b[34;41;01mHello World\x1b[39;49;00m\x1b[34;41;01m"\x1b[39;49;00m'
+
+Colors specified using ``ansi*`` are converted to a default set of RGB colors
+when used with formatters other than the terminal-256 formatter.
+
+By definition of ANSI, the following colors are considered "light" colors, and
+will be rendered by most terminals as bold:
+
+- "brightblack" (darkgrey), "brightred", "brightgreen", "brightyellow", "brightblue",
+  "brightmagenta", "brightcyan", "white"
+
+The following are considered "dark" colors and will be rendered as non-bold:
+
+- "black", "red", "green", "yellow", "blue", "magenta", "cyan",
+  "gray"
+
+Exact behavior might depends on the terminal emulator you are using, and its
+settings.
+
+.. _new-ansi-color-names:
+
+.. versionchanged:: 2.4
+
+The definition of the ANSI color names has changed.
+New names are easier to understand and align to the colors used in other projects.
+
+===================== ====================
+New names             Pygments up to 2.3
+===================== ====================
+``ansiblack``         ``#ansiblack``
+``ansired``           ``#ansidarkred``
+``ansigreen``         ``#ansidarkgreen``
+``ansiyellow``        ``#ansibrown``
+``ansiblue``          ``#ansidarkblue``
+``ansimagenta``       ``#ansipurple``
+``ansicyan``          ``#ansiteal``
+``ansigray``          ``#ansilightgray``
+``ansibrightblack``   ``#ansidarkgray``
+``ansibrightred``     ``#ansired``
+``ansibrightgreen``   ``#ansigreen``
+``ansibrightyellow``  ``#ansiyellow``
+``ansibrightblue``    ``#ansiblue``
+``ansibrightmagenta`` ``#ansifuchsia``
+``ansibrightcyan``    ``#ansiturquoise``
+``ansiwhite``         ``#ansiwhite``
+===================== ====================
+
+Old ANSI color names are deprecated but will still work.
diff --git a/doc/_build/html/_sources/docs/tokens.rst.txt b/doc/_build/html/_sources/docs/tokens.rst.txt
new file mode 100644 (file)
index 0000000..801fc63
--- /dev/null
@@ -0,0 +1,372 @@
+.. -*- mode: rst -*-
+
+==============
+Builtin Tokens
+==============
+
+.. module:: pygments.token
+
+In the :mod:`pygments.token` module, there is a special object called `Token`
+that is used to create token types.
+
+You can create a new token type by accessing an attribute of `Token`:
+
+.. sourcecode:: pycon
+
+    >>> from pygments.token import Token
+    >>> Token.String
+    Token.String
+    >>> Token.String is Token.String
+    True
+
+Note that tokens are singletons so you can use the ``is`` operator for comparing
+token types.
+
+As of Pygments 0.7 you can also use the ``in`` operator to perform set tests:
+
+.. sourcecode:: pycon
+
+    >>> from pygments.token import Comment
+    >>> Comment.Single in Comment
+    True
+    >>> Comment in Comment.Multi
+    False
+
+This can be useful in :doc:`filters <filters>` and if you write lexers on your
+own without using the base lexers.
+
+You can also split a token type into a hierarchy, and get the parent of it:
+
+.. sourcecode:: pycon
+
+    >>> String.split()
+    [Token, Token.Literal, Token.Literal.String]
+    >>> String.parent
+    Token.Literal
+
+In principle, you can create an unlimited number of token types but nobody can
+guarantee that a style would define style rules for a token type. Because of
+that, Pygments proposes some global token types defined in the
+`pygments.token.STANDARD_TYPES` dict.
+
+For some tokens aliases are already defined:
+
+.. sourcecode:: pycon
+
+    >>> from pygments.token import String
+    >>> String
+    Token.Literal.String
+
+Inside the :mod:`pygments.token` module the following aliases are defined:
+
+============= ============================ ====================================
+`Text`        `Token.Text`                 for any type of text data
+`Whitespace`  `Token.Text.Whitespace`      for specially highlighted whitespace
+`Error`       `Token.Error`                represents lexer errors
+`Other`       `Token.Other`                special token for data not
+                                           matched by a parser (e.g. HTML
+                                           markup in PHP code)
+`Keyword`     `Token.Keyword`              any kind of keywords
+`Name`        `Token.Name`                 variable/function names
+`Literal`     `Token.Literal`              Any literals
+`String`      `Token.Literal.String`       string literals
+`Number`      `Token.Literal.Number`       number literals
+`Operator`    `Token.Operator`             operators (``+``, ``not``...)
+`Punctuation` `Token.Punctuation`          punctuation (``[``, ``(``...)
+`Comment`     `Token.Comment`              any kind of comments
+`Generic`     `Token.Generic`              generic tokens (have a look at
+                                           the explanation below)
+============= ============================ ====================================
+
+The `Whitespace` token type is new in Pygments 0.8. It is used only by the
+`VisibleWhitespaceFilter` currently.
+
+Normally you just create token types using the already defined aliases. For each
+of those token aliases, a number of subtypes exists (excluding the special tokens
+`Token.Text`, `Token.Error` and `Token.Other`)
+
+The `is_token_subtype()` function in the `pygments.token` module can be used to
+test if a token type is a subtype of another (such as `Name.Tag` and `Name`).
+(This is the same as ``Name.Tag in Name``. The overloaded `in` operator was newly
+introduced in Pygments 0.7, the function still exists for backwards
+compatibility.)
+
+With Pygments 0.7, it's also possible to convert strings to token types (for example
+if you want to supply a token from the command line):
+
+.. sourcecode:: pycon
+
+    >>> from pygments.token import String, string_to_tokentype
+    >>> string_to_tokentype("String")
+    Token.Literal.String
+    >>> string_to_tokentype("Token.Literal.String")
+    Token.Literal.String
+    >>> string_to_tokentype(String)
+    Token.Literal.String
+
+
+Keyword Tokens
+==============
+
+`Keyword`
+    For any kind of keyword (especially if it doesn't match any of the
+    subtypes of course).
+
+`Keyword.Constant`
+    For keywords that are constants (e.g. ``None`` in future Python versions).
+
+`Keyword.Declaration`
+    For keywords used for variable declaration (e.g. ``var`` in some programming
+    languages like JavaScript).
+
+`Keyword.Namespace`
+    For keywords used for namespace declarations (e.g. ``import`` in Python and
+    Java and ``package`` in Java).
+
+`Keyword.Pseudo`
+    For keywords that aren't really keywords (e.g. ``None`` in old Python
+    versions).
+
+`Keyword.Reserved`
+    For reserved keywords.
+
+`Keyword.Type`
+    For builtin types that can't be used as identifiers (e.g. ``int``,
+    ``char`` etc. in C).
+
+
+Name Tokens
+===========
+
+`Name`
+    For any name (variable names, function names, classes).
+
+`Name.Attribute`
+    For all attributes (e.g. in HTML tags).
+
+`Name.Builtin`
+    Builtin names; names that are available in the global namespace.
+
+`Name.Builtin.Pseudo`
+    Builtin names that are implicit (e.g. ``self`` in Ruby, ``this`` in Java).
+
+`Name.Class`
+    Class names. Because no lexer can know if a name is a class or a function
+    or something else this token is meant for class declarations.
+
+`Name.Constant`
+    Token type for constants. In some languages you can recognise a token by the
+    way it's defined (the value after a ``const`` keyword for example). In
+    other languages constants are uppercase by definition (Ruby).
+
+`Name.Decorator`
+    Token type for decorators. Decorators are syntactic elements in the Python
+    language. Similar syntax elements exist in C# and Java.
+
+`Name.Entity`
+    Token type for special entities. (e.g. ``&nbsp;`` in HTML).
+
+`Name.Exception`
+    Token type for exception names (e.g. ``RuntimeError`` in Python). Some languages
+    define exceptions in the function signature (Java). You can highlight
+    the name of that exception using this token then.
+
+`Name.Function`
+    Token type for function names.
+
+`Name.Function.Magic`
+    same as `Name.Function` but for special function names that have an implicit use
+    in a language (e.g. ``__init__`` method in Python).
+
+`Name.Label`
+    Token type for label names (e.g. in languages that support ``goto``).
+
+`Name.Namespace`
+    Token type for namespaces. (e.g. import paths in Java/Python), names following
+    the ``module``/``namespace`` keyword in other languages.
+
+`Name.Other`
+    Other names. Normally unused.
+
+`Name.Tag`
+    Tag names (in HTML/XML markup or configuration files).
+
+`Name.Variable`
+    Token type for variables. Some languages have prefixes for variable names
+    (PHP, Ruby, Perl). You can highlight them using this token.
+
+`Name.Variable.Class`
+    same as `Name.Variable` but for class variables (also static variables).
+
+`Name.Variable.Global`
+    same as `Name.Variable` but for global variables (used in Ruby, for
+    example).
+
+`Name.Variable.Instance`
+    same as `Name.Variable` but for instance variables.
+
+`Name.Variable.Magic`
+    same as `Name.Variable` but for special variable names that have an implicit use
+    in a language (e.g. ``__doc__`` in Python).
+
+
+Literals
+========
+
+`Literal`
+    For any literal (if not further defined).
+
+`Literal.Date`
+    for date literals (e.g. ``42d`` in Boo).
+
+
+`String`
+    For any string literal.
+
+`String.Affix`
+    Token type for affixes that further specify the type of the string they're
+    attached to (e.g. the prefixes ``r`` and ``u8`` in ``r"foo"`` and ``u8"foo"``). 
+
+`String.Backtick`
+    Token type for strings enclosed in backticks.
+
+`String.Char`
+    Token type for single characters (e.g. Java, C).
+
+`String.Delimiter`
+    Token type for delimiting identifiers in "heredoc", raw and other similar
+    strings (e.g. the word ``END`` in Perl code ``print <<'END';``).
+
+`String.Doc`
+    Token type for documentation strings (for example Python).
+
+`String.Double`
+    Double quoted strings.
+
+`String.Escape`
+    Token type for escape sequences in strings.
+
+`String.Heredoc`
+    Token type for "heredoc" strings (e.g. in Ruby or Perl).
+
+`String.Interpol`
+    Token type for interpolated parts in strings (e.g. ``#{foo}`` in Ruby).
+
+`String.Other`
+    Token type for any other strings (for example ``%q{foo}`` string constructs
+    in Ruby).
+
+`String.Regex`
+    Token type for regular expression literals (e.g. ``/foo/`` in JavaScript).
+
+`String.Single`
+    Token type for single quoted strings.
+
+`String.Symbol`
+    Token type for symbols (e.g. ``:foo`` in LISP or Ruby).
+
+
+`Number`
+    Token type for any number literal.
+
+`Number.Bin`
+    Token type for binary literals (e.g. ``0b101010``).
+
+`Number.Float`
+    Token type for float literals (e.g. ``42.0``).
+
+`Number.Hex`
+    Token type for hexadecimal number literals (e.g. ``0xdeadbeef``).
+
+`Number.Integer`
+    Token type for integer literals (e.g. ``42``).
+
+`Number.Integer.Long`
+    Token type for long integer literals (e.g. ``42L`` in Python).
+
+`Number.Oct`
+    Token type for octal literals.
+
+
+Operators
+=========
+
+`Operator`
+    For any punctuation operator (e.g. ``+``, ``-``).
+
+`Operator.Word`
+    For any operator that is a word (e.g. ``not``).
+
+
+Punctuation
+===========
+
+.. versionadded:: 0.7
+
+`Punctuation`
+    For any punctuation which is not an operator (e.g. ``[``, ``(``...)
+
+
+Comments
+========
+
+`Comment`
+    Token type for any comment.
+
+`Comment.Hashbang`
+    Token type for hashbang comments (i.e. first lines of files that start with
+     ``#!``).
+
+`Comment.Multiline`
+    Token type for multiline comments.
+
+`Comment.Preproc`
+    Token type for preprocessor comments (also ``<?php``/``<%`` constructs).
+
+`Comment.Single`
+    Token type for comments that end at the end of a line (e.g. ``# foo``).
+
+`Comment.Special`
+    Special data in comments. For example code tags, author and license
+    information, etc.
+
+
+Generic Tokens
+==============
+
+Generic tokens are for special lexers like the `DiffLexer` that doesn't really
+highlight a programming language but a patch file.
+
+
+`Generic`
+    A generic, unstyled token. Normally you don't use this token type.
+
+`Generic.Deleted`
+    Marks the token value as deleted.
+
+`Generic.Emph`
+    Marks the token value as emphasized.
+
+`Generic.Error`
+    Marks the token value as an error message.
+
+`Generic.Heading`
+    Marks the token value as headline.
+
+`Generic.Inserted`
+    Marks the token value as inserted.
+
+`Generic.Output`
+    Marks the token value as program output (e.g. for python cli lexer).
+
+`Generic.Prompt`
+    Marks the token value as command prompt (e.g. bash lexer).
+
+`Generic.Strong`
+    Marks the token value as bold (e.g. for rst lexer).
+
+`Generic.Subheading`
+    Marks the token value as subheadline.
+
+`Generic.Traceback`
+    Marks the token value as a part of an error traceback.
diff --git a/doc/_build/html/_sources/docs/unicode.rst.txt b/doc/_build/html/_sources/docs/unicode.rst.txt
new file mode 100644 (file)
index 0000000..dca9111
--- /dev/null
@@ -0,0 +1,58 @@
+=====================
+Unicode and Encodings
+=====================
+
+Since Pygments 0.6, all lexers use unicode strings internally. Because of that
+you might encounter the occasional :exc:`UnicodeDecodeError` if you pass strings
+with the wrong encoding.
+
+Per default all lexers have their input encoding set to `guess`.  This means
+that the following encodings are tried:
+
+* UTF-8 (including BOM handling)
+* The locale encoding (i.e. the result of `locale.getpreferredencoding()`)
+* As a last resort, `latin1`
+
+If you pass a lexer a byte string object (not unicode), it tries to decode the
+data using this encoding.
+
+You can override the encoding using the `encoding` or `inencoding` lexer
+options.  If you have the `chardet`_ library installed and set the encoding to
+``chardet`` if will analyse the text and use the encoding it thinks is the
+right one automatically:
+
+.. sourcecode:: python
+
+    from pygments.lexers import PythonLexer
+    lexer = PythonLexer(encoding='chardet')
+
+The best way is to pass Pygments unicode objects. In that case you can't get
+unexpected output.
+
+The formatters now send Unicode objects to the stream if you don't set the
+output encoding. You can do so by passing the formatters an `encoding` option:
+
+.. sourcecode:: python
+
+    from pygments.formatters import HtmlFormatter
+    f = HtmlFormatter(encoding='utf-8')
+
+**You will have to set this option if you have non-ASCII characters in the
+source and the output stream does not accept Unicode written to it!**
+This is the case for all regular files and for terminals.
+
+Note: The Terminal formatter tries to be smart: if its output stream has an
+`encoding` attribute, and you haven't set the option, it will encode any
+Unicode string with this encoding before writing it. This is the case for
+`sys.stdout`, for example. The other formatters don't have that behavior.
+
+Another note: If you call Pygments via the command line (`pygmentize`),
+encoding is handled differently, see :doc:`the command line docs <cmdline>`.
+
+.. versionadded:: 0.7
+   The formatters now also accept an `outencoding` option which will override
+   the `encoding` option if given. This makes it possible to use a single
+   options dict with lexers and formatters, and still have different input and
+   output encodings.
+
+.. _chardet: https://chardet.github.io/
diff --git a/doc/_build/html/_sources/download.rst.txt b/doc/_build/html/_sources/download.rst.txt
new file mode 100644 (file)
index 0000000..975c41b
--- /dev/null
@@ -0,0 +1,39 @@
+Download and installation
+=========================
+
+The current release is version |version|.
+
+Packaged versions
+-----------------
+
+You can download it `from the Python Package Index
+<http://pypi.python.org/pypi/Pygments>`_.  For installation of packages from
+PyPI, we recommend `Pip <http://www.pip-installer.org>`_, which works on all
+major platforms.
+
+Under Linux, most distributions include a package for Pygments, usually called
+``pygments`` or ``python-pygments``.  You can install it with the package
+manager as usual.
+
+Development sources
+-------------------
+
+We're using the Git version control system.  You can get the development source
+using this command::
+
+    git clone https://github.com/pygments/pygments
+
+Development takes place at `GitHub <https://github.com/pygments/pygments>`_.
+
+The latest changes in the development source code are listed in the `changelog
+<https://github.com/pygments/pygments/blob/master/CHANGES>`_.
+
+.. Documentation
+   -------------
+
+.. XXX todo
+
+   You can download the <a href="/docs/">documentation</a> either as
+      a bunch of rst files from the Git repository, see above, or
+      as a tar.gz containing rendered HTML files:</p>
+      <p><a href="/docs/download/pygmentsdocs.tar.gz">pygmentsdocs.tar.gz</a></p>
diff --git a/doc/_build/html/_sources/faq.rst.txt b/doc/_build/html/_sources/faq.rst.txt
new file mode 100644 (file)
index 0000000..108cef4
--- /dev/null
@@ -0,0 +1,140 @@
+:orphan:
+
+Pygments FAQ
+=============
+
+What is Pygments?
+-----------------
+
+Pygments is a syntax highlighting engine written in Python. That means, it will
+take source code (or other markup) in a supported language and output a
+processed version (in different formats) containing syntax highlighting markup.
+
+Its features include:
+
+* a wide range of common :doc:`languages and markup formats <languages>` is supported
+* new languages and formats are added easily
+* a number of output formats is available, including:
+
+  - HTML
+  - ANSI sequences (console output)
+  - LaTeX
+  - RTF
+
+* it is usable as a command-line tool and as a library
+* parsing and formatting is fast
+
+Pygments is licensed under the BSD license.
+
+Where does the name Pygments come from?
+---------------------------------------
+
+*Py* of course stands for Python, while *pigments* are used for coloring paint,
+and in this case, source code!
+
+What are the system requirements?
+---------------------------------
+
+Pygments only needs a standard Python install, version 2.7 or higher or version
+3.5 or higher for Python 3. No additional libraries are needed.
+
+How can I use Pygments?
+-----------------------
+
+Pygments is usable as a command-line tool as well as a library.
+
+From the command-line, usage looks like this (assuming the pygmentize script is
+properly installed)::
+
+    pygmentize -f html /path/to/file.py
+
+This will print a HTML-highlighted version of /path/to/file.py to standard output.
+
+For a complete help, please run ``pygmentize -h``.
+
+Usage as a library is thoroughly demonstrated in the Documentation section.
+
+How do I make a new style?
+--------------------------
+
+Please see the :doc:`documentation on styles <docs/styles>`.
+
+How can I report a bug or suggest a feature?
+--------------------------------------------
+
+Please report bugs and feature wishes in the tracker at GitHub.
+
+You can also e-mail the authors, see the contact details.
+
+I want this support for this language!
+--------------------------------------
+
+Instead of waiting for others to include language support, why not write it
+yourself? All you have to know is :doc:`outlined in the docs
+<docs/lexerdevelopment>`.
+
+Can I use Pygments for programming language processing?
+-------------------------------------------------------
+
+The Pygments lexing machinery is quite powerful can be used to build lexers for
+basically all languages. However, parsing them is not possible, though some
+lexers go some steps in this direction in order to e.g. highlight function names
+differently.
+
+Also, error reporting is not the scope of Pygments. It focuses on correctly
+highlighting syntactically valid documents, not finding and compensating errors.
+
+Who uses Pygments?
+------------------
+
+This is an (incomplete) list of projects and sites known to use the Pygments highlighter.
+
+* `Wikipedia <http://en.wikipedia.org>`_
+* `BitBucket <http://bitbucket.org/>`_, a Mercurial and Git hosting site
+* `The Sphinx documentation builder <http://sphinx.pocoo.org/>`_, for embedded source examples
+* `rst2pdf <http://code.google.com/p/rst2pdf/>`_, a reStructuredText to PDF converter
+* `Codecov <http://codecov.io/>`_, a code coverage CI service
+* `Trac <http://trac.edgewall.org/>`_, the universal project management tool
+* `AsciiDoc <http://www.methods.co.nz/asciidoc/>`_, a text-based documentation generator
+* `ActiveState Code <http://code.activestate.com/>`_, the Python Cookbook successor
+* `ViewVC <http://viewvc.org/>`_, a web-based version control repository browser
+* `BzrFruit <http://repo.or.cz/w/bzrfruit.git>`_, a Bazaar branch viewer
+* `QBzr <http://bazaar-vcs.org/QBzr>`_, a cross-platform Qt-based GUI front end for Bazaar
+* `Review Board <http://www.review-board.org/>`_, a collaborative code reviewing tool
+* `Diamanda <http://code.google.com/p/diamanda/>`_, a Django powered wiki system with support for Pygments
+* `Progopedia <http://progopedia.ru/>`_ (`English <http://progopedia.com/>`_),
+  an encyclopedia of programming languages
+* `Bruce <http://r1chardj0n3s.googlepages.com/bruce>`_, a reStructuredText presentation tool
+* `PIDA <http://pida.co.uk/>`_, a universal IDE written in Python
+* `BPython <http://www.noiseforfree.com/bpython/>`_, a curses-based intelligent Python shell
+* `PuDB <http://pypi.python.org/pypi/pudb>`_, a console Python debugger
+* `XWiki <http://www.xwiki.org/>`_, a wiki-based development framework in Java, using Jython
+* `roux <http://ananelson.com/software/roux/>`_, a script for running R scripts
+  and creating beautiful output including graphs
+* `hurl <http://hurl.it/>`_, a web service for making HTTP requests
+* `wxHTMLPygmentizer <http://colinbarnette.net/projects/wxHTMLPygmentizer>`_ is
+  a GUI utility, used to make code-colorization easier
+* `Postmarkup <http://code.google.com/p/postmarkup/>`_, a BBCode to XHTML generator
+* `WpPygments <http://blog.mirotin.net/?page_id=49>`_, and `WPygments
+  <https://github.com/capynet/WPygments>`_, highlighter plugins for WordPress
+* `Siafoo <http://siafoo.net>`_, a tool for sharing and storing useful code and programming experience
+* `D source <http://www.dsource.org/>`_, a community for the D programming language
+* `dpaste.com <http://dpaste.com/>`_, another Django pastebin
+* `Django snippets <http://www.djangosnippets.org/>`_, a pastebin for Django code
+* `Fayaa <http://www.fayaa.com/code/>`_, a Chinese pastebin
+* `Incollo.com <http://incollo.com>`_, a free collaborative debugging tool
+* `PasteBox <http://p.boxnet.eu/>`_, a pastebin focused on privacy
+* `hilite.me <http://www.hilite.me/>`_, a site to highlight code snippets
+* `patx.me <http://patx.me/paste>`_, a pastebin
+* `Fluidic <https://github.com/richsmith/fluidic>`_, an experiment in
+  integrating shells with a GUI
+* `pygments.rb <https://github.com/tmm1/pygments.rb>`_, a pygments wrapper for Ruby
+* `Clygments <https://github.com/bfontaine/clygments>`_, a pygments wrapper for
+  Clojure
+* `PHPygments <https://github.com/capynet/PHPygments>`_, a pygments wrapper for PHP
+* `Spyder <https://www.spyder-ide.org/>`_, the Scientific Python Development
+  Environment, uses pygments for the multi-language syntax highlighting in its
+  `editor <https://docs.spyder-ide.org/editor.html>`_.
+
+If you have a project or web site using Pygments, drop me a line, and I'll add a
+link here.
diff --git a/doc/_build/html/_sources/index.rst.txt b/doc/_build/html/_sources/index.rst.txt
new file mode 100644 (file)
index 0000000..d89277e
--- /dev/null
@@ -0,0 +1,49 @@
+Welcome!
+========
+
+This is the home of Pygments.  It is a generic syntax highlighter suitable for
+use in code hosting, forums, wikis or other applications that need to prettify
+source code.  Highlights are:
+
+* a wide range of over 300 languages and other text formats is supported
+* special attention is paid to details that increase highlighting quality
+* support for new languages and formats are added easily; most languages use a
+  simple regex-based lexing mechanism
+* a number of output formats is available, among them HTML, RTF, LaTeX and ANSI
+  sequences
+* it is usable as a command-line tool and as a library
+* ... and it highlights even Perl 6!
+
+Read more in the :doc:`FAQ list <faq>` or the :doc:`documentation <docs/index>`,
+or `download the latest release <http://pypi.python.org/pypi/Pygments>`_.
+
+.. _contribute:
+
+Contribute
+----------
+
+Like every open-source project, we are always looking for volunteers to help us
+with programming. Python knowledge is required, but don't fear: Python is a very
+clear and easy to learn language.
+
+Development takes place on `GitHub <https://github.com/pygments/pygments>`_.
+
+If you found a bug, just open a ticket in the GitHub tracker. Be sure to log
+in to be notified when the issue is fixed -- development is not fast-paced as
+the library is quite stable.  You can also send an e-mail to the developers, see
+below.
+
+The authors
+-----------
+
+Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*
+and **Matthäus Chajdas**.
+
+Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of
+the `Pocoo <http://dev.pocoo.org/>`_ team and **Tim Hatch**.
+
+.. toctree::
+   :maxdepth: 1
+   :hidden:
+
+   docs/index
diff --git a/doc/_build/html/_sources/languages.rst.txt b/doc/_build/html/_sources/languages.rst.txt
new file mode 100644 (file)
index 0000000..a91664c
--- /dev/null
@@ -0,0 +1,176 @@
+:orphan:
+
+Supported languages
+===================
+
+Pygments supports an ever-growing range of languages. Watch this space...
+
+Programming languages
+---------------------
+
+* ActionScript
+* Ada
+* ANTLR
+* AppleScript
+* Assembly (various)
+* Asymptote
+* `Augeas <http://augeas.net>`_
+* Awk
+* BBC Basic
+* Befunge
+* `Boa <http://boa.cs.iastate.edu/docs/index.php>`_
+* Boo
+* BrainFuck
+* C, C++
+* C#
+* `Charm++ CI <http://charmplusplus.org/>`_
+* Clojure
+* CoffeeScript
+* ColdFusion
+* Common Lisp
+* Coq
+* Cryptol (incl. Literate Cryptol)
+* `Crystal <http://crystal-lang.org>`_
+* `Cython <http://cython.org>`_
+* `D <http://dlang.org>`_
+* Dart
+* DCPU-16
+* Delphi
+* Dylan
+* `Elm <http://elm-lang.org/>`_
+* Email
+* Erlang
+* `Ezhil <http://ezhillang.org>`_ Ezhil - A Tamil programming language
+* Factor
+* Fancy
+* `Fennel <https://fennel-lang.org/>`_
+* `FloScript <http://ioflo.com/>`_
+* Fortran
+* `FreeFEM++ <https://freefem.org/>`_
+* F#
+* GAP
+* Gherkin (Cucumber)
+* GL shaders
+* Groovy
+* `Haskell <http://www.haskell.org>`_ (incl. Literate Haskell)
+* HLSL
+* `HSpec <http://hackage.haskell.org/package/hspec>`_
+* IDL
+* Io
+* Java
+* JavaScript
+* Lasso
+* LLVM
+* Logtalk
+* `Lua <http://www.lua.org>`_
+* Matlab
+* MiniD
+* Modelica
+* Modula-2
+* MuPad
+* Nemerle
+* Nimrod
+* Notmuch
+* Objective-C
+* Objective-J
+* Octave
+* OCaml
+* PHP
+* `Perl 5 <http://perl.org>`_ and `Perl 6 <https://perl6.org>`_
+* `Pony <https://www.ponylang.io/>`_
+* PovRay
+* PostScript
+* PowerShell
+* Prolog
+* `Python <http://www.python.org>`_ 2.x and 3.x (incl. console sessions and tracebacks)
+* `REBOL <http://www.rebol.com>`_
+* `Red <http://www.red-lang.org>`_
+* Redcode
+* `Ruby <http://www.ruby-lang.org>`_ (incl. irb sessions)
+* Rust
+* S, S-Plus, R
+* Scala
+* `Scdoc <https://git.sr.ht/~sircmpwn/scdoc>`_
+* Scheme
+* Scilab
+* `SGF <https://www.red-bean.com/sgf/>`_
+* `Slash <https://github.com/arturadib/Slash-A>`_
+* `Slurm <https://slurm.schedmd.com/overview.html>`_
+* Smalltalk
+* SNOBOL
+* `Solidity <https://solidity.readthedocs.io/>`_
+* Tcl
+* `Tera Term language <https://ttssh2.osdn.jp/>`_
+* `TOML <https://github.com/toml-lang/toml>`_
+* Vala
+* Verilog
+* VHDL
+* Visual Basic.NET
+* Visual FoxPro
+* XQuery
+* `Zeek <https://www.zeek.org>`_
+* Zephir
+* `Zig <https://ziglang.org/>`_
+
+Template languages
+------------------
+
+* Cheetah templates
+* `Django <http://www.djangoproject.com>`_ / `Jinja
+  <http://jinja.pocoo.org/jinja>`_ templates
+* ERB (Ruby templating)
+* `Genshi <http://genshi.edgewall.org>`_ (the Trac template language)
+* JSP (Java Server Pages)
+* `Myghty <http://www.myghty.org>`_ (the HTML::Mason based framework)
+* `Mako <http://www.makotemplates.org>`_ (the Myghty successor)
+* `Smarty <http://www.smarty.net>`_ templates (PHP templating)
+* Tea
+
+Other markup
+------------
+
+* Apache config files
+* Bash shell scripts
+* BBCode
+* CMake
+* CSS
+* Debian control files
+* Diff files
+* DTD
+* Gettext catalogs
+* Gnuplot script
+* Groff markup
+* HTML
+* HTTP sessions
+* INI-style config files
+* IRC logs (irssi style)
+* Lighttpd config files
+* Makefiles
+* MoinMoin/Trac Wiki markup
+* MySQL
+* Nginx config files
+* POV-Ray scenes
+* Ragel
+* Redcode
+* ReST
+* Robot Framework
+* RPM spec files
+* SQL, also MySQL, SQLite
+* Squid configuration
+* TeX
+* tcsh
+* Vim Script
+* Windows batch files
+* XML
+* XSLT
+* YAML
+
+... that's all?
+---------------
+
+Well, why not write your own? Contributing to Pygments is easy and fun.  Take a
+look at the :doc:`docs on lexer development <docs/lexerdevelopment>`.  Pull
+requests are welcome on `GitHub <https://github.com/pygments/pygments>`.
+
+Note: the languages listed here are supported in the development version. The
+latest release may lack a few of them.
diff --git a/doc/_build/html/_static/basic.css b/doc/_build/html/_static/basic.css
new file mode 100644 (file)
index 0000000..ea6972d
--- /dev/null
@@ -0,0 +1,764 @@
+/*
+ * basic.css
+ * ~~~~~~~~~
+ *
+ * Sphinx stylesheet -- basic theme.
+ *
+ * :copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/* -- main layout ----------------------------------------------------------- */
+
+div.clearer {
+    clear: both;
+}
+
+/* -- relbar ---------------------------------------------------------------- */
+
+div.related {
+    width: 100%;
+    font-size: 90%;
+}
+
+div.related h3 {
+    display: none;
+}
+
+div.related ul {
+    margin: 0;
+    padding: 0 0 0 10px;
+    list-style: none;
+}
+
+div.related li {
+    display: inline;
+}
+
+div.related li.right {
+    float: right;
+    margin-right: 5px;
+}
+
+/* -- sidebar --------------------------------------------------------------- */
+
+div.sphinxsidebarwrapper {
+    padding: 10px 5px 0 10px;
+}
+
+div.sphinxsidebar {
+    float: left;
+    width: 230px;
+    margin-left: -100%;
+    font-size: 90%;
+    word-wrap: break-word;
+    overflow-wrap : break-word;
+}
+
+div.sphinxsidebar ul {
+    list-style: none;
+}
+
+div.sphinxsidebar ul ul,
+div.sphinxsidebar ul.want-points {
+    margin-left: 20px;
+    list-style: square;
+}
+
+div.sphinxsidebar ul ul {
+    margin-top: 0;
+    margin-bottom: 0;
+}
+
+div.sphinxsidebar form {
+    margin-top: 10px;
+}
+
+div.sphinxsidebar input {
+    border: 1px solid #98dbcc;
+    font-family: sans-serif;
+    font-size: 1em;
+}
+
+div.sphinxsidebar #searchbox form.search {
+    overflow: hidden;
+}
+
+div.sphinxsidebar #searchbox input[type="text"] {
+    float: left;
+    width: 80%;
+    padding: 0.25em;
+    box-sizing: border-box;
+}
+
+div.sphinxsidebar #searchbox input[type="submit"] {
+    float: left;
+    width: 20%;
+    border-left: none;
+    padding: 0.25em;
+    box-sizing: border-box;
+}
+
+
+img {
+    border: 0;
+    max-width: 100%;
+}
+
+/* -- search page ----------------------------------------------------------- */
+
+ul.search {
+    margin: 10px 0 0 20px;
+    padding: 0;
+}
+
+ul.search li {
+    padding: 5px 0 5px 20px;
+    background-image: url(file.png);
+    background-repeat: no-repeat;
+    background-position: 0 7px;
+}
+
+ul.search li a {
+    font-weight: bold;
+}
+
+ul.search li div.context {
+    color: #888;
+    margin: 2px 0 0 30px;
+    text-align: left;
+}
+
+ul.keywordmatches li.goodmatch a {
+    font-weight: bold;
+}
+
+/* -- index page ------------------------------------------------------------ */
+
+table.contentstable {
+    width: 90%;
+    margin-left: auto;
+    margin-right: auto;
+}
+
+table.contentstable p.biglink {
+    line-height: 150%;
+}
+
+a.biglink {
+    font-size: 1.3em;
+}
+
+span.linkdescr {
+    font-style: italic;
+    padding-top: 5px;
+    font-size: 90%;
+}
+
+/* -- general index --------------------------------------------------------- */
+
+table.indextable {
+    width: 100%;
+}
+
+table.indextable td {
+    text-align: left;
+    vertical-align: top;
+}
+
+table.indextable ul {
+    margin-top: 0;
+    margin-bottom: 0;
+    list-style-type: none;
+}
+
+table.indextable > tbody > tr > td > ul {
+    padding-left: 0em;
+}
+
+table.indextable tr.pcap {
+    height: 10px;
+}
+
+table.indextable tr.cap {
+    margin-top: 10px;
+    background-color: #f2f2f2;
+}
+
+img.toggler {
+    margin-right: 3px;
+    margin-top: 3px;
+    cursor: pointer;
+}
+
+div.modindex-jumpbox {
+    border-top: 1px solid #ddd;
+    border-bottom: 1px solid #ddd;
+    margin: 1em 0 1em 0;
+    padding: 0.4em;
+}
+
+div.genindex-jumpbox {
+    border-top: 1px solid #ddd;
+    border-bottom: 1px solid #ddd;
+    margin: 1em 0 1em 0;
+    padding: 0.4em;
+}
+
+/* -- domain module index --------------------------------------------------- */
+
+table.modindextable td {
+    padding: 2px;
+    border-collapse: collapse;
+}
+
+/* -- general body styles --------------------------------------------------- */
+
+div.body {
+    min-width: 450px;
+    max-width: 800px;
+}
+
+div.body p, div.body dd, div.body li, div.body blockquote {
+    -moz-hyphens: auto;
+    -ms-hyphens: auto;
+    -webkit-hyphens: auto;
+    hyphens: auto;
+}
+
+a.headerlink {
+    visibility: hidden;
+}
+
+a.brackets:before,
+span.brackets > a:before{
+    content: "[";
+}
+
+a.brackets:after,
+span.brackets > a:after {
+    content: "]";
+}
+
+h1:hover > a.headerlink,
+h2:hover > a.headerlink,
+h3:hover > a.headerlink,
+h4:hover > a.headerlink,
+h5:hover > a.headerlink,
+h6:hover > a.headerlink,
+dt:hover > a.headerlink,
+caption:hover > a.headerlink,
+p.caption:hover > a.headerlink,
+div.code-block-caption:hover > a.headerlink {
+    visibility: visible;
+}
+
+div.body p.caption {
+    text-align: inherit;
+}
+
+div.body td {
+    text-align: left;
+}
+
+.first {
+    margin-top: 0 !important;
+}
+
+p.rubric {
+    margin-top: 30px;
+    font-weight: bold;
+}
+
+img.align-left, .figure.align-left, object.align-left {
+    clear: left;
+    float: left;
+    margin-right: 1em;
+}
+
+img.align-right, .figure.align-right, object.align-right {
+    clear: right;
+    float: right;
+    margin-left: 1em;
+}
+
+img.align-center, .figure.align-center, object.align-center {
+  display: block;
+  margin-left: auto;
+  margin-right: auto;
+}
+
+img.align-default, .figure.align-default {
+  display: block;
+  margin-left: auto;
+  margin-right: auto;
+}
+
+.align-left {
+    text-align: left;
+}
+
+.align-center {
+    text-align: center;
+}
+
+.align-default {
+    text-align: center;
+}
+
+.align-right {
+    text-align: right;
+}
+
+/* -- sidebars -------------------------------------------------------------- */
+
+div.sidebar {
+    margin: 0 0 0.5em 1em;
+    border: 1px solid #ddb;
+    padding: 7px 7px 0 7px;
+    background-color: #ffe;
+    width: 40%;
+    float: right;
+}
+
+p.sidebar-title {
+    font-weight: bold;
+}
+
+/* -- topics ---------------------------------------------------------------- */
+
+div.topic {
+    border: 1px solid #ccc;
+    padding: 7px 7px 0 7px;
+    margin: 10px 0 10px 0;
+}
+
+p.topic-title {
+    font-size: 1.1em;
+    font-weight: bold;
+    margin-top: 10px;
+}
+
+/* -- admonitions ----------------------------------------------------------- */
+
+div.admonition {
+    margin-top: 10px;
+    margin-bottom: 10px;
+    padding: 7px;
+}
+
+div.admonition dt {
+    font-weight: bold;
+}
+
+div.admonition dl {
+    margin-bottom: 0;
+}
+
+p.admonition-title {
+    margin: 0px 10px 5px 0px;
+    font-weight: bold;
+}
+
+div.body p.centered {
+    text-align: center;
+    margin-top: 25px;
+}
+
+/* -- tables ---------------------------------------------------------------- */
+
+table.docutils {
+    border: 0;
+    border-collapse: collapse;
+}
+
+table.align-center {
+    margin-left: auto;
+    margin-right: auto;
+}
+
+table.align-default {
+    margin-left: auto;
+    margin-right: auto;
+}
+
+table caption span.caption-number {
+    font-style: italic;
+}
+
+table caption span.caption-text {
+}
+
+table.docutils td, table.docutils th {
+    padding: 1px 8px 1px 5px;
+    border-top: 0;
+    border-left: 0;
+    border-right: 0;
+    border-bottom: 1px solid #aaa;
+}
+
+table.footnote td, table.footnote th {
+    border: 0 !important;
+}
+
+th {
+    text-align: left;
+    padding-right: 5px;
+}
+
+table.citation {
+    border-left: solid 1px gray;
+    margin-left: 1px;
+}
+
+table.citation td {
+    border-bottom: none;
+}
+
+th > p:first-child,
+td > p:first-child {
+    margin-top: 0px;
+}
+
+th > p:last-child,
+td > p:last-child {
+    margin-bottom: 0px;
+}
+
+/* -- figures --------------------------------------------------------------- */
+
+div.figure {
+    margin: 0.5em;
+    padding: 0.5em;
+}
+
+div.figure p.caption {
+    padding: 0.3em;
+}
+
+div.figure p.caption span.caption-number {
+    font-style: italic;
+}
+
+div.figure p.caption span.caption-text {
+}
+
+/* -- field list styles ----------------------------------------------------- */
+
+table.field-list td, table.field-list th {
+    border: 0 !important;
+}
+
+.field-list ul {
+    margin: 0;
+    padding-left: 1em;
+}
+
+.field-list p {
+    margin: 0;
+}
+
+.field-name {
+    -moz-hyphens: manual;
+    -ms-hyphens: manual;
+    -webkit-hyphens: manual;
+    hyphens: manual;
+}
+
+/* -- hlist styles ---------------------------------------------------------- */
+
+table.hlist td {
+    vertical-align: top;
+}
+
+
+/* -- other body styles ----------------------------------------------------- */
+
+ol.arabic {
+    list-style: decimal;
+}
+
+ol.loweralpha {
+    list-style: lower-alpha;
+}
+
+ol.upperalpha {
+    list-style: upper-alpha;
+}
+
+ol.lowerroman {
+    list-style: lower-roman;
+}
+
+ol.upperroman {
+    list-style: upper-roman;
+}
+
+li > p:first-child {
+    margin-top: 0px;
+}
+
+li > p:last-child {
+    margin-bottom: 0px;
+}
+
+dl.footnote > dt,
+dl.citation > dt {
+    float: left;
+}
+
+dl.footnote > dd,
+dl.citation > dd {
+    margin-bottom: 0em;
+}
+
+dl.footnote > dd:after,
+dl.citation > dd:after {
+    content: "";
+    clear: both;
+}
+
+dl.field-list {
+    display: grid;
+    grid-template-columns: fit-content(30%) auto;
+}
+
+dl.field-list > dt {
+    font-weight: bold;
+    word-break: break-word;
+    padding-left: 0.5em;
+    padding-right: 5px;
+}
+
+dl.field-list > dt:after {
+    content: ":";
+}
+
+dl.field-list > dd {
+    padding-left: 0.5em;
+    margin-top: 0em;
+    margin-left: 0em;
+    margin-bottom: 0em;
+}
+
+dl {
+    margin-bottom: 15px;
+}
+
+dd > p:first-child {
+    margin-top: 0px;
+}
+
+dd ul, dd table {
+    margin-bottom: 10px;
+}
+
+dd {
+    margin-top: 3px;
+    margin-bottom: 10px;
+    margin-left: 30px;
+}
+
+dt:target, span.highlighted {
+    background-color: #fbe54e;
+}
+
+rect.highlighted {
+    fill: #fbe54e;
+}
+
+dl.glossary dt {
+    font-weight: bold;
+    font-size: 1.1em;
+}
+
+.optional {
+    font-size: 1.3em;
+}
+
+.sig-paren {
+    font-size: larger;
+}
+
+.versionmodified {
+    font-style: italic;
+}
+
+.system-message {
+    background-color: #fda;
+    padding: 5px;
+    border: 3px solid red;
+}
+
+.footnote:target  {
+    background-color: #ffa;
+}
+
+.line-block {
+    display: block;
+    margin-top: 1em;
+    margin-bottom: 1em;
+}
+
+.line-block .line-block {
+    margin-top: 0;
+    margin-bottom: 0;
+    margin-left: 1.5em;
+}
+
+.guilabel, .menuselection {
+    font-family: sans-serif;
+}
+
+.accelerator {
+    text-decoration: underline;
+}
+
+.classifier {
+    font-style: oblique;
+}
+
+.classifier:before {
+    font-style: normal;
+    margin: 0.5em;
+    content: ":";
+}
+
+abbr, acronym {
+    border-bottom: dotted 1px;
+    cursor: help;
+}
+
+/* -- code displays --------------------------------------------------------- */
+
+pre {
+    overflow: auto;
+    overflow-y: hidden;  /* fixes display issues on Chrome browsers */
+}
+
+span.pre {
+    -moz-hyphens: none;
+    -ms-hyphens: none;
+    -webkit-hyphens: none;
+    hyphens: none;
+}
+
+td.linenos pre {
+    padding: 5px 0px;
+    border: 0;
+    background-color: transparent;
+    color: #aaa;
+}
+
+table.highlighttable {
+    margin-left: 0.5em;
+}
+
+table.highlighttable td {
+    padding: 0 0.5em 0 0.5em;
+}
+
+div.code-block-caption {
+    padding: 2px 5px;
+    font-size: small;
+}
+
+div.code-block-caption code {
+    background-color: transparent;
+}
+
+div.code-block-caption + div > div.highlight > pre {
+    margin-top: 0;
+}
+
+div.code-block-caption span.caption-number {
+    padding: 0.1em 0.3em;
+    font-style: italic;
+}
+
+div.code-block-caption span.caption-text {
+}
+
+div.literal-block-wrapper {
+    padding: 1em 1em 0;
+}
+
+div.literal-block-wrapper div.highlight {
+    margin: 0;
+}
+
+code.descname {
+    background-color: transparent;
+    font-weight: bold;
+    font-size: 1.2em;
+}
+
+code.descclassname {
+    background-color: transparent;
+}
+
+code.xref, a code {
+    background-color: transparent;
+    font-weight: bold;
+}
+
+h1 code, h2 code, h3 code, h4 code, h5 code, h6 code {
+    background-color: transparent;
+}
+
+.viewcode-link {
+    float: right;
+}
+
+.viewcode-back {
+    float: right;
+    font-family: sans-serif;
+}
+
+div.viewcode-block:target {
+    margin: -1px -10px;
+    padding: 0 10px;
+}
+
+/* -- math display ---------------------------------------------------------- */
+
+img.math {
+    vertical-align: middle;
+}
+
+div.body div.math p {
+    text-align: center;
+}
+
+span.eqno {
+    float: right;
+}
+
+span.eqno a.headerlink {
+    position: relative;
+    left: 0px;
+    z-index: 1;
+}
+
+div.math:hover a.headerlink {
+    visibility: visible;
+}
+
+/* -- printout stylesheet --------------------------------------------------- */
+
+@media print {
+    div.document,
+    div.documentwrapper,
+    div.bodywrapper {
+        margin: 0 !important;
+        width: 100%;
+    }
+
+    div.sphinxsidebar,
+    div.related,
+    div.footer,
+    #top-link {
+        display: none;
+    }
+}
\ No newline at end of file
diff --git a/doc/_build/html/_static/bodybg.png b/doc/_build/html/_static/bodybg.png
new file mode 100644 (file)
index 0000000..46892b8
Binary files /dev/null and b/doc/_build/html/_static/bodybg.png differ
diff --git a/doc/_build/html/_static/demo.css b/doc/_build/html/_static/demo.css
new file mode 100644 (file)
index 0000000..9344291
--- /dev/null
@@ -0,0 +1,38 @@
+#try {
+    background-color: #f6f6f6;
+    border-radius: 0;
+    border: 1px solid #ccc;
+    margin-top: 15px;
+    padding: 10px 15px 5px 10px;
+    position: relative;
+}
+
+#try h2 {
+    margin-top: 0;
+}
+
+#try textarea {
+    border: 1px solid #999;
+    padding: 2px;
+    width: 100%;
+    min-height: 150px;
+}
+
+#hlcode pre {
+    background-color: transparent;
+    border-radius: 0;
+}
+
+#loading {
+    position: absolute;
+    top: 0;
+    left: 0;
+    width: 100%;
+    height: 100%;
+    margin: auto auto;
+    background-color: #cccccccc;
+    display: flex;
+    flex-direction: column;
+    justify-content: center;
+    text-align: center;
+}
diff --git a/doc/_build/html/_static/demo.js b/doc/_build/html/_static/demo.js
new file mode 100644 (file)
index 0000000..f538492
--- /dev/null
@@ -0,0 +1,100 @@
+languagePluginLoader.then(() => {
+    // pyodide is now ready to use...
+    pyodide.loadPackage('Pygments').then(() => {
+        pyodide.runPython('import pygments.lexers, pygments.formatters.html, pygments.styles');
+
+        var lexerlist = pyodide.runPython('list(pygments.lexers.get_all_lexers())');
+        var sel = document.getElementById("lang");
+        for (lex of lexerlist) {
+            var opt = document.createElement("option");
+            opt.text = lex[0];
+            opt.value = lex[1][0];
+            sel.add(opt);
+        }
+
+        var stylelist = pyodide.runPython('list(pygments.styles.get_all_styles())');
+        var sel = document.getElementById("style");
+        for (sty of stylelist) {
+            if (sty != "default") {
+                var opt = document.createElement("option");
+                opt.text = sty;
+                opt.value = sty;
+                sel.add(opt);
+            }
+        }
+
+        document.getElementById("hlbtn").disabled = false;
+        document.getElementById("loading").style.display = "none";
+    });
+});
+
+function new_file() {
+    pyodide.globals['fname'] = document.getElementById("file").files[0].name;
+    var alias = pyodide.runPython('pygments.lexers.find_lexer_class_for_filename(fname).aliases[0]');
+    var sel = document.getElementById("lang");
+    for (var i = 0; i < sel.length; i++) {
+        if (sel.options[i].value == alias) {
+            sel.selectedIndex = i;
+            reset_err_hl();
+            break;
+        }
+    }
+}
+
+function reset_err_hl() {
+    document.getElementById("aroundlang").style.backgroundColor = null;
+}
+
+function highlight() {
+    var select = document.getElementById("lang");
+    var alias = select.options.item(select.selectedIndex).value
+
+    if (alias == "") {
+        document.getElementById("aroundlang").style.backgroundColor = "#ffcccc";
+        return;
+    }
+    pyodide.globals['alias'] = alias;
+
+    var select = document.getElementById("style");
+    pyodide.globals['style'] = select.options.item(select.selectedIndex).value;
+
+    pyodide.runPython('lexer = pygments.lexers.get_lexer_by_name(alias)');
+    pyodide.runPython('fmter = pygments.formatters.html.HtmlFormatter(noclasses=True, style=style)');
+
+    var file = document.getElementById("file").files[0];
+    if (file) {
+        file.arrayBuffer().then(function(buf) {
+            pyodide.globals['code_mem'] = buf;
+            pyodide.runPython('code = bytes(code_mem)');
+            highlight_now();
+        });
+    } else {
+        pyodide.globals['code'] = document.getElementById("code").value;
+        highlight_now();
+    }
+}
+
+function highlight_now() {
+    var out = document.getElementById("hlcode");
+    out.innerHTML = pyodide.runPython('pygments.highlight(code, lexer, fmter)');
+    document.location.hash = "#try";
+    document.getElementById("hlcodedl").style.display = "block";
+}
+
+function download_code() {
+    var filename = "highlighted.html";
+    var hlcode = document.getElementById("hlcode").innerHTML;
+    var blob = new Blob([hlcode], {type: 'text/html'});
+    if (window.navigator.msSaveOrOpenBlob) {
+        window.navigator.msSaveBlob(blob, filename);
+    }
+    else{
+        var elem = window.document.createElement('a');
+        elem.href = window.URL.createObjectURL(blob);
+        elem.download = filename;
+        document.body.appendChild(elem);
+        elem.click();
+        document.body.removeChild(elem);
+        window.URL.revokeObjectURL(elem.href);
+    }
+}
diff --git a/doc/_build/html/_static/docbg.png b/doc/_build/html/_static/docbg.png
new file mode 100644 (file)
index 0000000..13e61f3
Binary files /dev/null and b/doc/_build/html/_static/docbg.png differ
diff --git a/doc/_build/html/_static/doctools.js b/doc/_build/html/_static/doctools.js
new file mode 100644 (file)
index 0000000..b33f87f
--- /dev/null
@@ -0,0 +1,314 @@
+/*
+ * doctools.js
+ * ~~~~~~~~~~~
+ *
+ * Sphinx JavaScript utilities for all documentation.
+ *
+ * :copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/**
+ * select a different prefix for underscore
+ */
+$u = _.noConflict();
+
+/**
+ * make the code below compatible with browsers without
+ * an installed firebug like debugger
+if (!window.console || !console.firebug) {
+  var names = ["log", "debug", "info", "warn", "error", "assert", "dir",
+    "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace",
+    "profile", "profileEnd"];
+  window.console = {};
+  for (var i = 0; i < names.length; ++i)
+    window.console[names[i]] = function() {};
+}
+ */
+
+/**
+ * small helper function to urldecode strings
+ */
+jQuery.urldecode = function(x) {
+  return decodeURIComponent(x).replace(/\+/g, ' ');
+};
+
+/**
+ * small helper function to urlencode strings
+ */
+jQuery.urlencode = encodeURIComponent;
+
+/**
+ * This function returns the parsed url parameters of the
+ * current request. Multiple values per key are supported,
+ * it will always return arrays of strings for the value parts.
+ */
+jQuery.getQueryParameters = function(s) {
+  if (typeof s === 'undefined')
+    s = document.location.search;
+  var parts = s.substr(s.indexOf('?') + 1).split('&');
+  var result = {};
+  for (var i = 0; i < parts.length; i++) {
+    var tmp = parts[i].split('=', 2);
+    var key = jQuery.urldecode(tmp[0]);
+    var value = jQuery.urldecode(tmp[1]);
+    if (key in result)
+      result[key].push(value);
+    else
+      result[key] = [value];
+  }
+  return result;
+};
+
+/**
+ * highlight a given string on a jquery object by wrapping it in
+ * span elements with the given class name.
+ */
+jQuery.fn.highlightText = function(text, className) {
+  function highlight(node, addItems) {
+    if (node.nodeType === 3) {
+      var val = node.nodeValue;
+      var pos = val.toLowerCase().indexOf(text);
+      if (pos >= 0 &&
+          !jQuery(node.parentNode).hasClass(className) &&
+          !jQuery(node.parentNode).hasClass("nohighlight")) {
+        var span;
+        var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg");
+        if (isInSVG) {
+          span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
+        } else {
+          span = document.createElement("span");
+          span.className = className;
+        }
+        span.appendChild(document.createTextNode(val.substr(pos, text.length)));
+        node.parentNode.insertBefore(span, node.parentNode.insertBefore(
+          document.createTextNode(val.substr(pos + text.length)),
+          node.nextSibling));
+        node.nodeValue = val.substr(0, pos);
+        if (isInSVG) {
+          var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect");
+          var bbox = node.parentElement.getBBox();
+          rect.x.baseVal.value = bbox.x;
+          rect.y.baseVal.value = bbox.y;
+          rect.width.baseVal.value = bbox.width;
+          rect.height.baseVal.value = bbox.height;
+          rect.setAttribute('class', className);
+          addItems.push({
+              "parent": node.parentNode,
+              "target": rect});
+        }
+      }
+    }
+    else if (!jQuery(node).is("button, select, textarea")) {
+      jQuery.each(node.childNodes, function() {
+        highlight(this, addItems);
+      });
+    }
+  }
+  var addItems = [];
+  var result = this.each(function() {
+    highlight(this, addItems);
+  });
+  for (var i = 0; i < addItems.length; ++i) {
+    jQuery(addItems[i].parent).before(addItems[i].target);
+  }
+  return result;
+};
+
+/*
+ * backward compatibility for jQuery.browser
+ * This will be supported until firefox bug is fixed.
+ */
+if (!jQuery.browser) {
+  jQuery.uaMatch = function(ua) {
+    ua = ua.toLowerCase();
+
+    var match = /(chrome)[ \/]([\w.]+)/.exec(ua) ||
+      /(webkit)[ \/]([\w.]+)/.exec(ua) ||
+      /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) ||
+      /(msie) ([\w.]+)/.exec(ua) ||
+      ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) ||
+      [];
+
+    return {
+      browser: match[ 1 ] || "",
+      version: match[ 2 ] || "0"
+    };
+  };
+  jQuery.browser = {};
+  jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true;
+}
+
+/**
+ * Small JavaScript module for the documentation.
+ */
+var Documentation = {
+
+  init : function() {
+    this.fixFirefoxAnchorBug();
+    this.highlightSearchWords();
+    this.initIndexTable();
+    if (DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) {
+      this.initOnKeyListeners();
+    }
+  },
+
+  /**
+   * i18n support
+   */
+  TRANSLATIONS : {},
+  PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; },
+  LOCALE : 'unknown',
+
+  // gettext and ngettext don't access this so that the functions
+  // can safely bound to a different name (_ = Documentation.gettext)
+  gettext : function(string) {
+    var translated = Documentation.TRANSLATIONS[string];
+    if (typeof translated === 'undefined')
+      return string;
+    return (typeof translated === 'string') ? translated : translated[0];
+  },
+
+  ngettext : function(singular, plural, n) {
+    var translated = Documentation.TRANSLATIONS[singular];
+    if (typeof translated === 'undefined')
+      return (n == 1) ? singular : plural;
+    return translated[Documentation.PLURALEXPR(n)];
+  },
+
+  addTranslations : function(catalog) {
+    for (var key in catalog.messages)
+      this.TRANSLATIONS[key] = catalog.messages[key];
+    this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')');
+    this.LOCALE = catalog.locale;
+  },
+
+  /**
+   * add context elements like header anchor links
+   */
+  addContextElements : function() {
+    $('div[id] > :header:first').each(function() {
+      $('<a class="headerlink">\u00B6</a>').
+      attr('href', '#' + this.id).
+      attr('title', _('Permalink to this headline')).
+      appendTo(this);
+    });
+    $('dt[id]').each(function() {
+      $('<a class="headerlink">\u00B6</a>').
+      attr('href', '#' + this.id).
+      attr('title', _('Permalink to this definition')).
+      appendTo(this);
+    });
+  },
+
+  /**
+   * workaround a firefox stupidity
+   * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075
+   */
+  fixFirefoxAnchorBug : function() {
+    if (document.location.hash && $.browser.mozilla)
+      window.setTimeout(function() {
+        document.location.href += '';
+      }, 10);
+  },
+
+  /**
+   * highlight the search words provided in the url in the text
+   */
+  highlightSearchWords : function() {
+    var params = $.getQueryParameters();
+    var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : [];
+    if (terms.length) {
+      var body = $('div.body');
+      if (!body.length) {
+        body = $('body');
+      }
+      window.setTimeout(function() {
+        $.each(terms, function() {
+          body.highlightText(this.toLowerCase(), 'highlighted');
+        });
+      }, 10);
+      $('<p class="highlight-link"><a href="javascript:Documentation.' +
+        'hideSearchWords()">' + _('Hide Search Matches') + '</a></p>')
+          .appendTo($('#searchbox'));
+    }
+  },
+
+  /**
+   * init the domain index toggle buttons
+   */
+  initIndexTable : function() {
+    var togglers = $('img.toggler').click(function() {
+      var src = $(this).attr('src');
+      var idnum = $(this).attr('id').substr(7);
+      $('tr.cg-' + idnum).toggle();
+      if (src.substr(-9) === 'minus.png')
+        $(this).attr('src', src.substr(0, src.length-9) + 'plus.png');
+      else
+        $(this).attr('src', src.substr(0, src.length-8) + 'minus.png');
+    }).css('display', '');
+    if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) {
+        togglers.click();
+    }
+  },
+
+  /**
+   * helper function to hide the search marks again
+   */
+  hideSearchWords : function() {
+    $('#searchbox .highlight-link').fadeOut(300);
+    $('span.highlighted').removeClass('highlighted');
+  },
+
+  /**
+   * make the url absolute
+   */
+  makeURL : function(relativeURL) {
+    return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL;
+  },
+
+  /**
+   * get the current relative url
+   */
+  getCurrentURL : function() {
+    var path = document.location.pathname;
+    var parts = path.split(/\//);
+    $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() {
+      if (this === '..')
+        parts.pop();
+    });
+    var url = parts.join('/');
+    return path.substring(url.lastIndexOf('/') + 1, path.length - 1);
+  },
+
+  initOnKeyListeners: function() {
+    $(document).keyup(function(event) {
+      var activeElementType = document.activeElement.tagName;
+      // don't navigate when in search box or textarea
+      if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT') {
+        switch (event.keyCode) {
+          case 37: // left
+            var prevHref = $('link[rel="prev"]').prop('href');
+            if (prevHref) {
+              window.location.href = prevHref;
+              return false;
+            }
+          case 39: // right
+            var nextHref = $('link[rel="next"]').prop('href');
+            if (nextHref) {
+              window.location.href = nextHref;
+              return false;
+            }
+        }
+      }
+    });
+  }
+};
+
+// quick alias for translations
+_ = Documentation.gettext;
+
+$(document).ready(function() {
+  Documentation.init();
+});
diff --git a/doc/_build/html/_static/documentation_options.js b/doc/_build/html/_static/documentation_options.js
new file mode 100644 (file)
index 0000000..6ab1660
--- /dev/null
@@ -0,0 +1,10 @@
+var DOCUMENTATION_OPTIONS = {
+    URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'),
+    VERSION: '2.4.2',
+    LANGUAGE: 'None',
+    COLLAPSE_INDEX: false,
+    FILE_SUFFIX: '.html',
+    HAS_SOURCE: true,
+    SOURCELINK_SUFFIX: '.txt',
+    NAVIGATION_WITH_KEYS: false
+};
\ No newline at end of file
diff --git a/doc/_build/html/_static/favicon.ico b/doc/_build/html/_static/favicon.ico
new file mode 100644 (file)
index 0000000..777f617
Binary files /dev/null and b/doc/_build/html/_static/favicon.ico differ
diff --git a/doc/_build/html/_static/file.png b/doc/_build/html/_static/file.png
new file mode 100644 (file)
index 0000000..a858a41
Binary files /dev/null and b/doc/_build/html/_static/file.png differ
diff --git a/doc/_build/html/_static/github.png b/doc/_build/html/_static/github.png
new file mode 100644 (file)
index 0000000..5d146ad
Binary files /dev/null and b/doc/_build/html/_static/github.png differ
diff --git a/doc/_build/html/_static/jquery-3.4.1.js b/doc/_build/html/_static/jquery-3.4.1.js
new file mode 100644 (file)
index 0000000..773ad95
--- /dev/null
@@ -0,0 +1,10598 @@
+/*!
+ * jQuery JavaScript Library v3.4.1
+ * https://jquery.com/
+ *
+ * Includes Sizzle.js
+ * https://sizzlejs.com/
+ *
+ * Copyright JS Foundation and other contributors
+ * Released under the MIT license
+ * https://jquery.org/license
+ *
+ * Date: 2019-05-01T21:04Z
+ */
+( function( global, factory ) {
+
+       "use strict";
+
+       if ( typeof module === "object" && typeof module.exports === "object" ) {
+
+               // For CommonJS and CommonJS-like environments where a proper `window`
+               // is present, execute the factory and get jQuery.
+               // For environments that do not have a `window` with a `document`
+               // (such as Node.js), expose a factory as module.exports.
+               // This accentuates the need for the creation of a real `window`.
+               // e.g. var jQuery = require("jquery")(window);
+               // See ticket #14549 for more info.
+               module.exports = global.document ?
+                       factory( global, true ) :
+                       function( w ) {
+                               if ( !w.document ) {
+                                       throw new Error( "jQuery requires a window with a document" );
+                               }
+                               return factory( w );
+                       };
+       } else {
+               factory( global );
+       }
+
+// Pass this if window is not defined yet
+} )( typeof window !== "undefined" ? window : this, function( window, noGlobal ) {
+
+// Edge <= 12 - 13+, Firefox <=18 - 45+, IE 10 - 11, Safari 5.1 - 9+, iOS 6 - 9.1
+// throw exceptions when non-strict code (e.g., ASP.NET 4.5) accesses strict mode
+// arguments.callee.caller (trac-13335). But as of jQuery 3.0 (2016), strict mode should be common
+// enough that all such attempts are guarded in a try block.
+"use strict";
+
+var arr = [];
+
+var document = window.document;
+
+var getProto = Object.getPrototypeOf;
+
+var slice = arr.slice;
+
+var concat = arr.concat;
+
+var push = arr.push;
+
+var indexOf = arr.indexOf;
+
+var class2type = {};
+
+var toString = class2type.toString;
+
+var hasOwn = class2type.hasOwnProperty;
+
+var fnToString = hasOwn.toString;
+
+var ObjectFunctionString = fnToString.call( Object );
+
+var support = {};
+
+var isFunction = function isFunction( obj ) {
+
+      // Support: Chrome <=57, Firefox <=52
+      // In some browsers, typeof returns "function" for HTML <object> elements
+      // (i.e., `typeof document.createElement( "object" ) === "function"`).
+      // We don't want to classify *any* DOM node as a function.
+      return typeof obj === "function" && typeof obj.nodeType !== "number";
+  };
+
+
+var isWindow = function isWindow( obj ) {
+               return obj != null && obj === obj.window;
+       };
+
+
+
+
+       var preservedScriptAttributes = {
+               type: true,
+               src: true,
+               nonce: true,
+               noModule: true
+       };
+
+       function DOMEval( code, node, doc ) {
+               doc = doc || document;
+
+               var i, val,
+                       script = doc.createElement( "script" );
+
+               script.text = code;
+               if ( node ) {
+                       for ( i in preservedScriptAttributes ) {
+
+                               // Support: Firefox 64+, Edge 18+
+                               // Some browsers don't support the "nonce" property on scripts.
+                               // On the other hand, just using `getAttribute` is not enough as
+                               // the `nonce` attribute is reset to an empty string whenever it
+                               // becomes browsing-context connected.
+                               // See https://github.com/whatwg/html/issues/2369
+                               // See https://html.spec.whatwg.org/#nonce-attributes
+                               // The `node.getAttribute` check was added for the sake of
+                               // `jQuery.globalEval` so that it can fake a nonce-containing node
+                               // via an object.
+                               val = node[ i ] || node.getAttribute && node.getAttribute( i );
+                               if ( val ) {
+                                       script.setAttribute( i, val );
+                               }
+                       }
+               }
+               doc.head.appendChild( script ).parentNode.removeChild( script );
+       }
+
+
+function toType( obj ) {
+       if ( obj == null ) {
+               return obj + "";
+       }
+
+       // Support: Android <=2.3 only (functionish RegExp)
+       return typeof obj === "object" || typeof obj === "function" ?
+               class2type[ toString.call( obj ) ] || "object" :
+               typeof obj;
+}
+/* global Symbol */
+// Defining this global in .eslintrc.json would create a danger of using the global
+// unguarded in another place, it seems safer to define global only for this module
+
+
+
+var
+       version = "3.4.1",
+
+       // Define a local copy of jQuery
+       jQuery = function( selector, context ) {
+
+               // The jQuery object is actually just the init constructor 'enhanced'
+               // Need init if jQuery is called (just allow error to be thrown if not included)
+               return new jQuery.fn.init( selector, context );
+       },
+
+       // Support: Android <=4.0 only
+       // Make sure we trim BOM and NBSP
+       rtrim = /^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;
+
+jQuery.fn = jQuery.prototype = {
+
+       // The current version of jQuery being used
+       jquery: version,
+
+       constructor: jQuery,
+
+       // The default length of a jQuery object is 0
+       length: 0,
+
+       toArray: function() {
+               return slice.call( this );
+       },
+
+       // Get the Nth element in the matched element set OR
+       // Get the whole matched element set as a clean array
+       get: function( num ) {
+
+               // Return all the elements in a clean array
+               if ( num == null ) {
+                       return slice.call( this );
+               }
+
+               // Return just the one element from the set
+               return num < 0 ? this[ num + this.length ] : this[ num ];
+       },
+
+       // Take an array of elements and push it onto the stack
+       // (returning the new matched element set)
+       pushStack: function( elems ) {
+
+               // Build a new jQuery matched element set
+               var ret = jQuery.merge( this.constructor(), elems );
+
+               // Add the old object onto the stack (as a reference)
+               ret.prevObject = this;
+
+               // Return the newly-formed element set
+               return ret;
+       },
+
+       // Execute a callback for every element in the matched set.
+       each: function( callback ) {
+               return jQuery.each( this, callback );
+       },
+
+       map: function( callback ) {
+               return this.pushStack( jQuery.map( this, function( elem, i ) {
+                       return callback.call( elem, i, elem );
+               } ) );
+       },
+
+       slice: function() {
+               return this.pushStack( slice.apply( this, arguments ) );
+       },
+
+       first: function() {
+               return this.eq( 0 );
+       },
+
+       last: function() {
+               return this.eq( -1 );
+       },
+
+       eq: function( i ) {
+               var len = this.length,
+                       j = +i + ( i < 0 ? len : 0 );
+               return this.pushStack( j >= 0 && j < len ? [ this[ j ] ] : [] );
+       },
+
+       end: function() {
+               return this.prevObject || this.constructor();
+       },
+
+       // For internal use only.
+       // Behaves like an Array's method, not like a jQuery method.
+       push: push,
+       sort: arr.sort,
+       splice: arr.splice
+};
+
+jQuery.extend = jQuery.fn.extend = function() {
+       var options, name, src, copy, copyIsArray, clone,
+               target = arguments[ 0 ] || {},
+               i = 1,
+               length = arguments.length,
+               deep = false;
+
+       // Handle a deep copy situation
+       if ( typeof target === "boolean" ) {
+               deep = target;
+
+               // Skip the boolean and the target
+               target = arguments[ i ] || {};
+               i++;
+       }
+
+       // Handle case when target is a string or something (possible in deep copy)
+       if ( typeof target !== "object" && !isFunction( target ) ) {
+               target = {};
+       }
+
+       // Extend jQuery itself if only one argument is passed
+       if ( i === length ) {
+               target = this;
+               i--;
+       }
+
+       for ( ; i < length; i++ ) {
+
+               // Only deal with non-null/undefined values
+               if ( ( options = arguments[ i ] ) != null ) {
+
+                       // Extend the base object
+                       for ( name in options ) {
+                               copy = options[ name ];
+
+                               // Prevent Object.prototype pollution
+                               // Prevent never-ending loop
+                               if ( name === "__proto__" || target === copy ) {
+                                       continue;
+                               }
+
+                               // Recurse if we're merging plain objects or arrays
+                               if ( deep && copy && ( jQuery.isPlainObject( copy ) ||
+                                       ( copyIsArray = Array.isArray( copy ) ) ) ) {
+                                       src = target[ name ];
+
+                                       // Ensure proper type for the source value
+                                       if ( copyIsArray && !Array.isArray( src ) ) {
+                                               clone = [];
+                                       } else if ( !copyIsArray && !jQuery.isPlainObject( src ) ) {
+                                               clone = {};
+                                       } else {
+                                               clone = src;
+                                       }
+                                       copyIsArray = false;
+
+                                       // Never move original objects, clone them
+                                       target[ name ] = jQuery.extend( deep, clone, copy );
+
+                               // Don't bring in undefined values
+                               } else if ( copy !== undefined ) {
+                                       target[ name ] = copy;
+                               }
+                       }
+               }
+       }
+
+       // Return the modified object
+       return target;
+};
+
+jQuery.extend( {
+
+       // Unique for each copy of jQuery on the page
+       expando: "jQuery" + ( version + Math.random() ).replace( /\D/g, "" ),
+
+       // Assume jQuery is ready without the ready module
+       isReady: true,
+
+       error: function( msg ) {
+               throw new Error( msg );
+       },
+
+       noop: function() {},
+
+       isPlainObject: function( obj ) {
+               var proto, Ctor;
+
+               // Detect obvious negatives
+               // Use toString instead of jQuery.type to catch host objects
+               if ( !obj || toString.call( obj ) !== "[object Object]" ) {
+                       return false;
+               }
+
+               proto = getProto( obj );
+
+               // Objects with no prototype (e.g., `Object.create( null )`) are plain
+               if ( !proto ) {
+                       return true;
+               }
+
+               // Objects with prototype are plain iff they were constructed by a global Object function
+               Ctor = hasOwn.call( proto, "constructor" ) && proto.constructor;
+               return typeof Ctor === "function" && fnToString.call( Ctor ) === ObjectFunctionString;
+       },
+
+       isEmptyObject: function( obj ) {
+               var name;
+
+               for ( name in obj ) {
+                       return false;
+               }
+               return true;
+       },
+
+       // Evaluates a script in a global context
+       globalEval: function( code, options ) {
+               DOMEval( code, { nonce: options && options.nonce } );
+       },
+
+       each: function( obj, callback ) {
+               var length, i = 0;
+
+               if ( isArrayLike( obj ) ) {
+                       length = obj.length;
+                       for ( ; i < length; i++ ) {
+                               if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) {
+                                       break;
+                               }
+                       }
+               } else {
+                       for ( i in obj ) {
+                               if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) {
+                                       break;
+                               }
+                       }
+               }
+
+               return obj;
+       },
+
+       // Support: Android <=4.0 only
+       trim: function( text ) {
+               return text == null ?
+                       "" :
+                       ( text + "" ).replace( rtrim, "" );
+       },
+
+       // results is for internal usage only
+       makeArray: function( arr, results ) {
+               var ret = results || [];
+
+               if ( arr != null ) {
+                       if ( isArrayLike( Object( arr ) ) ) {
+                               jQuery.merge( ret,
+                                       typeof arr === "string" ?
+                                       [ arr ] : arr
+                               );
+                       } else {
+                               push.call( ret, arr );
+                       }
+               }
+
+               return ret;
+       },
+
+       inArray: function( elem, arr, i ) {
+               return arr == null ? -1 : indexOf.call( arr, elem, i );
+       },
+
+       // Support: Android <=4.0 only, PhantomJS 1 only
+       // push.apply(_, arraylike) throws on ancient WebKit
+       merge: function( first, second ) {
+               var len = +second.length,
+                       j = 0,
+                       i = first.length;
+
+               for ( ; j < len; j++ ) {
+                       first[ i++ ] = second[ j ];
+               }
+
+               first.length = i;
+
+               return first;
+       },
+
+       grep: function( elems, callback, invert ) {
+               var callbackInverse,
+                       matches = [],
+                       i = 0,
+                       length = elems.length,
+                       callbackExpect = !invert;
+
+               // Go through the array, only saving the items
+               // that pass the validator function
+               for ( ; i < length; i++ ) {
+                       callbackInverse = !callback( elems[ i ], i );
+                       if ( callbackInverse !== callbackExpect ) {
+                               matches.push( elems[ i ] );
+                       }
+               }
+
+               return matches;
+       },
+
+       // arg is for internal usage only
+       map: function( elems, callback, arg ) {
+               var length, value,
+                       i = 0,
+                       ret = [];
+
+               // Go through the array, translating each of the items to their new values
+               if ( isArrayLike( elems ) ) {
+                       length = elems.length;
+                       for ( ; i < length; i++ ) {
+                               value = callback( elems[ i ], i, arg );
+
+                               if ( value != null ) {
+                                       ret.push( value );
+                               }
+                       }
+
+               // Go through every key on the object,
+               } else {
+                       for ( i in elems ) {
+                               value = callback( elems[ i ], i, arg );
+
+                               if ( value != null ) {
+                                       ret.push( value );
+                               }
+                       }
+               }
+
+               // Flatten any nested arrays
+               return concat.apply( [], ret );
+       },
+
+       // A global GUID counter for objects
+       guid: 1,
+
+       // jQuery.support is not used in Core but other projects attach their
+       // properties to it so it needs to exist.
+       support: support
+} );
+
+if ( typeof Symbol === "function" ) {
+       jQuery.fn[ Symbol.iterator ] = arr[ Symbol.iterator ];
+}
+
+// Populate the class2type map
+jQuery.each( "Boolean Number String Function Array Date RegExp Object Error Symbol".split( " " ),
+function( i, name ) {
+       class2type[ "[object " + name + "]" ] = name.toLowerCase();
+} );
+
+function isArrayLike( obj ) {
+
+       // Support: real iOS 8.2 only (not reproducible in simulator)
+       // `in` check used to prevent JIT error (gh-2145)
+       // hasOwn isn't used here due to false negatives
+       // regarding Nodelist length in IE
+       var length = !!obj && "length" in obj && obj.length,
+               type = toType( obj );
+
+       if ( isFunction( obj ) || isWindow( obj ) ) {
+               return false;
+       }
+
+       return type === "array" || length === 0 ||
+               typeof length === "number" && length > 0 && ( length - 1 ) in obj;
+}
+var Sizzle =
+/*!
+ * Sizzle CSS Selector Engine v2.3.4
+ * https://sizzlejs.com/
+ *
+ * Copyright JS Foundation and other contributors
+ * Released under the MIT license
+ * https://js.foundation/
+ *
+ * Date: 2019-04-08
+ */
+(function( window ) {
+
+var i,
+       support,
+       Expr,
+       getText,
+       isXML,
+       tokenize,
+       compile,
+       select,
+       outermostContext,
+       sortInput,
+       hasDuplicate,
+
+       // Local document vars
+       setDocument,
+       document,
+       docElem,
+       documentIsHTML,
+       rbuggyQSA,
+       rbuggyMatches,
+       matches,
+       contains,
+
+       // Instance-specific data
+       expando = "sizzle" + 1 * new Date(),
+       preferredDoc = window.document,
+       dirruns = 0,
+       done = 0,
+       classCache = createCache(),
+       tokenCache = createCache(),
+       compilerCache = createCache(),
+       nonnativeSelectorCache = createCache(),
+       sortOrder = function( a, b ) {
+               if ( a === b ) {
+                       hasDuplicate = true;
+               }
+               return 0;
+       },
+
+       // Instance methods
+       hasOwn = ({}).hasOwnProperty,
+       arr = [],
+       pop = arr.pop,
+       push_native = arr.push,
+       push = arr.push,
+       slice = arr.slice,
+       // Use a stripped-down indexOf as it's faster than native
+       // https://jsperf.com/thor-indexof-vs-for/5
+       indexOf = function( list, elem ) {
+               var i = 0,
+                       len = list.length;
+               for ( ; i < len; i++ ) {
+                       if ( list[i] === elem ) {
+                               return i;
+                       }
+               }
+               return -1;
+       },
+
+       booleans = "checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",
+
+       // Regular expressions
+
+       // http://www.w3.org/TR/css3-selectors/#whitespace
+       whitespace = "[\\x20\\t\\r\\n\\f]",
+
+       // http://www.w3.org/TR/CSS21/syndata.html#value-def-identifier
+       identifier = "(?:\\\\.|[\\w-]|[^\0-\\xa0])+",
+
+       // Attribute selectors: http://www.w3.org/TR/selectors/#attribute-selectors
+       attributes = "\\[" + whitespace + "*(" + identifier + ")(?:" + whitespace +
+               // Operator (capture 2)
+               "*([*^$|!~]?=)" + whitespace +
+               // "Attribute values must be CSS identifiers [capture 5] or strings [capture 3 or capture 4]"
+               "*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|(" + identifier + "))|)" + whitespace +
+               "*\\]",
+
+       pseudos = ":(" + identifier + ")(?:\\((" +
+               // To reduce the number of selectors needing tokenize in the preFilter, prefer arguments:
+               // 1. quoted (capture 3; capture 4 or capture 5)
+               "('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|" +
+               // 2. simple (capture 6)
+               "((?:\\\\.|[^\\\\()[\\]]|" + attributes + ")*)|" +
+               // 3. anything else (capture 2)
+               ".*" +
+               ")\\)|)",
+
+       // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter
+       rwhitespace = new RegExp( whitespace + "+", "g" ),
+       rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + whitespace + "+$", "g" ),
+
+       rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ),
+       rcombinators = new RegExp( "^" + whitespace + "*([>+~]|" + whitespace + ")" + whitespace + "*" ),
+       rdescend = new RegExp( whitespace + "|>" ),
+
+       rpseudo = new RegExp( pseudos ),
+       ridentifier = new RegExp( "^" + identifier + "$" ),
+
+       matchExpr = {
+               "ID": new RegExp( "^#(" + identifier + ")" ),
+               "CLASS": new RegExp( "^\\.(" + identifier + ")" ),
+               "TAG": new RegExp( "^(" + identifier + "|[*])" ),
+               "ATTR": new RegExp( "^" + attributes ),
+               "PSEUDO": new RegExp( "^" + pseudos ),
+               "CHILD": new RegExp( "^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + whitespace +
+                       "*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + whitespace +
+                       "*(\\d+)|))" + whitespace + "*\\)|)", "i" ),
+               "bool": new RegExp( "^(?:" + booleans + ")$", "i" ),
+               // For use in libraries implementing .is()
+               // We use this for POS matching in `select`
+               "needsContext": new RegExp( "^" + whitespace + "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" +
+                       whitespace + "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" )
+       },
+
+       rhtml = /HTML$/i,
+       rinputs = /^(?:input|select|textarea|button)$/i,
+       rheader = /^h\d$/i,
+
+       rnative = /^[^{]+\{\s*\[native \w/,
+
+       // Easily-parseable/retrievable ID or TAG or CLASS selectors
+       rquickExpr = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,
+
+       rsibling = /[+~]/,
+
+       // CSS escapes
+       // http://www.w3.org/TR/CSS21/syndata.html#escaped-characters
+       runescape = new RegExp( "\\\\([\\da-f]{1,6}" + whitespace + "?|(" + whitespace + ")|.)", "ig" ),
+       funescape = function( _, escaped, escapedWhitespace ) {
+               var high = "0x" + escaped - 0x10000;
+               // NaN means non-codepoint
+               // Support: Firefox<24
+               // Workaround erroneous numeric interpretation of +"0x"
+               return high !== high || escapedWhitespace ?
+                       escaped :
+                       high < 0 ?
+                               // BMP codepoint
+                               String.fromCharCode( high + 0x10000 ) :
+                               // Supplemental Plane codepoint (surrogate pair)
+                               String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 );
+       },
+
+       // CSS string/identifier serialization
+       // https://drafts.csswg.org/cssom/#common-serializing-idioms
+       rcssescape = /([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,
+       fcssescape = function( ch, asCodePoint ) {
+               if ( asCodePoint ) {
+
+                       // U+0000 NULL becomes U+FFFD REPLACEMENT CHARACTER
+                       if ( ch === "\0" ) {
+                               return "\uFFFD";
+                       }
+
+                       // Control characters and (dependent upon position) numbers get escaped as code points
+                       return ch.slice( 0, -1 ) + "\\" + ch.charCodeAt( ch.length - 1 ).toString( 16 ) + " ";
+               }
+
+               // Other potentially-special ASCII characters get backslash-escaped
+               return "\\" + ch;
+       },
+
+       // Used for iframes
+       // See setDocument()
+       // Removing the function wrapper causes a "Permission Denied"
+       // error in IE
+       unloadHandler = function() {
+               setDocument();
+       },
+
+       inDisabledFieldset = addCombinator(
+               function( elem ) {
+                       return elem.disabled === true && elem.nodeName.toLowerCase() === "fieldset";
+               },
+               { dir: "parentNode", next: "legend" }
+       );
+
+// Optimize for push.apply( _, NodeList )
+try {
+       push.apply(
+               (arr = slice.call( preferredDoc.childNodes )),
+               preferredDoc.childNodes
+       );
+       // Support: Android<4.0
+       // Detect silently failing push.apply
+       arr[ preferredDoc.childNodes.length ].nodeType;
+} catch ( e ) {
+       push = { apply: arr.length ?
+
+               // Leverage slice if possible
+               function( target, els ) {
+                       push_native.apply( target, slice.call(els) );
+               } :
+
+               // Support: IE<9
+               // Otherwise append directly
+               function( target, els ) {
+                       var j = target.length,
+                               i = 0;
+                       // Can't trust NodeList.length
+                       while ( (target[j++] = els[i++]) ) {}
+                       target.length = j - 1;
+               }
+       };
+}
+
+function Sizzle( selector, context, results, seed ) {
+       var m, i, elem, nid, match, groups, newSelector,
+               newContext = context && context.ownerDocument,
+
+               // nodeType defaults to 9, since context defaults to document
+               nodeType = context ? context.nodeType : 9;
+
+       results = results || [];
+
+       // Return early from calls with invalid selector or context
+       if ( typeof selector !== "string" || !selector ||
+               nodeType !== 1 && nodeType !== 9 && nodeType !== 11 ) {
+
+               return results;
+       }
+
+       // Try to shortcut find operations (as opposed to filters) in HTML documents
+       if ( !seed ) {
+
+               if ( ( context ? context.ownerDocument || context : preferredDoc ) !== document ) {
+                       setDocument( context );
+               }
+               context = context || document;
+
+               if ( documentIsHTML ) {
+
+                       // If the selector is sufficiently simple, try using a "get*By*" DOM method
+                       // (excepting DocumentFragment context, where the methods don't exist)
+                       if ( nodeType !== 11 && (match = rquickExpr.exec( selector )) ) {
+
+                               // ID selector
+                               if ( (m = match[1]) ) {
+
+                                       // Document context
+                                       if ( nodeType === 9 ) {
+                                               if ( (elem = context.getElementById( m )) ) {
+
+                                                       // Support: IE, Opera, Webkit
+                                                       // TODO: identify versions
+                                                       // getElementById can match elements by name instead of ID
+                                                       if ( elem.id === m ) {
+                                                               results.push( elem );
+                                                               return results;
+                                                       }
+                                               } else {
+                                                       return results;
+                                               }
+
+                                       // Element context
+                                       } else {
+
+                                               // Support: IE, Opera, Webkit
+                                               // TODO: identify versions
+                                               // getElementById can match elements by name instead of ID
+                                               if ( newContext && (elem = newContext.getElementById( m )) &&
+                                                       contains( context, elem ) &&
+                                                       elem.id === m ) {
+
+                                                       results.push( elem );
+                                                       return results;
+                                               }
+                                       }
+
+                               // Type selector
+                               } else if ( match[2] ) {
+                                       push.apply( results, context.getElementsByTagName( selector ) );
+                                       return results;
+
+                               // Class selector
+                               } else if ( (m = match[3]) && support.getElementsByClassName &&
+                                       context.getElementsByClassName ) {
+
+                                       push.apply( results, context.getElementsByClassName( m ) );
+                                       return results;
+                               }
+                       }
+
+                       // Take advantage of querySelectorAll
+                       if ( support.qsa &&
+                               !nonnativeSelectorCache[ selector + " " ] &&
+                               (!rbuggyQSA || !rbuggyQSA.test( selector )) &&
+
+                               // Support: IE 8 only
+                               // Exclude object elements
+                               (nodeType !== 1 || context.nodeName.toLowerCase() !== "object") ) {
+
+                               newSelector = selector;
+                               newContext = context;
+
+                               // qSA considers elements outside a scoping root when evaluating child or
+                               // descendant combinators, which is not what we want.
+                               // In such cases, we work around the behavior by prefixing every selector in the
+                               // list with an ID selector referencing the scope context.
+                               // Thanks to Andrew Dupont for this technique.
+                               if ( nodeType === 1 && rdescend.test( selector ) ) {
+
+                                       // Capture the context ID, setting it first if necessary
+                                       if ( (nid = context.getAttribute( "id" )) ) {
+                                               nid = nid.replace( rcssescape, fcssescape );
+                                       } else {
+                                               context.setAttribute( "id", (nid = expando) );
+                                       }
+
+                                       // Prefix every selector in the list
+                                       groups = tokenize( selector );
+                                       i = groups.length;
+                                       while ( i-- ) {
+                                               groups[i] = "#" + nid + " " + toSelector( groups[i] );
+                                       }
+                                       newSelector = groups.join( "," );
+
+                                       // Expand context for sibling selectors
+                                       newContext = rsibling.test( selector ) && testContext( context.parentNode ) ||
+                                               context;
+                               }
+
+                               try {
+                                       push.apply( results,
+                                               newContext.querySelectorAll( newSelector )
+                                       );
+                                       return results;
+                               } catch ( qsaError ) {
+                                       nonnativeSelectorCache( selector, true );
+                               } finally {
+                                       if ( nid === expando ) {
+                                               context.removeAttribute( "id" );
+                                       }
+                               }
+                       }
+               }
+       }
+
+       // All others
+       return select( selector.replace( rtrim, "$1" ), context, results, seed );
+}
+
+/**
+ * Create key-value caches of limited size
+ * @returns {function(string, object)} Returns the Object data after storing it on itself with
+ *     property name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength)
+ *     deleting the oldest entry
+ */
+function createCache() {
+       var keys = [];
+
+       function cache( key, value ) {
+               // Use (key + " ") to avoid collision with native prototype properties (see Issue #157)
+               if ( keys.push( key + " " ) > Expr.cacheLength ) {
+                       // Only keep the most recent entries
+                       delete cache[ keys.shift() ];
+               }
+               return (cache[ key + " " ] = value);
+       }
+       return cache;
+}
+
+/**
+ * Mark a function for special use by Sizzle
+ * @param {Function} fn The function to mark
+ */
+function markFunction( fn ) {
+       fn[ expando ] = true;
+       return fn;
+}
+
+/**
+ * Support testing using an element
+ * @param {Function} fn Passed the created element and returns a boolean result
+ */
+function assert( fn ) {
+       var el = document.createElement("fieldset");
+
+       try {
+               return !!fn( el );
+       } catch (e) {
+               return false;
+       } finally {
+               // Remove from its parent by default
+               if ( el.parentNode ) {
+                       el.parentNode.removeChild( el );
+               }
+               // release memory in IE
+               el = null;
+       }
+}
+
+/**
+ * Adds the same handler for all of the specified attrs
+ * @param {String} attrs Pipe-separated list of attributes
+ * @param {Function} handler The method that will be applied
+ */
+function addHandle( attrs, handler ) {
+       var arr = attrs.split("|"),
+               i = arr.length;
+
+       while ( i-- ) {
+               Expr.attrHandle[ arr[i] ] = handler;
+       }
+}
+
+/**
+ * Checks document order of two siblings
+ * @param {Element} a
+ * @param {Element} b
+ * @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b
+ */
+function siblingCheck( a, b ) {
+       var cur = b && a,
+               diff = cur && a.nodeType === 1 && b.nodeType === 1 &&
+                       a.sourceIndex - b.sourceIndex;
+
+       // Use IE sourceIndex if available on both nodes
+       if ( diff ) {
+               return diff;
+       }
+
+       // Check if b follows a
+       if ( cur ) {
+               while ( (cur = cur.nextSibling) ) {
+                       if ( cur === b ) {
+                               return -1;
+                       }
+               }
+       }
+
+       return a ? 1 : -1;
+}
+
+/**
+ * Returns a function to use in pseudos for input types
+ * @param {String} type
+ */
+function createInputPseudo( type ) {
+       return function( elem ) {
+               var name = elem.nodeName.toLowerCase();
+               return name === "input" && elem.type === type;
+       };
+}
+
+/**
+ * Returns a function to use in pseudos for buttons
+ * @param {String} type
+ */
+function createButtonPseudo( type ) {
+       return function( elem ) {
+               var name = elem.nodeName.toLowerCase();
+               return (name === "input" || name === "button") && elem.type === type;
+       };
+}
+
+/**
+ * Returns a function to use in pseudos for :enabled/:disabled
+ * @param {Boolean} disabled true for :disabled; false for :enabled
+ */
+function createDisabledPseudo( disabled ) {
+
+       // Known :disabled false positives: fieldset[disabled] > legend:nth-of-type(n+2) :can-disable
+       return function( elem ) {
+
+               // Only certain elements can match :enabled or :disabled
+               // https://html.spec.whatwg.org/multipage/scripting.html#selector-enabled
+               // https://html.spec.whatwg.org/multipage/scripting.html#selector-disabled
+               if ( "form" in elem ) {
+
+                       // Check for inherited disabledness on relevant non-disabled elements:
+                       // * listed form-associated elements in a disabled fieldset
+                       //   https://html.spec.whatwg.org/multipage/forms.html#category-listed
+                       //   https://html.spec.whatwg.org/multipage/forms.html#concept-fe-disabled
+                       // * option elements in a disabled optgroup
+                       //   https://html.spec.whatwg.org/multipage/forms.html#concept-option-disabled
+                       // All such elements have a "form" property.
+                       if ( elem.parentNode && elem.disabled === false ) {
+
+                               // Option elements defer to a parent optgroup if present
+                               if ( "label" in elem ) {
+                                       if ( "label" in elem.parentNode ) {
+                                               return elem.parentNode.disabled === disabled;
+                                       } else {
+                                               return elem.disabled === disabled;
+                                       }
+                               }
+
+                               // Support: IE 6 - 11
+                               // Use the isDisabled shortcut property to check for disabled fieldset ancestors
+                               return elem.isDisabled === disabled ||
+
+                                       // Where there is no isDisabled, check manually
+                                       /* jshint -W018 */
+                                       elem.isDisabled !== !disabled &&
+                                               inDisabledFieldset( elem ) === disabled;
+                       }
+
+                       return elem.disabled === disabled;
+
+               // Try to winnow out elements that can't be disabled before trusting the disabled property.
+               // Some victims get caught in our net (label, legend, menu, track), but it shouldn't
+               // even exist on them, let alone have a boolean value.
+               } else if ( "label" in elem ) {
+                       return elem.disabled === disabled;
+               }
+
+               // Remaining elements are neither :enabled nor :disabled
+               return false;
+       };
+}
+
+/**
+ * Returns a function to use in pseudos for positionals
+ * @param {Function} fn
+ */
+function createPositionalPseudo( fn ) {
+       return markFunction(function( argument ) {
+               argument = +argument;
+               return markFunction(function( seed, matches ) {
+                       var j,
+                               matchIndexes = fn( [], seed.length, argument ),
+                               i = matchIndexes.length;
+
+                       // Match elements found at the specified indexes
+                       while ( i-- ) {
+                               if ( seed[ (j = matchIndexes[i]) ] ) {
+                                       seed[j] = !(matches[j] = seed[j]);
+                               }
+                       }
+               });
+       });
+}
+
+/**
+ * Checks a node for validity as a Sizzle context
+ * @param {Element|Object=} context
+ * @returns {Element|Object|Boolean} The input node if acceptable, otherwise a falsy value
+ */
+function testContext( context ) {
+       return context && typeof context.getElementsByTagName !== "undefined" && context;
+}
+
+// Expose support vars for convenience
+support = Sizzle.support = {};
+
+/**
+ * Detects XML nodes
+ * @param {Element|Object} elem An element or a document
+ * @returns {Boolean} True iff elem is a non-HTML XML node
+ */
+isXML = Sizzle.isXML = function( elem ) {
+       var namespace = elem.namespaceURI,
+               docElem = (elem.ownerDocument || elem).documentElement;
+
+       // Support: IE <=8
+       // Assume HTML when documentElement doesn't yet exist, such as inside loading iframes
+       // https://bugs.jquery.com/ticket/4833
+       return !rhtml.test( namespace || docElem && docElem.nodeName || "HTML" );
+};
+
+/**
+ * Sets document-related variables once based on the current document
+ * @param {Element|Object} [doc] An element or document object to use to set the document
+ * @returns {Object} Returns the current document
+ */
+setDocument = Sizzle.setDocument = function( node ) {
+       var hasCompare, subWindow,
+               doc = node ? node.ownerDocument || node : preferredDoc;
+
+       // Return early if doc is invalid or already selected
+       if ( doc === document || doc.nodeType !== 9 || !doc.documentElement ) {
+               return document;
+       }
+
+       // Update global variables
+       document = doc;
+       docElem = document.documentElement;
+       documentIsHTML = !isXML( document );
+
+       // Support: IE 9-11, Edge
+       // Accessing iframe documents after unload throws "permission denied" errors (jQuery #13936)
+       if ( preferredDoc !== document &&
+               (subWindow = document.defaultView) && subWindow.top !== subWindow ) {
+
+               // Support: IE 11, Edge
+               if ( subWindow.addEventListener ) {
+                       subWindow.addEventListener( "unload", unloadHandler, false );
+
+               // Support: IE 9 - 10 only
+               } else if ( subWindow.attachEvent ) {
+                       subWindow.attachEvent( "onunload", unloadHandler );
+               }
+       }
+
+       /* Attributes
+       ---------------------------------------------------------------------- */
+
+       // Support: IE<8
+       // Verify that getAttribute really returns attributes and not properties
+       // (excepting IE8 booleans)
+       support.attributes = assert(function( el ) {
+               el.className = "i";
+               return !el.getAttribute("className");
+       });
+
+       /* getElement(s)By*
+       ---------------------------------------------------------------------- */
+
+       // Check if getElementsByTagName("*") returns only elements
+       support.getElementsByTagName = assert(function( el ) {
+               el.appendChild( document.createComment("") );
+               return !el.getElementsByTagName("*").length;
+       });
+
+       // Support: IE<9
+       support.getElementsByClassName = rnative.test( document.getElementsByClassName );
+
+       // Support: IE<10
+       // Check if getElementById returns elements by name
+       // The broken getElementById methods don't pick up programmatically-set names,
+       // so use a roundabout getElementsByName test
+       support.getById = assert(function( el ) {
+               docElem.appendChild( el ).id = expando;
+               return !document.getElementsByName || !document.getElementsByName( expando ).length;
+       });
+
+       // ID filter and find
+       if ( support.getById ) {
+               Expr.filter["ID"] = function( id ) {
+                       var attrId = id.replace( runescape, funescape );
+                       return function( elem ) {
+                               return elem.getAttribute("id") === attrId;
+                       };
+               };
+               Expr.find["ID"] = function( id, context ) {
+                       if ( typeof context.getElementById !== "undefined" && documentIsHTML ) {
+                               var elem = context.getElementById( id );
+                               return elem ? [ elem ] : [];
+                       }
+               };
+       } else {
+               Expr.filter["ID"] =  function( id ) {
+                       var attrId = id.replace( runescape, funescape );
+                       return function( elem ) {
+                               var node = typeof elem.getAttributeNode !== "undefined" &&
+                                       elem.getAttributeNode("id");
+                               return node && node.value === attrId;
+                       };
+               };
+
+               // Support: IE 6 - 7 only
+               // getElementById is not reliable as a find shortcut
+               Expr.find["ID"] = function( id, context ) {
+                       if ( typeof context.getElementById !== "undefined" && documentIsHTML ) {
+                               var node, i, elems,
+                                       elem = context.getElementById( id );
+
+                               if ( elem ) {
+
+                                       // Verify the id attribute
+                                       node = elem.getAttributeNode("id");
+                                       if ( node && node.value === id ) {
+                                               return [ elem ];
+                                       }
+
+                                       // Fall back on getElementsByName
+                                       elems = context.getElementsByName( id );
+                                       i = 0;
+                                       while ( (elem = elems[i++]) ) {
+                                               node = elem.getAttributeNode("id");
+                                               if ( node && node.value === id ) {
+                                                       return [ elem ];
+                                               }
+                                       }
+                               }
+
+                               return [];
+                       }
+               };
+       }
+
+       // Tag
+       Expr.find["TAG"] = support.getElementsByTagName ?
+               function( tag, context ) {
+                       if ( typeof context.getElementsByTagName !== "undefined" ) {
+                               return context.getElementsByTagName( tag );
+
+                       // DocumentFragment nodes don't have gEBTN
+                       } else if ( support.qsa ) {
+                               return context.querySelectorAll( tag );
+                       }
+               } :
+
+               function( tag, context ) {
+                       var elem,
+                               tmp = [],
+                               i = 0,
+                               // By happy coincidence, a (broken) gEBTN appears on DocumentFragment nodes too
+                               results = context.getElementsByTagName( tag );
+
+                       // Filter out possible comments
+                       if ( tag === "*" ) {
+                               while ( (elem = results[i++]) ) {
+                                       if ( elem.nodeType === 1 ) {
+                                               tmp.push( elem );
+                                       }
+                               }
+
+                               return tmp;
+                       }
+                       return results;
+               };
+
+       // Class
+       Expr.find["CLASS"] = support.getElementsByClassName && function( className, context ) {
+               if ( typeof context.getElementsByClassName !== "undefined" && documentIsHTML ) {
+                       return context.getElementsByClassName( className );
+               }
+       };
+
+       /* QSA/matchesSelector
+       ---------------------------------------------------------------------- */
+
+       // QSA and matchesSelector support
+
+       // matchesSelector(:active) reports false when true (IE9/Opera 11.5)
+       rbuggyMatches = [];
+
+       // qSa(:focus) reports false when true (Chrome 21)
+       // We allow this because of a bug in IE8/9 that throws an error
+       // whenever `document.activeElement` is accessed on an iframe
+       // So, we allow :focus to pass through QSA all the time to avoid the IE error
+       // See https://bugs.jquery.com/ticket/13378
+       rbuggyQSA = [];
+
+       if ( (support.qsa = rnative.test( document.querySelectorAll )) ) {
+               // Build QSA regex
+               // Regex strategy adopted from Diego Perini
+               assert(function( el ) {
+                       // Select is set to empty string on purpose
+                       // This is to test IE's treatment of not explicitly
+                       // setting a boolean content attribute,
+                       // since its presence should be enough
+                       // https://bugs.jquery.com/ticket/12359
+                       docElem.appendChild( el ).innerHTML = "<a id='" + expando + "'></a>" +
+                               "<select id='" + expando + "-\r\\' msallowcapture=''>" +
+                               "<option selected=''></option></select>";
+
+                       // Support: IE8, Opera 11-12.16
+                       // Nothing should be selected when empty strings follow ^= or $= or *=
+                       // The test attribute must be unknown in Opera but "safe" for WinRT
+                       // https://msdn.microsoft.com/en-us/library/ie/hh465388.aspx#attribute_section
+                       if ( el.querySelectorAll("[msallowcapture^='']").length ) {
+                               rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:''|\"\")" );
+                       }
+
+                       // Support: IE8
+                       // Boolean attributes and "value" are not treated correctly
+                       if ( !el.querySelectorAll("[selected]").length ) {
+                               rbuggyQSA.push( "\\[" + whitespace + "*(?:value|" + booleans + ")" );
+                       }
+
+                       // Support: Chrome<29, Android<4.4, Safari<7.0+, iOS<7.0+, PhantomJS<1.9.8+
+                       if ( !el.querySelectorAll( "[id~=" + expando + "-]" ).length ) {
+                               rbuggyQSA.push("~=");
+                       }
+
+                       // Webkit/Opera - :checked should return selected option elements
+                       // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked
+                       // IE8 throws error here and will not see later tests
+                       if ( !el.querySelectorAll(":checked").length ) {
+                               rbuggyQSA.push(":checked");
+                       }
+
+                       // Support: Safari 8+, iOS 8+
+                       // https://bugs.webkit.org/show_bug.cgi?id=136851
+                       // In-page `selector#id sibling-combinator selector` fails
+                       if ( !el.querySelectorAll( "a#" + expando + "+*" ).length ) {
+                               rbuggyQSA.push(".#.+[+~]");
+                       }
+               });
+
+               assert(function( el ) {
+                       el.innerHTML = "<a href='' disabled='disabled'></a>" +
+                               "<select disabled='disabled'><option/></select>";
+
+                       // Support: Windows 8 Native Apps
+                       // The type and name attributes are restricted during .innerHTML assignment
+                       var input = document.createElement("input");
+                       input.setAttribute( "type", "hidden" );
+                       el.appendChild( input ).setAttribute( "name", "D" );
+
+                       // Support: IE8
+                       // Enforce case-sensitivity of name attribute
+                       if ( el.querySelectorAll("[name=d]").length ) {
+                               rbuggyQSA.push( "name" + whitespace + "*[*^$|!~]?=" );
+                       }
+
+                       // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled)
+                       // IE8 throws error here and will not see later tests
+                       if ( el.querySelectorAll(":enabled").length !== 2 ) {
+                               rbuggyQSA.push( ":enabled", ":disabled" );
+                       }
+
+                       // Support: IE9-11+
+                       // IE's :disabled selector does not pick up the children of disabled fieldsets
+                       docElem.appendChild( el ).disabled = true;
+                       if ( el.querySelectorAll(":disabled").length !== 2 ) {
+                               rbuggyQSA.push( ":enabled", ":disabled" );
+                       }
+
+                       // Opera 10-11 does not throw on post-comma invalid pseudos
+                       el.querySelectorAll("*,:x");
+                       rbuggyQSA.push(",.*:");
+               });
+       }
+
+       if ( (support.matchesSelector = rnative.test( (matches = docElem.matches ||
+               docElem.webkitMatchesSelector ||
+               docElem.mozMatchesSelector ||
+               docElem.oMatchesSelector ||
+               docElem.msMatchesSelector) )) ) {
+
+               assert(function( el ) {
+                       // Check to see if it's possible to do matchesSelector
+                       // on a disconnected node (IE 9)
+                       support.disconnectedMatch = matches.call( el, "*" );
+
+                       // This should fail with an exception
+                       // Gecko does not error, returns false instead
+                       matches.call( el, "[s!='']:x" );
+                       rbuggyMatches.push( "!=", pseudos );
+               });
+       }
+
+       rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join("|") );
+       rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join("|") );
+
+       /* Contains
+       ---------------------------------------------------------------------- */
+       hasCompare = rnative.test( docElem.compareDocumentPosition );
+
+       // Element contains another
+       // Purposefully self-exclusive
+       // As in, an element does not contain itself
+       contains = hasCompare || rnative.test( docElem.contains ) ?
+               function( a, b ) {
+                       var adown = a.nodeType === 9 ? a.documentElement : a,
+                               bup = b && b.parentNode;
+                       return a === bup || !!( bup && bup.nodeType === 1 && (
+                               adown.contains ?
+                                       adown.contains( bup ) :
+                                       a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16
+                       ));
+               } :
+               function( a, b ) {
+                       if ( b ) {
+                               while ( (b = b.parentNode) ) {
+                                       if ( b === a ) {
+                                               return true;
+                                       }
+                               }
+                       }
+                       return false;
+               };
+
+       /* Sorting
+       ---------------------------------------------------------------------- */
+
+       // Document order sorting
+       sortOrder = hasCompare ?
+       function( a, b ) {
+
+               // Flag for duplicate removal
+               if ( a === b ) {
+                       hasDuplicate = true;
+                       return 0;
+               }
+
+               // Sort on method existence if only one input has compareDocumentPosition
+               var compare = !a.compareDocumentPosition - !b.compareDocumentPosition;
+               if ( compare ) {
+                       return compare;
+               }
+
+               // Calculate position if both inputs belong to the same document
+               compare = ( a.ownerDocument || a ) === ( b.ownerDocument || b ) ?
+                       a.compareDocumentPosition( b ) :
+
+                       // Otherwise we know they are disconnected
+                       1;
+
+               // Disconnected nodes
+               if ( compare & 1 ||
+                       (!support.sortDetached && b.compareDocumentPosition( a ) === compare) ) {
+
+                       // Choose the first element that is related to our preferred document
+                       if ( a === document || a.ownerDocument === preferredDoc && contains(preferredDoc, a) ) {
+                               return -1;
+                       }
+                       if ( b === document || b.ownerDocument === preferredDoc && contains(preferredDoc, b) ) {
+                               return 1;
+                       }
+
+                       // Maintain original order
+                       return sortInput ?
+                               ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) :
+                               0;
+               }
+
+               return compare & 4 ? -1 : 1;
+       } :
+       function( a, b ) {
+               // Exit early if the nodes are identical
+               if ( a === b ) {
+                       hasDuplicate = true;
+                       return 0;
+               }
+
+               var cur,
+                       i = 0,
+                       aup = a.parentNode,
+                       bup = b.parentNode,
+                       ap = [ a ],
+                       bp = [ b ];
+
+               // Parentless nodes are either documents or disconnected
+               if ( !aup || !bup ) {
+                       return a === document ? -1 :
+                               b === document ? 1 :
+                               aup ? -1 :
+                               bup ? 1 :
+                               sortInput ?
+                               ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) :
+                               0;
+
+               // If the nodes are siblings, we can do a quick check
+               } else if ( aup === bup ) {
+                       return siblingCheck( a, b );
+               }
+
+               // Otherwise we need full lists of their ancestors for comparison
+               cur = a;
+               while ( (cur = cur.parentNode) ) {
+                       ap.unshift( cur );
+               }
+               cur = b;
+               while ( (cur = cur.parentNode) ) {
+                       bp.unshift( cur );
+               }
+
+               // Walk down the tree looking for a discrepancy
+               while ( ap[i] === bp[i] ) {
+                       i++;
+               }
+
+               return i ?
+                       // Do a sibling check if the nodes have a common ancestor
+                       siblingCheck( ap[i], bp[i] ) :
+
+                       // Otherwise nodes in our document sort first
+                       ap[i] === preferredDoc ? -1 :
+                       bp[i] === preferredDoc ? 1 :
+                       0;
+       };
+
+       return document;
+};
+
+Sizzle.matches = function( expr, elements ) {
+       return Sizzle( expr, null, null, elements );
+};
+
+Sizzle.matchesSelector = function( elem, expr ) {
+       // Set document vars if needed
+       if ( ( elem.ownerDocument || elem ) !== document ) {
+               setDocument( elem );
+       }
+
+       if ( support.matchesSelector && documentIsHTML &&
+               !nonnativeSelectorCache[ expr + " " ] &&
+               ( !rbuggyMatches || !rbuggyMatches.test( expr ) ) &&
+               ( !rbuggyQSA     || !rbuggyQSA.test( expr ) ) ) {
+
+               try {
+                       var ret = matches.call( elem, expr );
+
+                       // IE 9's matchesSelector returns false on disconnected nodes
+                       if ( ret || support.disconnectedMatch ||
+                                       // As well, disconnected nodes are said to be in a document
+                                       // fragment in IE 9
+                                       elem.document && elem.document.nodeType !== 11 ) {
+                               return ret;
+                       }
+               } catch (e) {
+                       nonnativeSelectorCache( expr, true );
+               }
+       }
+
+       return Sizzle( expr, document, null, [ elem ] ).length > 0;
+};
+
+Sizzle.contains = function( context, elem ) {
+       // Set document vars if needed
+       if ( ( context.ownerDocument || context ) !== document ) {
+               setDocument( context );
+       }
+       return contains( context, elem );
+};
+
+Sizzle.attr = function( elem, name ) {
+       // Set document vars if needed
+       if ( ( elem.ownerDocument || elem ) !== document ) {
+               setDocument( elem );
+       }
+
+       var fn = Expr.attrHandle[ name.toLowerCase() ],
+               // Don't get fooled by Object.prototype properties (jQuery #13807)
+               val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ?
+                       fn( elem, name, !documentIsHTML ) :
+                       undefined;
+
+       return val !== undefined ?
+               val :
+               support.attributes || !documentIsHTML ?
+                       elem.getAttribute( name ) :
+                       (val = elem.getAttributeNode(name)) && val.specified ?
+                               val.value :
+                               null;
+};
+
+Sizzle.escape = function( sel ) {
+       return (sel + "").replace( rcssescape, fcssescape );
+};
+
+Sizzle.error = function( msg ) {
+       throw new Error( "Syntax error, unrecognized expression: " + msg );
+};
+
+/**
+ * Document sorting and removing duplicates
+ * @param {ArrayLike} results
+ */
+Sizzle.uniqueSort = function( results ) {
+       var elem,
+               duplicates = [],
+               j = 0,
+               i = 0;
+
+       // Unless we *know* we can detect duplicates, assume their presence
+       hasDuplicate = !support.detectDuplicates;
+       sortInput = !support.sortStable && results.slice( 0 );
+       results.sort( sortOrder );
+
+       if ( hasDuplicate ) {
+               while ( (elem = results[i++]) ) {
+                       if ( elem === results[ i ] ) {
+                               j = duplicates.push( i );
+                       }
+               }
+               while ( j-- ) {
+                       results.splice( duplicates[ j ], 1 );
+               }
+       }
+
+       // Clear input after sorting to release objects
+       // See https://github.com/jquery/sizzle/pull/225
+       sortInput = null;
+
+       return results;
+};
+
+/**
+ * Utility function for retrieving the text value of an array of DOM nodes
+ * @param {Array|Element} elem
+ */
+getText = Sizzle.getText = function( elem ) {
+       var node,
+               ret = "",
+               i = 0,
+               nodeType = elem.nodeType;
+
+       if ( !nodeType ) {
+               // If no nodeType, this is expected to be an array
+               while ( (node = elem[i++]) ) {
+                       // Do not traverse comment nodes
+                       ret += getText( node );
+               }
+       } else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) {
+               // Use textContent for elements
+               // innerText usage removed for consistency of new lines (jQuery #11153)
+               if ( typeof elem.textContent === "string" ) {
+                       return elem.textContent;
+               } else {
+                       // Traverse its children
+                       for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) {
+                               ret += getText( elem );
+                       }
+               }
+       } else if ( nodeType === 3 || nodeType === 4 ) {
+               return elem.nodeValue;
+       }
+       // Do not include comment or processing instruction nodes
+
+       return ret;
+};
+
+Expr = Sizzle.selectors = {
+
+       // Can be adjusted by the user
+       cacheLength: 50,
+
+       createPseudo: markFunction,
+
+       match: matchExpr,
+
+       attrHandle: {},
+
+       find: {},
+
+       relative: {
+               ">": { dir: "parentNode", first: true },
+               " ": { dir: "parentNode" },
+               "+": { dir: "previousSibling", first: true },
+               "~": { dir: "previousSibling" }
+       },
+
+       preFilter: {
+               "ATTR": function( match ) {
+                       match[1] = match[1].replace( runescape, funescape );
+
+                       // Move the given value to match[3] whether quoted or unquoted
+                       match[3] = ( match[3] || match[4] || match[5] || "" ).replace( runescape, funescape );
+
+                       if ( match[2] === "~=" ) {
+                               match[3] = " " + match[3] + " ";
+                       }
+
+                       return match.slice( 0, 4 );
+               },
+
+               "CHILD": function( match ) {
+                       /* matches from matchExpr["CHILD"]
+                               1 type (only|nth|...)
+                               2 what (child|of-type)
+                               3 argument (even|odd|\d*|\d*n([+-]\d+)?|...)
+                               4 xn-component of xn+y argument ([+-]?\d*n|)
+                               5 sign of xn-component
+                               6 x of xn-component
+                               7 sign of y-component
+                               8 y of y-component
+                       */
+                       match[1] = match[1].toLowerCase();
+
+                       if ( match[1].slice( 0, 3 ) === "nth" ) {
+                               // nth-* requires argument
+                               if ( !match[3] ) {
+                                       Sizzle.error( match[0] );
+                               }
+
+                               // numeric x and y parameters for Expr.filter.CHILD
+                               // remember that false/true cast respectively to 0/1
+                               match[4] = +( match[4] ? match[5] + (match[6] || 1) : 2 * ( match[3] === "even" || match[3] === "odd" ) );
+                               match[5] = +( ( match[7] + match[8] ) || match[3] === "odd" );
+
+                       // other types prohibit arguments
+                       } else if ( match[3] ) {
+                               Sizzle.error( match[0] );
+                       }
+
+                       return match;
+               },
+
+               "PSEUDO": function( match ) {
+                       var excess,
+                               unquoted = !match[6] && match[2];
+
+                       if ( matchExpr["CHILD"].test( match[0] ) ) {
+                               return null;
+                       }
+
+                       // Accept quoted arguments as-is
+                       if ( match[3] ) {
+                               match[2] = match[4] || match[5] || "";
+
+                       // Strip excess characters from unquoted arguments
+                       } else if ( unquoted && rpseudo.test( unquoted ) &&
+                               // Get excess from tokenize (recursively)
+                               (excess = tokenize( unquoted, true )) &&
+                               // advance to the next closing parenthesis
+                               (excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length) ) {
+
+                               // excess is a negative index
+                               match[0] = match[0].slice( 0, excess );
+                               match[2] = unquoted.slice( 0, excess );
+                       }
+
+                       // Return only captures needed by the pseudo filter method (type and argument)
+                       return match.slice( 0, 3 );
+               }
+       },
+
+       filter: {
+
+               "TAG": function( nodeNameSelector ) {
+                       var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase();
+                       return nodeNameSelector === "*" ?
+                               function() { return true; } :
+                               function( elem ) {
+                                       return elem.nodeName && elem.nodeName.toLowerCase() === nodeName;
+                               };
+               },
+
+               "CLASS": function( className ) {
+                       var pattern = classCache[ className + " " ];
+
+                       return pattern ||
+                               (pattern = new RegExp( "(^|" + whitespace + ")" + className + "(" + whitespace + "|$)" )) &&
+                               classCache( className, function( elem ) {
+                                       return pattern.test( typeof elem.className === "string" && elem.className || typeof elem.getAttribute !== "undefined" && elem.getAttribute("class") || "" );
+                               });
+               },
+
+               "ATTR": function( name, operator, check ) {
+                       return function( elem ) {
+                               var result = Sizzle.attr( elem, name );
+
+                               if ( result == null ) {
+                                       return operator === "!=";
+                               }
+                               if ( !operator ) {
+                                       return true;
+                               }
+
+                               result += "";
+
+                               return operator === "=" ? result === check :
+                                       operator === "!=" ? result !== check :
+                                       operator === "^=" ? check && result.indexOf( check ) === 0 :
+                                       operator === "*=" ? check && result.indexOf( check ) > -1 :
+                                       operator === "$=" ? check && result.slice( -check.length ) === check :
+                                       operator === "~=" ? ( " " + result.replace( rwhitespace, " " ) + " " ).indexOf( check ) > -1 :
+                                       operator === "|=" ? result === check || result.slice( 0, check.length + 1 ) === check + "-" :
+                                       false;
+                       };
+               },
+
+               "CHILD": function( type, what, argument, first, last ) {
+                       var simple = type.slice( 0, 3 ) !== "nth",
+                               forward = type.slice( -4 ) !== "last",
+                               ofType = what === "of-type";
+
+                       return first === 1 && last === 0 ?
+
+                               // Shortcut for :nth-*(n)
+                               function( elem ) {
+                                       return !!elem.parentNode;
+                               } :
+
+                               function( elem, context, xml ) {
+                                       var cache, uniqueCache, outerCache, node, nodeIndex, start,
+                                               dir = simple !== forward ? "nextSibling" : "previousSibling",
+                                               parent = elem.parentNode,
+                                               name = ofType && elem.nodeName.toLowerCase(),
+                                               useCache = !xml && !ofType,
+                                               diff = false;
+
+                                       if ( parent ) {
+
+                                               // :(first|last|only)-(child|of-type)
+                                               if ( simple ) {
+                                                       while ( dir ) {
+                                                               node = elem;
+                                                               while ( (node = node[ dir ]) ) {
+                                                                       if ( ofType ?
+                                                                               node.nodeName.toLowerCase() === name :
+                                                                               node.nodeType === 1 ) {
+
+                                                                               return false;
+                                                                       }
+                                                               }
+                                                               // Reverse direction for :only-* (if we haven't yet done so)
+                                                               start = dir = type === "only" && !start && "nextSibling";
+                                                       }
+                                                       return true;
+                                               }
+
+                                               start = [ forward ? parent.firstChild : parent.lastChild ];
+
+                                               // non-xml :nth-child(...) stores cache data on `parent`
+                                               if ( forward && useCache ) {
+
+                                                       // Seek `elem` from a previously-cached index
+
+                                                       // ...in a gzip-friendly way
+                                                       node = parent;
+                                                       outerCache = node[ expando ] || (node[ expando ] = {});
+
+                                                       // Support: IE <9 only
+                                                       // Defend against cloned attroperties (jQuery gh-1709)
+                                                       uniqueCache = outerCache[ node.uniqueID ] ||
+                                                               (outerCache[ node.uniqueID ] = {});
+
+                                                       cache = uniqueCache[ type ] || [];
+                                                       nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ];
+                                                       diff = nodeIndex && cache[ 2 ];
+                                                       node = nodeIndex && parent.childNodes[ nodeIndex ];
+
+                                                       while ( (node = ++nodeIndex && node && node[ dir ] ||
+
+                                                               // Fallback to seeking `elem` from the start
+                                                               (diff = nodeIndex = 0) || start.pop()) ) {
+
+                                                               // When found, cache indexes on `parent` and break
+                                                               if ( node.nodeType === 1 && ++diff && node === elem ) {
+                                                                       uniqueCache[ type ] = [ dirruns, nodeIndex, diff ];
+                                                                       break;
+                                                               }
+                                                       }
+
+                                               } else {
+                                                       // Use previously-cached element index if available
+                                                       if ( useCache ) {
+                                                               // ...in a gzip-friendly way
+                                                               node = elem;
+                                                               outerCache = node[ expando ] || (node[ expando ] = {});
+
+                                                               // Support: IE <9 only
+                                                               // Defend against cloned attroperties (jQuery gh-1709)
+                                                               uniqueCache = outerCache[ node.uniqueID ] ||
+                                                                       (outerCache[ node.uniqueID ] = {});
+
+                                                               cache = uniqueCache[ type ] || [];
+                                                               nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ];
+                                                               diff = nodeIndex;
+                                                       }
+
+                                                       // xml :nth-child(...)
+                                                       // or :nth-last-child(...) or :nth(-last)?-of-type(...)
+                                                       if ( diff === false ) {
+                                                               // Use the same loop as above to seek `elem` from the start
+                                                               while ( (node = ++nodeIndex && node && node[ dir ] ||
+                                                                       (diff = nodeIndex = 0) || start.pop()) ) {
+
+                                                                       if ( ( ofType ?
+                                                                               node.nodeName.toLowerCase() === name :
+                                                                               node.nodeType === 1 ) &&
+                                                                               ++diff ) {
+
+                                                                               // Cache the index of each encountered element
+                                                                               if ( useCache ) {
+                                                                                       outerCache = node[ expando ] || (node[ expando ] = {});
+
+                                                                                       // Support: IE <9 only
+                                                                                       // Defend against cloned attroperties (jQuery gh-1709)
+                                                                                       uniqueCache = outerCache[ node.uniqueID ] ||
+                                                                                               (outerCache[ node.uniqueID ] = {});
+
+                                                                                       uniqueCache[ type ] = [ dirruns, diff ];
+                                                                               }
+
+                                                                               if ( node === elem ) {
+                                                                                       break;
+                                                                               }
+                                                                       }
+                                                               }
+                                                       }
+                                               }
+
+                                               // Incorporate the offset, then check against cycle size
+                                               diff -= last;
+                                               return diff === first || ( diff % first === 0 && diff / first >= 0 );
+                                       }
+                               };
+               },
+
+               "PSEUDO": function( pseudo, argument ) {
+                       // pseudo-class names are case-insensitive
+                       // http://www.w3.org/TR/selectors/#pseudo-classes
+                       // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters
+                       // Remember that setFilters inherits from pseudos
+                       var args,
+                               fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] ||
+                                       Sizzle.error( "unsupported pseudo: " + pseudo );
+
+                       // The user may use createPseudo to indicate that
+                       // arguments are needed to create the filter function
+                       // just as Sizzle does
+                       if ( fn[ expando ] ) {
+                               return fn( argument );
+                       }
+
+                       // But maintain support for old signatures
+                       if ( fn.length > 1 ) {
+                               args = [ pseudo, pseudo, "", argument ];
+                               return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ?
+                                       markFunction(function( seed, matches ) {
+                                               var idx,
+                                                       matched = fn( seed, argument ),
+                                                       i = matched.length;
+                                               while ( i-- ) {
+                                                       idx = indexOf( seed, matched[i] );
+                                                       seed[ idx ] = !( matches[ idx ] = matched[i] );
+                                               }
+                                       }) :
+                                       function( elem ) {
+                                               return fn( elem, 0, args );
+                                       };
+                       }
+
+                       return fn;
+               }
+       },
+
+       pseudos: {
+               // Potentially complex pseudos
+               "not": markFunction(function( selector ) {
+                       // Trim the selector passed to compile
+                       // to avoid treating leading and trailing
+                       // spaces as combinators
+                       var input = [],
+                               results = [],
+                               matcher = compile( selector.replace( rtrim, "$1" ) );
+
+                       return matcher[ expando ] ?
+                               markFunction(function( seed, matches, context, xml ) {
+                                       var elem,
+                                               unmatched = matcher( seed, null, xml, [] ),
+                                               i = seed.length;
+
+                                       // Match elements unmatched by `matcher`
+                                       while ( i-- ) {
+                                               if ( (elem = unmatched[i]) ) {
+                                                       seed[i] = !(matches[i] = elem);
+                                               }
+                                       }
+                               }) :
+                               function( elem, context, xml ) {
+                                       input[0] = elem;
+                                       matcher( input, null, xml, results );
+                                       // Don't keep the element (issue #299)
+                                       input[0] = null;
+                                       return !results.pop();
+                               };
+               }),
+
+               "has": markFunction(function( selector ) {
+                       return function( elem ) {
+                               return Sizzle( selector, elem ).length > 0;
+                       };
+               }),
+
+               "contains": markFunction(function( text ) {
+                       text = text.replace( runescape, funescape );
+                       return function( elem ) {
+                               return ( elem.textContent || getText( elem ) ).indexOf( text ) > -1;
+                       };
+               }),
+
+               // "Whether an element is represented by a :lang() selector
+               // is based solely on the element's language value
+               // being equal to the identifier C,
+               // or beginning with the identifier C immediately followed by "-".
+               // The matching of C against the element's language value is performed case-insensitively.
+               // The identifier C does not have to be a valid language name."
+               // http://www.w3.org/TR/selectors/#lang-pseudo
+               "lang": markFunction( function( lang ) {
+                       // lang value must be a valid identifier
+                       if ( !ridentifier.test(lang || "") ) {
+                               Sizzle.error( "unsupported lang: " + lang );
+                       }
+                       lang = lang.replace( runescape, funescape ).toLowerCase();
+                       return function( elem ) {
+                               var elemLang;
+                               do {
+                                       if ( (elemLang = documentIsHTML ?
+                                               elem.lang :
+                                               elem.getAttribute("xml:lang") || elem.getAttribute("lang")) ) {
+
+                                               elemLang = elemLang.toLowerCase();
+                                               return elemLang === lang || elemLang.indexOf( lang + "-" ) === 0;
+                                       }
+                               } while ( (elem = elem.parentNode) && elem.nodeType === 1 );
+                               return false;
+                       };
+               }),
+
+               // Miscellaneous
+               "target": function( elem ) {
+                       var hash = window.location && window.location.hash;
+                       return hash && hash.slice( 1 ) === elem.id;
+               },
+
+               "root": function( elem ) {
+                       return elem === docElem;
+               },
+
+               "focus": function( elem ) {
+                       return elem === document.activeElement && (!document.hasFocus || document.hasFocus()) && !!(elem.type || elem.href || ~elem.tabIndex);
+               },
+
+               // Boolean properties
+               "enabled": createDisabledPseudo( false ),
+               "disabled": createDisabledPseudo( true ),
+
+               "checked": function( elem ) {
+                       // In CSS3, :checked should return both checked and selected elements
+                       // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked
+                       var nodeName = elem.nodeName.toLowerCase();
+                       return (nodeName === "input" && !!elem.checked) || (nodeName === "option" && !!elem.selected);
+               },
+
+               "selected": function( elem ) {
+                       // Accessing this property makes selected-by-default
+                       // options in Safari work properly
+                       if ( elem.parentNode ) {
+                               elem.parentNode.selectedIndex;
+                       }
+
+                       return elem.selected === true;
+               },
+
+               // Contents
+               "empty": function( elem ) {
+                       // http://www.w3.org/TR/selectors/#empty-pseudo
+                       // :empty is negated by element (1) or content nodes (text: 3; cdata: 4; entity ref: 5),
+                       //   but not by others (comment: 8; processing instruction: 7; etc.)
+                       // nodeType < 6 works because attributes (2) do not appear as children
+                       for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) {
+                               if ( elem.nodeType < 6 ) {
+                                       return false;
+                               }
+                       }
+                       return true;
+               },
+
+               "parent": function( elem ) {
+                       return !Expr.pseudos["empty"]( elem );
+               },
+
+               // Element/input types
+               "header": function( elem ) {
+                       return rheader.test( elem.nodeName );
+               },
+
+               "input": function( elem ) {
+                       return rinputs.test( elem.nodeName );
+               },
+
+               "button": function( elem ) {
+                       var name = elem.nodeName.toLowerCase();
+                       return name === "input" && elem.type === "button" || name === "button";
+               },
+
+               "text": function( elem ) {
+                       var attr;
+                       return elem.nodeName.toLowerCase() === "input" &&
+                               elem.type === "text" &&
+
+                               // Support: IE<8
+                               // New HTML5 attribute values (e.g., "search") appear with elem.type === "text"
+                               ( (attr = elem.getAttribute("type")) == null || attr.toLowerCase() === "text" );
+               },
+
+               // Position-in-collection
+               "first": createPositionalPseudo(function() {
+                       return [ 0 ];
+               }),
+
+               "last": createPositionalPseudo(function( matchIndexes, length ) {
+                       return [ length - 1 ];
+               }),
+
+               "eq": createPositionalPseudo(function( matchIndexes, length, argument ) {
+                       return [ argument < 0 ? argument + length : argument ];
+               }),
+
+               "even": createPositionalPseudo(function( matchIndexes, length ) {
+                       var i = 0;
+                       for ( ; i < length; i += 2 ) {
+                               matchIndexes.push( i );
+                       }
+                       return matchIndexes;
+               }),
+
+               "odd": createPositionalPseudo(function( matchIndexes, length ) {
+                       var i = 1;
+                       for ( ; i < length; i += 2 ) {
+                               matchIndexes.push( i );
+                       }
+                       return matchIndexes;
+               }),
+
+               "lt": createPositionalPseudo(function( matchIndexes, length, argument ) {
+                       var i = argument < 0 ?
+                               argument + length :
+                               argument > length ?
+                                       length :
+                                       argument;
+                       for ( ; --i >= 0; ) {
+                               matchIndexes.push( i );
+                       }
+                       return matchIndexes;
+               }),
+
+               "gt": createPositionalPseudo(function( matchIndexes, length, argument ) {
+                       var i = argument < 0 ? argument + length : argument;
+                       for ( ; ++i < length; ) {
+                               matchIndexes.push( i );
+                       }
+                       return matchIndexes;
+               })
+       }
+};
+
+Expr.pseudos["nth"] = Expr.pseudos["eq"];
+
+// Add button/input type pseudos
+for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) {
+       Expr.pseudos[ i ] = createInputPseudo( i );
+}
+for ( i in { submit: true, reset: true } ) {
+       Expr.pseudos[ i ] = createButtonPseudo( i );
+}
+
+// Easy API for creating new setFilters
+function setFilters() {}
+setFilters.prototype = Expr.filters = Expr.pseudos;
+Expr.setFilters = new setFilters();
+
+tokenize = Sizzle.tokenize = function( selector, parseOnly ) {
+       var matched, match, tokens, type,
+               soFar, groups, preFilters,
+               cached = tokenCache[ selector + " " ];
+
+       if ( cached ) {
+               return parseOnly ? 0 : cached.slice( 0 );
+       }
+
+       soFar = selector;
+       groups = [];
+       preFilters = Expr.preFilter;
+
+       while ( soFar ) {
+
+               // Comma and first run
+               if ( !matched || (match = rcomma.exec( soFar )) ) {
+                       if ( match ) {
+                               // Don't consume trailing commas as valid
+                               soFar = soFar.slice( match[0].length ) || soFar;
+                       }
+                       groups.push( (tokens = []) );
+               }
+
+               matched = false;
+
+               // Combinators
+               if ( (match = rcombinators.exec( soFar )) ) {
+                       matched = match.shift();
+                       tokens.push({
+                               value: matched,
+                               // Cast descendant combinators to space
+                               type: match[0].replace( rtrim, " " )
+                       });
+                       soFar = soFar.slice( matched.length );
+               }
+
+               // Filters
+               for ( type in Expr.filter ) {
+                       if ( (match = matchExpr[ type ].exec( soFar )) && (!preFilters[ type ] ||
+                               (match = preFilters[ type ]( match ))) ) {
+                               matched = match.shift();
+                               tokens.push({
+                                       value: matched,
+                                       type: type,
+                                       matches: match
+                               });
+                               soFar = soFar.slice( matched.length );
+                       }
+               }
+
+               if ( !matched ) {
+                       break;
+               }
+       }
+
+       // Return the length of the invalid excess
+       // if we're just parsing
+       // Otherwise, throw an error or return tokens
+       return parseOnly ?
+               soFar.length :
+               soFar ?
+                       Sizzle.error( selector ) :
+                       // Cache the tokens
+                       tokenCache( selector, groups ).slice( 0 );
+};
+
+function toSelector( tokens ) {
+       var i = 0,
+               len = tokens.length,
+               selector = "";
+       for ( ; i < len; i++ ) {
+               selector += tokens[i].value;
+       }
+       return selector;
+}
+
+function addCombinator( matcher, combinator, base ) {
+       var dir = combinator.dir,
+               skip = combinator.next,
+               key = skip || dir,
+               checkNonElements = base && key === "parentNode",
+               doneName = done++;
+
+       return combinator.first ?
+               // Check against closest ancestor/preceding element
+               function( elem, context, xml ) {
+                       while ( (elem = elem[ dir ]) ) {
+                               if ( elem.nodeType === 1 || checkNonElements ) {
+                                       return matcher( elem, context, xml );
+                               }
+                       }
+                       return false;
+               } :
+
+               // Check against all ancestor/preceding elements
+               function( elem, context, xml ) {
+                       var oldCache, uniqueCache, outerCache,
+                               newCache = [ dirruns, doneName ];
+
+                       // We can't set arbitrary data on XML nodes, so they don't benefit from combinator caching
+                       if ( xml ) {
+                               while ( (elem = elem[ dir ]) ) {
+                                       if ( elem.nodeType === 1 || checkNonElements ) {
+                                               if ( matcher( elem, context, xml ) ) {
+                                                       return true;
+                                               }
+                                       }
+                               }
+                       } else {
+                               while ( (elem = elem[ dir ]) ) {
+                                       if ( elem.nodeType === 1 || checkNonElements ) {
+                                               outerCache = elem[ expando ] || (elem[ expando ] = {});
+
+                                               // Support: IE <9 only
+                                               // Defend against cloned attroperties (jQuery gh-1709)
+                                               uniqueCache = outerCache[ elem.uniqueID ] || (outerCache[ elem.uniqueID ] = {});
+
+                                               if ( skip && skip === elem.nodeName.toLowerCase() ) {
+                                                       elem = elem[ dir ] || elem;
+                                               } else if ( (oldCache = uniqueCache[ key ]) &&
+                                                       oldCache[ 0 ] === dirruns && oldCache[ 1 ] === doneName ) {
+
+                                                       // Assign to newCache so results back-propagate to previous elements
+                                                       return (newCache[ 2 ] = oldCache[ 2 ]);
+                                               } else {
+                                                       // Reuse newcache so results back-propagate to previous elements
+                                                       uniqueCache[ key ] = newCache;
+
+                                                       // A match means we're done; a fail means we have to keep checking
+                                                       if ( (newCache[ 2 ] = matcher( elem, context, xml )) ) {
+                                                               return true;
+                                                       }
+                                               }
+                                       }
+                               }
+                       }
+                       return false;
+               };
+}
+
+function elementMatcher( matchers ) {
+       return matchers.length > 1 ?
+               function( elem, context, xml ) {
+                       var i = matchers.length;
+                       while ( i-- ) {
+                               if ( !matchers[i]( elem, context, xml ) ) {
+                                       return false;
+                               }
+                       }
+                       return true;
+               } :
+               matchers[0];
+}
+
+function multipleContexts( selector, contexts, results ) {
+       var i = 0,
+               len = contexts.length;
+       for ( ; i < len; i++ ) {
+               Sizzle( selector, contexts[i], results );
+       }
+       return results;
+}
+
+function condense( unmatched, map, filter, context, xml ) {
+       var elem,
+               newUnmatched = [],
+               i = 0,
+               len = unmatched.length,
+               mapped = map != null;
+
+       for ( ; i < len; i++ ) {
+               if ( (elem = unmatched[i]) ) {
+                       if ( !filter || filter( elem, context, xml ) ) {
+                               newUnmatched.push( elem );
+                               if ( mapped ) {
+                                       map.push( i );
+                               }
+                       }
+               }
+       }
+
+       return newUnmatched;
+}
+
+function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) {
+       if ( postFilter && !postFilter[ expando ] ) {
+               postFilter = setMatcher( postFilter );
+       }
+       if ( postFinder && !postFinder[ expando ] ) {
+               postFinder = setMatcher( postFinder, postSelector );
+       }
+       return markFunction(function( seed, results, context, xml ) {
+               var temp, i, elem,
+                       preMap = [],
+                       postMap = [],
+                       preexisting = results.length,
+
+                       // Get initial elements from seed or context
+                       elems = seed || multipleContexts( selector || "*", context.nodeType ? [ context ] : context, [] ),
+
+                       // Prefilter to get matcher input, preserving a map for seed-results synchronization
+                       matcherIn = preFilter && ( seed || !selector ) ?
+                               condense( elems, preMap, preFilter, context, xml ) :
+                               elems,
+
+                       matcherOut = matcher ?
+                               // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results,
+                               postFinder || ( seed ? preFilter : preexisting || postFilter ) ?
+
+                                       // ...intermediate processing is necessary
+                                       [] :
+
+                                       // ...otherwise use results directly
+                                       results :
+                               matcherIn;
+
+               // Find primary matches
+               if ( matcher ) {
+                       matcher( matcherIn, matcherOut, context, xml );
+               }
+
+               // Apply postFilter
+               if ( postFilter ) {
+                       temp = condense( matcherOut, postMap );
+                       postFilter( temp, [], context, xml );
+
+                       // Un-match failing elements by moving them back to matcherIn
+                       i = temp.length;
+                       while ( i-- ) {
+                               if ( (elem = temp[i]) ) {
+                                       matcherOut[ postMap[i] ] = !(matcherIn[ postMap[i] ] = elem);
+                               }
+                       }
+               }
+
+               if ( seed ) {
+                       if ( postFinder || preFilter ) {
+                               if ( postFinder ) {
+                                       // Get the final matcherOut by condensing this intermediate into postFinder contexts
+                                       temp = [];
+                                       i = matcherOut.length;
+                                       while ( i-- ) {
+                                               if ( (elem = matcherOut[i]) ) {
+                                                       // Restore matcherIn since elem is not yet a final match
+                                                       temp.push( (matcherIn[i] = elem) );
+                                               }
+                                       }
+                                       postFinder( null, (matcherOut = []), temp, xml );
+                               }
+
+                               // Move matched elements from seed to results to keep them synchronized
+                               i = matcherOut.length;
+                               while ( i-- ) {
+                                       if ( (elem = matcherOut[i]) &&
+                                               (temp = postFinder ? indexOf( seed, elem ) : preMap[i]) > -1 ) {
+
+                                               seed[temp] = !(results[temp] = elem);
+                                       }
+                               }
+                       }
+
+               // Add elements to results, through postFinder if defined
+               } else {
+                       matcherOut = condense(
+                               matcherOut === results ?
+                                       matcherOut.splice( preexisting, matcherOut.length ) :
+                                       matcherOut
+                       );
+                       if ( postFinder ) {
+                               postFinder( null, results, matcherOut, xml );
+                       } else {
+                               push.apply( results, matcherOut );
+                       }
+               }
+       });
+}
+
+function matcherFromTokens( tokens ) {
+       var checkContext, matcher, j,
+               len = tokens.length,
+               leadingRelative = Expr.relative[ tokens[0].type ],
+               implicitRelative = leadingRelative || Expr.relative[" "],
+               i = leadingRelative ? 1 : 0,
+
+               // The foundational matcher ensures that elements are reachable from top-level context(s)
+               matchContext = addCombinator( function( elem ) {
+                       return elem === checkContext;
+               }, implicitRelative, true ),
+               matchAnyContext = addCombinator( function( elem ) {
+                       return indexOf( checkContext, elem ) > -1;
+               }, implicitRelative, true ),
+               matchers = [ function( elem, context, xml ) {
+                       var ret = ( !leadingRelative && ( xml || context !== outermostContext ) ) || (
+                               (checkContext = context).nodeType ?
+                                       matchContext( elem, context, xml ) :
+                                       matchAnyContext( elem, context, xml ) );
+                       // Avoid hanging onto element (issue #299)
+                       checkContext = null;
+                       return ret;
+               } ];
+
+       for ( ; i < len; i++ ) {
+               if ( (matcher = Expr.relative[ tokens[i].type ]) ) {
+                       matchers = [ addCombinator(elementMatcher( matchers ), matcher) ];
+               } else {
+                       matcher = Expr.filter[ tokens[i].type ].apply( null, tokens[i].matches );
+
+                       // Return special upon seeing a positional matcher
+                       if ( matcher[ expando ] ) {
+                               // Find the next relative operator (if any) for proper handling
+                               j = ++i;
+                               for ( ; j < len; j++ ) {
+                                       if ( Expr.relative[ tokens[j].type ] ) {
+                                               break;
+                                       }
+                               }
+                               return setMatcher(
+                                       i > 1 && elementMatcher( matchers ),
+                                       i > 1 && toSelector(
+                                               // If the preceding token was a descendant combinator, insert an implicit any-element `*`
+                                               tokens.slice( 0, i - 1 ).concat({ value: tokens[ i - 2 ].type === " " ? "*" : "" })
+                                       ).replace( rtrim, "$1" ),
+                                       matcher,
+                                       i < j && matcherFromTokens( tokens.slice( i, j ) ),
+                                       j < len && matcherFromTokens( (tokens = tokens.slice( j )) ),
+                                       j < len && toSelector( tokens )
+                               );
+                       }
+                       matchers.push( matcher );
+               }
+       }
+
+       return elementMatcher( matchers );
+}
+
+function matcherFromGroupMatchers( elementMatchers, setMatchers ) {
+       var bySet = setMatchers.length > 0,
+               byElement = elementMatchers.length > 0,
+               superMatcher = function( seed, context, xml, results, outermost ) {
+                       var elem, j, matcher,
+                               matchedCount = 0,
+                               i = "0",
+                               unmatched = seed && [],
+                               setMatched = [],
+                               contextBackup = outermostContext,
+                               // We must always have either seed elements or outermost context
+                               elems = seed || byElement && Expr.find["TAG"]( "*", outermost ),
+                               // Use integer dirruns iff this is the outermost matcher
+                               dirrunsUnique = (dirruns += contextBackup == null ? 1 : Math.random() || 0.1),
+                               len = elems.length;
+
+                       if ( outermost ) {
+                               outermostContext = context === document || context || outermost;
+                       }
+
+                       // Add elements passing elementMatchers directly to results
+                       // Support: IE<9, Safari
+                       // Tolerate NodeList properties (IE: "length"; Safari: <number>) matching elements by id
+                       for ( ; i !== len && (elem = elems[i]) != null; i++ ) {
+                               if ( byElement && elem ) {
+                                       j = 0;
+                                       if ( !context && elem.ownerDocument !== document ) {
+                                               setDocument( elem );
+                                               xml = !documentIsHTML;
+                                       }
+                                       while ( (matcher = elementMatchers[j++]) ) {
+                                               if ( matcher( elem, context || document, xml) ) {
+                                                       results.push( elem );
+                                                       break;
+                                               }
+                                       }
+                                       if ( outermost ) {
+                                               dirruns = dirrunsUnique;
+                                       }
+                               }
+
+                               // Track unmatched elements for set filters
+                               if ( bySet ) {
+                                       // They will have gone through all possible matchers
+                                       if ( (elem = !matcher && elem) ) {
+                                               matchedCount--;
+                                       }
+
+                                       // Lengthen the array for every element, matched or not
+                                       if ( seed ) {
+                                               unmatched.push( elem );
+                                       }
+                               }
+                       }
+
+                       // `i` is now the count of elements visited above, and adding it to `matchedCount`
+                       // makes the latter nonnegative.
+                       matchedCount += i;
+
+                       // Apply set filters to unmatched elements
+                       // NOTE: This can be skipped if there are no unmatched elements (i.e., `matchedCount`
+                       // equals `i`), unless we didn't visit _any_ elements in the above loop because we have
+                       // no element matchers and no seed.
+                       // Incrementing an initially-string "0" `i` allows `i` to remain a string only in that
+                       // case, which will result in a "00" `matchedCount` that differs from `i` but is also
+                       // numerically zero.
+                       if ( bySet && i !== matchedCount ) {
+                               j = 0;
+                               while ( (matcher = setMatchers[j++]) ) {
+                                       matcher( unmatched, setMatched, context, xml );
+                               }
+
+                               if ( seed ) {
+                                       // Reintegrate element matches to eliminate the need for sorting
+                                       if ( matchedCount > 0 ) {
+                                               while ( i-- ) {
+                                                       if ( !(unmatched[i] || setMatched[i]) ) {
+                                                               setMatched[i] = pop.call( results );
+                                                       }
+                                               }
+                                       }
+
+                                       // Discard index placeholder values to get only actual matches
+                                       setMatched = condense( setMatched );
+                               }
+
+                               // Add matches to results
+                               push.apply( results, setMatched );
+
+                               // Seedless set matches succeeding multiple successful matchers stipulate sorting
+                               if ( outermost && !seed && setMatched.length > 0 &&
+                                       ( matchedCount + setMatchers.length ) > 1 ) {
+
+                                       Sizzle.uniqueSort( results );
+                               }
+                       }
+
+                       // Override manipulation of globals by nested matchers
+                       if ( outermost ) {
+                               dirruns = dirrunsUnique;
+                               outermostContext = contextBackup;
+                       }
+
+                       return unmatched;
+               };
+
+       return bySet ?
+               markFunction( superMatcher ) :
+               superMatcher;
+}
+
+compile = Sizzle.compile = function( selector, match /* Internal Use Only */ ) {
+       var i,
+               setMatchers = [],
+               elementMatchers = [],
+               cached = compilerCache[ selector + " " ];
+
+       if ( !cached ) {
+               // Generate a function of recursive functions that can be used to check each element
+               if ( !match ) {
+                       match = tokenize( selector );
+               }
+               i = match.length;
+               while ( i-- ) {
+                       cached = matcherFromTokens( match[i] );
+                       if ( cached[ expando ] ) {
+                               setMatchers.push( cached );
+                       } else {
+                               elementMatchers.push( cached );
+                       }
+               }
+
+               // Cache the compiled function
+               cached = compilerCache( selector, matcherFromGroupMatchers( elementMatchers, setMatchers ) );
+
+               // Save selector and tokenization
+               cached.selector = selector;
+       }
+       return cached;
+};
+
+/**
+ * A low-level selection function that works with Sizzle's compiled
+ *  selector functions
+ * @param {String|Function} selector A selector or a pre-compiled
+ *  selector function built with Sizzle.compile
+ * @param {Element} context
+ * @param {Array} [results]
+ * @param {Array} [seed] A set of elements to match against
+ */
+select = Sizzle.select = function( selector, context, results, seed ) {
+       var i, tokens, token, type, find,
+               compiled = typeof selector === "function" && selector,
+               match = !seed && tokenize( (selector = compiled.selector || selector) );
+
+       results = results || [];
+
+       // Try to minimize operations if there is only one selector in the list and no seed
+       // (the latter of which guarantees us context)
+       if ( match.length === 1 ) {
+
+               // Reduce context if the leading compound selector is an ID
+               tokens = match[0] = match[0].slice( 0 );
+               if ( tokens.length > 2 && (token = tokens[0]).type === "ID" &&
+                               context.nodeType === 9 && documentIsHTML && Expr.relative[ tokens[1].type ] ) {
+
+                       context = ( Expr.find["ID"]( token.matches[0].replace(runescape, funescape), context ) || [] )[0];
+                       if ( !context ) {
+                               return results;
+
+                       // Precompiled matchers will still verify ancestry, so step up a level
+                       } else if ( compiled ) {
+                               context = context.parentNode;
+                       }
+
+                       selector = selector.slice( tokens.shift().value.length );
+               }
+
+               // Fetch a seed set for right-to-left matching
+               i = matchExpr["needsContext"].test( selector ) ? 0 : tokens.length;
+               while ( i-- ) {
+                       token = tokens[i];
+
+                       // Abort if we hit a combinator
+                       if ( Expr.relative[ (type = token.type) ] ) {
+                               break;
+                       }
+                       if ( (find = Expr.find[ type ]) ) {
+                               // Search, expanding context for leading sibling combinators
+                               if ( (seed = find(
+                                       token.matches[0].replace( runescape, funescape ),
+                                       rsibling.test( tokens[0].type ) && testContext( context.parentNode ) || context
+                               )) ) {
+
+                                       // If seed is empty or no tokens remain, we can return early
+                                       tokens.splice( i, 1 );
+                                       selector = seed.length && toSelector( tokens );
+                                       if ( !selector ) {
+                                               push.apply( results, seed );
+                                               return results;
+                                       }
+
+                                       break;
+                               }
+                       }
+               }
+       }
+
+       // Compile and execute a filtering function if one is not provided
+       // Provide `match` to avoid retokenization if we modified the selector above
+       ( compiled || compile( selector, match ) )(
+               seed,
+               context,
+               !documentIsHTML,
+               results,
+               !context || rsibling.test( selector ) && testContext( context.parentNode ) || context
+       );
+       return results;
+};
+
+// One-time assignments
+
+// Sort stability
+support.sortStable = expando.split("").sort( sortOrder ).join("") === expando;
+
+// Support: Chrome 14-35+
+// Always assume duplicates if they aren't passed to the comparison function
+support.detectDuplicates = !!hasDuplicate;
+
+// Initialize against the default document
+setDocument();
+
+// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27)
+// Detached nodes confoundingly follow *each other*
+support.sortDetached = assert(function( el ) {
+       // Should return 1, but returns 4 (following)
+       return el.compareDocumentPosition( document.createElement("fieldset") ) & 1;
+});
+
+// Support: IE<8
+// Prevent attribute/property "interpolation"
+// https://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx
+if ( !assert(function( el ) {
+       el.innerHTML = "<a href='#'></a>";
+       return el.firstChild.getAttribute("href") === "#" ;
+}) ) {
+       addHandle( "type|href|height|width", function( elem, name, isXML ) {
+               if ( !isXML ) {
+                       return elem.getAttribute( name, name.toLowerCase() === "type" ? 1 : 2 );
+               }
+       });
+}
+
+// Support: IE<9
+// Use defaultValue in place of getAttribute("value")
+if ( !support.attributes || !assert(function( el ) {
+       el.innerHTML = "<input/>";
+       el.firstChild.setAttribute( "value", "" );
+       return el.firstChild.getAttribute( "value" ) === "";
+}) ) {
+       addHandle( "value", function( elem, name, isXML ) {
+               if ( !isXML && elem.nodeName.toLowerCase() === "input" ) {
+                       return elem.defaultValue;
+               }
+       });
+}
+
+// Support: IE<9
+// Use getAttributeNode to fetch booleans when getAttribute lies
+if ( !assert(function( el ) {
+       return el.getAttribute("disabled") == null;
+}) ) {
+       addHandle( booleans, function( elem, name, isXML ) {
+               var val;
+               if ( !isXML ) {
+                       return elem[ name ] === true ? name.toLowerCase() :
+                                       (val = elem.getAttributeNode( name )) && val.specified ?
+                                       val.value :
+                               null;
+               }
+       });
+}
+
+return Sizzle;
+
+})( window );
+
+
+
+jQuery.find = Sizzle;
+jQuery.expr = Sizzle.selectors;
+
+// Deprecated
+jQuery.expr[ ":" ] = jQuery.expr.pseudos;
+jQuery.uniqueSort = jQuery.unique = Sizzle.uniqueSort;
+jQuery.text = Sizzle.getText;
+jQuery.isXMLDoc = Sizzle.isXML;
+jQuery.contains = Sizzle.contains;
+jQuery.escapeSelector = Sizzle.escape;
+
+
+
+
+var dir = function( elem, dir, until ) {
+       var matched = [],
+               truncate = until !== undefined;
+
+       while ( ( elem = elem[ dir ] ) && elem.nodeType !== 9 ) {
+               if ( elem.nodeType === 1 ) {
+                       if ( truncate && jQuery( elem ).is( until ) ) {
+                               break;
+                       }
+                       matched.push( elem );
+               }
+       }
+       return matched;
+};
+
+
+var siblings = function( n, elem ) {
+       var matched = [];
+
+       for ( ; n; n = n.nextSibling ) {
+               if ( n.nodeType === 1 && n !== elem ) {
+                       matched.push( n );
+               }
+       }
+
+       return matched;
+};
+
+
+var rneedsContext = jQuery.expr.match.needsContext;
+
+
+
+function nodeName( elem, name ) {
+
+  return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase();
+
+};
+var rsingleTag = ( /^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i );
+
+
+
+// Implement the identical functionality for filter and not
+function winnow( elements, qualifier, not ) {
+       if ( isFunction( qualifier ) ) {
+               return jQuery.grep( elements, function( elem, i ) {
+                       return !!qualifier.call( elem, i, elem ) !== not;
+               } );
+       }
+
+       // Single element
+       if ( qualifier.nodeType ) {
+               return jQuery.grep( elements, function( elem ) {
+                       return ( elem === qualifier ) !== not;
+               } );
+       }
+
+       // Arraylike of elements (jQuery, arguments, Array)
+       if ( typeof qualifier !== "string" ) {
+               return jQuery.grep( elements, function( elem ) {
+                       return ( indexOf.call( qualifier, elem ) > -1 ) !== not;
+               } );
+       }
+
+       // Filtered directly for both simple and complex selectors
+       return jQuery.filter( qualifier, elements, not );
+}
+
+jQuery.filter = function( expr, elems, not ) {
+       var elem = elems[ 0 ];
+
+       if ( not ) {
+               expr = ":not(" + expr + ")";
+       }
+
+       if ( elems.length === 1 && elem.nodeType === 1 ) {
+               return jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : [];
+       }
+
+       return jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) {
+               return elem.nodeType === 1;
+       } ) );
+};
+
+jQuery.fn.extend( {
+       find: function( selector ) {
+               var i, ret,
+                       len = this.length,
+                       self = this;
+
+               if ( typeof selector !== "string" ) {
+                       return this.pushStack( jQuery( selector ).filter( function() {
+                               for ( i = 0; i < len; i++ ) {
+                                       if ( jQuery.contains( self[ i ], this ) ) {
+                                               return true;
+                                       }
+                               }
+                       } ) );
+               }
+
+               ret = this.pushStack( [] );
+
+               for ( i = 0; i < len; i++ ) {
+                       jQuery.find( selector, self[ i ], ret );
+               }
+
+               return len > 1 ? jQuery.uniqueSort( ret ) : ret;
+       },
+       filter: function( selector ) {
+               return this.pushStack( winnow( this, selector || [], false ) );
+       },
+       not: function( selector ) {
+               return this.pushStack( winnow( this, selector || [], true ) );
+       },
+       is: function( selector ) {
+               return !!winnow(
+                       this,
+
+                       // If this is a positional/relative selector, check membership in the returned set
+                       // so $("p:first").is("p:last") won't return true for a doc with two "p".
+                       typeof selector === "string" && rneedsContext.test( selector ) ?
+                               jQuery( selector ) :
+                               selector || [],
+                       false
+               ).length;
+       }
+} );
+
+
+// Initialize a jQuery object
+
+
+// A central reference to the root jQuery(document)
+var rootjQuery,
+
+       // A simple way to check for HTML strings
+       // Prioritize #id over <tag> to avoid XSS via location.hash (#9521)
+       // Strict HTML recognition (#11290: must start with <)
+       // Shortcut simple #id case for speed
+       rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/,
+
+       init = jQuery.fn.init = function( selector, context, root ) {
+               var match, elem;
+
+               // HANDLE: $(""), $(null), $(undefined), $(false)
+               if ( !selector ) {
+                       return this;
+               }
+
+               // Method init() accepts an alternate rootjQuery
+               // so migrate can support jQuery.sub (gh-2101)
+               root = root || rootjQuery;
+
+               // Handle HTML strings
+               if ( typeof selector === "string" ) {
+                       if ( selector[ 0 ] === "<" &&
+                               selector[ selector.length - 1 ] === ">" &&
+                               selector.length >= 3 ) {
+
+                               // Assume that strings that start and end with <> are HTML and skip the regex check
+                               match = [ null, selector, null ];
+
+                       } else {
+                               match = rquickExpr.exec( selector );
+                       }
+
+                       // Match html or make sure no context is specified for #id
+                       if ( match && ( match[ 1 ] || !context ) ) {
+
+                               // HANDLE: $(html) -> $(array)
+                               if ( match[ 1 ] ) {
+                                       context = context instanceof jQuery ? context[ 0 ] : context;
+
+                                       // Option to run scripts is true for back-compat
+                                       // Intentionally let the error be thrown if parseHTML is not present
+                                       jQuery.merge( this, jQuery.parseHTML(
+                                               match[ 1 ],
+                                               context && context.nodeType ? context.ownerDocument || context : document,
+                                               true
+                                       ) );
+
+                                       // HANDLE: $(html, props)
+                                       if ( rsingleTag.test( match[ 1 ] ) && jQuery.isPlainObject( context ) ) {
+                                               for ( match in context ) {
+
+                                                       // Properties of context are called as methods if possible
+                                                       if ( isFunction( this[ match ] ) ) {
+                                                               this[ match ]( context[ match ] );
+
+                                                       // ...and otherwise set as attributes
+                                                       } else {
+                                                               this.attr( match, context[ match ] );
+                                                       }
+                                               }
+                                       }
+
+                                       return this;
+
+                               // HANDLE: $(#id)
+                               } else {
+                                       elem = document.getElementById( match[ 2 ] );
+
+                                       if ( elem ) {
+
+                                               // Inject the element directly into the jQuery object
+                                               this[ 0 ] = elem;
+                                               this.length = 1;
+                                       }
+                                       return this;
+                               }
+
+                       // HANDLE: $(expr, $(...))
+                       } else if ( !context || context.jquery ) {
+                               return ( context || root ).find( selector );
+
+                       // HANDLE: $(expr, context)
+                       // (which is just equivalent to: $(context).find(expr)
+                       } else {
+                               return this.constructor( context ).find( selector );
+                       }
+
+               // HANDLE: $(DOMElement)
+               } else if ( selector.nodeType ) {
+                       this[ 0 ] = selector;
+                       this.length = 1;
+                       return this;
+
+               // HANDLE: $(function)
+               // Shortcut for document ready
+               } else if ( isFunction( selector ) ) {
+                       return root.ready !== undefined ?
+                               root.ready( selector ) :
+
+                               // Execute immediately if ready is not present
+                               selector( jQuery );
+               }
+
+               return jQuery.makeArray( selector, this );
+       };
+
+// Give the init function the jQuery prototype for later instantiation
+init.prototype = jQuery.fn;
+
+// Initialize central reference
+rootjQuery = jQuery( document );
+
+
+var rparentsprev = /^(?:parents|prev(?:Until|All))/,
+
+       // Methods guaranteed to produce a unique set when starting from a unique set
+       guaranteedUnique = {
+               children: true,
+               contents: true,
+               next: true,
+               prev: true
+       };
+
+jQuery.fn.extend( {
+       has: function( target ) {
+               var targets = jQuery( target, this ),
+                       l = targets.length;
+
+               return this.filter( function() {
+                       var i = 0;
+                       for ( ; i < l; i++ ) {
+                               if ( jQuery.contains( this, targets[ i ] ) ) {
+                                       return true;
+                               }
+                       }
+               } );
+       },
+
+       closest: function( selectors, context ) {
+               var cur,
+                       i = 0,
+                       l = this.length,
+                       matched = [],
+                       targets = typeof selectors !== "string" && jQuery( selectors );
+
+               // Positional selectors never match, since there's no _selection_ context
+               if ( !rneedsContext.test( selectors ) ) {
+                       for ( ; i < l; i++ ) {
+                               for ( cur = this[ i ]; cur && cur !== context; cur = cur.parentNode ) {
+
+                                       // Always skip document fragments
+                                       if ( cur.nodeType < 11 && ( targets ?
+                                               targets.index( cur ) > -1 :
+
+                                               // Don't pass non-elements to Sizzle
+                                               cur.nodeType === 1 &&
+                                                       jQuery.find.matchesSelector( cur, selectors ) ) ) {
+
+                                               matched.push( cur );
+                                               break;
+                                       }
+                               }
+                       }
+               }
+
+               return this.pushStack( matched.length > 1 ? jQuery.uniqueSort( matched ) : matched );
+       },
+
+       // Determine the position of an element within the set
+       index: function( elem ) {
+
+               // No argument, return index in parent
+               if ( !elem ) {
+                       return ( this[ 0 ] && this[ 0 ].parentNode ) ? this.first().prevAll().length : -1;
+               }
+
+               // Index in selector
+               if ( typeof elem === "string" ) {
+                       return indexOf.call( jQuery( elem ), this[ 0 ] );
+               }
+
+               // Locate the position of the desired element
+               return indexOf.call( this,
+
+                       // If it receives a jQuery object, the first element is used
+                       elem.jquery ? elem[ 0 ] : elem
+               );
+       },
+
+       add: function( selector, context ) {
+               return this.pushStack(
+                       jQuery.uniqueSort(
+                               jQuery.merge( this.get(), jQuery( selector, context ) )
+                       )
+               );
+       },
+
+       addBack: function( selector ) {
+               return this.add( selector == null ?
+                       this.prevObject : this.prevObject.filter( selector )
+               );
+       }
+} );
+
+function sibling( cur, dir ) {
+       while ( ( cur = cur[ dir ] ) && cur.nodeType !== 1 ) {}
+       return cur;
+}
+
+jQuery.each( {
+       parent: function( elem ) {
+               var parent = elem.parentNode;
+               return parent && parent.nodeType !== 11 ? parent : null;
+       },
+       parents: function( elem ) {
+               return dir( elem, "parentNode" );
+       },
+       parentsUntil: function( elem, i, until ) {
+               return dir( elem, "parentNode", until );
+       },
+       next: function( elem ) {
+               return sibling( elem, "nextSibling" );
+       },
+       prev: function( elem ) {
+               return sibling( elem, "previousSibling" );
+       },
+       nextAll: function( elem ) {
+               return dir( elem, "nextSibling" );
+       },
+       prevAll: function( elem ) {
+               return dir( elem, "previousSibling" );
+       },
+       nextUntil: function( elem, i, until ) {
+               return dir( elem, "nextSibling", until );
+       },
+       prevUntil: function( elem, i, until ) {
+               return dir( elem, "previousSibling", until );
+       },
+       siblings: function( elem ) {
+               return siblings( ( elem.parentNode || {} ).firstChild, elem );
+       },
+       children: function( elem ) {
+               return siblings( elem.firstChild );
+       },
+       contents: function( elem ) {
+               if ( typeof elem.contentDocument !== "undefined" ) {
+                       return elem.contentDocument;
+               }
+
+               // Support: IE 9 - 11 only, iOS 7 only, Android Browser <=4.3 only
+               // Treat the template element as a regular one in browsers that
+               // don't support it.
+               if ( nodeName( elem, "template" ) ) {
+                       elem = elem.content || elem;
+               }
+
+               return jQuery.merge( [], elem.childNodes );
+       }
+}, function( name, fn ) {
+       jQuery.fn[ name ] = function( until, selector ) {
+               var matched = jQuery.map( this, fn, until );
+
+               if ( name.slice( -5 ) !== "Until" ) {
+                       selector = until;
+               }
+
+               if ( selector && typeof selector === "string" ) {
+                       matched = jQuery.filter( selector, matched );
+               }
+
+               if ( this.length > 1 ) {
+
+                       // Remove duplicates
+                       if ( !guaranteedUnique[ name ] ) {
+                               jQuery.uniqueSort( matched );
+                       }
+
+                       // Reverse order for parents* and prev-derivatives
+                       if ( rparentsprev.test( name ) ) {
+                               matched.reverse();
+                       }
+               }
+
+               return this.pushStack( matched );
+       };
+} );
+var rnothtmlwhite = ( /[^\x20\t\r\n\f]+/g );
+
+
+
+// Convert String-formatted options into Object-formatted ones
+function createOptions( options ) {
+       var object = {};
+       jQuery.each( options.match( rnothtmlwhite ) || [], function( _, flag ) {
+               object[ flag ] = true;
+       } );
+       return object;
+}
+
+/*
+ * Create a callback list using the following parameters:
+ *
+ *     options: an optional list of space-separated options that will change how
+ *                     the callback list behaves or a more traditional option object
+ *
+ * By default a callback list will act like an event callback list and can be
+ * "fired" multiple times.
+ *
+ * Possible options:
+ *
+ *     once:                   will ensure the callback list can only be fired once (like a Deferred)
+ *
+ *     memory:                 will keep track of previous values and will call any callback added
+ *                                     after the list has been fired right away with the latest "memorized"
+ *                                     values (like a Deferred)
+ *
+ *     unique:                 will ensure a callback can only be added once (no duplicate in the list)
+ *
+ *     stopOnFalse:    interrupt callings when a callback returns false
+ *
+ */
+jQuery.Callbacks = function( options ) {
+
+       // Convert options from String-formatted to Object-formatted if needed
+       // (we check in cache first)
+       options = typeof options === "string" ?
+               createOptions( options ) :
+               jQuery.extend( {}, options );
+
+       var // Flag to know if list is currently firing
+               firing,
+
+               // Last fire value for non-forgettable lists
+               memory,
+
+               // Flag to know if list was already fired
+               fired,
+
+               // Flag to prevent firing
+               locked,
+
+               // Actual callback list
+               list = [],
+
+               // Queue of execution data for repeatable lists
+               queue = [],
+
+               // Index of currently firing callback (modified by add/remove as needed)
+               firingIndex = -1,
+
+               // Fire callbacks
+               fire = function() {
+
+                       // Enforce single-firing
+                       locked = locked || options.once;
+
+                       // Execute callbacks for all pending executions,
+                       // respecting firingIndex overrides and runtime changes
+                       fired = firing = true;
+                       for ( ; queue.length; firingIndex = -1 ) {
+                               memory = queue.shift();
+                               while ( ++firingIndex < list.length ) {
+
+                                       // Run callback and check for early termination
+                                       if ( list[ firingIndex ].apply( memory[ 0 ], memory[ 1 ] ) === false &&
+                                               options.stopOnFalse ) {
+
+                                               // Jump to end and forget the data so .add doesn't re-fire
+                                               firingIndex = list.length;
+                                               memory = false;
+                                       }
+                               }
+                       }
+
+                       // Forget the data if we're done with it
+                       if ( !options.memory ) {
+                               memory = false;
+                       }
+
+                       firing = false;
+
+                       // Clean up if we're done firing for good
+                       if ( locked ) {
+
+                               // Keep an empty list if we have data for future add calls
+                               if ( memory ) {
+                                       list = [];
+
+                               // Otherwise, this object is spent
+                               } else {
+                                       list = "";
+                               }
+                       }
+               },
+
+               // Actual Callbacks object
+               self = {
+
+                       // Add a callback or a collection of callbacks to the list
+                       add: function() {
+                               if ( list ) {
+
+                                       // If we have memory from a past run, we should fire after adding
+                                       if ( memory && !firing ) {
+                                               firingIndex = list.length - 1;
+                                               queue.push( memory );
+                                       }
+
+                                       ( function add( args ) {
+                                               jQuery.each( args, function( _, arg ) {
+                                                       if ( isFunction( arg ) ) {
+                                                               if ( !options.unique || !self.has( arg ) ) {
+                                                                       list.push( arg );
+                                                               }
+                                                       } else if ( arg && arg.length && toType( arg ) !== "string" ) {
+
+                                                               // Inspect recursively
+                                                               add( arg );
+                                                       }
+                                               } );
+                                       } )( arguments );
+
+                                       if ( memory && !firing ) {
+                                               fire();
+                                       }
+                               }
+                               return this;
+                       },
+
+                       // Remove a callback from the list
+                       remove: function() {
+                               jQuery.each( arguments, function( _, arg ) {
+                                       var index;
+                                       while ( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) {
+                                               list.splice( index, 1 );
+
+                                               // Handle firing indexes
+                                               if ( index <= firingIndex ) {
+                                                       firingIndex--;
+                                               }
+                                       }
+                               } );
+                               return this;
+                       },
+
+                       // Check if a given callback is in the list.
+                       // If no argument is given, return whether or not list has callbacks attached.
+                       has: function( fn ) {
+                               return fn ?
+                                       jQuery.inArray( fn, list ) > -1 :
+                                       list.length > 0;
+                       },
+
+                       // Remove all callbacks from the list
+                       empty: function() {
+                               if ( list ) {
+                                       list = [];
+                               }
+                               return this;
+                       },
+
+                       // Disable .fire and .add
+                       // Abort any current/pending executions
+                       // Clear all callbacks and values
+                       disable: function() {
+                               locked = queue = [];
+                               list = memory = "";
+                               return this;
+                       },
+                       disabled: function() {
+                               return !list;
+                       },
+
+                       // Disable .fire
+                       // Also disable .add unless we have memory (since it would have no effect)
+                       // Abort any pending executions
+                       lock: function() {
+                               locked = queue = [];
+                               if ( !memory && !firing ) {
+                                       list = memory = "";
+                               }
+                               return this;
+                       },
+                       locked: function() {
+                               return !!locked;
+                       },
+
+                       // Call all callbacks with the given context and arguments
+                       fireWith: function( context, args ) {
+                               if ( !locked ) {
+                                       args = args || [];
+                                       args = [ context, args.slice ? args.slice() : args ];
+                                       queue.push( args );
+                                       if ( !firing ) {
+                                               fire();
+                                       }
+                               }
+                               return this;
+                       },
+
+                       // Call all the callbacks with the given arguments
+                       fire: function() {
+                               self.fireWith( this, arguments );
+                               return this;
+                       },
+
+                       // To know if the callbacks have already been called at least once
+                       fired: function() {
+                               return !!fired;
+                       }
+               };
+
+       return self;
+};
+
+
+function Identity( v ) {
+       return v;
+}
+function Thrower( ex ) {
+       throw ex;
+}
+
+function adoptValue( value, resolve, reject, noValue ) {
+       var method;
+
+       try {
+
+               // Check for promise aspect first to privilege synchronous behavior
+               if ( value && isFunction( ( method = value.promise ) ) ) {
+                       method.call( value ).done( resolve ).fail( reject );
+
+               // Other thenables
+               } else if ( value && isFunction( ( method = value.then ) ) ) {
+                       method.call( value, resolve, reject );
+
+               // Other non-thenables
+               } else {
+
+                       // Control `resolve` arguments by letting Array#slice cast boolean `noValue` to integer:
+                       // * false: [ value ].slice( 0 ) => resolve( value )
+                       // * true: [ value ].slice( 1 ) => resolve()
+                       resolve.apply( undefined, [ value ].slice( noValue ) );
+               }
+
+       // For Promises/A+, convert exceptions into rejections
+       // Since jQuery.when doesn't unwrap thenables, we can skip the extra checks appearing in
+       // Deferred#then to conditionally suppress rejection.
+       } catch ( value ) {
+
+               // Support: Android 4.0 only
+               // Strict mode functions invoked without .call/.apply get global-object context
+               reject.apply( undefined, [ value ] );
+       }
+}
+
+jQuery.extend( {
+
+       Deferred: function( func ) {
+               var tuples = [
+
+                               // action, add listener, callbacks,
+                               // ... .then handlers, argument index, [final state]
+                               [ "notify", "progress", jQuery.Callbacks( "memory" ),
+                                       jQuery.Callbacks( "memory" ), 2 ],
+                               [ "resolve", "done", jQuery.Callbacks( "once memory" ),
+                                       jQuery.Callbacks( "once memory" ), 0, "resolved" ],
+                               [ "reject", "fail", jQuery.Callbacks( "once memory" ),
+                                       jQuery.Callbacks( "once memory" ), 1, "rejected" ]
+                       ],
+                       state = "pending",
+                       promise = {
+                               state: function() {
+                                       return state;
+                               },
+                               always: function() {
+                                       deferred.done( arguments ).fail( arguments );
+                                       return this;
+                               },
+                               "catch": function( fn ) {
+                                       return promise.then( null, fn );
+                               },
+
+                               // Keep pipe for back-compat
+                               pipe: function( /* fnDone, fnFail, fnProgress */ ) {
+                                       var fns = arguments;
+
+                                       return jQuery.Deferred( function( newDefer ) {
+                                               jQuery.each( tuples, function( i, tuple ) {
+
+                                                       // Map tuples (progress, done, fail) to arguments (done, fail, progress)
+                                                       var fn = isFunction( fns[ tuple[ 4 ] ] ) && fns[ tuple[ 4 ] ];
+
+                                                       // deferred.progress(function() { bind to newDefer or newDefer.notify })
+                                                       // deferred.done(function() { bind to newDefer or newDefer.resolve })
+                                                       // deferred.fail(function() { bind to newDefer or newDefer.reject })
+                                                       deferred[ tuple[ 1 ] ]( function() {
+                                                               var returned = fn && fn.apply( this, arguments );
+                                                               if ( returned && isFunction( returned.promise ) ) {
+                                                                       returned.promise()
+                                                                               .progress( newDefer.notify )
+                                                                               .done( newDefer.resolve )
+                                                                               .fail( newDefer.reject );
+                                                               } else {
+                                                                       newDefer[ tuple[ 0 ] + "With" ](
+                                                                               this,
+                                                                               fn ? [ returned ] : arguments
+                                                                       );
+                                                               }
+                                                       } );
+                                               } );
+                                               fns = null;
+                                       } ).promise();
+                               },
+                               then: function( onFulfilled, onRejected, onProgress ) {
+                                       var maxDepth = 0;
+                                       function resolve( depth, deferred, handler, special ) {
+                                               return function() {
+                                                       var that = this,
+                                                               args = arguments,
+                                                               mightThrow = function() {
+                                                                       var returned, then;
+
+                                                                       // Support: Promises/A+ section 2.3.3.3.3
+                                                                       // https://promisesaplus.com/#point-59
+                                                                       // Ignore double-resolution attempts
+                                                                       if ( depth < maxDepth ) {
+                                                                               return;
+                                                                       }
+
+                                                                       returned = handler.apply( that, args );
+
+                                                                       // Support: Promises/A+ section 2.3.1
+                                                                       // https://promisesaplus.com/#point-48
+                                                                       if ( returned === deferred.promise() ) {
+                                                                               throw new TypeError( "Thenable self-resolution" );
+                                                                       }
+
+                                                                       // Support: Promises/A+ sections 2.3.3.1, 3.5
+                                                                       // https://promisesaplus.com/#point-54
+                                                                       // https://promisesaplus.com/#point-75
+                                                                       // Retrieve `then` only once
+                                                                       then = returned &&
+
+                                                                               // Support: Promises/A+ section 2.3.4
+                                                                               // https://promisesaplus.com/#point-64
+                                                                               // Only check objects and functions for thenability
+                                                                               ( typeof returned === "object" ||
+                                                                                       typeof returned === "function" ) &&
+                                                                               returned.then;
+
+                                                                       // Handle a returned thenable
+                                                                       if ( isFunction( then ) ) {
+
+                                                                               // Special processors (notify) just wait for resolution
+                                                                               if ( special ) {
+                                                                                       then.call(
+                                                                                               returned,
+                                                                                               resolve( maxDepth, deferred, Identity, special ),
+                                                                                               resolve( maxDepth, deferred, Thrower, special )
+                                                                                       );
+
+                                                                               // Normal processors (resolve) also hook into progress
+                                                                               } else {
+
+                                                                                       // ...and disregard older resolution values
+                                                                                       maxDepth++;
+
+                                                                                       then.call(
+                                                                                               returned,
+                                                                                               resolve( maxDepth, deferred, Identity, special ),
+                                                                                               resolve( maxDepth, deferred, Thrower, special ),
+                                                                                               resolve( maxDepth, deferred, Identity,
+                                                                                                       deferred.notifyWith )
+                                                                                       );
+                                                                               }
+
+                                                                       // Handle all other returned values
+                                                                       } else {
+
+                                                                               // Only substitute handlers pass on context
+                                                                               // and multiple values (non-spec behavior)
+                                                                               if ( handler !== Identity ) {
+                                                                                       that = undefined;
+                                                                                       args = [ returned ];
+                                                                               }
+
+                                                                               // Process the value(s)
+                                                                               // Default process is resolve
+                                                                               ( special || deferred.resolveWith )( that, args );
+                                                                       }
+                                                               },
+
+                                                               // Only normal processors (resolve) catch and reject exceptions
+                                                               process = special ?
+                                                                       mightThrow :
+                                                                       function() {
+                                                                               try {
+                                                                                       mightThrow();
+                                                                               } catch ( e ) {
+
+                                                                                       if ( jQuery.Deferred.exceptionHook ) {
+                                                                                               jQuery.Deferred.exceptionHook( e,
+                                                                                                       process.stackTrace );
+                                                                                       }
+
+                                                                                       // Support: Promises/A+ section 2.3.3.3.4.1
+                                                                                       // https://promisesaplus.com/#point-61
+                                                                                       // Ignore post-resolution exceptions
+                                                                                       if ( depth + 1 >= maxDepth ) {
+
+                                                                                               // Only substitute handlers pass on context
+                                                                                               // and multiple values (non-spec behavior)
+                                                                                               if ( handler !== Thrower ) {
+                                                                                                       that = undefined;
+                                                                                                       args = [ e ];
+                                                                                               }
+
+                                                                                               deferred.rejectWith( that, args );
+                                                                                       }
+                                                                               }
+                                                                       };
+
+                                                       // Support: Promises/A+ section 2.3.3.3.1
+                                                       // https://promisesaplus.com/#point-57
+                                                       // Re-resolve promises immediately to dodge false rejection from
+                                                       // subsequent errors
+                                                       if ( depth ) {
+                                                               process();
+                                                       } else {
+
+                                                               // Call an optional hook to record the stack, in case of exception
+                                                               // since it's otherwise lost when execution goes async
+                                                               if ( jQuery.Deferred.getStackHook ) {
+                                                                       process.stackTrace = jQuery.Deferred.getStackHook();
+                                                               }
+                                                               window.setTimeout( process );
+                                                       }
+                                               };
+                                       }
+
+                                       return jQuery.Deferred( function( newDefer ) {
+
+                                               // progress_handlers.add( ... )
+                                               tuples[ 0 ][ 3 ].add(
+                                                       resolve(
+                                                               0,
+                                                               newDefer,
+                                                               isFunction( onProgress ) ?
+                                                                       onProgress :
+                                                                       Identity,
+                                                               newDefer.notifyWith
+                                                       )
+                                               );
+
+                                               // fulfilled_handlers.add( ... )
+                                               tuples[ 1 ][ 3 ].add(
+                                                       resolve(
+                                                               0,
+                                                               newDefer,
+                                                               isFunction( onFulfilled ) ?
+                                                                       onFulfilled :
+                                                                       Identity
+                                                       )
+                                               );
+
+                                               // rejected_handlers.add( ... )
+                                               tuples[ 2 ][ 3 ].add(
+                                                       resolve(
+                                                               0,
+                                                               newDefer,
+                                                               isFunction( onRejected ) ?
+                                                                       onRejected :
+                                                                       Thrower
+                                                       )
+                                               );
+                                       } ).promise();
+                               },
+
+                               // Get a promise for this deferred
+                               // If obj is provided, the promise aspect is added to the object
+                               promise: function( obj ) {
+                                       return obj != null ? jQuery.extend( obj, promise ) : promise;
+                               }
+                       },
+                       deferred = {};
+
+               // Add list-specific methods
+               jQuery.each( tuples, function( i, tuple ) {
+                       var list = tuple[ 2 ],
+                               stateString = tuple[ 5 ];
+
+                       // promise.progress = list.add
+                       // promise.done = list.add
+                       // promise.fail = list.add
+                       promise[ tuple[ 1 ] ] = list.add;
+
+                       // Handle state
+                       if ( stateString ) {
+                               list.add(
+                                       function() {
+
+                                               // state = "resolved" (i.e., fulfilled)
+                                               // state = "rejected"
+                                               state = stateString;
+                                       },
+
+                                       // rejected_callbacks.disable
+                                       // fulfilled_callbacks.disable
+                                       tuples[ 3 - i ][ 2 ].disable,
+
+                                       // rejected_handlers.disable
+                                       // fulfilled_handlers.disable
+                                       tuples[ 3 - i ][ 3 ].disable,
+
+                                       // progress_callbacks.lock
+                                       tuples[ 0 ][ 2 ].lock,
+
+                                       // progress_handlers.lock
+                                       tuples[ 0 ][ 3 ].lock
+                               );
+                       }
+
+                       // progress_handlers.fire
+                       // fulfilled_handlers.fire
+                       // rejected_handlers.fire
+                       list.add( tuple[ 3 ].fire );
+
+                       // deferred.notify = function() { deferred.notifyWith(...) }
+                       // deferred.resolve = function() { deferred.resolveWith(...) }
+                       // deferred.reject = function() { deferred.rejectWith(...) }
+                       deferred[ tuple[ 0 ] ] = function() {
+                               deferred[ tuple[ 0 ] + "With" ]( this === deferred ? undefined : this, arguments );
+                               return this;
+                       };
+
+                       // deferred.notifyWith = list.fireWith
+                       // deferred.resolveWith = list.fireWith
+                       // deferred.rejectWith = list.fireWith
+                       deferred[ tuple[ 0 ] + "With" ] = list.fireWith;
+               } );
+
+               // Make the deferred a promise
+               promise.promise( deferred );
+
+               // Call given func if any
+               if ( func ) {
+                       func.call( deferred, deferred );
+               }
+
+               // All done!
+               return deferred;
+       },
+
+       // Deferred helper
+       when: function( singleValue ) {
+               var
+
+                       // count of uncompleted subordinates
+                       remaining = arguments.length,
+
+                       // count of unprocessed arguments
+                       i = remaining,
+
+                       // subordinate fulfillment data
+                       resolveContexts = Array( i ),
+                       resolveValues = slice.call( arguments ),
+
+                       // the master Deferred
+                       master = jQuery.Deferred(),
+
+                       // subordinate callback factory
+                       updateFunc = function( i ) {
+                               return function( value ) {
+                                       resolveContexts[ i ] = this;
+                                       resolveValues[ i ] = arguments.length > 1 ? slice.call( arguments ) : value;
+                                       if ( !( --remaining ) ) {
+                                               master.resolveWith( resolveContexts, resolveValues );
+                                       }
+                               };
+                       };
+
+               // Single- and empty arguments are adopted like Promise.resolve
+               if ( remaining <= 1 ) {
+                       adoptValue( singleValue, master.done( updateFunc( i ) ).resolve, master.reject,
+                               !remaining );
+
+                       // Use .then() to unwrap secondary thenables (cf. gh-3000)
+                       if ( master.state() === "pending" ||
+                               isFunction( resolveValues[ i ] && resolveValues[ i ].then ) ) {
+
+                               return master.then();
+                       }
+               }
+
+               // Multiple arguments are aggregated like Promise.all array elements
+               while ( i-- ) {
+                       adoptValue( resolveValues[ i ], updateFunc( i ), master.reject );
+               }
+
+               return master.promise();
+       }
+} );
+
+
+// These usually indicate a programmer mistake during development,
+// warn about them ASAP rather than swallowing them by default.
+var rerrorNames = /^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/;
+
+jQuery.Deferred.exceptionHook = function( error, stack ) {
+
+       // Support: IE 8 - 9 only
+       // Console exists when dev tools are open, which can happen at any time
+       if ( window.console && window.console.warn && error && rerrorNames.test( error.name ) ) {
+               window.console.warn( "jQuery.Deferred exception: " + error.message, error.stack, stack );
+       }
+};
+
+
+
+
+jQuery.readyException = function( error ) {
+       window.setTimeout( function() {
+               throw error;
+       } );
+};
+
+
+
+
+// The deferred used on DOM ready
+var readyList = jQuery.Deferred();
+
+jQuery.fn.ready = function( fn ) {
+
+       readyList
+               .then( fn )
+
+               // Wrap jQuery.readyException in a function so that the lookup
+               // happens at the time of error handling instead of callback
+               // registration.
+               .catch( function( error ) {
+                       jQuery.readyException( error );
+               } );
+
+       return this;
+};
+
+jQuery.extend( {
+
+       // Is the DOM ready to be used? Set to true once it occurs.
+       isReady: false,
+
+       // A counter to track how many items to wait for before
+       // the ready event fires. See #6781
+       readyWait: 1,
+
+       // Handle when the DOM is ready
+       ready: function( wait ) {
+
+               // Abort if there are pending holds or we're already ready
+               if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) {
+                       return;
+               }
+
+               // Remember that the DOM is ready
+               jQuery.isReady = true;
+
+               // If a normal DOM Ready event fired, decrement, and wait if need be
+               if ( wait !== true && --jQuery.readyWait > 0 ) {
+                       return;
+               }
+
+               // If there are functions bound, to execute
+               readyList.resolveWith( document, [ jQuery ] );
+       }
+} );
+
+jQuery.ready.then = readyList.then;
+
+// The ready event handler and self cleanup method
+function completed() {
+       document.removeEventListener( "DOMContentLoaded", completed );
+       window.removeEventListener( "load", completed );
+       jQuery.ready();
+}
+
+// Catch cases where $(document).ready() is called
+// after the browser event has already occurred.
+// Support: IE <=9 - 10 only
+// Older IE sometimes signals "interactive" too soon
+if ( document.readyState === "complete" ||
+       ( document.readyState !== "loading" && !document.documentElement.doScroll ) ) {
+
+       // Handle it asynchronously to allow scripts the opportunity to delay ready
+       window.setTimeout( jQuery.ready );
+
+} else {
+
+       // Use the handy event callback
+       document.addEventListener( "DOMContentLoaded", completed );
+
+       // A fallback to window.onload, that will always work
+       window.addEventListener( "load", completed );
+}
+
+
+
+
+// Multifunctional method to get and set values of a collection
+// The value/s can optionally be executed if it's a function
+var access = function( elems, fn, key, value, chainable, emptyGet, raw ) {
+       var i = 0,
+               len = elems.length,
+               bulk = key == null;
+
+       // Sets many values
+       if ( toType( key ) === "object" ) {
+               chainable = true;
+               for ( i in key ) {
+                       access( elems, fn, i, key[ i ], true, emptyGet, raw );
+               }
+
+       // Sets one value
+       } else if ( value !== undefined ) {
+               chainable = true;
+
+               if ( !isFunction( value ) ) {
+                       raw = true;
+               }
+
+               if ( bulk ) {
+
+                       // Bulk operations run against the entire set
+                       if ( raw ) {
+                               fn.call( elems, value );
+                               fn = null;
+
+                       // ...except when executing function values
+                       } else {
+                               bulk = fn;
+                               fn = function( elem, key, value ) {
+                                       return bulk.call( jQuery( elem ), value );
+                               };
+                       }
+               }
+
+               if ( fn ) {
+                       for ( ; i < len; i++ ) {
+                               fn(
+                                       elems[ i ], key, raw ?
+                                       value :
+                                       value.call( elems[ i ], i, fn( elems[ i ], key ) )
+                               );
+                       }
+               }
+       }
+
+       if ( chainable ) {
+               return elems;
+       }
+
+       // Gets
+       if ( bulk ) {
+               return fn.call( elems );
+       }
+
+       return len ? fn( elems[ 0 ], key ) : emptyGet;
+};
+
+
+// Matches dashed string for camelizing
+var rmsPrefix = /^-ms-/,
+       rdashAlpha = /-([a-z])/g;
+
+// Used by camelCase as callback to replace()
+function fcamelCase( all, letter ) {
+       return letter.toUpperCase();
+}
+
+// Convert dashed to camelCase; used by the css and data modules
+// Support: IE <=9 - 11, Edge 12 - 15
+// Microsoft forgot to hump their vendor prefix (#9572)
+function camelCase( string ) {
+       return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase );
+}
+var acceptData = function( owner ) {
+
+       // Accepts only:
+       //  - Node
+       //    - Node.ELEMENT_NODE
+       //    - Node.DOCUMENT_NODE
+       //  - Object
+       //    - Any
+       return owner.nodeType === 1 || owner.nodeType === 9 || !( +owner.nodeType );
+};
+
+
+
+
+function Data() {
+       this.expando = jQuery.expando + Data.uid++;
+}
+
+Data.uid = 1;
+
+Data.prototype = {
+
+       cache: function( owner ) {
+
+               // Check if the owner object already has a cache
+               var value = owner[ this.expando ];
+
+               // If not, create one
+               if ( !value ) {
+                       value = {};
+
+                       // We can accept data for non-element nodes in modern browsers,
+                       // but we should not, see #8335.
+                       // Always return an empty object.
+                       if ( acceptData( owner ) ) {
+
+                               // If it is a node unlikely to be stringify-ed or looped over
+                               // use plain assignment
+                               if ( owner.nodeType ) {
+                                       owner[ this.expando ] = value;
+
+                               // Otherwise secure it in a non-enumerable property
+                               // configurable must be true to allow the property to be
+                               // deleted when data is removed
+                               } else {
+                                       Object.defineProperty( owner, this.expando, {
+                                               value: value,
+                                               configurable: true
+                                       } );
+                               }
+                       }
+               }
+
+               return value;
+       },
+       set: function( owner, data, value ) {
+               var prop,
+                       cache = this.cache( owner );
+
+               // Handle: [ owner, key, value ] args
+               // Always use camelCase key (gh-2257)
+               if ( typeof data === "string" ) {
+                       cache[ camelCase( data ) ] = value;
+
+               // Handle: [ owner, { properties } ] args
+               } else {
+
+                       // Copy the properties one-by-one to the cache object
+                       for ( prop in data ) {
+                               cache[ camelCase( prop ) ] = data[ prop ];
+                       }
+               }
+               return cache;
+       },
+       get: function( owner, key ) {
+               return key === undefined ?
+                       this.cache( owner ) :
+
+                       // Always use camelCase key (gh-2257)
+                       owner[ this.expando ] && owner[ this.expando ][ camelCase( key ) ];
+       },
+       access: function( owner, key, value ) {
+
+               // In cases where either:
+               //
+               //   1. No key was specified
+               //   2. A string key was specified, but no value provided
+               //
+               // Take the "read" path and allow the get method to determine
+               // which value to return, respectively either:
+               //
+               //   1. The entire cache object
+               //   2. The data stored at the key
+               //
+               if ( key === undefined ||
+                               ( ( key && typeof key === "string" ) && value === undefined ) ) {
+
+                       return this.get( owner, key );
+               }
+
+               // When the key is not a string, or both a key and value
+               // are specified, set or extend (existing objects) with either:
+               //
+               //   1. An object of properties
+               //   2. A key and value
+               //
+               this.set( owner, key, value );
+
+               // Since the "set" path can have two possible entry points
+               // return the expected data based on which path was taken[*]
+               return value !== undefined ? value : key;
+       },
+       remove: function( owner, key ) {
+               var i,
+                       cache = owner[ this.expando ];
+
+               if ( cache === undefined ) {
+                       return;
+               }
+
+               if ( key !== undefined ) {
+
+                       // Support array or space separated string of keys
+                       if ( Array.isArray( key ) ) {
+
+                               // If key is an array of keys...
+                               // We always set camelCase keys, so remove that.
+                               key = key.map( camelCase );
+                       } else {
+                               key = camelCase( key );
+
+                               // If a key with the spaces exists, use it.
+                               // Otherwise, create an array by matching non-whitespace
+                               key = key in cache ?
+                                       [ key ] :
+                                       ( key.match( rnothtmlwhite ) || [] );
+                       }
+
+                       i = key.length;
+
+                       while ( i-- ) {
+                               delete cache[ key[ i ] ];
+                       }
+               }
+
+               // Remove the expando if there's no more data
+               if ( key === undefined || jQuery.isEmptyObject( cache ) ) {
+
+                       // Support: Chrome <=35 - 45
+                       // Webkit & Blink performance suffers when deleting properties
+                       // from DOM nodes, so set to undefined instead
+                       // https://bugs.chromium.org/p/chromium/issues/detail?id=378607 (bug restricted)
+                       if ( owner.nodeType ) {
+                               owner[ this.expando ] = undefined;
+                       } else {
+                               delete owner[ this.expando ];
+                       }
+               }
+       },
+       hasData: function( owner ) {
+               var cache = owner[ this.expando ];
+               return cache !== undefined && !jQuery.isEmptyObject( cache );
+       }
+};
+var dataPriv = new Data();
+
+var dataUser = new Data();
+
+
+
+//     Implementation Summary
+//
+//     1. Enforce API surface and semantic compatibility with 1.9.x branch
+//     2. Improve the module's maintainability by reducing the storage
+//             paths to a single mechanism.
+//     3. Use the same single mechanism to support "private" and "user" data.
+//     4. _Never_ expose "private" data to user code (TODO: Drop _data, _removeData)
+//     5. Avoid exposing implementation details on user objects (eg. expando properties)
+//     6. Provide a clear path for implementation upgrade to WeakMap in 2014
+
+var rbrace = /^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,
+       rmultiDash = /[A-Z]/g;
+
+function getData( data ) {
+       if ( data === "true" ) {
+               return true;
+       }
+
+       if ( data === "false" ) {
+               return false;
+       }
+
+       if ( data === "null" ) {
+               return null;
+       }
+
+       // Only convert to a number if it doesn't change the string
+       if ( data === +data + "" ) {
+               return +data;
+       }
+
+       if ( rbrace.test( data ) ) {
+               return JSON.parse( data );
+       }
+
+       return data;
+}
+
+function dataAttr( elem, key, data ) {
+       var name;
+
+       // If nothing was found internally, try to fetch any
+       // data from the HTML5 data-* attribute
+       if ( data === undefined && elem.nodeType === 1 ) {
+               name = "data-" + key.replace( rmultiDash, "-$&" ).toLowerCase();
+               data = elem.getAttribute( name );
+
+               if ( typeof data === "string" ) {
+                       try {
+                               data = getData( data );
+                       } catch ( e ) {}
+
+                       // Make sure we set the data so it isn't changed later
+                       dataUser.set( elem, key, data );
+               } else {
+                       data = undefined;
+               }
+       }
+       return data;
+}
+
+jQuery.extend( {
+       hasData: function( elem ) {
+               return dataUser.hasData( elem ) || dataPriv.hasData( elem );
+       },
+
+       data: function( elem, name, data ) {
+               return dataUser.access( elem, name, data );
+       },
+
+       removeData: function( elem, name ) {
+               dataUser.remove( elem, name );
+       },
+
+       // TODO: Now that all calls to _data and _removeData have been replaced
+       // with direct calls to dataPriv methods, these can be deprecated.
+       _data: function( elem, name, data ) {
+               return dataPriv.access( elem, name, data );
+       },
+
+       _removeData: function( elem, name ) {
+               dataPriv.remove( elem, name );
+       }
+} );
+
+jQuery.fn.extend( {
+       data: function( key, value ) {
+               var i, name, data,
+                       elem = this[ 0 ],
+                       attrs = elem && elem.attributes;
+
+               // Gets all values
+               if ( key === undefined ) {
+                       if ( this.length ) {
+                               data = dataUser.get( elem );
+
+                               if ( elem.nodeType === 1 && !dataPriv.get( elem, "hasDataAttrs" ) ) {
+                                       i = attrs.length;
+                                       while ( i-- ) {
+
+                                               // Support: IE 11 only
+                                               // The attrs elements can be null (#14894)
+                                               if ( attrs[ i ] ) {
+                                                       name = attrs[ i ].name;
+                                                       if ( name.indexOf( "data-" ) === 0 ) {
+                                                               name = camelCase( name.slice( 5 ) );
+                                                               dataAttr( elem, name, data[ name ] );
+                                                       }
+                                               }
+                                       }
+                                       dataPriv.set( elem, "hasDataAttrs", true );
+                               }
+                       }
+
+                       return data;
+               }
+
+               // Sets multiple values
+               if ( typeof key === "object" ) {
+                       return this.each( function() {
+                               dataUser.set( this, key );
+                       } );
+               }
+
+               return access( this, function( value ) {
+                       var data;
+
+                       // The calling jQuery object (element matches) is not empty
+                       // (and therefore has an element appears at this[ 0 ]) and the
+                       // `value` parameter was not undefined. An empty jQuery object
+                       // will result in `undefined` for elem = this[ 0 ] which will
+                       // throw an exception if an attempt to read a data cache is made.
+                       if ( elem && value === undefined ) {
+
+                               // Attempt to get data from the cache
+                               // The key will always be camelCased in Data
+                               data = dataUser.get( elem, key );
+                               if ( data !== undefined ) {
+                                       return data;
+                               }
+
+                               // Attempt to "discover" the data in
+                               // HTML5 custom data-* attrs
+                               data = dataAttr( elem, key );
+                               if ( data !== undefined ) {
+                                       return data;
+                               }
+
+                               // We tried really hard, but the data doesn't exist.
+                               return;
+                       }
+
+                       // Set the data...
+                       this.each( function() {
+
+                               // We always store the camelCased key
+                               dataUser.set( this, key, value );
+                       } );
+               }, null, value, arguments.length > 1, null, true );
+       },
+
+       removeData: function( key ) {
+               return this.each( function() {
+                       dataUser.remove( this, key );
+               } );
+       }
+} );
+
+
+jQuery.extend( {
+       queue: function( elem, type, data ) {
+               var queue;
+
+               if ( elem ) {
+                       type = ( type || "fx" ) + "queue";
+                       queue = dataPriv.get( elem, type );
+
+                       // Speed up dequeue by getting out quickly if this is just a lookup
+                       if ( data ) {
+                               if ( !queue || Array.isArray( data ) ) {
+                                       queue = dataPriv.access( elem, type, jQuery.makeArray( data ) );
+                               } else {
+                                       queue.push( data );
+                               }
+                       }
+                       return queue || [];
+               }
+       },
+
+       dequeue: function( elem, type ) {
+               type = type || "fx";
+
+               var queue = jQuery.queue( elem, type ),
+                       startLength = queue.length,
+                       fn = queue.shift(),
+                       hooks = jQuery._queueHooks( elem, type ),
+                       next = function() {
+                               jQuery.dequeue( elem, type );
+                       };
+
+               // If the fx queue is dequeued, always remove the progress sentinel
+               if ( fn === "inprogress" ) {
+                       fn = queue.shift();
+                       startLength--;
+               }
+
+               if ( fn ) {
+
+                       // Add a progress sentinel to prevent the fx queue from being
+                       // automatically dequeued
+                       if ( type === "fx" ) {
+                               queue.unshift( "inprogress" );
+                       }
+
+                       // Clear up the last queue stop function
+                       delete hooks.stop;
+                       fn.call( elem, next, hooks );
+               }
+
+               if ( !startLength && hooks ) {
+                       hooks.empty.fire();
+               }
+       },
+
+       // Not public - generate a queueHooks object, or return the current one
+       _queueHooks: function( elem, type ) {
+               var key = type + "queueHooks";
+               return dataPriv.get( elem, key ) || dataPriv.access( elem, key, {
+                       empty: jQuery.Callbacks( "once memory" ).add( function() {
+                               dataPriv.remove( elem, [ type + "queue", key ] );
+                       } )
+               } );
+       }
+} );
+
+jQuery.fn.extend( {
+       queue: function( type, data ) {
+               var setter = 2;
+
+               if ( typeof type !== "string" ) {
+                       data = type;
+                       type = "fx";
+                       setter--;
+               }
+
+               if ( arguments.length < setter ) {
+                       return jQuery.queue( this[ 0 ], type );
+               }
+
+               return data === undefined ?
+                       this :
+                       this.each( function() {
+                               var queue = jQuery.queue( this, type, data );
+
+                               // Ensure a hooks for this queue
+                               jQuery._queueHooks( this, type );
+
+                               if ( type === "fx" && queue[ 0 ] !== "inprogress" ) {
+                                       jQuery.dequeue( this, type );
+                               }
+                       } );
+       },
+       dequeue: function( type ) {
+               return this.each( function() {
+                       jQuery.dequeue( this, type );
+               } );
+       },
+       clearQueue: function( type ) {
+               return this.queue( type || "fx", [] );
+       },
+
+       // Get a promise resolved when queues of a certain type
+       // are emptied (fx is the type by default)
+       promise: function( type, obj ) {
+               var tmp,
+                       count = 1,
+                       defer = jQuery.Deferred(),
+                       elements = this,
+                       i = this.length,
+                       resolve = function() {
+                               if ( !( --count ) ) {
+                                       defer.resolveWith( elements, [ elements ] );
+                               }
+                       };
+
+               if ( typeof type !== "string" ) {
+                       obj = type;
+                       type = undefined;
+               }
+               type = type || "fx";
+
+               while ( i-- ) {
+                       tmp = dataPriv.get( elements[ i ], type + "queueHooks" );
+                       if ( tmp && tmp.empty ) {
+                               count++;
+                               tmp.empty.add( resolve );
+                       }
+               }
+               resolve();
+               return defer.promise( obj );
+       }
+} );
+var pnum = ( /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/ ).source;
+
+var rcssNum = new RegExp( "^(?:([+-])=|)(" + pnum + ")([a-z%]*)$", "i" );
+
+
+var cssExpand = [ "Top", "Right", "Bottom", "Left" ];
+
+var documentElement = document.documentElement;
+
+
+
+       var isAttached = function( elem ) {
+                       return jQuery.contains( elem.ownerDocument, elem );
+               },
+               composed = { composed: true };
+
+       // Support: IE 9 - 11+, Edge 12 - 18+, iOS 10.0 - 10.2 only
+       // Check attachment across shadow DOM boundaries when possible (gh-3504)
+       // Support: iOS 10.0-10.2 only
+       // Early iOS 10 versions support `attachShadow` but not `getRootNode`,
+       // leading to errors. We need to check for `getRootNode`.
+       if ( documentElement.getRootNode ) {
+               isAttached = function( elem ) {
+                       return jQuery.contains( elem.ownerDocument, elem ) ||
+                               elem.getRootNode( composed ) === elem.ownerDocument;
+               };
+       }
+var isHiddenWithinTree = function( elem, el ) {
+
+               // isHiddenWithinTree might be called from jQuery#filter function;
+               // in that case, element will be second argument
+               elem = el || elem;
+
+               // Inline style trumps all
+               return elem.style.display === "none" ||
+                       elem.style.display === "" &&
+
+                       // Otherwise, check computed style
+                       // Support: Firefox <=43 - 45
+                       // Disconnected elements can have computed display: none, so first confirm that elem is
+                       // in the document.
+                       isAttached( elem ) &&
+
+                       jQuery.css( elem, "display" ) === "none";
+       };
+
+var swap = function( elem, options, callback, args ) {
+       var ret, name,
+               old = {};
+
+       // Remember the old values, and insert the new ones
+       for ( name in options ) {
+               old[ name ] = elem.style[ name ];
+               elem.style[ name ] = options[ name ];
+       }
+
+       ret = callback.apply( elem, args || [] );
+
+       // Revert the old values
+       for ( name in options ) {
+               elem.style[ name ] = old[ name ];
+       }
+
+       return ret;
+};
+
+
+
+
+function adjustCSS( elem, prop, valueParts, tween ) {
+       var adjusted, scale,
+               maxIterations = 20,
+               currentValue = tween ?
+                       function() {
+                               return tween.cur();
+                       } :
+                       function() {
+                               return jQuery.css( elem, prop, "" );
+                       },
+               initial = currentValue(),
+               unit = valueParts && valueParts[ 3 ] || ( jQuery.cssNumber[ prop ] ? "" : "px" ),
+
+               // Starting value computation is required for potential unit mismatches
+               initialInUnit = elem.nodeType &&
+                       ( jQuery.cssNumber[ prop ] || unit !== "px" && +initial ) &&
+                       rcssNum.exec( jQuery.css( elem, prop ) );
+
+       if ( initialInUnit && initialInUnit[ 3 ] !== unit ) {
+
+               // Support: Firefox <=54
+               // Halve the iteration target value to prevent interference from CSS upper bounds (gh-2144)
+               initial = initial / 2;
+
+               // Trust units reported by jQuery.css
+               unit = unit || initialInUnit[ 3 ];
+
+               // Iteratively approximate from a nonzero starting point
+               initialInUnit = +initial || 1;
+
+               while ( maxIterations-- ) {
+
+                       // Evaluate and update our best guess (doubling guesses that zero out).
+                       // Finish if the scale equals or crosses 1 (making the old*new product non-positive).
+                       jQuery.style( elem, prop, initialInUnit + unit );
+                       if ( ( 1 - scale ) * ( 1 - ( scale = currentValue() / initial || 0.5 ) ) <= 0 ) {
+                               maxIterations = 0;
+                       }
+                       initialInUnit = initialInUnit / scale;
+
+               }
+
+               initialInUnit = initialInUnit * 2;
+               jQuery.style( elem, prop, initialInUnit + unit );
+
+               // Make sure we update the tween properties later on
+               valueParts = valueParts || [];
+       }
+
+       if ( valueParts ) {
+               initialInUnit = +initialInUnit || +initial || 0;
+
+               // Apply relative offset (+=/-=) if specified
+               adjusted = valueParts[ 1 ] ?
+                       initialInUnit + ( valueParts[ 1 ] + 1 ) * valueParts[ 2 ] :
+                       +valueParts[ 2 ];
+               if ( tween ) {
+                       tween.unit = unit;
+                       tween.start = initialInUnit;
+                       tween.end = adjusted;
+               }
+       }
+       return adjusted;
+}
+
+
+var defaultDisplayMap = {};
+
+function getDefaultDisplay( elem ) {
+       var temp,
+               doc = elem.ownerDocument,
+               nodeName = elem.nodeName,
+               display = defaultDisplayMap[ nodeName ];
+
+       if ( display ) {
+               return display;
+       }
+
+       temp = doc.body.appendChild( doc.createElement( nodeName ) );
+       display = jQuery.css( temp, "display" );
+
+       temp.parentNode.removeChild( temp );
+
+       if ( display === "none" ) {
+               display = "block";
+       }
+       defaultDisplayMap[ nodeName ] = display;
+
+       return display;
+}
+
+function showHide( elements, show ) {
+       var display, elem,
+               values = [],
+               index = 0,
+               length = elements.length;
+
+       // Determine new display value for elements that need to change
+       for ( ; index < length; index++ ) {
+               elem = elements[ index ];
+               if ( !elem.style ) {
+                       continue;
+               }
+
+               display = elem.style.display;
+               if ( show ) {
+
+                       // Since we force visibility upon cascade-hidden elements, an immediate (and slow)
+                       // check is required in this first loop unless we have a nonempty display value (either
+                       // inline or about-to-be-restored)
+                       if ( display === "none" ) {
+                               values[ index ] = dataPriv.get( elem, "display" ) || null;
+                               if ( !values[ index ] ) {
+                                       elem.style.display = "";
+                               }
+                       }
+                       if ( elem.style.display === "" && isHiddenWithinTree( elem ) ) {
+                               values[ index ] = getDefaultDisplay( elem );
+                       }
+               } else {
+                       if ( display !== "none" ) {
+                               values[ index ] = "none";
+
+                               // Remember what we're overwriting
+                               dataPriv.set( elem, "display", display );
+                       }
+               }
+       }
+
+       // Set the display of the elements in a second loop to avoid constant reflow
+       for ( index = 0; index < length; index++ ) {
+               if ( values[ index ] != null ) {
+                       elements[ index ].style.display = values[ index ];
+               }
+       }
+
+       return elements;
+}
+
+jQuery.fn.extend( {
+       show: function() {
+               return showHide( this, true );
+       },
+       hide: function() {
+               return showHide( this );
+       },
+       toggle: function( state ) {
+               if ( typeof state === "boolean" ) {
+                       return state ? this.show() : this.hide();
+               }
+
+               return this.each( function() {
+                       if ( isHiddenWithinTree( this ) ) {
+                               jQuery( this ).show();
+                       } else {
+                               jQuery( this ).hide();
+                       }
+               } );
+       }
+} );
+var rcheckableType = ( /^(?:checkbox|radio)$/i );
+
+var rtagName = ( /<([a-z][^\/\0>\x20\t\r\n\f]*)/i );
+
+var rscriptType = ( /^$|^module$|\/(?:java|ecma)script/i );
+
+
+
+// We have to close these tags to support XHTML (#13200)
+var wrapMap = {
+
+       // Support: IE <=9 only
+       option: [ 1, "<select multiple='multiple'>", "</select>" ],
+
+       // XHTML parsers do not magically insert elements in the
+       // same way that tag soup parsers do. So we cannot shorten
+       // this by omitting <tbody> or other required elements.
+       thead: [ 1, "<table>", "</table>" ],
+       col: [ 2, "<table><colgroup>", "</colgroup></table>" ],
+       tr: [ 2, "<table><tbody>", "</tbody></table>" ],
+       td: [ 3, "<table><tbody><tr>", "</tr></tbody></table>" ],
+
+       _default: [ 0, "", "" ]
+};
+
+// Support: IE <=9 only
+wrapMap.optgroup = wrapMap.option;
+
+wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead;
+wrapMap.th = wrapMap.td;
+
+
+function getAll( context, tag ) {
+
+       // Support: IE <=9 - 11 only
+       // Use typeof to avoid zero-argument method invocation on host objects (#15151)
+       var ret;
+
+       if ( typeof context.getElementsByTagName !== "undefined" ) {
+               ret = context.getElementsByTagName( tag || "*" );
+
+       } else if ( typeof context.querySelectorAll !== "undefined" ) {
+               ret = context.querySelectorAll( tag || "*" );
+
+       } else {
+               ret = [];
+       }
+
+       if ( tag === undefined || tag && nodeName( context, tag ) ) {
+               return jQuery.merge( [ context ], ret );
+       }
+
+       return ret;
+}
+
+
+// Mark scripts as having already been evaluated
+function setGlobalEval( elems, refElements ) {
+       var i = 0,
+               l = elems.length;
+
+       for ( ; i < l; i++ ) {
+               dataPriv.set(
+                       elems[ i ],
+                       "globalEval",
+                       !refElements || dataPriv.get( refElements[ i ], "globalEval" )
+               );
+       }
+}
+
+
+var rhtml = /<|&#?\w+;/;
+
+function buildFragment( elems, context, scripts, selection, ignored ) {
+       var elem, tmp, tag, wrap, attached, j,
+               fragment = context.createDocumentFragment(),
+               nodes = [],
+               i = 0,
+               l = elems.length;
+
+       for ( ; i < l; i++ ) {
+               elem = elems[ i ];
+
+               if ( elem || elem === 0 ) {
+
+                       // Add nodes directly
+                       if ( toType( elem ) === "object" ) {
+
+                               // Support: Android <=4.0 only, PhantomJS 1 only
+                               // push.apply(_, arraylike) throws on ancient WebKit
+                               jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem );
+
+                       // Convert non-html into a text node
+                       } else if ( !rhtml.test( elem ) ) {
+                               nodes.push( context.createTextNode( elem ) );
+
+                       // Convert html into DOM nodes
+                       } else {
+                               tmp = tmp || fragment.appendChild( context.createElement( "div" ) );
+
+                               // Deserialize a standard representation
+                               tag = ( rtagName.exec( elem ) || [ "", "" ] )[ 1 ].toLowerCase();
+                               wrap = wrapMap[ tag ] || wrapMap._default;
+                               tmp.innerHTML = wrap[ 1 ] + jQuery.htmlPrefilter( elem ) + wrap[ 2 ];
+
+                               // Descend through wrappers to the right content
+                               j = wrap[ 0 ];
+                               while ( j-- ) {
+                                       tmp = tmp.lastChild;
+                               }
+
+                               // Support: Android <=4.0 only, PhantomJS 1 only
+                               // push.apply(_, arraylike) throws on ancient WebKit
+                               jQuery.merge( nodes, tmp.childNodes );
+
+                               // Remember the top-level container
+                               tmp = fragment.firstChild;
+
+                               // Ensure the created nodes are orphaned (#12392)
+                               tmp.textContent = "";
+                       }
+               }
+       }
+
+       // Remove wrapper from fragment
+       fragment.textContent = "";
+
+       i = 0;
+       while ( ( elem = nodes[ i++ ] ) ) {
+
+               // Skip elements already in the context collection (trac-4087)
+               if ( selection && jQuery.inArray( elem, selection ) > -1 ) {
+                       if ( ignored ) {
+                               ignored.push( elem );
+                       }
+                       continue;
+               }
+
+               attached = isAttached( elem );
+
+               // Append to fragment
+               tmp = getAll( fragment.appendChild( elem ), "script" );
+
+               // Preserve script evaluation history
+               if ( attached ) {
+                       setGlobalEval( tmp );
+               }
+
+               // Capture executables
+               if ( scripts ) {
+                       j = 0;
+                       while ( ( elem = tmp[ j++ ] ) ) {
+                               if ( rscriptType.test( elem.type || "" ) ) {
+                                       scripts.push( elem );
+                               }
+                       }
+               }
+       }
+
+       return fragment;
+}
+
+
+( function() {
+       var fragment = document.createDocumentFragment(),
+               div = fragment.appendChild( document.createElement( "div" ) ),
+               input = document.createElement( "input" );
+
+       // Support: Android 4.0 - 4.3 only
+       // Check state lost if the name is set (#11217)
+       // Support: Windows Web Apps (WWA)
+       // `name` and `type` must use .setAttribute for WWA (#14901)
+       input.setAttribute( "type", "radio" );
+       input.setAttribute( "checked", "checked" );
+       input.setAttribute( "name", "t" );
+
+       div.appendChild( input );
+
+       // Support: Android <=4.1 only
+       // Older WebKit doesn't clone checked state correctly in fragments
+       support.checkClone = div.cloneNode( true ).cloneNode( true ).lastChild.checked;
+
+       // Support: IE <=11 only
+       // Make sure textarea (and checkbox) defaultValue is properly cloned
+       div.innerHTML = "<textarea>x</textarea>";
+       support.noCloneChecked = !!div.cloneNode( true ).lastChild.defaultValue;
+} )();
+
+
+var
+       rkeyEvent = /^key/,
+       rmouseEvent = /^(?:mouse|pointer|contextmenu|drag|drop)|click/,
+       rtypenamespace = /^([^.]*)(?:\.(.+)|)/;
+
+function returnTrue() {
+       return true;
+}
+
+function returnFalse() {
+       return false;
+}
+
+// Support: IE <=9 - 11+
+// focus() and blur() are asynchronous, except when they are no-op.
+// So expect focus to be synchronous when the element is already active,
+// and blur to be synchronous when the element is not already active.
+// (focus and blur are always synchronous in other supported browsers,
+// this just defines when we can count on it).
+function expectSync( elem, type ) {
+       return ( elem === safeActiveElement() ) === ( type === "focus" );
+}
+
+// Support: IE <=9 only
+// Accessing document.activeElement can throw unexpectedly
+// https://bugs.jquery.com/ticket/13393
+function safeActiveElement() {
+       try {
+               return document.activeElement;
+       } catch ( err ) { }
+}
+
+function on( elem, types, selector, data, fn, one ) {
+       var origFn, type;
+
+       // Types can be a map of types/handlers
+       if ( typeof types === "object" ) {
+
+               // ( types-Object, selector, data )
+               if ( typeof selector !== "string" ) {
+
+                       // ( types-Object, data )
+                       data = data || selector;
+                       selector = undefined;
+               }
+               for ( type in types ) {
+                       on( elem, type, selector, data, types[ type ], one );
+               }
+               return elem;
+       }
+
+       if ( data == null && fn == null ) {
+
+               // ( types, fn )
+               fn = selector;
+               data = selector = undefined;
+       } else if ( fn == null ) {
+               if ( typeof selector === "string" ) {
+
+                       // ( types, selector, fn )
+                       fn = data;
+                       data = undefined;
+               } else {
+
+                       // ( types, data, fn )
+                       fn = data;
+                       data = selector;
+                       selector = undefined;
+               }
+       }
+       if ( fn === false ) {
+               fn = returnFalse;
+       } else if ( !fn ) {
+               return elem;
+       }
+
+       if ( one === 1 ) {
+               origFn = fn;
+               fn = function( event ) {
+
+                       // Can use an empty set, since event contains the info
+                       jQuery().off( event );
+                       return origFn.apply( this, arguments );
+               };
+
+               // Use same guid so caller can remove using origFn
+               fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ );
+       }
+       return elem.each( function() {
+               jQuery.event.add( this, types, fn, data, selector );
+       } );
+}
+
+/*
+ * Helper functions for managing events -- not part of the public interface.
+ * Props to Dean Edwards' addEvent library for many of the ideas.
+ */
+jQuery.event = {
+
+       global: {},
+
+       add: function( elem, types, handler, data, selector ) {
+
+               var handleObjIn, eventHandle, tmp,
+                       events, t, handleObj,
+                       special, handlers, type, namespaces, origType,
+                       elemData = dataPriv.get( elem );
+
+               // Don't attach events to noData or text/comment nodes (but allow plain objects)
+               if ( !elemData ) {
+                       return;
+               }
+
+               // Caller can pass in an object of custom data in lieu of the handler
+               if ( handler.handler ) {
+                       handleObjIn = handler;
+                       handler = handleObjIn.handler;
+                       selector = handleObjIn.selector;
+               }
+
+               // Ensure that invalid selectors throw exceptions at attach time
+               // Evaluate against documentElement in case elem is a non-element node (e.g., document)
+               if ( selector ) {
+                       jQuery.find.matchesSelector( documentElement, selector );
+               }
+
+               // Make sure that the handler has a unique ID, used to find/remove it later
+               if ( !handler.guid ) {
+                       handler.guid = jQuery.guid++;
+               }
+
+               // Init the element's event structure and main handler, if this is the first
+               if ( !( events = elemData.events ) ) {
+                       events = elemData.events = {};
+               }
+               if ( !( eventHandle = elemData.handle ) ) {
+                       eventHandle = elemData.handle = function( e ) {
+
+                               // Discard the second event of a jQuery.event.trigger() and
+                               // when an event is called after a page has unloaded
+                               return typeof jQuery !== "undefined" && jQuery.event.triggered !== e.type ?
+                                       jQuery.event.dispatch.apply( elem, arguments ) : undefined;
+                       };
+               }
+
+               // Handle multiple events separated by a space
+               types = ( types || "" ).match( rnothtmlwhite ) || [ "" ];
+               t = types.length;
+               while ( t-- ) {
+                       tmp = rtypenamespace.exec( types[ t ] ) || [];
+                       type = origType = tmp[ 1 ];
+                       namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort();
+
+                       // There *must* be a type, no attaching namespace-only handlers
+                       if ( !type ) {
+                               continue;
+                       }
+
+                       // If event changes its type, use the special event handlers for the changed type
+                       special = jQuery.event.special[ type ] || {};
+
+                       // If selector defined, determine special event api type, otherwise given type
+                       type = ( selector ? special.delegateType : special.bindType ) || type;
+
+                       // Update special based on newly reset type
+                       special = jQuery.event.special[ type ] || {};
+
+                       // handleObj is passed to all event handlers
+                       handleObj = jQuery.extend( {
+                               type: type,
+                               origType: origType,
+                               data: data,
+                               handler: handler,
+                               guid: handler.guid,
+                               selector: selector,
+                               needsContext: selector && jQuery.expr.match.needsContext.test( selector ),
+                               namespace: namespaces.join( "." )
+                       }, handleObjIn );
+
+                       // Init the event handler queue if we're the first
+                       if ( !( handlers = events[ type ] ) ) {
+                               handlers = events[ type ] = [];
+                               handlers.delegateCount = 0;
+
+                               // Only use addEventListener if the special events handler returns false
+                               if ( !special.setup ||
+                                       special.setup.call( elem, data, namespaces, eventHandle ) === false ) {
+
+                                       if ( elem.addEventListener ) {
+                                               elem.addEventListener( type, eventHandle );
+                                       }
+                               }
+                       }
+
+                       if ( special.add ) {
+                               special.add.call( elem, handleObj );
+
+                               if ( !handleObj.handler.guid ) {
+                                       handleObj.handler.guid = handler.guid;
+                               }
+                       }
+
+                       // Add to the element's handler list, delegates in front
+                       if ( selector ) {
+                               handlers.splice( handlers.delegateCount++, 0, handleObj );
+                       } else {
+                               handlers.push( handleObj );
+                       }
+
+                       // Keep track of which events have ever been used, for event optimization
+                       jQuery.event.global[ type ] = true;
+               }
+
+       },
+
+       // Detach an event or set of events from an element
+       remove: function( elem, types, handler, selector, mappedTypes ) {
+
+               var j, origCount, tmp,
+                       events, t, handleObj,
+                       special, handlers, type, namespaces, origType,
+                       elemData = dataPriv.hasData( elem ) && dataPriv.get( elem );
+
+               if ( !elemData || !( events = elemData.events ) ) {
+                       return;
+               }
+
+               // Once for each type.namespace in types; type may be omitted
+               types = ( types || "" ).match( rnothtmlwhite ) || [ "" ];
+               t = types.length;
+               while ( t-- ) {
+                       tmp = rtypenamespace.exec( types[ t ] ) || [];
+                       type = origType = tmp[ 1 ];
+                       namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort();
+
+                       // Unbind all events (on this namespace, if provided) for the element
+                       if ( !type ) {
+                               for ( type in events ) {
+                                       jQuery.event.remove( elem, type + types[ t ], handler, selector, true );
+                               }
+                               continue;
+                       }
+
+                       special = jQuery.event.special[ type ] || {};
+                       type = ( selector ? special.delegateType : special.bindType ) || type;
+                       handlers = events[ type ] || [];
+                       tmp = tmp[ 2 ] &&
+                               new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" );
+
+                       // Remove matching events
+                       origCount = j = handlers.length;
+                       while ( j-- ) {
+                               handleObj = handlers[ j ];
+
+                               if ( ( mappedTypes || origType === handleObj.origType ) &&
+                                       ( !handler || handler.guid === handleObj.guid ) &&
+                                       ( !tmp || tmp.test( handleObj.namespace ) ) &&
+                                       ( !selector || selector === handleObj.selector ||
+                                               selector === "**" && handleObj.selector ) ) {
+                                       handlers.splice( j, 1 );
+
+                                       if ( handleObj.selector ) {
+                                               handlers.delegateCount--;
+                                       }
+                                       if ( special.remove ) {
+                                               special.remove.call( elem, handleObj );
+                                       }
+                               }
+                       }
+
+                       // Remove generic event handler if we removed something and no more handlers exist
+                       // (avoids potential for endless recursion during removal of special event handlers)
+                       if ( origCount && !handlers.length ) {
+                               if ( !special.teardown ||
+                                       special.teardown.call( elem, namespaces, elemData.handle ) === false ) {
+
+                                       jQuery.removeEvent( elem, type, elemData.handle );
+                               }
+
+                               delete events[ type ];
+                       }
+               }
+
+               // Remove data and the expando if it's no longer used
+               if ( jQuery.isEmptyObject( events ) ) {
+                       dataPriv.remove( elem, "handle events" );
+               }
+       },
+
+       dispatch: function( nativeEvent ) {
+
+               // Make a writable jQuery.Event from the native event object
+               var event = jQuery.event.fix( nativeEvent );
+
+               var i, j, ret, matched, handleObj, handlerQueue,
+                       args = new Array( arguments.length ),
+                       handlers = ( dataPriv.get( this, "events" ) || {} )[ event.type ] || [],
+                       special = jQuery.event.special[ event.type ] || {};
+
+               // Use the fix-ed jQuery.Event rather than the (read-only) native event
+               args[ 0 ] = event;
+
+               for ( i = 1; i < arguments.length; i++ ) {
+                       args[ i ] = arguments[ i ];
+               }
+
+               event.delegateTarget = this;
+
+               // Call the preDispatch hook for the mapped type, and let it bail if desired
+               if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) {
+                       return;
+               }
+
+               // Determine handlers
+               handlerQueue = jQuery.event.handlers.call( this, event, handlers );
+
+               // Run delegates first; they may want to stop propagation beneath us
+               i = 0;
+               while ( ( matched = handlerQueue[ i++ ] ) && !event.isPropagationStopped() ) {
+                       event.currentTarget = matched.elem;
+
+                       j = 0;
+                       while ( ( handleObj = matched.handlers[ j++ ] ) &&
+                               !event.isImmediatePropagationStopped() ) {
+
+                               // If the event is namespaced, then each handler is only invoked if it is
+                               // specially universal or its namespaces are a superset of the event's.
+                               if ( !event.rnamespace || handleObj.namespace === false ||
+                                       event.rnamespace.test( handleObj.namespace ) ) {
+
+                                       event.handleObj = handleObj;
+                                       event.data = handleObj.data;
+
+                                       ret = ( ( jQuery.event.special[ handleObj.origType ] || {} ).handle ||
+                                               handleObj.handler ).apply( matched.elem, args );
+
+                                       if ( ret !== undefined ) {
+                                               if ( ( event.result = ret ) === false ) {
+                                                       event.preventDefault();
+                                                       event.stopPropagation();
+                                               }
+                                       }
+                               }
+                       }
+               }
+
+               // Call the postDispatch hook for the mapped type
+               if ( special.postDispatch ) {
+                       special.postDispatch.call( this, event );
+               }
+
+               return event.result;
+       },
+
+       handlers: function( event, handlers ) {
+               var i, handleObj, sel, matchedHandlers, matchedSelectors,
+                       handlerQueue = [],
+                       delegateCount = handlers.delegateCount,
+                       cur = event.target;
+
+               // Find delegate handlers
+               if ( delegateCount &&
+
+                       // Support: IE <=9
+                       // Black-hole SVG <use> instance trees (trac-13180)
+                       cur.nodeType &&
+
+                       // Support: Firefox <=42
+                       // Suppress spec-violating clicks indicating a non-primary pointer button (trac-3861)
+                       // https://www.w3.org/TR/DOM-Level-3-Events/#event-type-click
+                       // Support: IE 11 only
+                       // ...but not arrow key "clicks" of radio inputs, which can have `button` -1 (gh-2343)
+                       !( event.type === "click" && event.button >= 1 ) ) {
+
+                       for ( ; cur !== this; cur = cur.parentNode || this ) {
+
+                               // Don't check non-elements (#13208)
+                               // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764)
+                               if ( cur.nodeType === 1 && !( event.type === "click" && cur.disabled === true ) ) {
+                                       matchedHandlers = [];
+                                       matchedSelectors = {};
+                                       for ( i = 0; i < delegateCount; i++ ) {
+                                               handleObj = handlers[ i ];
+
+                                               // Don't conflict with Object.prototype properties (#13203)
+                                               sel = handleObj.selector + " ";
+
+                                               if ( matchedSelectors[ sel ] === undefined ) {
+                                                       matchedSelectors[ sel ] = handleObj.needsContext ?
+                                                               jQuery( sel, this ).index( cur ) > -1 :
+                                                               jQuery.find( sel, this, null, [ cur ] ).length;
+                                               }
+                                               if ( matchedSelectors[ sel ] ) {
+                                                       matchedHandlers.push( handleObj );
+                                               }
+                                       }
+                                       if ( matchedHandlers.length ) {
+                                               handlerQueue.push( { elem: cur, handlers: matchedHandlers } );
+                                       }
+                               }
+                       }
+               }
+
+               // Add the remaining (directly-bound) handlers
+               cur = this;
+               if ( delegateCount < handlers.length ) {
+                       handlerQueue.push( { elem: cur, handlers: handlers.slice( delegateCount ) } );
+               }
+
+               return handlerQueue;
+       },
+
+       addProp: function( name, hook ) {
+               Object.defineProperty( jQuery.Event.prototype, name, {
+                       enumerable: true,
+                       configurable: true,
+
+                       get: isFunction( hook ) ?
+                               function() {
+                                       if ( this.originalEvent ) {
+                                                       return hook( this.originalEvent );
+                                       }
+                               } :
+                               function() {
+                                       if ( this.originalEvent ) {
+                                                       return this.originalEvent[ name ];
+                                       }
+                               },
+
+                       set: function( value ) {
+                               Object.defineProperty( this, name, {
+                                       enumerable: true,
+                                       configurable: true,
+                                       writable: true,
+                                       value: value
+                               } );
+                       }
+               } );
+       },
+
+       fix: function( originalEvent ) {
+               return originalEvent[ jQuery.expando ] ?
+                       originalEvent :
+                       new jQuery.Event( originalEvent );
+       },
+
+       special: {
+               load: {
+
+                       // Prevent triggered image.load events from bubbling to window.load
+                       noBubble: true
+               },
+               click: {
+
+                       // Utilize native event to ensure correct state for checkable inputs
+                       setup: function( data ) {
+
+                               // For mutual compressibility with _default, replace `this` access with a local var.
+                               // `|| data` is dead code meant only to preserve the variable through minification.
+                               var el = this || data;
+
+                               // Claim the first handler
+                               if ( rcheckableType.test( el.type ) &&
+                                       el.click && nodeName( el, "input" ) ) {
+
+                                       // dataPriv.set( el, "click", ... )
+                                       leverageNative( el, "click", returnTrue );
+                               }
+
+                               // Return false to allow normal processing in the caller
+                               return false;
+                       },
+                       trigger: function( data ) {
+
+                               // For mutual compressibility with _default, replace `this` access with a local var.
+                               // `|| data` is dead code meant only to preserve the variable through minification.
+                               var el = this || data;
+
+                               // Force setup before triggering a click
+                               if ( rcheckableType.test( el.type ) &&
+                                       el.click && nodeName( el, "input" ) ) {
+
+                                       leverageNative( el, "click" );
+                               }
+
+                               // Return non-false to allow normal event-path propagation
+                               return true;
+                       },
+
+                       // For cross-browser consistency, suppress native .click() on links
+                       // Also prevent it if we're currently inside a leveraged native-event stack
+                       _default: function( event ) {
+                               var target = event.target;
+                               return rcheckableType.test( target.type ) &&
+                                       target.click && nodeName( target, "input" ) &&
+                                       dataPriv.get( target, "click" ) ||
+                                       nodeName( target, "a" );
+                       }
+               },
+
+               beforeunload: {
+                       postDispatch: function( event ) {
+
+                               // Support: Firefox 20+
+                               // Firefox doesn't alert if the returnValue field is not set.
+                               if ( event.result !== undefined && event.originalEvent ) {
+                                       event.originalEvent.returnValue = event.result;
+                               }
+                       }
+               }
+       }
+};
+
+// Ensure the presence of an event listener that handles manually-triggered
+// synthetic events by interrupting progress until reinvoked in response to
+// *native* events that it fires directly, ensuring that state changes have
+// already occurred before other listeners are invoked.
+function leverageNative( el, type, expectSync ) {
+
+       // Missing expectSync indicates a trigger call, which must force setup through jQuery.event.add
+       if ( !expectSync ) {
+               if ( dataPriv.get( el, type ) === undefined ) {
+                       jQuery.event.add( el, type, returnTrue );
+               }
+               return;
+       }
+
+       // Register the controller as a special universal handler for all event namespaces
+       dataPriv.set( el, type, false );
+       jQuery.event.add( el, type, {
+               namespace: false,
+               handler: function( event ) {
+                       var notAsync, result,
+                               saved = dataPriv.get( this, type );
+
+                       if ( ( event.isTrigger & 1 ) && this[ type ] ) {
+
+                               // Interrupt processing of the outer synthetic .trigger()ed event
+                               // Saved data should be false in such cases, but might be a leftover capture object
+                               // from an async native handler (gh-4350)
+                               if ( !saved.length ) {
+
+                                       // Store arguments for use when handling the inner native event
+                                       // There will always be at least one argument (an event object), so this array
+                                       // will not be confused with a leftover capture object.
+                                       saved = slice.call( arguments );
+                                       dataPriv.set( this, type, saved );
+
+                                       // Trigger the native event and capture its result
+                                       // Support: IE <=9 - 11+
+                                       // focus() and blur() are asynchronous
+                                       notAsync = expectSync( this, type );
+                                       this[ type ]();
+                                       result = dataPriv.get( this, type );
+                                       if ( saved !== result || notAsync ) {
+                                               dataPriv.set( this, type, false );
+                                       } else {
+                                               result = {};
+                                       }
+                                       if ( saved !== result ) {
+
+                                               // Cancel the outer synthetic event
+                                               event.stopImmediatePropagation();
+                                               event.preventDefault();
+                                               return result.value;
+                                       }
+
+                               // If this is an inner synthetic event for an event with a bubbling surrogate
+                               // (focus or blur), assume that the surrogate already propagated from triggering the
+                               // native event and prevent that from happening again here.
+                               // This technically gets the ordering wrong w.r.t. to `.trigger()` (in which the
+                               // bubbling surrogate propagates *after* the non-bubbling base), but that seems
+                               // less bad than duplication.
+                               } else if ( ( jQuery.event.special[ type ] || {} ).delegateType ) {
+                                       event.stopPropagation();
+                               }
+
+                       // If this is a native event triggered above, everything is now in order
+                       // Fire an inner synthetic event with the original arguments
+                       } else if ( saved.length ) {
+
+                               // ...and capture the result
+                               dataPriv.set( this, type, {
+                                       value: jQuery.event.trigger(
+
+                                               // Support: IE <=9 - 11+
+                                               // Extend with the prototype to reset the above stopImmediatePropagation()
+                                               jQuery.extend( saved[ 0 ], jQuery.Event.prototype ),
+                                               saved.slice( 1 ),
+                                               this
+                                       )
+                               } );
+
+                               // Abort handling of the native event
+                               event.stopImmediatePropagation();
+                       }
+               }
+       } );
+}
+
+jQuery.removeEvent = function( elem, type, handle ) {
+
+       // This "if" is needed for plain objects
+       if ( elem.removeEventListener ) {
+               elem.removeEventListener( type, handle );
+       }
+};
+
+jQuery.Event = function( src, props ) {
+
+       // Allow instantiation without the 'new' keyword
+       if ( !( this instanceof jQuery.Event ) ) {
+               return new jQuery.Event( src, props );
+       }
+
+       // Event object
+       if ( src && src.type ) {
+               this.originalEvent = src;
+               this.type = src.type;
+
+               // Events bubbling up the document may have been marked as prevented
+               // by a handler lower down the tree; reflect the correct value.
+               this.isDefaultPrevented = src.defaultPrevented ||
+                               src.defaultPrevented === undefined &&
+
+                               // Support: Android <=2.3 only
+                               src.returnValue === false ?
+                       returnTrue :
+                       returnFalse;
+
+               // Create target properties
+               // Support: Safari <=6 - 7 only
+               // Target should not be a text node (#504, #13143)
+               this.target = ( src.target && src.target.nodeType === 3 ) ?
+                       src.target.parentNode :
+                       src.target;
+
+               this.currentTarget = src.currentTarget;
+               this.relatedTarget = src.relatedTarget;
+
+       // Event type
+       } else {
+               this.type = src;
+       }
+
+       // Put explicitly provided properties onto the event object
+       if ( props ) {
+               jQuery.extend( this, props );
+       }
+
+       // Create a timestamp if incoming event doesn't have one
+       this.timeStamp = src && src.timeStamp || Date.now();
+
+       // Mark it as fixed
+       this[ jQuery.expando ] = true;
+};
+
+// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding
+// https://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html
+jQuery.Event.prototype = {
+       constructor: jQuery.Event,
+       isDefaultPrevented: returnFalse,
+       isPropagationStopped: returnFalse,
+       isImmediatePropagationStopped: returnFalse,
+       isSimulated: false,
+
+       preventDefault: function() {
+               var e = this.originalEvent;
+
+               this.isDefaultPrevented = returnTrue;
+
+               if ( e && !this.isSimulated ) {
+                       e.preventDefault();
+               }
+       },
+       stopPropagation: function() {
+               var e = this.originalEvent;
+
+               this.isPropagationStopped = returnTrue;
+
+               if ( e && !this.isSimulated ) {
+                       e.stopPropagation();
+               }
+       },
+       stopImmediatePropagation: function() {
+               var e = this.originalEvent;
+
+               this.isImmediatePropagationStopped = returnTrue;
+
+               if ( e && !this.isSimulated ) {
+                       e.stopImmediatePropagation();
+               }
+
+               this.stopPropagation();
+       }
+};
+
+// Includes all common event props including KeyEvent and MouseEvent specific props
+jQuery.each( {
+       altKey: true,
+       bubbles: true,
+       cancelable: true,
+       changedTouches: true,
+       ctrlKey: true,
+       detail: true,
+       eventPhase: true,
+       metaKey: true,
+       pageX: true,
+       pageY: true,
+       shiftKey: true,
+       view: true,
+       "char": true,
+       code: true,
+       charCode: true,
+       key: true,
+       keyCode: true,
+       button: true,
+       buttons: true,
+       clientX: true,
+       clientY: true,
+       offsetX: true,
+       offsetY: true,
+       pointerId: true,
+       pointerType: true,
+       screenX: true,
+       screenY: true,
+       targetTouches: true,
+       toElement: true,
+       touches: true,
+
+       which: function( event ) {
+               var button = event.button;
+
+               // Add which for key events
+               if ( event.which == null && rkeyEvent.test( event.type ) ) {
+                       return event.charCode != null ? event.charCode : event.keyCode;
+               }
+
+               // Add which for click: 1 === left; 2 === middle; 3 === right
+               if ( !event.which && button !== undefined && rmouseEvent.test( event.type ) ) {
+                       if ( button & 1 ) {
+                               return 1;
+                       }
+
+                       if ( button & 2 ) {
+                               return 3;
+                       }
+
+                       if ( button & 4 ) {
+                               return 2;
+                       }
+
+                       return 0;
+               }
+
+               return event.which;
+       }
+}, jQuery.event.addProp );
+
+jQuery.each( { focus: "focusin", blur: "focusout" }, function( type, delegateType ) {
+       jQuery.event.special[ type ] = {
+
+               // Utilize native event if possible so blur/focus sequence is correct
+               setup: function() {
+
+                       // Claim the first handler
+                       // dataPriv.set( this, "focus", ... )
+                       // dataPriv.set( this, "blur", ... )
+                       leverageNative( this, type, expectSync );
+
+                       // Return false to allow normal processing in the caller
+                       return false;
+               },
+               trigger: function() {
+
+                       // Force setup before trigger
+                       leverageNative( this, type );
+
+                       // Return non-false to allow normal event-path propagation
+                       return true;
+               },
+
+               delegateType: delegateType
+       };
+} );
+
+// Create mouseenter/leave events using mouseover/out and event-time checks
+// so that event delegation works in jQuery.
+// Do the same for pointerenter/pointerleave and pointerover/pointerout
+//
+// Support: Safari 7 only
+// Safari sends mouseenter too often; see:
+// https://bugs.chromium.org/p/chromium/issues/detail?id=470258
+// for the description of the bug (it existed in older Chrome versions as well).
+jQuery.each( {
+       mouseenter: "mouseover",
+       mouseleave: "mouseout",
+       pointerenter: "pointerover",
+       pointerleave: "pointerout"
+}, function( orig, fix ) {
+       jQuery.event.special[ orig ] = {
+               delegateType: fix,
+               bindType: fix,
+
+               handle: function( event ) {
+                       var ret,
+                               target = this,
+                               related = event.relatedTarget,
+                               handleObj = event.handleObj;
+
+                       // For mouseenter/leave call the handler if related is outside the target.
+                       // NB: No relatedTarget if the mouse left/entered the browser window
+                       if ( !related || ( related !== target && !jQuery.contains( target, related ) ) ) {
+                               event.type = handleObj.origType;
+                               ret = handleObj.handler.apply( this, arguments );
+                               event.type = fix;
+                       }
+                       return ret;
+               }
+       };
+} );
+
+jQuery.fn.extend( {
+
+       on: function( types, selector, data, fn ) {
+               return on( this, types, selector, data, fn );
+       },
+       one: function( types, selector, data, fn ) {
+               return on( this, types, selector, data, fn, 1 );
+       },
+       off: function( types, selector, fn ) {
+               var handleObj, type;
+               if ( types && types.preventDefault && types.handleObj ) {
+
+                       // ( event )  dispatched jQuery.Event
+                       handleObj = types.handleObj;
+                       jQuery( types.delegateTarget ).off(
+                               handleObj.namespace ?
+                                       handleObj.origType + "." + handleObj.namespace :
+                                       handleObj.origType,
+                               handleObj.selector,
+                               handleObj.handler
+                       );
+                       return this;
+               }
+               if ( typeof types === "object" ) {
+
+                       // ( types-object [, selector] )
+                       for ( type in types ) {
+                               this.off( type, selector, types[ type ] );
+                       }
+                       return this;
+               }
+               if ( selector === false || typeof selector === "function" ) {
+
+                       // ( types [, fn] )
+                       fn = selector;
+                       selector = undefined;
+               }
+               if ( fn === false ) {
+                       fn = returnFalse;
+               }
+               return this.each( function() {
+                       jQuery.event.remove( this, types, fn, selector );
+               } );
+       }
+} );
+
+
+var
+
+       /* eslint-disable max-len */
+
+       // See https://github.com/eslint/eslint/issues/3229
+       rxhtmlTag = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([a-z][^\/\0>\x20\t\r\n\f]*)[^>]*)\/>/gi,
+
+       /* eslint-enable */
+
+       // Support: IE <=10 - 11, Edge 12 - 13 only
+       // In IE/Edge using regex groups here causes severe slowdowns.
+       // See https://connect.microsoft.com/IE/feedback/details/1736512/
+       rnoInnerhtml = /<script|<style|<link/i,
+
+       // checked="checked" or checked
+       rchecked = /checked\s*(?:[^=]|=\s*.checked.)/i,
+       rcleanScript = /^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g;
+
+// Prefer a tbody over its parent table for containing new rows
+function manipulationTarget( elem, content ) {
+       if ( nodeName( elem, "table" ) &&
+               nodeName( content.nodeType !== 11 ? content : content.firstChild, "tr" ) ) {
+
+               return jQuery( elem ).children( "tbody" )[ 0 ] || elem;
+       }
+
+       return elem;
+}
+
+// Replace/restore the type attribute of script elements for safe DOM manipulation
+function disableScript( elem ) {
+       elem.type = ( elem.getAttribute( "type" ) !== null ) + "/" + elem.type;
+       return elem;
+}
+function restoreScript( elem ) {
+       if ( ( elem.type || "" ).slice( 0, 5 ) === "true/" ) {
+               elem.type = elem.type.slice( 5 );
+       } else {
+               elem.removeAttribute( "type" );
+       }
+
+       return elem;
+}
+
+function cloneCopyEvent( src, dest ) {
+       var i, l, type, pdataOld, pdataCur, udataOld, udataCur, events;
+
+       if ( dest.nodeType !== 1 ) {
+               return;
+       }
+
+       // 1. Copy private data: events, handlers, etc.
+       if ( dataPriv.hasData( src ) ) {
+               pdataOld = dataPriv.access( src );
+               pdataCur = dataPriv.set( dest, pdataOld );
+               events = pdataOld.events;
+
+               if ( events ) {
+                       delete pdataCur.handle;
+                       pdataCur.events = {};
+
+                       for ( type in events ) {
+                               for ( i = 0, l = events[ type ].length; i < l; i++ ) {
+                                       jQuery.event.add( dest, type, events[ type ][ i ] );
+                               }
+                       }
+               }
+       }
+
+       // 2. Copy user data
+       if ( dataUser.hasData( src ) ) {
+               udataOld = dataUser.access( src );
+               udataCur = jQuery.extend( {}, udataOld );
+
+               dataUser.set( dest, udataCur );
+       }
+}
+
+// Fix IE bugs, see support tests
+function fixInput( src, dest ) {
+       var nodeName = dest.nodeName.toLowerCase();
+
+       // Fails to persist the checked state of a cloned checkbox or radio button.
+       if ( nodeName === "input" && rcheckableType.test( src.type ) ) {
+               dest.checked = src.checked;
+
+       // Fails to return the selected option to the default selected state when cloning options
+       } else if ( nodeName === "input" || nodeName === "textarea" ) {
+               dest.defaultValue = src.defaultValue;
+       }
+}
+
+function domManip( collection, args, callback, ignored ) {
+
+       // Flatten any nested arrays
+       args = concat.apply( [], args );
+
+       var fragment, first, scripts, hasScripts, node, doc,
+               i = 0,
+               l = collection.length,
+               iNoClone = l - 1,
+               value = args[ 0 ],
+               valueIsFunction = isFunction( value );
+
+       // We can't cloneNode fragments that contain checked, in WebKit
+       if ( valueIsFunction ||
+                       ( l > 1 && typeof value === "string" &&
+                               !support.checkClone && rchecked.test( value ) ) ) {
+               return collection.each( function( index ) {
+                       var self = collection.eq( index );
+                       if ( valueIsFunction ) {
+                               args[ 0 ] = value.call( this, index, self.html() );
+                       }
+                       domManip( self, args, callback, ignored );
+               } );
+       }
+
+       if ( l ) {
+               fragment = buildFragment( args, collection[ 0 ].ownerDocument, false, collection, ignored );
+               first = fragment.firstChild;
+
+               if ( fragment.childNodes.length === 1 ) {
+                       fragment = first;
+               }
+
+               // Require either new content or an interest in ignored elements to invoke the callback
+               if ( first || ignored ) {
+                       scripts = jQuery.map( getAll( fragment, "script" ), disableScript );
+                       hasScripts = scripts.length;
+
+                       // Use the original fragment for the last item
+                       // instead of the first because it can end up
+                       // being emptied incorrectly in certain situations (#8070).
+                       for ( ; i < l; i++ ) {
+                               node = fragment;
+
+                               if ( i !== iNoClone ) {
+                                       node = jQuery.clone( node, true, true );
+
+                                       // Keep references to cloned scripts for later restoration
+                                       if ( hasScripts ) {
+
+                                               // Support: Android <=4.0 only, PhantomJS 1 only
+                                               // push.apply(_, arraylike) throws on ancient WebKit
+                                               jQuery.merge( scripts, getAll( node, "script" ) );
+                                       }
+                               }
+
+                               callback.call( collection[ i ], node, i );
+                       }
+
+                       if ( hasScripts ) {
+                               doc = scripts[ scripts.length - 1 ].ownerDocument;
+
+                               // Reenable scripts
+                               jQuery.map( scripts, restoreScript );
+
+                               // Evaluate executable scripts on first document insertion
+                               for ( i = 0; i < hasScripts; i++ ) {
+                                       node = scripts[ i ];
+                                       if ( rscriptType.test( node.type || "" ) &&
+                                               !dataPriv.access( node, "globalEval" ) &&
+                                               jQuery.contains( doc, node ) ) {
+
+                                               if ( node.src && ( node.type || "" ).toLowerCase()  !== "module" ) {
+
+                                                       // Optional AJAX dependency, but won't run scripts if not present
+                                                       if ( jQuery._evalUrl && !node.noModule ) {
+                                                               jQuery._evalUrl( node.src, {
+                                                                       nonce: node.nonce || node.getAttribute( "nonce" )
+                                                               } );
+                                                       }
+                                               } else {
+                                                       DOMEval( node.textContent.replace( rcleanScript, "" ), node, doc );
+                                               }
+                                       }
+                               }
+                       }
+               }
+       }
+
+       return collection;
+}
+
+function remove( elem, selector, keepData ) {
+       var node,
+               nodes = selector ? jQuery.filter( selector, elem ) : elem,
+               i = 0;
+
+       for ( ; ( node = nodes[ i ] ) != null; i++ ) {
+               if ( !keepData && node.nodeType === 1 ) {
+                       jQuery.cleanData( getAll( node ) );
+               }
+
+               if ( node.parentNode ) {
+                       if ( keepData && isAttached( node ) ) {
+                               setGlobalEval( getAll( node, "script" ) );
+                       }
+                       node.parentNode.removeChild( node );
+               }
+       }
+
+       return elem;
+}
+
+jQuery.extend( {
+       htmlPrefilter: function( html ) {
+               return html.replace( rxhtmlTag, "<$1></$2>" );
+       },
+
+       clone: function( elem, dataAndEvents, deepDataAndEvents ) {
+               var i, l, srcElements, destElements,
+                       clone = elem.cloneNode( true ),
+                       inPage = isAttached( elem );
+
+               // Fix IE cloning issues
+               if ( !support.noCloneChecked && ( elem.nodeType === 1 || elem.nodeType === 11 ) &&
+                               !jQuery.isXMLDoc( elem ) ) {
+
+                       // We eschew Sizzle here for performance reasons: https://jsperf.com/getall-vs-sizzle/2
+                       destElements = getAll( clone );
+                       srcElements = getAll( elem );
+
+                       for ( i = 0, l = srcElements.length; i < l; i++ ) {
+                               fixInput( srcElements[ i ], destElements[ i ] );
+                       }
+               }
+
+               // Copy the events from the original to the clone
+               if ( dataAndEvents ) {
+                       if ( deepDataAndEvents ) {
+                               srcElements = srcElements || getAll( elem );
+                               destElements = destElements || getAll( clone );
+
+                               for ( i = 0, l = srcElements.length; i < l; i++ ) {
+                                       cloneCopyEvent( srcElements[ i ], destElements[ i ] );
+                               }
+                       } else {
+                               cloneCopyEvent( elem, clone );
+                       }
+               }
+
+               // Preserve script evaluation history
+               destElements = getAll( clone, "script" );
+               if ( destElements.length > 0 ) {
+                       setGlobalEval( destElements, !inPage && getAll( elem, "script" ) );
+               }
+
+               // Return the cloned set
+               return clone;
+       },
+
+       cleanData: function( elems ) {
+               var data, elem, type,
+                       special = jQuery.event.special,
+                       i = 0;
+
+               for ( ; ( elem = elems[ i ] ) !== undefined; i++ ) {
+                       if ( acceptData( elem ) ) {
+                               if ( ( data = elem[ dataPriv.expando ] ) ) {
+                                       if ( data.events ) {
+                                               for ( type in data.events ) {
+                                                       if ( special[ type ] ) {
+                                                               jQuery.event.remove( elem, type );
+
+                                                       // This is a shortcut to avoid jQuery.event.remove's overhead
+                                                       } else {
+                                                               jQuery.removeEvent( elem, type, data.handle );
+                                                       }
+                                               }
+                                       }
+
+                                       // Support: Chrome <=35 - 45+
+                                       // Assign undefined instead of using delete, see Data#remove
+                                       elem[ dataPriv.expando ] = undefined;
+                               }
+                               if ( elem[ dataUser.expando ] ) {
+
+                                       // Support: Chrome <=35 - 45+
+                                       // Assign undefined instead of using delete, see Data#remove
+                                       elem[ dataUser.expando ] = undefined;
+                               }
+                       }
+               }
+       }
+} );
+
+jQuery.fn.extend( {
+       detach: function( selector ) {
+               return remove( this, selector, true );
+       },
+
+       remove: function( selector ) {
+               return remove( this, selector );
+       },
+
+       text: function( value ) {
+               return access( this, function( value ) {
+                       return value === undefined ?
+                               jQuery.text( this ) :
+                               this.empty().each( function() {
+                                       if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) {
+                                               this.textContent = value;
+                                       }
+                               } );
+               }, null, value, arguments.length );
+       },
+
+       append: function() {
+               return domManip( this, arguments, function( elem ) {
+                       if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) {
+                               var target = manipulationTarget( this, elem );
+                               target.appendChild( elem );
+                       }
+               } );
+       },
+
+       prepend: function() {
+               return domManip( this, arguments, function( elem ) {
+                       if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) {
+                               var target = manipulationTarget( this, elem );
+                               target.insertBefore( elem, target.firstChild );
+                       }
+               } );
+       },
+
+       before: function() {
+               return domManip( this, arguments, function( elem ) {
+                       if ( this.parentNode ) {
+                               this.parentNode.insertBefore( elem, this );
+                       }
+               } );
+       },
+
+       after: function() {
+               return domManip( this, arguments, function( elem ) {
+                       if ( this.parentNode ) {
+                               this.parentNode.insertBefore( elem, this.nextSibling );
+                       }
+               } );
+       },
+
+       empty: function() {
+               var elem,
+                       i = 0;
+
+               for ( ; ( elem = this[ i ] ) != null; i++ ) {
+                       if ( elem.nodeType === 1 ) {
+
+                               // Prevent memory leaks
+                               jQuery.cleanData( getAll( elem, false ) );
+
+                               // Remove any remaining nodes
+                               elem.textContent = "";
+                       }
+               }
+
+               return this;
+       },
+
+       clone: function( dataAndEvents, deepDataAndEvents ) {
+               dataAndEvents = dataAndEvents == null ? false : dataAndEvents;
+               deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents;
+
+               return this.map( function() {
+                       return jQuery.clone( this, dataAndEvents, deepDataAndEvents );
+               } );
+       },
+
+       html: function( value ) {
+               return access( this, function( value ) {
+                       var elem = this[ 0 ] || {},
+                               i = 0,
+                               l = this.length;
+
+                       if ( value === undefined && elem.nodeType === 1 ) {
+                               return elem.innerHTML;
+                       }
+
+                       // See if we can take a shortcut and just use innerHTML
+                       if ( typeof value === "string" && !rnoInnerhtml.test( value ) &&
+                               !wrapMap[ ( rtagName.exec( value ) || [ "", "" ] )[ 1 ].toLowerCase() ] ) {
+
+                               value = jQuery.htmlPrefilter( value );
+
+                               try {
+                                       for ( ; i < l; i++ ) {
+                                               elem = this[ i ] || {};
+
+                                               // Remove element nodes and prevent memory leaks
+                                               if ( elem.nodeType === 1 ) {
+                                                       jQuery.cleanData( getAll( elem, false ) );
+                                                       elem.innerHTML = value;
+                                               }
+                                       }
+
+                                       elem = 0;
+
+                               // If using innerHTML throws an exception, use the fallback method
+                               } catch ( e ) {}
+                       }
+
+                       if ( elem ) {
+                               this.empty().append( value );
+                       }
+               }, null, value, arguments.length );
+       },
+
+       replaceWith: function() {
+               var ignored = [];
+
+               // Make the changes, replacing each non-ignored context element with the new content
+               return domManip( this, arguments, function( elem ) {
+                       var parent = this.parentNode;
+
+                       if ( jQuery.inArray( this, ignored ) < 0 ) {
+                               jQuery.cleanData( getAll( this ) );
+                               if ( parent ) {
+                                       parent.replaceChild( elem, this );
+                               }
+                       }
+
+               // Force callback invocation
+               }, ignored );
+       }
+} );
+
+jQuery.each( {
+       appendTo: "append",
+       prependTo: "prepend",
+       insertBefore: "before",
+       insertAfter: "after",
+       replaceAll: "replaceWith"
+}, function( name, original ) {
+       jQuery.fn[ name ] = function( selector ) {
+               var elems,
+                       ret = [],
+                       insert = jQuery( selector ),
+                       last = insert.length - 1,
+                       i = 0;
+
+               for ( ; i <= last; i++ ) {
+                       elems = i === last ? this : this.clone( true );
+                       jQuery( insert[ i ] )[ original ]( elems );
+
+                       // Support: Android <=4.0 only, PhantomJS 1 only
+                       // .get() because push.apply(_, arraylike) throws on ancient WebKit
+                       push.apply( ret, elems.get() );
+               }
+
+               return this.pushStack( ret );
+       };
+} );
+var rnumnonpx = new RegExp( "^(" + pnum + ")(?!px)[a-z%]+$", "i" );
+
+var getStyles = function( elem ) {
+
+               // Support: IE <=11 only, Firefox <=30 (#15098, #14150)
+               // IE throws on elements created in popups
+               // FF meanwhile throws on frame elements through "defaultView.getComputedStyle"
+               var view = elem.ownerDocument.defaultView;
+
+               if ( !view || !view.opener ) {
+                       view = window;
+               }
+
+               return view.getComputedStyle( elem );
+       };
+
+var rboxStyle = new RegExp( cssExpand.join( "|" ), "i" );
+
+
+
+( function() {
+
+       // Executing both pixelPosition & boxSizingReliable tests require only one layout
+       // so they're executed at the same time to save the second computation.
+       function computeStyleTests() {
+
+               // This is a singleton, we need to execute it only once
+               if ( !div ) {
+                       return;
+               }
+
+               container.style.cssText = "position:absolute;left:-11111px;width:60px;" +
+                       "margin-top:1px;padding:0;border:0";
+               div.style.cssText =
+                       "position:relative;display:block;box-sizing:border-box;overflow:scroll;" +
+                       "margin:auto;border:1px;padding:1px;" +
+                       "width:60%;top:1%";
+               documentElement.appendChild( container ).appendChild( div );
+
+               var divStyle = window.getComputedStyle( div );
+               pixelPositionVal = divStyle.top !== "1%";
+
+               // Support: Android 4.0 - 4.3 only, Firefox <=3 - 44
+               reliableMarginLeftVal = roundPixelMeasures( divStyle.marginLeft ) === 12;
+
+               // Support: Android 4.0 - 4.3 only, Safari <=9.1 - 10.1, iOS <=7.0 - 9.3
+               // Some styles come back with percentage values, even though they shouldn't
+               div.style.right = "60%";
+               pixelBoxStylesVal = roundPixelMeasures( divStyle.right ) === 36;
+
+               // Support: IE 9 - 11 only
+               // Detect misreporting of content dimensions for box-sizing:border-box elements
+               boxSizingReliableVal = roundPixelMeasures( divStyle.width ) === 36;
+
+               // Support: IE 9 only
+               // Detect overflow:scroll screwiness (gh-3699)
+               // Support: Chrome <=64
+               // Don't get tricked when zoom affects offsetWidth (gh-4029)
+               div.style.position = "absolute";
+               scrollboxSizeVal = roundPixelMeasures( div.offsetWidth / 3 ) === 12;
+
+               documentElement.removeChild( container );
+
+               // Nullify the div so it wouldn't be stored in the memory and
+               // it will also be a sign that checks already performed
+               div = null;
+       }
+
+       function roundPixelMeasures( measure ) {
+               return Math.round( parseFloat( measure ) );
+       }
+
+       var pixelPositionVal, boxSizingReliableVal, scrollboxSizeVal, pixelBoxStylesVal,
+               reliableMarginLeftVal,
+               container = document.createElement( "div" ),
+               div = document.createElement( "div" );
+
+       // Finish early in limited (non-browser) environments
+       if ( !div.style ) {
+               return;
+       }
+
+       // Support: IE <=9 - 11 only
+       // Style of cloned element affects source element cloned (#8908)
+       div.style.backgroundClip = "content-box";
+       div.cloneNode( true ).style.backgroundClip = "";
+       support.clearCloneStyle = div.style.backgroundClip === "content-box";
+
+       jQuery.extend( support, {
+               boxSizingReliable: function() {
+                       computeStyleTests();
+                       return boxSizingReliableVal;
+               },
+               pixelBoxStyles: function() {
+                       computeStyleTests();
+                       return pixelBoxStylesVal;
+               },
+               pixelPosition: function() {
+                       computeStyleTests();
+                       return pixelPositionVal;
+               },
+               reliableMarginLeft: function() {
+                       computeStyleTests();
+                       return reliableMarginLeftVal;
+               },
+               scrollboxSize: function() {
+                       computeStyleTests();
+                       return scrollboxSizeVal;
+               }
+       } );
+} )();
+
+
+function curCSS( elem, name, computed ) {
+       var width, minWidth, maxWidth, ret,
+
+               // Support: Firefox 51+
+               // Retrieving style before computed somehow
+               // fixes an issue with getting wrong values
+               // on detached elements
+               style = elem.style;
+
+       computed = computed || getStyles( elem );
+
+       // getPropertyValue is needed for:
+       //   .css('filter') (IE 9 only, #12537)
+       //   .css('--customProperty) (#3144)
+       if ( computed ) {
+               ret = computed.getPropertyValue( name ) || computed[ name ];
+
+               if ( ret === "" && !isAttached( elem ) ) {
+                       ret = jQuery.style( elem, name );
+               }
+
+               // A tribute to the "awesome hack by Dean Edwards"
+               // Android Browser returns percentage for some values,
+               // but width seems to be reliably pixels.
+               // This is against the CSSOM draft spec:
+               // https://drafts.csswg.org/cssom/#resolved-values
+               if ( !support.pixelBoxStyles() && rnumnonpx.test( ret ) && rboxStyle.test( name ) ) {
+
+                       // Remember the original values
+                       width = style.width;
+                       minWidth = style.minWidth;
+                       maxWidth = style.maxWidth;
+
+                       // Put in the new values to get a computed value out
+                       style.minWidth = style.maxWidth = style.width = ret;
+                       ret = computed.width;
+
+                       // Revert the changed values
+                       style.width = width;
+                       style.minWidth = minWidth;
+                       style.maxWidth = maxWidth;
+               }
+       }
+
+       return ret !== undefined ?
+
+               // Support: IE <=9 - 11 only
+               // IE returns zIndex value as an integer.
+               ret + "" :
+               ret;
+}
+
+
+function addGetHookIf( conditionFn, hookFn ) {
+
+       // Define the hook, we'll check on the first run if it's really needed.
+       return {
+               get: function() {
+                       if ( conditionFn() ) {
+
+                               // Hook not needed (or it's not possible to use it due
+                               // to missing dependency), remove it.
+                               delete this.get;
+                               return;
+                       }
+
+                       // Hook needed; redefine it so that the support test is not executed again.
+                       return ( this.get = hookFn ).apply( this, arguments );
+               }
+       };
+}
+
+
+var cssPrefixes = [ "Webkit", "Moz", "ms" ],
+       emptyStyle = document.createElement( "div" ).style,
+       vendorProps = {};
+
+// Return a vendor-prefixed property or undefined
+function vendorPropName( name ) {
+
+       // Check for vendor prefixed names
+       var capName = name[ 0 ].toUpperCase() + name.slice( 1 ),
+               i = cssPrefixes.length;
+
+       while ( i-- ) {
+               name = cssPrefixes[ i ] + capName;
+               if ( name in emptyStyle ) {
+                       return name;
+               }
+       }
+}
+
+// Return a potentially-mapped jQuery.cssProps or vendor prefixed property
+function finalPropName( name ) {
+       var final = jQuery.cssProps[ name ] || vendorProps[ name ];
+
+       if ( final ) {
+               return final;
+       }
+       if ( name in emptyStyle ) {
+               return name;
+       }
+       return vendorProps[ name ] = vendorPropName( name ) || name;
+}
+
+
+var
+
+       // Swappable if display is none or starts with table
+       // except "table", "table-cell", or "table-caption"
+       // See here for display values: https://developer.mozilla.org/en-US/docs/CSS/display
+       rdisplayswap = /^(none|table(?!-c[ea]).+)/,
+       rcustomProp = /^--/,
+       cssShow = { position: "absolute", visibility: "hidden", display: "block" },
+       cssNormalTransform = {
+               letterSpacing: "0",
+               fontWeight: "400"
+       };
+
+function setPositiveNumber( elem, value, subtract ) {
+
+       // Any relative (+/-) values have already been
+       // normalized at this point
+       var matches = rcssNum.exec( value );
+       return matches ?
+
+               // Guard against undefined "subtract", e.g., when used as in cssHooks
+               Math.max( 0, matches[ 2 ] - ( subtract || 0 ) ) + ( matches[ 3 ] || "px" ) :
+               value;
+}
+
+function boxModelAdjustment( elem, dimension, box, isBorderBox, styles, computedVal ) {
+       var i = dimension === "width" ? 1 : 0,
+               extra = 0,
+               delta = 0;
+
+       // Adjustment may not be necessary
+       if ( box === ( isBorderBox ? "border" : "content" ) ) {
+               return 0;
+       }
+
+       for ( ; i < 4; i += 2 ) {
+
+               // Both box models exclude margin
+               if ( box === "margin" ) {
+                       delta += jQuery.css( elem, box + cssExpand[ i ], true, styles );
+               }
+
+               // If we get here with a content-box, we're seeking "padding" or "border" or "margin"
+               if ( !isBorderBox ) {
+
+                       // Add padding
+                       delta += jQuery.css( elem, "padding" + cssExpand[ i ], true, styles );
+
+                       // For "border" or "margin", add border
+                       if ( box !== "padding" ) {
+                               delta += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles );
+
+                       // But still keep track of it otherwise
+                       } else {
+                               extra += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles );
+                       }
+
+               // If we get here with a border-box (content + padding + border), we're seeking "content" or
+               // "padding" or "margin"
+               } else {
+
+                       // For "content", subtract padding
+                       if ( box === "content" ) {
+                               delta -= jQuery.css( elem, "padding" + cssExpand[ i ], true, styles );
+                       }
+
+                       // For "content" or "padding", subtract border
+                       if ( box !== "margin" ) {
+                               delta -= jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles );
+                       }
+               }
+       }
+
+       // Account for positive content-box scroll gutter when requested by providing computedVal
+       if ( !isBorderBox && computedVal >= 0 ) {
+
+               // offsetWidth/offsetHeight is a rounded sum of content, padding, scroll gutter, and border
+               // Assuming integer scroll gutter, subtract the rest and round down
+               delta += Math.max( 0, Math.ceil(
+                       elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] -
+                       computedVal -
+                       delta -
+                       extra -
+                       0.5
+
+               // If offsetWidth/offsetHeight is unknown, then we can't determine content-box scroll gutter
+               // Use an explicit zero to avoid NaN (gh-3964)
+               ) ) || 0;
+       }
+
+       return delta;
+}
+
+function getWidthOrHeight( elem, dimension, extra ) {
+
+       // Start with computed style
+       var styles = getStyles( elem ),
+
+               // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-4322).
+               // Fake content-box until we know it's needed to know the true value.
+               boxSizingNeeded = !support.boxSizingReliable() || extra,
+               isBorderBox = boxSizingNeeded &&
+                       jQuery.css( elem, "boxSizing", false, styles ) === "border-box",
+               valueIsBorderBox = isBorderBox,
+
+               val = curCSS( elem, dimension, styles ),
+               offsetProp = "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 );
+
+       // Support: Firefox <=54
+       // Return a confounding non-pixel value or feign ignorance, as appropriate.
+       if ( rnumnonpx.test( val ) ) {
+               if ( !extra ) {
+                       return val;
+               }
+               val = "auto";
+       }
+
+
+       // Fall back to offsetWidth/offsetHeight when value is "auto"
+       // This happens for inline elements with no explicit setting (gh-3571)
+       // Support: Android <=4.1 - 4.3 only
+       // Also use offsetWidth/offsetHeight for misreported inline dimensions (gh-3602)
+       // Support: IE 9-11 only
+       // Also use offsetWidth/offsetHeight for when box sizing is unreliable
+       // We use getClientRects() to check for hidden/disconnected.
+       // In those cases, the computed value can be trusted to be border-box
+       if ( ( !support.boxSizingReliable() && isBorderBox ||
+               val === "auto" ||
+               !parseFloat( val ) && jQuery.css( elem, "display", false, styles ) === "inline" ) &&
+               elem.getClientRects().length ) {
+
+               isBorderBox = jQuery.css( elem, "boxSizing", false, styles ) === "border-box";
+
+               // Where available, offsetWidth/offsetHeight approximate border box dimensions.
+               // Where not available (e.g., SVG), assume unreliable box-sizing and interpret the
+               // retrieved value as a content box dimension.
+               valueIsBorderBox = offsetProp in elem;
+               if ( valueIsBorderBox ) {
+                       val = elem[ offsetProp ];
+               }
+       }
+
+       // Normalize "" and auto
+       val = parseFloat( val ) || 0;
+
+       // Adjust for the element's box model
+       return ( val +
+               boxModelAdjustment(
+                       elem,
+                       dimension,
+                       extra || ( isBorderBox ? "border" : "content" ),
+                       valueIsBorderBox,
+                       styles,
+
+                       // Provide the current computed size to request scroll gutter calculation (gh-3589)
+                       val
+               )
+       ) + "px";
+}
+
+jQuery.extend( {
+
+       // Add in style property hooks for overriding the default
+       // behavior of getting and setting a style property
+       cssHooks: {
+               opacity: {
+                       get: function( elem, computed ) {
+                               if ( computed ) {
+
+                                       // We should always get a number back from opacity
+                                       var ret = curCSS( elem, "opacity" );
+                                       return ret === "" ? "1" : ret;
+                               }
+                       }
+               }
+       },
+
+       // Don't automatically add "px" to these possibly-unitless properties
+       cssNumber: {
+               "animationIterationCount": true,
+               "columnCount": true,
+               "fillOpacity": true,
+               "flexGrow": true,
+               "flexShrink": true,
+               "fontWeight": true,
+               "gridArea": true,
+               "gridColumn": true,
+               "gridColumnEnd": true,
+               "gridColumnStart": true,
+               "gridRow": true,
+               "gridRowEnd": true,
+               "gridRowStart": true,
+               "lineHeight": true,
+               "opacity": true,
+               "order": true,
+               "orphans": true,
+               "widows": true,
+               "zIndex": true,
+               "zoom": true
+       },
+
+       // Add in properties whose names you wish to fix before
+       // setting or getting the value
+       cssProps: {},
+
+       // Get and set the style property on a DOM Node
+       style: function( elem, name, value, extra ) {
+
+               // Don't set styles on text and comment nodes
+               if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) {
+                       return;
+               }
+
+               // Make sure that we're working with the right name
+               var ret, type, hooks,
+                       origName = camelCase( name ),
+                       isCustomProp = rcustomProp.test( name ),
+                       style = elem.style;
+
+               // Make sure that we're working with the right name. We don't
+               // want to query the value if it is a CSS custom property
+               // since they are user-defined.
+               if ( !isCustomProp ) {
+                       name = finalPropName( origName );
+               }
+
+               // Gets hook for the prefixed version, then unprefixed version
+               hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ];
+
+               // Check if we're setting a value
+               if ( value !== undefined ) {
+                       type = typeof value;
+
+                       // Convert "+=" or "-=" to relative numbers (#7345)
+                       if ( type === "string" && ( ret = rcssNum.exec( value ) ) && ret[ 1 ] ) {
+                               value = adjustCSS( elem, name, ret );
+
+                               // Fixes bug #9237
+                               type = "number";
+                       }
+
+                       // Make sure that null and NaN values aren't set (#7116)
+                       if ( value == null || value !== value ) {
+                               return;
+                       }
+
+                       // If a number was passed in, add the unit (except for certain CSS properties)
+                       // The isCustomProp check can be removed in jQuery 4.0 when we only auto-append
+                       // "px" to a few hardcoded values.
+                       if ( type === "number" && !isCustomProp ) {
+                               value += ret && ret[ 3 ] || ( jQuery.cssNumber[ origName ] ? "" : "px" );
+                       }
+
+                       // background-* props affect original clone's values
+                       if ( !support.clearCloneStyle && value === "" && name.indexOf( "background" ) === 0 ) {
+                               style[ name ] = "inherit";
+                       }
+
+                       // If a hook was provided, use that value, otherwise just set the specified value
+                       if ( !hooks || !( "set" in hooks ) ||
+                               ( value = hooks.set( elem, value, extra ) ) !== undefined ) {
+
+                               if ( isCustomProp ) {
+                                       style.setProperty( name, value );
+                               } else {
+                                       style[ name ] = value;
+                               }
+                       }
+
+               } else {
+
+                       // If a hook was provided get the non-computed value from there
+                       if ( hooks && "get" in hooks &&
+                               ( ret = hooks.get( elem, false, extra ) ) !== undefined ) {
+
+                               return ret;
+                       }
+
+                       // Otherwise just get the value from the style object
+                       return style[ name ];
+               }
+       },
+
+       css: function( elem, name, extra, styles ) {
+               var val, num, hooks,
+                       origName = camelCase( name ),
+                       isCustomProp = rcustomProp.test( name );
+
+               // Make sure that we're working with the right name. We don't
+               // want to modify the value if it is a CSS custom property
+               // since they are user-defined.
+               if ( !isCustomProp ) {
+                       name = finalPropName( origName );
+               }
+
+               // Try prefixed name followed by the unprefixed name
+               hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ];
+
+               // If a hook was provided get the computed value from there
+               if ( hooks && "get" in hooks ) {
+                       val = hooks.get( elem, true, extra );
+               }
+
+               // Otherwise, if a way to get the computed value exists, use that
+               if ( val === undefined ) {
+                       val = curCSS( elem, name, styles );
+               }
+
+               // Convert "normal" to computed value
+               if ( val === "normal" && name in cssNormalTransform ) {
+                       val = cssNormalTransform[ name ];
+               }
+
+               // Make numeric if forced or a qualifier was provided and val looks numeric
+               if ( extra === "" || extra ) {
+                       num = parseFloat( val );
+                       return extra === true || isFinite( num ) ? num || 0 : val;
+               }
+
+               return val;
+       }
+} );
+
+jQuery.each( [ "height", "width" ], function( i, dimension ) {
+       jQuery.cssHooks[ dimension ] = {
+               get: function( elem, computed, extra ) {
+                       if ( computed ) {
+
+                               // Certain elements can have dimension info if we invisibly show them
+                               // but it must have a current display style that would benefit
+                               return rdisplayswap.test( jQuery.css( elem, "display" ) ) &&
+
+                                       // Support: Safari 8+
+                                       // Table columns in Safari have non-zero offsetWidth & zero
+                                       // getBoundingClientRect().width unless display is changed.
+                                       // Support: IE <=11 only
+                                       // Running getBoundingClientRect on a disconnected node
+                                       // in IE throws an error.
+                                       ( !elem.getClientRects().length || !elem.getBoundingClientRect().width ) ?
+                                               swap( elem, cssShow, function() {
+                                                       return getWidthOrHeight( elem, dimension, extra );
+                                               } ) :
+                                               getWidthOrHeight( elem, dimension, extra );
+                       }
+               },
+
+               set: function( elem, value, extra ) {
+                       var matches,
+                               styles = getStyles( elem ),
+
+                               // Only read styles.position if the test has a chance to fail
+                               // to avoid forcing a reflow.
+                               scrollboxSizeBuggy = !support.scrollboxSize() &&
+                                       styles.position === "absolute",
+
+                               // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-3991)
+                               boxSizingNeeded = scrollboxSizeBuggy || extra,
+                               isBorderBox = boxSizingNeeded &&
+                                       jQuery.css( elem, "boxSizing", false, styles ) === "border-box",
+                               subtract = extra ?
+                                       boxModelAdjustment(
+                                               elem,
+                                               dimension,
+                                               extra,
+                                               isBorderBox,
+                                               styles
+                                       ) :
+                                       0;
+
+                       // Account for unreliable border-box dimensions by comparing offset* to computed and
+                       // faking a content-box to get border and padding (gh-3699)
+                       if ( isBorderBox && scrollboxSizeBuggy ) {
+                               subtract -= Math.ceil(
+                                       elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] -
+                                       parseFloat( styles[ dimension ] ) -
+                                       boxModelAdjustment( elem, dimension, "border", false, styles ) -
+                                       0.5
+                               );
+                       }
+
+                       // Convert to pixels if value adjustment is needed
+                       if ( subtract && ( matches = rcssNum.exec( value ) ) &&
+                               ( matches[ 3 ] || "px" ) !== "px" ) {
+
+                               elem.style[ dimension ] = value;
+                               value = jQuery.css( elem, dimension );
+                       }
+
+                       return setPositiveNumber( elem, value, subtract );
+               }
+       };
+} );
+
+jQuery.cssHooks.marginLeft = addGetHookIf( support.reliableMarginLeft,
+       function( elem, computed ) {
+               if ( computed ) {
+                       return ( parseFloat( curCSS( elem, "marginLeft" ) ) ||
+                               elem.getBoundingClientRect().left -
+                                       swap( elem, { marginLeft: 0 }, function() {
+                                               return elem.getBoundingClientRect().left;
+                                       } )
+                               ) + "px";
+               }
+       }
+);
+
+// These hooks are used by animate to expand properties
+jQuery.each( {
+       margin: "",
+       padding: "",
+       border: "Width"
+}, function( prefix, suffix ) {
+       jQuery.cssHooks[ prefix + suffix ] = {
+               expand: function( value ) {
+                       var i = 0,
+                               expanded = {},
+
+                               // Assumes a single number if not a string
+                               parts = typeof value === "string" ? value.split( " " ) : [ value ];
+
+                       for ( ; i < 4; i++ ) {
+                               expanded[ prefix + cssExpand[ i ] + suffix ] =
+                                       parts[ i ] || parts[ i - 2 ] || parts[ 0 ];
+                       }
+
+                       return expanded;
+               }
+       };
+
+       if ( prefix !== "margin" ) {
+               jQuery.cssHooks[ prefix + suffix ].set = setPositiveNumber;
+       }
+} );
+
+jQuery.fn.extend( {
+       css: function( name, value ) {
+               return access( this, function( elem, name, value ) {
+                       var styles, len,
+                               map = {},
+                               i = 0;
+
+                       if ( Array.isArray( name ) ) {
+                               styles = getStyles( elem );
+                               len = name.length;
+
+                               for ( ; i < len; i++ ) {
+                                       map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles );
+                               }
+
+                               return map;
+                       }
+
+                       return value !== undefined ?
+                               jQuery.style( elem, name, value ) :
+                               jQuery.css( elem, name );
+               }, name, value, arguments.length > 1 );
+       }
+} );
+
+
+function Tween( elem, options, prop, end, easing ) {
+       return new Tween.prototype.init( elem, options, prop, end, easing );
+}
+jQuery.Tween = Tween;
+
+Tween.prototype = {
+       constructor: Tween,
+       init: function( elem, options, prop, end, easing, unit ) {
+               this.elem = elem;
+               this.prop = prop;
+               this.easing = easing || jQuery.easing._default;
+               this.options = options;
+               this.start = this.now = this.cur();
+               this.end = end;
+               this.unit = unit || ( jQuery.cssNumber[ prop ] ? "" : "px" );
+       },
+       cur: function() {
+               var hooks = Tween.propHooks[ this.prop ];
+
+               return hooks && hooks.get ?
+                       hooks.get( this ) :
+                       Tween.propHooks._default.get( this );
+       },
+       run: function( percent ) {
+               var eased,
+                       hooks = Tween.propHooks[ this.prop ];
+
+               if ( this.options.duration ) {
+                       this.pos = eased = jQuery.easing[ this.easing ](
+                               percent, this.options.duration * percent, 0, 1, this.options.duration
+                       );
+               } else {
+                       this.pos = eased = percent;
+               }
+               this.now = ( this.end - this.start ) * eased + this.start;
+
+               if ( this.options.step ) {
+                       this.options.step.call( this.elem, this.now, this );
+               }
+
+               if ( hooks && hooks.set ) {
+                       hooks.set( this );
+               } else {
+                       Tween.propHooks._default.set( this );
+               }
+               return this;
+       }
+};
+
+Tween.prototype.init.prototype = Tween.prototype;
+
+Tween.propHooks = {
+       _default: {
+               get: function( tween ) {
+                       var result;
+
+                       // Use a property on the element directly when it is not a DOM element,
+                       // or when there is no matching style property that exists.
+                       if ( tween.elem.nodeType !== 1 ||
+                               tween.elem[ tween.prop ] != null && tween.elem.style[ tween.prop ] == null ) {
+                               return tween.elem[ tween.prop ];
+                       }
+
+                       // Passing an empty string as a 3rd parameter to .css will automatically
+                       // attempt a parseFloat and fallback to a string if the parse fails.
+                       // Simple values such as "10px" are parsed to Float;
+                       // complex values such as "rotate(1rad)" are returned as-is.
+                       result = jQuery.css( tween.elem, tween.prop, "" );
+
+                       // Empty strings, null, undefined and "auto" are converted to 0.
+                       return !result || result === "auto" ? 0 : result;
+               },
+               set: function( tween ) {
+
+                       // Use step hook for back compat.
+                       // Use cssHook if its there.
+                       // Use .style if available and use plain properties where available.
+                       if ( jQuery.fx.step[ tween.prop ] ) {
+                               jQuery.fx.step[ tween.prop ]( tween );
+                       } else if ( tween.elem.nodeType === 1 && (
+                                       jQuery.cssHooks[ tween.prop ] ||
+                                       tween.elem.style[ finalPropName( tween.prop ) ] != null ) ) {
+                               jQuery.style( tween.elem, tween.prop, tween.now + tween.unit );
+                       } else {
+                               tween.elem[ tween.prop ] = tween.now;
+                       }
+               }
+       }
+};
+
+// Support: IE <=9 only
+// Panic based approach to setting things on disconnected nodes
+Tween.propHooks.scrollTop = Tween.propHooks.scrollLeft = {
+       set: function( tween ) {
+               if ( tween.elem.nodeType && tween.elem.parentNode ) {
+                       tween.elem[ tween.prop ] = tween.now;
+               }
+       }
+};
+
+jQuery.easing = {
+       linear: function( p ) {
+               return p;
+       },
+       swing: function( p ) {
+               return 0.5 - Math.cos( p * Math.PI ) / 2;
+       },
+       _default: "swing"
+};
+
+jQuery.fx = Tween.prototype.init;
+
+// Back compat <1.8 extension point
+jQuery.fx.step = {};
+
+
+
+
+var
+       fxNow, inProgress,
+       rfxtypes = /^(?:toggle|show|hide)$/,
+       rrun = /queueHooks$/;
+
+function schedule() {
+       if ( inProgress ) {
+               if ( document.hidden === false && window.requestAnimationFrame ) {
+                       window.requestAnimationFrame( schedule );
+               } else {
+                       window.setTimeout( schedule, jQuery.fx.interval );
+               }
+
+               jQuery.fx.tick();
+       }
+}
+
+// Animations created synchronously will run synchronously
+function createFxNow() {
+       window.setTimeout( function() {
+               fxNow = undefined;
+       } );
+       return ( fxNow = Date.now() );
+}
+
+// Generate parameters to create a standard animation
+function genFx( type, includeWidth ) {
+       var which,
+               i = 0,
+               attrs = { height: type };
+
+       // If we include width, step value is 1 to do all cssExpand values,
+       // otherwise step value is 2 to skip over Left and Right
+       includeWidth = includeWidth ? 1 : 0;
+       for ( ; i < 4; i += 2 - includeWidth ) {
+               which = cssExpand[ i ];
+               attrs[ "margin" + which ] = attrs[ "padding" + which ] = type;
+       }
+
+       if ( includeWidth ) {
+               attrs.opacity = attrs.width = type;
+       }
+
+       return attrs;
+}
+
+function createTween( value, prop, animation ) {
+       var tween,
+               collection = ( Animation.tweeners[ prop ] || [] ).concat( Animation.tweeners[ "*" ] ),
+               index = 0,
+               length = collection.length;
+       for ( ; index < length; index++ ) {
+               if ( ( tween = collection[ index ].call( animation, prop, value ) ) ) {
+
+                       // We're done with this property
+                       return tween;
+               }
+       }
+}
+
+function defaultPrefilter( elem, props, opts ) {
+       var prop, value, toggle, hooks, oldfire, propTween, restoreDisplay, display,
+               isBox = "width" in props || "height" in props,
+               anim = this,
+               orig = {},
+               style = elem.style,
+               hidden = elem.nodeType && isHiddenWithinTree( elem ),
+               dataShow = dataPriv.get( elem, "fxshow" );
+
+       // Queue-skipping animations hijack the fx hooks
+       if ( !opts.queue ) {
+               hooks = jQuery._queueHooks( elem, "fx" );
+               if ( hooks.unqueued == null ) {
+                       hooks.unqueued = 0;
+                       oldfire = hooks.empty.fire;
+                       hooks.empty.fire = function() {
+                               if ( !hooks.unqueued ) {
+                                       oldfire();
+                               }
+                       };
+               }
+               hooks.unqueued++;
+
+               anim.always( function() {
+
+                       // Ensure the complete handler is called before this completes
+                       anim.always( function() {
+                               hooks.unqueued--;
+                               if ( !jQuery.queue( elem, "fx" ).length ) {
+                                       hooks.empty.fire();
+                               }
+                       } );
+               } );
+       }
+
+       // Detect show/hide animations
+       for ( prop in props ) {
+               value = props[ prop ];
+               if ( rfxtypes.test( value ) ) {
+                       delete props[ prop ];
+                       toggle = toggle || value === "toggle";
+                       if ( value === ( hidden ? "hide" : "show" ) ) {
+
+                               // Pretend to be hidden if this is a "show" and
+                               // there is still data from a stopped show/hide
+                               if ( value === "show" && dataShow && dataShow[ prop ] !== undefined ) {
+                                       hidden = true;
+
+                               // Ignore all other no-op show/hide data
+                               } else {
+                                       continue;
+                               }
+                       }
+                       orig[ prop ] = dataShow && dataShow[ prop ] || jQuery.style( elem, prop );
+               }
+       }
+
+       // Bail out if this is a no-op like .hide().hide()
+       propTween = !jQuery.isEmptyObject( props );
+       if ( !propTween && jQuery.isEmptyObject( orig ) ) {
+               return;
+       }
+
+       // Restrict "overflow" and "display" styles during box animations
+       if ( isBox && elem.nodeType === 1 ) {
+
+               // Support: IE <=9 - 11, Edge 12 - 15
+               // Record all 3 overflow attributes because IE does not infer the shorthand
+               // from identically-valued overflowX and overflowY and Edge just mirrors
+               // the overflowX value there.
+               opts.overflow = [ style.overflow, style.overflowX, style.overflowY ];
+
+               // Identify a display type, preferring old show/hide data over the CSS cascade
+               restoreDisplay = dataShow && dataShow.display;
+               if ( restoreDisplay == null ) {
+                       restoreDisplay = dataPriv.get( elem, "display" );
+               }
+               display = jQuery.css( elem, "display" );
+               if ( display === "none" ) {
+                       if ( restoreDisplay ) {
+                               display = restoreDisplay;
+                       } else {
+
+                               // Get nonempty value(s) by temporarily forcing visibility
+                               showHide( [ elem ], true );
+                               restoreDisplay = elem.style.display || restoreDisplay;
+                               display = jQuery.css( elem, "display" );
+                               showHide( [ elem ] );
+                       }
+               }
+
+               // Animate inline elements as inline-block
+               if ( display === "inline" || display === "inline-block" && restoreDisplay != null ) {
+                       if ( jQuery.css( elem, "float" ) === "none" ) {
+
+                               // Restore the original display value at the end of pure show/hide animations
+                               if ( !propTween ) {
+                                       anim.done( function() {
+                                               style.display = restoreDisplay;
+                                       } );
+                                       if ( restoreDisplay == null ) {
+                                               display = style.display;
+                                               restoreDisplay = display === "none" ? "" : display;
+                                       }
+                               }
+                               style.display = "inline-block";
+                       }
+               }
+       }
+
+       if ( opts.overflow ) {
+               style.overflow = "hidden";
+               anim.always( function() {
+                       style.overflow = opts.overflow[ 0 ];
+                       style.overflowX = opts.overflow[ 1 ];
+                       style.overflowY = opts.overflow[ 2 ];
+               } );
+       }
+
+       // Implement show/hide animations
+       propTween = false;
+       for ( prop in orig ) {
+
+               // General show/hide setup for this element animation
+               if ( !propTween ) {
+                       if ( dataShow ) {
+                               if ( "hidden" in dataShow ) {
+                                       hidden = dataShow.hidden;
+                               }
+                       } else {
+                               dataShow = dataPriv.access( elem, "fxshow", { display: restoreDisplay } );
+                       }
+
+                       // Store hidden/visible for toggle so `.stop().toggle()` "reverses"
+                       if ( toggle ) {
+                               dataShow.hidden = !hidden;
+                       }
+
+                       // Show elements before animating them
+                       if ( hidden ) {
+                               showHide( [ elem ], true );
+                       }
+
+                       /* eslint-disable no-loop-func */
+
+                       anim.done( function() {
+
+                       /* eslint-enable no-loop-func */
+
+                               // The final step of a "hide" animation is actually hiding the element
+                               if ( !hidden ) {
+                                       showHide( [ elem ] );
+                               }
+                               dataPriv.remove( elem, "fxshow" );
+                               for ( prop in orig ) {
+                                       jQuery.style( elem, prop, orig[ prop ] );
+                               }
+                       } );
+               }
+
+               // Per-property setup
+               propTween = createTween( hidden ? dataShow[ prop ] : 0, prop, anim );
+               if ( !( prop in dataShow ) ) {
+                       dataShow[ prop ] = propTween.start;
+                       if ( hidden ) {
+                               propTween.end = propTween.start;
+                               propTween.start = 0;
+                       }
+               }
+       }
+}
+
+function propFilter( props, specialEasing ) {
+       var index, name, easing, value, hooks;
+
+       // camelCase, specialEasing and expand cssHook pass
+       for ( index in props ) {
+               name = camelCase( index );
+               easing = specialEasing[ name ];
+               value = props[ index ];
+               if ( Array.isArray( value ) ) {
+                       easing = value[ 1 ];
+                       value = props[ index ] = value[ 0 ];
+               }
+
+               if ( index !== name ) {
+                       props[ name ] = value;
+                       delete props[ index ];
+               }
+
+               hooks = jQuery.cssHooks[ name ];
+               if ( hooks && "expand" in hooks ) {
+                       value = hooks.expand( value );
+                       delete props[ name ];
+
+                       // Not quite $.extend, this won't overwrite existing keys.
+                       // Reusing 'index' because we have the correct "name"
+                       for ( index in value ) {
+                               if ( !( index in props ) ) {
+                                       props[ index ] = value[ index ];
+                                       specialEasing[ index ] = easing;
+                               }
+                       }
+               } else {
+                       specialEasing[ name ] = easing;
+               }
+       }
+}
+
+function Animation( elem, properties, options ) {
+       var result,
+               stopped,
+               index = 0,
+               length = Animation.prefilters.length,
+               deferred = jQuery.Deferred().always( function() {
+
+                       // Don't match elem in the :animated selector
+                       delete tick.elem;
+               } ),
+               tick = function() {
+                       if ( stopped ) {
+                               return false;
+                       }
+                       var currentTime = fxNow || createFxNow(),
+                               remaining = Math.max( 0, animation.startTime + animation.duration - currentTime ),
+
+                               // Support: Android 2.3 only
+                               // Archaic crash bug won't allow us to use `1 - ( 0.5 || 0 )` (#12497)
+                               temp = remaining / animation.duration || 0,
+                               percent = 1 - temp,
+                               index = 0,
+                               length = animation.tweens.length;
+
+                       for ( ; index < length; index++ ) {
+                               animation.tweens[ index ].run( percent );
+                       }
+
+                       deferred.notifyWith( elem, [ animation, percent, remaining ] );
+
+                       // If there's more to do, yield
+                       if ( percent < 1 && length ) {
+                               return remaining;
+                       }
+
+                       // If this was an empty animation, synthesize a final progress notification
+                       if ( !length ) {
+                               deferred.notifyWith( elem, [ animation, 1, 0 ] );
+                       }
+
+                       // Resolve the animation and report its conclusion
+                       deferred.resolveWith( elem, [ animation ] );
+                       return false;
+               },
+               animation = deferred.promise( {
+                       elem: elem,
+                       props: jQuery.extend( {}, properties ),
+                       opts: jQuery.extend( true, {
+                               specialEasing: {},
+                               easing: jQuery.easing._default
+                       }, options ),
+                       originalProperties: properties,
+                       originalOptions: options,
+                       startTime: fxNow || createFxNow(),
+                       duration: options.duration,
+                       tweens: [],
+                       createTween: function( prop, end ) {
+                               var tween = jQuery.Tween( elem, animation.opts, prop, end,
+                                               animation.opts.specialEasing[ prop ] || animation.opts.easing );
+                               animation.tweens.push( tween );
+                               return tween;
+                       },
+                       stop: function( gotoEnd ) {
+                               var index = 0,
+
+                                       // If we are going to the end, we want to run all the tweens
+                                       // otherwise we skip this part
+                                       length = gotoEnd ? animation.tweens.length : 0;
+                               if ( stopped ) {
+                                       return this;
+                               }
+                               stopped = true;
+                               for ( ; index < length; index++ ) {
+                                       animation.tweens[ index ].run( 1 );
+                               }
+
+                               // Resolve when we played the last frame; otherwise, reject
+                               if ( gotoEnd ) {
+                                       deferred.notifyWith( elem, [ animation, 1, 0 ] );
+                                       deferred.resolveWith( elem, [ animation, gotoEnd ] );
+                               } else {
+                                       deferred.rejectWith( elem, [ animation, gotoEnd ] );
+                               }
+                               return this;
+                       }
+               } ),
+               props = animation.props;
+
+       propFilter( props, animation.opts.specialEasing );
+
+       for ( ; index < length; index++ ) {
+               result = Animation.prefilters[ index ].call( animation, elem, props, animation.opts );
+               if ( result ) {
+                       if ( isFunction( result.stop ) ) {
+                               jQuery._queueHooks( animation.elem, animation.opts.queue ).stop =
+                                       result.stop.bind( result );
+                       }
+                       return result;
+               }
+       }
+
+       jQuery.map( props, createTween, animation );
+
+       if ( isFunction( animation.opts.start ) ) {
+               animation.opts.start.call( elem, animation );
+       }
+
+       // Attach callbacks from options
+       animation
+               .progress( animation.opts.progress )
+               .done( animation.opts.done, animation.opts.complete )
+               .fail( animation.opts.fail )
+               .always( animation.opts.always );
+
+       jQuery.fx.timer(
+               jQuery.extend( tick, {
+                       elem: elem,
+                       anim: animation,
+                       queue: animation.opts.queue
+               } )
+       );
+
+       return animation;
+}
+
+jQuery.Animation = jQuery.extend( Animation, {
+
+       tweeners: {
+               "*": [ function( prop, value ) {
+                       var tween = this.createTween( prop, value );
+                       adjustCSS( tween.elem, prop, rcssNum.exec( value ), tween );
+                       return tween;
+               } ]
+       },
+
+       tweener: function( props, callback ) {
+               if ( isFunction( props ) ) {
+                       callback = props;
+                       props = [ "*" ];
+               } else {
+                       props = props.match( rnothtmlwhite );
+               }
+
+               var prop,
+                       index = 0,
+                       length = props.length;
+
+               for ( ; index < length; index++ ) {
+                       prop = props[ index ];
+                       Animation.tweeners[ prop ] = Animation.tweeners[ prop ] || [];
+                       Animation.tweeners[ prop ].unshift( callback );
+               }
+       },
+
+       prefilters: [ defaultPrefilter ],
+
+       prefilter: function( callback, prepend ) {
+               if ( prepend ) {
+                       Animation.prefilters.unshift( callback );
+               } else {
+                       Animation.prefilters.push( callback );
+               }
+       }
+} );
+
+jQuery.speed = function( speed, easing, fn ) {
+       var opt = speed && typeof speed === "object" ? jQuery.extend( {}, speed ) : {
+               complete: fn || !fn && easing ||
+                       isFunction( speed ) && speed,
+               duration: speed,
+               easing: fn && easing || easing && !isFunction( easing ) && easing
+       };
+
+       // Go to the end state if fx are off
+       if ( jQuery.fx.off ) {
+               opt.duration = 0;
+
+       } else {
+               if ( typeof opt.duration !== "number" ) {
+                       if ( opt.duration in jQuery.fx.speeds ) {
+                               opt.duration = jQuery.fx.speeds[ opt.duration ];
+
+                       } else {
+                               opt.duration = jQuery.fx.speeds._default;
+                       }
+               }
+       }
+
+       // Normalize opt.queue - true/undefined/null -> "fx"
+       if ( opt.queue == null || opt.queue === true ) {
+               opt.queue = "fx";
+       }
+
+       // Queueing
+       opt.old = opt.complete;
+
+       opt.complete = function() {
+               if ( isFunction( opt.old ) ) {
+                       opt.old.call( this );
+               }
+
+               if ( opt.queue ) {
+                       jQuery.dequeue( this, opt.queue );
+               }
+       };
+
+       return opt;
+};
+
+jQuery.fn.extend( {
+       fadeTo: function( speed, to, easing, callback ) {
+
+               // Show any hidden elements after setting opacity to 0
+               return this.filter( isHiddenWithinTree ).css( "opacity", 0 ).show()
+
+                       // Animate to the value specified
+                       .end().animate( { opacity: to }, speed, easing, callback );
+       },
+       animate: function( prop, speed, easing, callback ) {
+               var empty = jQuery.isEmptyObject( prop ),
+                       optall = jQuery.speed( speed, easing, callback ),
+                       doAnimation = function() {
+
+                               // Operate on a copy of prop so per-property easing won't be lost
+                               var anim = Animation( this, jQuery.extend( {}, prop ), optall );
+
+                               // Empty animations, or finishing resolves immediately
+                               if ( empty || dataPriv.get( this, "finish" ) ) {
+                                       anim.stop( true );
+                               }
+                       };
+                       doAnimation.finish = doAnimation;
+
+               return empty || optall.queue === false ?
+                       this.each( doAnimation ) :
+                       this.queue( optall.queue, doAnimation );
+       },
+       stop: function( type, clearQueue, gotoEnd ) {
+               var stopQueue = function( hooks ) {
+                       var stop = hooks.stop;
+                       delete hooks.stop;
+                       stop( gotoEnd );
+               };
+
+               if ( typeof type !== "string" ) {
+                       gotoEnd = clearQueue;
+                       clearQueue = type;
+                       type = undefined;
+               }
+               if ( clearQueue && type !== false ) {
+                       this.queue( type || "fx", [] );
+               }
+
+               return this.each( function() {
+                       var dequeue = true,
+                               index = type != null && type + "queueHooks",
+                               timers = jQuery.timers,
+                               data = dataPriv.get( this );
+
+                       if ( index ) {
+                               if ( data[ index ] && data[ index ].stop ) {
+                                       stopQueue( data[ index ] );
+                               }
+                       } else {
+                               for ( index in data ) {
+                                       if ( data[ index ] && data[ index ].stop && rrun.test( index ) ) {
+                                               stopQueue( data[ index ] );
+                                       }
+                               }
+                       }
+
+                       for ( index = timers.length; index--; ) {
+                               if ( timers[ index ].elem === this &&
+                                       ( type == null || timers[ index ].queue === type ) ) {
+
+                                       timers[ index ].anim.stop( gotoEnd );
+                                       dequeue = false;
+                                       timers.splice( index, 1 );
+                               }
+                       }
+
+                       // Start the next in the queue if the last step wasn't forced.
+                       // Timers currently will call their complete callbacks, which
+                       // will dequeue but only if they were gotoEnd.
+                       if ( dequeue || !gotoEnd ) {
+                               jQuery.dequeue( this, type );
+                       }
+               } );
+       },
+       finish: function( type ) {
+               if ( type !== false ) {
+                       type = type || "fx";
+               }
+               return this.each( function() {
+                       var index,
+                               data = dataPriv.get( this ),
+                               queue = data[ type + "queue" ],
+                               hooks = data[ type + "queueHooks" ],
+                               timers = jQuery.timers,
+                               length = queue ? queue.length : 0;
+
+                       // Enable finishing flag on private data
+                       data.finish = true;
+
+                       // Empty the queue first
+                       jQuery.queue( this, type, [] );
+
+                       if ( hooks && hooks.stop ) {
+                               hooks.stop.call( this, true );
+                       }
+
+                       // Look for any active animations, and finish them
+                       for ( index = timers.length; index--; ) {
+                               if ( timers[ index ].elem === this && timers[ index ].queue === type ) {
+                                       timers[ index ].anim.stop( true );
+                                       timers.splice( index, 1 );
+                               }
+                       }
+
+                       // Look for any animations in the old queue and finish them
+                       for ( index = 0; index < length; index++ ) {
+                               if ( queue[ index ] && queue[ index ].finish ) {
+                                       queue[ index ].finish.call( this );
+                               }
+                       }
+
+                       // Turn off finishing flag
+                       delete data.finish;
+               } );
+       }
+} );
+
+jQuery.each( [ "toggle", "show", "hide" ], function( i, name ) {
+       var cssFn = jQuery.fn[ name ];
+       jQuery.fn[ name ] = function( speed, easing, callback ) {
+               return speed == null || typeof speed === "boolean" ?
+                       cssFn.apply( this, arguments ) :
+                       this.animate( genFx( name, true ), speed, easing, callback );
+       };
+} );
+
+// Generate shortcuts for custom animations
+jQuery.each( {
+       slideDown: genFx( "show" ),
+       slideUp: genFx( "hide" ),
+       slideToggle: genFx( "toggle" ),
+       fadeIn: { opacity: "show" },
+       fadeOut: { opacity: "hide" },
+       fadeToggle: { opacity: "toggle" }
+}, function( name, props ) {
+       jQuery.fn[ name ] = function( speed, easing, callback ) {
+               return this.animate( props, speed, easing, callback );
+       };
+} );
+
+jQuery.timers = [];
+jQuery.fx.tick = function() {
+       var timer,
+               i = 0,
+               timers = jQuery.timers;
+
+       fxNow = Date.now();
+
+       for ( ; i < timers.length; i++ ) {
+               timer = timers[ i ];
+
+               // Run the timer and safely remove it when done (allowing for external removal)
+               if ( !timer() && timers[ i ] === timer ) {
+                       timers.splice( i--, 1 );
+               }
+       }
+
+       if ( !timers.length ) {
+               jQuery.fx.stop();
+       }
+       fxNow = undefined;
+};
+
+jQuery.fx.timer = function( timer ) {
+       jQuery.timers.push( timer );
+       jQuery.fx.start();
+};
+
+jQuery.fx.interval = 13;
+jQuery.fx.start = function() {
+       if ( inProgress ) {
+               return;
+       }
+
+       inProgress = true;
+       schedule();
+};
+
+jQuery.fx.stop = function() {
+       inProgress = null;
+};
+
+jQuery.fx.speeds = {
+       slow: 600,
+       fast: 200,
+
+       // Default speed
+       _default: 400
+};
+
+
+// Based off of the plugin by Clint Helfers, with permission.
+// https://web.archive.org/web/20100324014747/http://blindsignals.com/index.php/2009/07/jquery-delay/
+jQuery.fn.delay = function( time, type ) {
+       time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time;
+       type = type || "fx";
+
+       return this.queue( type, function( next, hooks ) {
+               var timeout = window.setTimeout( next, time );
+               hooks.stop = function() {
+                       window.clearTimeout( timeout );
+               };
+       } );
+};
+
+
+( function() {
+       var input = document.createElement( "input" ),
+               select = document.createElement( "select" ),
+               opt = select.appendChild( document.createElement( "option" ) );
+
+       input.type = "checkbox";
+
+       // Support: Android <=4.3 only
+       // Default value for a checkbox should be "on"
+       support.checkOn = input.value !== "";
+
+       // Support: IE <=11 only
+       // Must access selectedIndex to make default options select
+       support.optSelected = opt.selected;
+
+       // Support: IE <=11 only
+       // An input loses its value after becoming a radio
+       input = document.createElement( "input" );
+       input.value = "t";
+       input.type = "radio";
+       support.radioValue = input.value === "t";
+} )();
+
+
+var boolHook,
+       attrHandle = jQuery.expr.attrHandle;
+
+jQuery.fn.extend( {
+       attr: function( name, value ) {
+               return access( this, jQuery.attr, name, value, arguments.length > 1 );
+       },
+
+       removeAttr: function( name ) {
+               return this.each( function() {
+                       jQuery.removeAttr( this, name );
+               } );
+       }
+} );
+
+jQuery.extend( {
+       attr: function( elem, name, value ) {
+               var ret, hooks,
+                       nType = elem.nodeType;
+
+               // Don't get/set attributes on text, comment and attribute nodes
+               if ( nType === 3 || nType === 8 || nType === 2 ) {
+                       return;
+               }
+
+               // Fallback to prop when attributes are not supported
+               if ( typeof elem.getAttribute === "undefined" ) {
+                       return jQuery.prop( elem, name, value );
+               }
+
+               // Attribute hooks are determined by the lowercase version
+               // Grab necessary hook if one is defined
+               if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) {
+                       hooks = jQuery.attrHooks[ name.toLowerCase() ] ||
+                               ( jQuery.expr.match.bool.test( name ) ? boolHook : undefined );
+               }
+
+               if ( value !== undefined ) {
+                       if ( value === null ) {
+                               jQuery.removeAttr( elem, name );
+                               return;
+                       }
+
+                       if ( hooks && "set" in hooks &&
+                               ( ret = hooks.set( elem, value, name ) ) !== undefined ) {
+                               return ret;
+                       }
+
+                       elem.setAttribute( name, value + "" );
+                       return value;
+               }
+
+               if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) {
+                       return ret;
+               }
+
+               ret = jQuery.find.attr( elem, name );
+
+               // Non-existent attributes return null, we normalize to undefined
+               return ret == null ? undefined : ret;
+       },
+
+       attrHooks: {
+               type: {
+                       set: function( elem, value ) {
+                               if ( !support.radioValue && value === "radio" &&
+                                       nodeName( elem, "input" ) ) {
+                                       var val = elem.value;
+                                       elem.setAttribute( "type", value );
+                                       if ( val ) {
+                                               elem.value = val;
+                                       }
+                                       return value;
+                               }
+                       }
+               }
+       },
+
+       removeAttr: function( elem, value ) {
+               var name,
+                       i = 0,
+
+                       // Attribute names can contain non-HTML whitespace characters
+                       // https://html.spec.whatwg.org/multipage/syntax.html#attributes-2
+                       attrNames = value && value.match( rnothtmlwhite );
+
+               if ( attrNames && elem.nodeType === 1 ) {
+                       while ( ( name = attrNames[ i++ ] ) ) {
+                               elem.removeAttribute( name );
+                       }
+               }
+       }
+} );
+
+// Hooks for boolean attributes
+boolHook = {
+       set: function( elem, value, name ) {
+               if ( value === false ) {
+
+                       // Remove boolean attributes when set to false
+                       jQuery.removeAttr( elem, name );
+               } else {
+                       elem.setAttribute( name, name );
+               }
+               return name;
+       }
+};
+
+jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( i, name ) {
+       var getter = attrHandle[ name ] || jQuery.find.attr;
+
+       attrHandle[ name ] = function( elem, name, isXML ) {
+               var ret, handle,
+                       lowercaseName = name.toLowerCase();
+
+               if ( !isXML ) {
+
+                       // Avoid an infinite loop by temporarily removing this function from the getter
+                       handle = attrHandle[ lowercaseName ];
+                       attrHandle[ lowercaseName ] = ret;
+                       ret = getter( elem, name, isXML ) != null ?
+                               lowercaseName :
+                               null;
+                       attrHandle[ lowercaseName ] = handle;
+               }
+               return ret;
+       };
+} );
+
+
+
+
+var rfocusable = /^(?:input|select|textarea|button)$/i,
+       rclickable = /^(?:a|area)$/i;
+
+jQuery.fn.extend( {
+       prop: function( name, value ) {
+               return access( this, jQuery.prop, name, value, arguments.length > 1 );
+       },
+
+       removeProp: function( name ) {
+               return this.each( function() {
+                       delete this[ jQuery.propFix[ name ] || name ];
+               } );
+       }
+} );
+
+jQuery.extend( {
+       prop: function( elem, name, value ) {
+               var ret, hooks,
+                       nType = elem.nodeType;
+
+               // Don't get/set properties on text, comment and attribute nodes
+               if ( nType === 3 || nType === 8 || nType === 2 ) {
+                       return;
+               }
+
+               if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) {
+
+                       // Fix name and attach hooks
+                       name = jQuery.propFix[ name ] || name;
+                       hooks = jQuery.propHooks[ name ];
+               }
+
+               if ( value !== undefined ) {
+                       if ( hooks && "set" in hooks &&
+                               ( ret = hooks.set( elem, value, name ) ) !== undefined ) {
+                               return ret;
+                       }
+
+                       return ( elem[ name ] = value );
+               }
+
+               if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) {
+                       return ret;
+               }
+
+               return elem[ name ];
+       },
+
+       propHooks: {
+               tabIndex: {
+                       get: function( elem ) {
+
+                               // Support: IE <=9 - 11 only
+                               // elem.tabIndex doesn't always return the
+                               // correct value when it hasn't been explicitly set
+                               // https://web.archive.org/web/20141116233347/http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/
+                               // Use proper attribute retrieval(#12072)
+                               var tabindex = jQuery.find.attr( elem, "tabindex" );
+
+                               if ( tabindex ) {
+                                       return parseInt( tabindex, 10 );
+                               }
+
+                               if (
+                                       rfocusable.test( elem.nodeName ) ||
+                                       rclickable.test( elem.nodeName ) &&
+                                       elem.href
+                               ) {
+                                       return 0;
+                               }
+
+                               return -1;
+                       }
+               }
+       },
+
+       propFix: {
+               "for": "htmlFor",
+               "class": "className"
+       }
+} );
+
+// Support: IE <=11 only
+// Accessing the selectedIndex property
+// forces the browser to respect setting selected
+// on the option
+// The getter ensures a default option is selected
+// when in an optgroup
+// eslint rule "no-unused-expressions" is disabled for this code
+// since it considers such accessions noop
+if ( !support.optSelected ) {
+       jQuery.propHooks.selected = {
+               get: function( elem ) {
+
+                       /* eslint no-unused-expressions: "off" */
+
+                       var parent = elem.parentNode;
+                       if ( parent && parent.parentNode ) {
+                               parent.parentNode.selectedIndex;
+                       }
+                       return null;
+               },
+               set: function( elem ) {
+
+                       /* eslint no-unused-expressions: "off" */
+
+                       var parent = elem.parentNode;
+                       if ( parent ) {
+                               parent.selectedIndex;
+
+                               if ( parent.parentNode ) {
+                                       parent.parentNode.selectedIndex;
+                               }
+                       }
+               }
+       };
+}
+
+jQuery.each( [
+       "tabIndex",
+       "readOnly",
+       "maxLength",
+       "cellSpacing",
+       "cellPadding",
+       "rowSpan",
+       "colSpan",
+       "useMap",
+       "frameBorder",
+       "contentEditable"
+], function() {
+       jQuery.propFix[ this.toLowerCase() ] = this;
+} );
+
+
+
+
+       // Strip and collapse whitespace according to HTML spec
+       // https://infra.spec.whatwg.org/#strip-and-collapse-ascii-whitespace
+       function stripAndCollapse( value ) {
+               var tokens = value.match( rnothtmlwhite ) || [];
+               return tokens.join( " " );
+       }
+
+
+function getClass( elem ) {
+       return elem.getAttribute && elem.getAttribute( "class" ) || "";
+}
+
+function classesToArray( value ) {
+       if ( Array.isArray( value ) ) {
+               return value;
+       }
+       if ( typeof value === "string" ) {
+               return value.match( rnothtmlwhite ) || [];
+       }
+       return [];
+}
+
+jQuery.fn.extend( {
+       addClass: function( value ) {
+               var classes, elem, cur, curValue, clazz, j, finalValue,
+                       i = 0;
+
+               if ( isFunction( value ) ) {
+                       return this.each( function( j ) {
+                               jQuery( this ).addClass( value.call( this, j, getClass( this ) ) );
+                       } );
+               }
+
+               classes = classesToArray( value );
+
+               if ( classes.length ) {
+                       while ( ( elem = this[ i++ ] ) ) {
+                               curValue = getClass( elem );
+                               cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " );
+
+                               if ( cur ) {
+                                       j = 0;
+                                       while ( ( clazz = classes[ j++ ] ) ) {
+                                               if ( cur.indexOf( " " + clazz + " " ) < 0 ) {
+                                                       cur += clazz + " ";
+                                               }
+                                       }
+
+                                       // Only assign if different to avoid unneeded rendering.
+                                       finalValue = stripAndCollapse( cur );
+                                       if ( curValue !== finalValue ) {
+                                               elem.setAttribute( "class", finalValue );
+                                       }
+                               }
+                       }
+               }
+
+               return this;
+       },
+
+       removeClass: function( value ) {
+               var classes, elem, cur, curValue, clazz, j, finalValue,
+                       i = 0;
+
+               if ( isFunction( value ) ) {
+                       return this.each( function( j ) {
+                               jQuery( this ).removeClass( value.call( this, j, getClass( this ) ) );
+                       } );
+               }
+
+               if ( !arguments.length ) {
+                       return this.attr( "class", "" );
+               }
+
+               classes = classesToArray( value );
+
+               if ( classes.length ) {
+                       while ( ( elem = this[ i++ ] ) ) {
+                               curValue = getClass( elem );
+
+                               // This expression is here for better compressibility (see addClass)
+                               cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " );
+
+                               if ( cur ) {
+                                       j = 0;
+                                       while ( ( clazz = classes[ j++ ] ) ) {
+
+                                               // Remove *all* instances
+                                               while ( cur.indexOf( " " + clazz + " " ) > -1 ) {
+                                                       cur = cur.replace( " " + clazz + " ", " " );
+                                               }
+                                       }
+
+                                       // Only assign if different to avoid unneeded rendering.
+                                       finalValue = stripAndCollapse( cur );
+                                       if ( curValue !== finalValue ) {
+                                               elem.setAttribute( "class", finalValue );
+                                       }
+                               }
+                       }
+               }
+
+               return this;
+       },
+
+       toggleClass: function( value, stateVal ) {
+               var type = typeof value,
+                       isValidValue = type === "string" || Array.isArray( value );
+
+               if ( typeof stateVal === "boolean" && isValidValue ) {
+                       return stateVal ? this.addClass( value ) : this.removeClass( value );
+               }
+
+               if ( isFunction( value ) ) {
+                       return this.each( function( i ) {
+                               jQuery( this ).toggleClass(
+                                       value.call( this, i, getClass( this ), stateVal ),
+                                       stateVal
+                               );
+                       } );
+               }
+
+               return this.each( function() {
+                       var className, i, self, classNames;
+
+                       if ( isValidValue ) {
+
+                               // Toggle individual class names
+                               i = 0;
+                               self = jQuery( this );
+                               classNames = classesToArray( value );
+
+                               while ( ( className = classNames[ i++ ] ) ) {
+
+                                       // Check each className given, space separated list
+                                       if ( self.hasClass( className ) ) {
+                                               self.removeClass( className );
+                                       } else {
+                                               self.addClass( className );
+                                       }
+                               }
+
+                       // Toggle whole class name
+                       } else if ( value === undefined || type === "boolean" ) {
+                               className = getClass( this );
+                               if ( className ) {
+
+                                       // Store className if set
+                                       dataPriv.set( this, "__className__", className );
+                               }
+
+                               // If the element has a class name or if we're passed `false`,
+                               // then remove the whole classname (if there was one, the above saved it).
+                               // Otherwise bring back whatever was previously saved (if anything),
+                               // falling back to the empty string if nothing was stored.
+                               if ( this.setAttribute ) {
+                                       this.setAttribute( "class",
+                                               className || value === false ?
+                                               "" :
+                                               dataPriv.get( this, "__className__" ) || ""
+                                       );
+                               }
+                       }
+               } );
+       },
+
+       hasClass: function( selector ) {
+               var className, elem,
+                       i = 0;
+
+               className = " " + selector + " ";
+               while ( ( elem = this[ i++ ] ) ) {
+                       if ( elem.nodeType === 1 &&
+                               ( " " + stripAndCollapse( getClass( elem ) ) + " " ).indexOf( className ) > -1 ) {
+                                       return true;
+                       }
+               }
+
+               return false;
+       }
+} );
+
+
+
+
+var rreturn = /\r/g;
+
+jQuery.fn.extend( {
+       val: function( value ) {
+               var hooks, ret, valueIsFunction,
+                       elem = this[ 0 ];
+
+               if ( !arguments.length ) {
+                       if ( elem ) {
+                               hooks = jQuery.valHooks[ elem.type ] ||
+                                       jQuery.valHooks[ elem.nodeName.toLowerCase() ];
+
+                               if ( hooks &&
+                                       "get" in hooks &&
+                                       ( ret = hooks.get( elem, "value" ) ) !== undefined
+                               ) {
+                                       return ret;
+                               }
+
+                               ret = elem.value;
+
+                               // Handle most common string cases
+                               if ( typeof ret === "string" ) {
+                                       return ret.replace( rreturn, "" );
+                               }
+
+                               // Handle cases where value is null/undef or number
+                               return ret == null ? "" : ret;
+                       }
+
+                       return;
+               }
+
+               valueIsFunction = isFunction( value );
+
+               return this.each( function( i ) {
+                       var val;
+
+                       if ( this.nodeType !== 1 ) {
+                               return;
+                       }
+
+                       if ( valueIsFunction ) {
+                               val = value.call( this, i, jQuery( this ).val() );
+                       } else {
+                               val = value;
+                       }
+
+                       // Treat null/undefined as ""; convert numbers to string
+                       if ( val == null ) {
+                               val = "";
+
+                       } else if ( typeof val === "number" ) {
+                               val += "";
+
+                       } else if ( Array.isArray( val ) ) {
+                               val = jQuery.map( val, function( value ) {
+                                       return value == null ? "" : value + "";
+                               } );
+                       }
+
+                       hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ];
+
+                       // If set returns undefined, fall back to normal setting
+                       if ( !hooks || !( "set" in hooks ) || hooks.set( this, val, "value" ) === undefined ) {
+                               this.value = val;
+                       }
+               } );
+       }
+} );
+
+jQuery.extend( {
+       valHooks: {
+               option: {
+                       get: function( elem ) {
+
+                               var val = jQuery.find.attr( elem, "value" );
+                               return val != null ?
+                                       val :
+
+                                       // Support: IE <=10 - 11 only
+                                       // option.text throws exceptions (#14686, #14858)
+                                       // Strip and collapse whitespace
+                                       // https://html.spec.whatwg.org/#strip-and-collapse-whitespace
+                                       stripAndCollapse( jQuery.text( elem ) );
+                       }
+               },
+               select: {
+                       get: function( elem ) {
+                               var value, option, i,
+                                       options = elem.options,
+                                       index = elem.selectedIndex,
+                                       one = elem.type === "select-one",
+                                       values = one ? null : [],
+                                       max = one ? index + 1 : options.length;
+
+                               if ( index < 0 ) {
+                                       i = max;
+
+                               } else {
+                                       i = one ? index : 0;
+                               }
+
+                               // Loop through all the selected options
+                               for ( ; i < max; i++ ) {
+                                       option = options[ i ];
+
+                                       // Support: IE <=9 only
+                                       // IE8-9 doesn't update selected after form reset (#2551)
+                                       if ( ( option.selected || i === index ) &&
+
+                                                       // Don't return options that are disabled or in a disabled optgroup
+                                                       !option.disabled &&
+                                                       ( !option.parentNode.disabled ||
+                                                               !nodeName( option.parentNode, "optgroup" ) ) ) {
+
+                                               // Get the specific value for the option
+                                               value = jQuery( option ).val();
+
+                                               // We don't need an array for one selects
+                                               if ( one ) {
+                                                       return value;
+                                               }
+
+                                               // Multi-Selects return an array
+                                               values.push( value );
+                                       }
+                               }
+
+                               return values;
+                       },
+
+                       set: function( elem, value ) {
+                               var optionSet, option,
+                                       options = elem.options,
+                                       values = jQuery.makeArray( value ),
+                                       i = options.length;
+
+                               while ( i-- ) {
+                                       option = options[ i ];
+
+                                       /* eslint-disable no-cond-assign */
+
+                                       if ( option.selected =
+                                               jQuery.inArray( jQuery.valHooks.option.get( option ), values ) > -1
+                                       ) {
+                                               optionSet = true;
+                                       }
+
+                                       /* eslint-enable no-cond-assign */
+                               }
+
+                               // Force browsers to behave consistently when non-matching value is set
+                               if ( !optionSet ) {
+                                       elem.selectedIndex = -1;
+                               }
+                               return values;
+                       }
+               }
+       }
+} );
+
+// Radios and checkboxes getter/setter
+jQuery.each( [ "radio", "checkbox" ], function() {
+       jQuery.valHooks[ this ] = {
+               set: function( elem, value ) {
+                       if ( Array.isArray( value ) ) {
+                               return ( elem.checked = jQuery.inArray( jQuery( elem ).val(), value ) > -1 );
+                       }
+               }
+       };
+       if ( !support.checkOn ) {
+               jQuery.valHooks[ this ].get = function( elem ) {
+                       return elem.getAttribute( "value" ) === null ? "on" : elem.value;
+               };
+       }
+} );
+
+
+
+
+// Return jQuery for attributes-only inclusion
+
+
+support.focusin = "onfocusin" in window;
+
+
+var rfocusMorph = /^(?:focusinfocus|focusoutblur)$/,
+       stopPropagationCallback = function( e ) {
+               e.stopPropagation();
+       };
+
+jQuery.extend( jQuery.event, {
+
+       trigger: function( event, data, elem, onlyHandlers ) {
+
+               var i, cur, tmp, bubbleType, ontype, handle, special, lastElement,
+                       eventPath = [ elem || document ],
+                       type = hasOwn.call( event, "type" ) ? event.type : event,
+                       namespaces = hasOwn.call( event, "namespace" ) ? event.namespace.split( "." ) : [];
+
+               cur = lastElement = tmp = elem = elem || document;
+
+               // Don't do events on text and comment nodes
+               if ( elem.nodeType === 3 || elem.nodeType === 8 ) {
+                       return;
+               }
+
+               // focus/blur morphs to focusin/out; ensure we're not firing them right now
+               if ( rfocusMorph.test( type + jQuery.event.triggered ) ) {
+                       return;
+               }
+
+               if ( type.indexOf( "." ) > -1 ) {
+
+                       // Namespaced trigger; create a regexp to match event type in handle()
+                       namespaces = type.split( "." );
+                       type = namespaces.shift();
+                       namespaces.sort();
+               }
+               ontype = type.indexOf( ":" ) < 0 && "on" + type;
+
+               // Caller can pass in a jQuery.Event object, Object, or just an event type string
+               event = event[ jQuery.expando ] ?
+                       event :
+                       new jQuery.Event( type, typeof event === "object" && event );
+
+               // Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true)
+               event.isTrigger = onlyHandlers ? 2 : 3;
+               event.namespace = namespaces.join( "." );
+               event.rnamespace = event.namespace ?
+                       new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ) :
+                       null;
+
+               // Clean up the event in case it is being reused
+               event.result = undefined;
+               if ( !event.target ) {
+                       event.target = elem;
+               }
+
+               // Clone any incoming data and prepend the event, creating the handler arg list
+               data = data == null ?
+                       [ event ] :
+                       jQuery.makeArray( data, [ event ] );
+
+               // Allow special events to draw outside the lines
+               special = jQuery.event.special[ type ] || {};
+               if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) {
+                       return;
+               }
+
+               // Determine event propagation path in advance, per W3C events spec (#9951)
+               // Bubble up to document, then to window; watch for a global ownerDocument var (#9724)
+               if ( !onlyHandlers && !special.noBubble && !isWindow( elem ) ) {
+
+                       bubbleType = special.delegateType || type;
+                       if ( !rfocusMorph.test( bubbleType + type ) ) {
+                               cur = cur.parentNode;
+                       }
+                       for ( ; cur; cur = cur.parentNode ) {
+                               eventPath.push( cur );
+                               tmp = cur;
+                       }
+
+                       // Only add window if we got to document (e.g., not plain obj or detached DOM)
+                       if ( tmp === ( elem.ownerDocument || document ) ) {
+                               eventPath.push( tmp.defaultView || tmp.parentWindow || window );
+                       }
+               }
+
+               // Fire handlers on the event path
+               i = 0;
+               while ( ( cur = eventPath[ i++ ] ) && !event.isPropagationStopped() ) {
+                       lastElement = cur;
+                       event.type = i > 1 ?
+                               bubbleType :
+                               special.bindType || type;
+
+                       // jQuery handler
+                       handle = ( dataPriv.get( cur, "events" ) || {} )[ event.type ] &&
+                               dataPriv.get( cur, "handle" );
+                       if ( handle ) {
+                               handle.apply( cur, data );
+                       }
+
+                       // Native handler
+                       handle = ontype && cur[ ontype ];
+                       if ( handle && handle.apply && acceptData( cur ) ) {
+                               event.result = handle.apply( cur, data );
+                               if ( event.result === false ) {
+                                       event.preventDefault();
+                               }
+                       }
+               }
+               event.type = type;
+
+               // If nobody prevented the default action, do it now
+               if ( !onlyHandlers && !event.isDefaultPrevented() ) {
+
+                       if ( ( !special._default ||
+                               special._default.apply( eventPath.pop(), data ) === false ) &&
+                               acceptData( elem ) ) {
+
+                               // Call a native DOM method on the target with the same name as the event.
+                               // Don't do default actions on window, that's where global variables be (#6170)
+                               if ( ontype && isFunction( elem[ type ] ) && !isWindow( elem ) ) {
+
+                                       // Don't re-trigger an onFOO event when we call its FOO() method
+                                       tmp = elem[ ontype ];
+
+                                       if ( tmp ) {
+                                               elem[ ontype ] = null;
+                                       }
+
+                                       // Prevent re-triggering of the same event, since we already bubbled it above
+                                       jQuery.event.triggered = type;
+
+                                       if ( event.isPropagationStopped() ) {
+                                               lastElement.addEventListener( type, stopPropagationCallback );
+                                       }
+
+                                       elem[ type ]();
+
+                                       if ( event.isPropagationStopped() ) {
+                                               lastElement.removeEventListener( type, stopPropagationCallback );
+                                       }
+
+                                       jQuery.event.triggered = undefined;
+
+                                       if ( tmp ) {
+                                               elem[ ontype ] = tmp;
+                                       }
+                               }
+                       }
+               }
+
+               return event.result;
+       },
+
+       // Piggyback on a donor event to simulate a different one
+       // Used only for `focus(in | out)` events
+       simulate: function( type, elem, event ) {
+               var e = jQuery.extend(
+                       new jQuery.Event(),
+                       event,
+                       {
+                               type: type,
+                               isSimulated: true
+                       }
+               );
+
+               jQuery.event.trigger( e, null, elem );
+       }
+
+} );
+
+jQuery.fn.extend( {
+
+       trigger: function( type, data ) {
+               return this.each( function() {
+                       jQuery.event.trigger( type, data, this );
+               } );
+       },
+       triggerHandler: function( type, data ) {
+               var elem = this[ 0 ];
+               if ( elem ) {
+                       return jQuery.event.trigger( type, data, elem, true );
+               }
+       }
+} );
+
+
+// Support: Firefox <=44
+// Firefox doesn't have focus(in | out) events
+// Related ticket - https://bugzilla.mozilla.org/show_bug.cgi?id=687787
+//
+// Support: Chrome <=48 - 49, Safari <=9.0 - 9.1
+// focus(in | out) events fire after focus & blur events,
+// which is spec violation - http://www.w3.org/TR/DOM-Level-3-Events/#events-focusevent-event-order
+// Related ticket - https://bugs.chromium.org/p/chromium/issues/detail?id=449857
+if ( !support.focusin ) {
+       jQuery.each( { focus: "focusin", blur: "focusout" }, function( orig, fix ) {
+
+               // Attach a single capturing handler on the document while someone wants focusin/focusout
+               var handler = function( event ) {
+                       jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ) );
+               };
+
+               jQuery.event.special[ fix ] = {
+                       setup: function() {
+                               var doc = this.ownerDocument || this,
+                                       attaches = dataPriv.access( doc, fix );
+
+                               if ( !attaches ) {
+                                       doc.addEventListener( orig, handler, true );
+                               }
+                               dataPriv.access( doc, fix, ( attaches || 0 ) + 1 );
+                       },
+                       teardown: function() {
+                               var doc = this.ownerDocument || this,
+                                       attaches = dataPriv.access( doc, fix ) - 1;
+
+                               if ( !attaches ) {
+                                       doc.removeEventListener( orig, handler, true );
+                                       dataPriv.remove( doc, fix );
+
+                               } else {
+                                       dataPriv.access( doc, fix, attaches );
+                               }
+                       }
+               };
+       } );
+}
+var location = window.location;
+
+var nonce = Date.now();
+
+var rquery = ( /\?/ );
+
+
+
+// Cross-browser xml parsing
+jQuery.parseXML = function( data ) {
+       var xml;
+       if ( !data || typeof data !== "string" ) {
+               return null;
+       }
+
+       // Support: IE 9 - 11 only
+       // IE throws on parseFromString with invalid input.
+       try {
+               xml = ( new window.DOMParser() ).parseFromString( data, "text/xml" );
+       } catch ( e ) {
+               xml = undefined;
+       }
+
+       if ( !xml || xml.getElementsByTagName( "parsererror" ).length ) {
+               jQuery.error( "Invalid XML: " + data );
+       }
+       return xml;
+};
+
+
+var
+       rbracket = /\[\]$/,
+       rCRLF = /\r?\n/g,
+       rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i,
+       rsubmittable = /^(?:input|select|textarea|keygen)/i;
+
+function buildParams( prefix, obj, traditional, add ) {
+       var name;
+
+       if ( Array.isArray( obj ) ) {
+
+               // Serialize array item.
+               jQuery.each( obj, function( i, v ) {
+                       if ( traditional || rbracket.test( prefix ) ) {
+
+                               // Treat each array item as a scalar.
+                               add( prefix, v );
+
+                       } else {
+
+                               // Item is non-scalar (array or object), encode its numeric index.
+                               buildParams(
+                                       prefix + "[" + ( typeof v === "object" && v != null ? i : "" ) + "]",
+                                       v,
+                                       traditional,
+                                       add
+                               );
+                       }
+               } );
+
+       } else if ( !traditional && toType( obj ) === "object" ) {
+
+               // Serialize object item.
+               for ( name in obj ) {
+                       buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add );
+               }
+
+       } else {
+
+               // Serialize scalar item.
+               add( prefix, obj );
+       }
+}
+
+// Serialize an array of form elements or a set of
+// key/values into a query string
+jQuery.param = function( a, traditional ) {
+       var prefix,
+               s = [],
+               add = function( key, valueOrFunction ) {
+
+                       // If value is a function, invoke it and use its return value
+                       var value = isFunction( valueOrFunction ) ?
+                               valueOrFunction() :
+                               valueOrFunction;
+
+                       s[ s.length ] = encodeURIComponent( key ) + "=" +
+                               encodeURIComponent( value == null ? "" : value );
+               };
+
+       if ( a == null ) {
+               return "";
+       }
+
+       // If an array was passed in, assume that it is an array of form elements.
+       if ( Array.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) {
+
+               // Serialize the form elements
+               jQuery.each( a, function() {
+                       add( this.name, this.value );
+               } );
+
+       } else {
+
+               // If traditional, encode the "old" way (the way 1.3.2 or older
+               // did it), otherwise encode params recursively.
+               for ( prefix in a ) {
+                       buildParams( prefix, a[ prefix ], traditional, add );
+               }
+       }
+
+       // Return the resulting serialization
+       return s.join( "&" );
+};
+
+jQuery.fn.extend( {
+       serialize: function() {
+               return jQuery.param( this.serializeArray() );
+       },
+       serializeArray: function() {
+               return this.map( function() {
+
+                       // Can add propHook for "elements" to filter or add form elements
+                       var elements = jQuery.prop( this, "elements" );
+                       return elements ? jQuery.makeArray( elements ) : this;
+               } )
+               .filter( function() {
+                       var type = this.type;
+
+                       // Use .is( ":disabled" ) so that fieldset[disabled] works
+                       return this.name && !jQuery( this ).is( ":disabled" ) &&
+                               rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) &&
+                               ( this.checked || !rcheckableType.test( type ) );
+               } )
+               .map( function( i, elem ) {
+                       var val = jQuery( this ).val();
+
+                       if ( val == null ) {
+                               return null;
+                       }
+
+                       if ( Array.isArray( val ) ) {
+                               return jQuery.map( val, function( val ) {
+                                       return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) };
+                               } );
+                       }
+
+                       return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) };
+               } ).get();
+       }
+} );
+
+
+var
+       r20 = /%20/g,
+       rhash = /#.*$/,
+       rantiCache = /([?&])_=[^&]*/,
+       rheaders = /^(.*?):[ \t]*([^\r\n]*)$/mg,
+
+       // #7653, #8125, #8152: local protocol detection
+       rlocalProtocol = /^(?:about|app|app-storage|.+-extension|file|res|widget):$/,
+       rnoContent = /^(?:GET|HEAD)$/,
+       rprotocol = /^\/\//,
+
+       /* Prefilters
+        * 1) They are useful to introduce custom dataTypes (see ajax/jsonp.js for an example)
+        * 2) These are called:
+        *    - BEFORE asking for a transport
+        *    - AFTER param serialization (s.data is a string if s.processData is true)
+        * 3) key is the dataType
+        * 4) the catchall symbol "*" can be used
+        * 5) execution will start with transport dataType and THEN continue down to "*" if needed
+        */
+       prefilters = {},
+
+       /* Transports bindings
+        * 1) key is the dataType
+        * 2) the catchall symbol "*" can be used
+        * 3) selection will start with transport dataType and THEN go to "*" if needed
+        */
+       transports = {},
+
+       // Avoid comment-prolog char sequence (#10098); must appease lint and evade compression
+       allTypes = "*/".concat( "*" ),
+
+       // Anchor tag for parsing the document origin
+       originAnchor = document.createElement( "a" );
+       originAnchor.href = location.href;
+
+// Base "constructor" for jQuery.ajaxPrefilter and jQuery.ajaxTransport
+function addToPrefiltersOrTransports( structure ) {
+
+       // dataTypeExpression is optional and defaults to "*"
+       return function( dataTypeExpression, func ) {
+
+               if ( typeof dataTypeExpression !== "string" ) {
+                       func = dataTypeExpression;
+                       dataTypeExpression = "*";
+               }
+
+               var dataType,
+                       i = 0,
+                       dataTypes = dataTypeExpression.toLowerCase().match( rnothtmlwhite ) || [];
+
+               if ( isFunction( func ) ) {
+
+                       // For each dataType in the dataTypeExpression
+                       while ( ( dataType = dataTypes[ i++ ] ) ) {
+
+                               // Prepend if requested
+                               if ( dataType[ 0 ] === "+" ) {
+                                       dataType = dataType.slice( 1 ) || "*";
+                                       ( structure[ dataType ] = structure[ dataType ] || [] ).unshift( func );
+
+                               // Otherwise append
+                               } else {
+                                       ( structure[ dataType ] = structure[ dataType ] || [] ).push( func );
+                               }
+                       }
+               }
+       };
+}
+
+// Base inspection function for prefilters and transports
+function inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR ) {
+
+       var inspected = {},
+               seekingTransport = ( structure === transports );
+
+       function inspect( dataType ) {
+               var selected;
+               inspected[ dataType ] = true;
+               jQuery.each( structure[ dataType ] || [], function( _, prefilterOrFactory ) {
+                       var dataTypeOrTransport = prefilterOrFactory( options, originalOptions, jqXHR );
+                       if ( typeof dataTypeOrTransport === "string" &&
+                               !seekingTransport && !inspected[ dataTypeOrTransport ] ) {
+
+                               options.dataTypes.unshift( dataTypeOrTransport );
+                               inspect( dataTypeOrTransport );
+                               return false;
+                       } else if ( seekingTransport ) {
+                               return !( selected = dataTypeOrTransport );
+                       }
+               } );
+               return selected;
+       }
+
+       return inspect( options.dataTypes[ 0 ] ) || !inspected[ "*" ] && inspect( "*" );
+}
+
+// A special extend for ajax options
+// that takes "flat" options (not to be deep extended)
+// Fixes #9887
+function ajaxExtend( target, src ) {
+       var key, deep,
+               flatOptions = jQuery.ajaxSettings.flatOptions || {};
+
+       for ( key in src ) {
+               if ( src[ key ] !== undefined ) {
+                       ( flatOptions[ key ] ? target : ( deep || ( deep = {} ) ) )[ key ] = src[ key ];
+               }
+       }
+       if ( deep ) {
+               jQuery.extend( true, target, deep );
+       }
+
+       return target;
+}
+
+/* Handles responses to an ajax request:
+ * - finds the right dataType (mediates between content-type and expected dataType)
+ * - returns the corresponding response
+ */
+function ajaxHandleResponses( s, jqXHR, responses ) {
+
+       var ct, type, finalDataType, firstDataType,
+               contents = s.contents,
+               dataTypes = s.dataTypes;
+
+       // Remove auto dataType and get content-type in the process
+       while ( dataTypes[ 0 ] === "*" ) {
+               dataTypes.shift();
+               if ( ct === undefined ) {
+                       ct = s.mimeType || jqXHR.getResponseHeader( "Content-Type" );
+               }
+       }
+
+       // Check if we're dealing with a known content-type
+       if ( ct ) {
+               for ( type in contents ) {
+                       if ( contents[ type ] && contents[ type ].test( ct ) ) {
+                               dataTypes.unshift( type );
+                               break;
+                       }
+               }
+       }
+
+       // Check to see if we have a response for the expected dataType
+       if ( dataTypes[ 0 ] in responses ) {
+               finalDataType = dataTypes[ 0 ];
+       } else {
+
+               // Try convertible dataTypes
+               for ( type in responses ) {
+                       if ( !dataTypes[ 0 ] || s.converters[ type + " " + dataTypes[ 0 ] ] ) {
+                               finalDataType = type;
+                               break;
+                       }
+                       if ( !firstDataType ) {
+                               firstDataType = type;
+                       }
+               }
+
+               // Or just use first one
+               finalDataType = finalDataType || firstDataType;
+       }
+
+       // If we found a dataType
+       // We add the dataType to the list if needed
+       // and return the corresponding response
+       if ( finalDataType ) {
+               if ( finalDataType !== dataTypes[ 0 ] ) {
+                       dataTypes.unshift( finalDataType );
+               }
+               return responses[ finalDataType ];
+       }
+}
+
+/* Chain conversions given the request and the original response
+ * Also sets the responseXXX fields on the jqXHR instance
+ */
+function ajaxConvert( s, response, jqXHR, isSuccess ) {
+       var conv2, current, conv, tmp, prev,
+               converters = {},
+
+               // Work with a copy of dataTypes in case we need to modify it for conversion
+               dataTypes = s.dataTypes.slice();
+
+       // Create converters map with lowercased keys
+       if ( dataTypes[ 1 ] ) {
+               for ( conv in s.converters ) {
+                       converters[ conv.toLowerCase() ] = s.converters[ conv ];
+               }
+       }
+
+       current = dataTypes.shift();
+
+       // Convert to each sequential dataType
+       while ( current ) {
+
+               if ( s.responseFields[ current ] ) {
+                       jqXHR[ s.responseFields[ current ] ] = response;
+               }
+
+               // Apply the dataFilter if provided
+               if ( !prev && isSuccess && s.dataFilter ) {
+                       response = s.dataFilter( response, s.dataType );
+               }
+
+               prev = current;
+               current = dataTypes.shift();
+
+               if ( current ) {
+
+                       // There's only work to do if current dataType is non-auto
+                       if ( current === "*" ) {
+
+                               current = prev;
+
+                       // Convert response if prev dataType is non-auto and differs from current
+                       } else if ( prev !== "*" && prev !== current ) {
+
+                               // Seek a direct converter
+                               conv = converters[ prev + " " + current ] || converters[ "* " + current ];
+
+                               // If none found, seek a pair
+                               if ( !conv ) {
+                                       for ( conv2 in converters ) {
+
+                                               // If conv2 outputs current
+                                               tmp = conv2.split( " " );
+                                               if ( tmp[ 1 ] === current ) {
+
+                                                       // If prev can be converted to accepted input
+                                                       conv = converters[ prev + " " + tmp[ 0 ] ] ||
+                                                               converters[ "* " + tmp[ 0 ] ];
+                                                       if ( conv ) {
+
+                                                               // Condense equivalence converters
+                                                               if ( conv === true ) {
+                                                                       conv = converters[ conv2 ];
+
+                                                               // Otherwise, insert the intermediate dataType
+                                                               } else if ( converters[ conv2 ] !== true ) {
+                                                                       current = tmp[ 0 ];
+                                                                       dataTypes.unshift( tmp[ 1 ] );
+                                                               }
+                                                               break;
+                                                       }
+                                               }
+                                       }
+                               }
+
+                               // Apply converter (if not an equivalence)
+                               if ( conv !== true ) {
+
+                                       // Unless errors are allowed to bubble, catch and return them
+                                       if ( conv && s.throws ) {
+                                               response = conv( response );
+                                       } else {
+                                               try {
+                                                       response = conv( response );
+                                               } catch ( e ) {
+                                                       return {
+                                                               state: "parsererror",
+                                                               error: conv ? e : "No conversion from " + prev + " to " + current
+                                                       };
+                                               }
+                                       }
+                               }
+                       }
+               }
+       }
+
+       return { state: "success", data: response };
+}
+
+jQuery.extend( {
+
+       // Counter for holding the number of active queries
+       active: 0,
+
+       // Last-Modified header cache for next request
+       lastModified: {},
+       etag: {},
+
+       ajaxSettings: {
+               url: location.href,
+               type: "GET",
+               isLocal: rlocalProtocol.test( location.protocol ),
+               global: true,
+               processData: true,
+               async: true,
+               contentType: "application/x-www-form-urlencoded; charset=UTF-8",
+
+               /*
+               timeout: 0,
+               data: null,
+               dataType: null,
+               username: null,
+               password: null,
+               cache: null,
+               throws: false,
+               traditional: false,
+               headers: {},
+               */
+
+               accepts: {
+                       "*": allTypes,
+                       text: "text/plain",
+                       html: "text/html",
+                       xml: "application/xml, text/xml",
+                       json: "application/json, text/javascript"
+               },
+
+               contents: {
+                       xml: /\bxml\b/,
+                       html: /\bhtml/,
+                       json: /\bjson\b/
+               },
+
+               responseFields: {
+                       xml: "responseXML",
+                       text: "responseText",
+                       json: "responseJSON"
+               },
+
+               // Data converters
+               // Keys separate source (or catchall "*") and destination types with a single space
+               converters: {
+
+                       // Convert anything to text
+                       "* text": String,
+
+                       // Text to html (true = no transformation)
+                       "text html": true,
+
+                       // Evaluate text as a json expression
+                       "text json": JSON.parse,
+
+                       // Parse text as xml
+                       "text xml": jQuery.parseXML
+               },
+
+               // For options that shouldn't be deep extended:
+               // you can add your own custom options here if
+               // and when you create one that shouldn't be
+               // deep extended (see ajaxExtend)
+               flatOptions: {
+                       url: true,
+                       context: true
+               }
+       },
+
+       // Creates a full fledged settings object into target
+       // with both ajaxSettings and settings fields.
+       // If target is omitted, writes into ajaxSettings.
+       ajaxSetup: function( target, settings ) {
+               return settings ?
+
+                       // Building a settings object
+                       ajaxExtend( ajaxExtend( target, jQuery.ajaxSettings ), settings ) :
+
+                       // Extending ajaxSettings
+                       ajaxExtend( jQuery.ajaxSettings, target );
+       },
+
+       ajaxPrefilter: addToPrefiltersOrTransports( prefilters ),
+       ajaxTransport: addToPrefiltersOrTransports( transports ),
+
+       // Main method
+       ajax: function( url, options ) {
+
+               // If url is an object, simulate pre-1.5 signature
+               if ( typeof url === "object" ) {
+                       options = url;
+                       url = undefined;
+               }
+
+               // Force options to be an object
+               options = options || {};
+
+               var transport,
+
+                       // URL without anti-cache param
+                       cacheURL,
+
+                       // Response headers
+                       responseHeadersString,
+                       responseHeaders,
+
+                       // timeout handle
+                       timeoutTimer,
+
+                       // Url cleanup var
+                       urlAnchor,
+
+                       // Request state (becomes false upon send and true upon completion)
+                       completed,
+
+                       // To know if global events are to be dispatched
+                       fireGlobals,
+
+                       // Loop variable
+                       i,
+
+                       // uncached part of the url
+                       uncached,
+
+                       // Create the final options object
+                       s = jQuery.ajaxSetup( {}, options ),
+
+                       // Callbacks context
+                       callbackContext = s.context || s,
+
+                       // Context for global events is callbackContext if it is a DOM node or jQuery collection
+                       globalEventContext = s.context &&
+                               ( callbackContext.nodeType || callbackContext.jquery ) ?
+                                       jQuery( callbackContext ) :
+                                       jQuery.event,
+
+                       // Deferreds
+                       deferred = jQuery.Deferred(),
+                       completeDeferred = jQuery.Callbacks( "once memory" ),
+
+                       // Status-dependent callbacks
+                       statusCode = s.statusCode || {},
+
+                       // Headers (they are sent all at once)
+                       requestHeaders = {},
+                       requestHeadersNames = {},
+
+                       // Default abort message
+                       strAbort = "canceled",
+
+                       // Fake xhr
+                       jqXHR = {
+                               readyState: 0,
+
+                               // Builds headers hashtable if needed
+                               getResponseHeader: function( key ) {
+                                       var match;
+                                       if ( completed ) {
+                                               if ( !responseHeaders ) {
+                                                       responseHeaders = {};
+                                                       while ( ( match = rheaders.exec( responseHeadersString ) ) ) {
+                                                               responseHeaders[ match[ 1 ].toLowerCase() + " " ] =
+                                                                       ( responseHeaders[ match[ 1 ].toLowerCase() + " " ] || [] )
+                                                                               .concat( match[ 2 ] );
+                                                       }
+                                               }
+                                               match = responseHeaders[ key.toLowerCase() + " " ];
+                                       }
+                                       return match == null ? null : match.join( ", " );
+                               },
+
+                               // Raw string
+                               getAllResponseHeaders: function() {
+                                       return completed ? responseHeadersString : null;
+                               },
+
+                               // Caches the header
+                               setRequestHeader: function( name, value ) {
+                                       if ( completed == null ) {
+                                               name = requestHeadersNames[ name.toLowerCase() ] =
+                                                       requestHeadersNames[ name.toLowerCase() ] || name;
+                                               requestHeaders[ name ] = value;
+                                       }
+                                       return this;
+                               },
+
+                               // Overrides response content-type header
+                               overrideMimeType: function( type ) {
+                                       if ( completed == null ) {
+                                               s.mimeType = type;
+                                       }
+                                       return this;
+                               },
+
+                               // Status-dependent callbacks
+                               statusCode: function( map ) {
+                                       var code;
+                                       if ( map ) {
+                                               if ( completed ) {
+
+                                                       // Execute the appropriate callbacks
+                                                       jqXHR.always( map[ jqXHR.status ] );
+                                               } else {
+
+                                                       // Lazy-add the new callbacks in a way that preserves old ones
+                                                       for ( code in map ) {
+                                                               statusCode[ code ] = [ statusCode[ code ], map[ code ] ];
+                                                       }
+                                               }
+                                       }
+                                       return this;
+                               },
+
+                               // Cancel the request
+                               abort: function( statusText ) {
+                                       var finalText = statusText || strAbort;
+                                       if ( transport ) {
+                                               transport.abort( finalText );
+                                       }
+                                       done( 0, finalText );
+                                       return this;
+                               }
+                       };
+
+               // Attach deferreds
+               deferred.promise( jqXHR );
+
+               // Add protocol if not provided (prefilters might expect it)
+               // Handle falsy url in the settings object (#10093: consistency with old signature)
+               // We also use the url parameter if available
+               s.url = ( ( url || s.url || location.href ) + "" )
+                       .replace( rprotocol, location.protocol + "//" );
+
+               // Alias method option to type as per ticket #12004
+               s.type = options.method || options.type || s.method || s.type;
+
+               // Extract dataTypes list
+               s.dataTypes = ( s.dataType || "*" ).toLowerCase().match( rnothtmlwhite ) || [ "" ];
+
+               // A cross-domain request is in order when the origin doesn't match the current origin.
+               if ( s.crossDomain == null ) {
+                       urlAnchor = document.createElement( "a" );
+
+                       // Support: IE <=8 - 11, Edge 12 - 15
+                       // IE throws exception on accessing the href property if url is malformed,
+                       // e.g. http://example.com:80x/
+                       try {
+                               urlAnchor.href = s.url;
+
+                               // Support: IE <=8 - 11 only
+                               // Anchor's host property isn't correctly set when s.url is relative
+                               urlAnchor.href = urlAnchor.href;
+                               s.crossDomain = originAnchor.protocol + "//" + originAnchor.host !==
+                                       urlAnchor.protocol + "//" + urlAnchor.host;
+                       } catch ( e ) {
+
+                               // If there is an error parsing the URL, assume it is crossDomain,
+                               // it can be rejected by the transport if it is invalid
+                               s.crossDomain = true;
+                       }
+               }
+
+               // Convert data if not already a string
+               if ( s.data && s.processData && typeof s.data !== "string" ) {
+                       s.data = jQuery.param( s.data, s.traditional );
+               }
+
+               // Apply prefilters
+               inspectPrefiltersOrTransports( prefilters, s, options, jqXHR );
+
+               // If request was aborted inside a prefilter, stop there
+               if ( completed ) {
+                       return jqXHR;
+               }
+
+               // We can fire global events as of now if asked to
+               // Don't fire events if jQuery.event is undefined in an AMD-usage scenario (#15118)
+               fireGlobals = jQuery.event && s.global;
+
+               // Watch for a new set of requests
+               if ( fireGlobals && jQuery.active++ === 0 ) {
+                       jQuery.event.trigger( "ajaxStart" );
+               }
+
+               // Uppercase the type
+               s.type = s.type.toUpperCase();
+
+               // Determine if request has content
+               s.hasContent = !rnoContent.test( s.type );
+
+               // Save the URL in case we're toying with the If-Modified-Since
+               // and/or If-None-Match header later on
+               // Remove hash to simplify url manipulation
+               cacheURL = s.url.replace( rhash, "" );
+
+               // More options handling for requests with no content
+               if ( !s.hasContent ) {
+
+                       // Remember the hash so we can put it back
+                       uncached = s.url.slice( cacheURL.length );
+
+                       // If data is available and should be processed, append data to url
+                       if ( s.data && ( s.processData || typeof s.data === "string" ) ) {
+                               cacheURL += ( rquery.test( cacheURL ) ? "&" : "?" ) + s.data;
+
+                               // #9682: remove data so that it's not used in an eventual retry
+                               delete s.data;
+                       }
+
+                       // Add or update anti-cache param if needed
+                       if ( s.cache === false ) {
+                               cacheURL = cacheURL.replace( rantiCache, "$1" );
+                               uncached = ( rquery.test( cacheURL ) ? "&" : "?" ) + "_=" + ( nonce++ ) + uncached;
+                       }
+
+                       // Put hash and anti-cache on the URL that will be requested (gh-1732)
+                       s.url = cacheURL + uncached;
+
+               // Change '%20' to '+' if this is encoded form body content (gh-2658)
+               } else if ( s.data && s.processData &&
+                       ( s.contentType || "" ).indexOf( "application/x-www-form-urlencoded" ) === 0 ) {
+                       s.data = s.data.replace( r20, "+" );
+               }
+
+               // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode.
+               if ( s.ifModified ) {
+                       if ( jQuery.lastModified[ cacheURL ] ) {
+                               jqXHR.setRequestHeader( "If-Modified-Since", jQuery.lastModified[ cacheURL ] );
+                       }
+                       if ( jQuery.etag[ cacheURL ] ) {
+                               jqXHR.setRequestHeader( "If-None-Match", jQuery.etag[ cacheURL ] );
+                       }
+               }
+
+               // Set the correct header, if data is being sent
+               if ( s.data && s.hasContent && s.contentType !== false || options.contentType ) {
+                       jqXHR.setRequestHeader( "Content-Type", s.contentType );
+               }
+
+               // Set the Accepts header for the server, depending on the dataType
+               jqXHR.setRequestHeader(
+                       "Accept",
+                       s.dataTypes[ 0 ] && s.accepts[ s.dataTypes[ 0 ] ] ?
+                               s.accepts[ s.dataTypes[ 0 ] ] +
+                                       ( s.dataTypes[ 0 ] !== "*" ? ", " + allTypes + "; q=0.01" : "" ) :
+                               s.accepts[ "*" ]
+               );
+
+               // Check for headers option
+               for ( i in s.headers ) {
+                       jqXHR.setRequestHeader( i, s.headers[ i ] );
+               }
+
+               // Allow custom headers/mimetypes and early abort
+               if ( s.beforeSend &&
+                       ( s.beforeSend.call( callbackContext, jqXHR, s ) === false || completed ) ) {
+
+                       // Abort if not done already and return
+                       return jqXHR.abort();
+               }
+
+               // Aborting is no longer a cancellation
+               strAbort = "abort";
+
+               // Install callbacks on deferreds
+               completeDeferred.add( s.complete );
+               jqXHR.done( s.success );
+               jqXHR.fail( s.error );
+
+               // Get transport
+               transport = inspectPrefiltersOrTransports( transports, s, options, jqXHR );
+
+               // If no transport, we auto-abort
+               if ( !transport ) {
+                       done( -1, "No Transport" );
+               } else {
+                       jqXHR.readyState = 1;
+
+                       // Send global event
+                       if ( fireGlobals ) {
+                               globalEventContext.trigger( "ajaxSend", [ jqXHR, s ] );
+                       }
+
+                       // If request was aborted inside ajaxSend, stop there
+                       if ( completed ) {
+                               return jqXHR;
+                       }
+
+                       // Timeout
+                       if ( s.async && s.timeout > 0 ) {
+                               timeoutTimer = window.setTimeout( function() {
+                                       jqXHR.abort( "timeout" );
+                               }, s.timeout );
+                       }
+
+                       try {
+                               completed = false;
+                               transport.send( requestHeaders, done );
+                       } catch ( e ) {
+
+                               // Rethrow post-completion exceptions
+                               if ( completed ) {
+                                       throw e;
+                               }
+
+                               // Propagate others as results
+                               done( -1, e );
+                       }
+               }
+
+               // Callback for when everything is done
+               function done( status, nativeStatusText, responses, headers ) {
+                       var isSuccess, success, error, response, modified,
+                               statusText = nativeStatusText;
+
+                       // Ignore repeat invocations
+                       if ( completed ) {
+                               return;
+                       }
+
+                       completed = true;
+
+                       // Clear timeout if it exists
+                       if ( timeoutTimer ) {
+                               window.clearTimeout( timeoutTimer );
+                       }
+
+                       // Dereference transport for early garbage collection
+                       // (no matter how long the jqXHR object will be used)
+                       transport = undefined;
+
+                       // Cache response headers
+                       responseHeadersString = headers || "";
+
+                       // Set readyState
+                       jqXHR.readyState = status > 0 ? 4 : 0;
+
+                       // Determine if successful
+                       isSuccess = status >= 200 && status < 300 || status === 304;
+
+                       // Get response data
+                       if ( responses ) {
+                               response = ajaxHandleResponses( s, jqXHR, responses );
+                       }
+
+                       // Convert no matter what (that way responseXXX fields are always set)
+                       response = ajaxConvert( s, response, jqXHR, isSuccess );
+
+                       // If successful, handle type chaining
+                       if ( isSuccess ) {
+
+                               // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode.
+                               if ( s.ifModified ) {
+                                       modified = jqXHR.getResponseHeader( "Last-Modified" );
+                                       if ( modified ) {
+                                               jQuery.lastModified[ cacheURL ] = modified;
+                                       }
+                                       modified = jqXHR.getResponseHeader( "etag" );
+                                       if ( modified ) {
+                                               jQuery.etag[ cacheURL ] = modified;
+                                       }
+                               }
+
+                               // if no content
+                               if ( status === 204 || s.type === "HEAD" ) {
+                                       statusText = "nocontent";
+
+                               // if not modified
+                               } else if ( status === 304 ) {
+                                       statusText = "notmodified";
+
+                               // If we have data, let's convert it
+                               } else {
+                                       statusText = response.state;
+                                       success = response.data;
+                                       error = response.error;
+                                       isSuccess = !error;
+                               }
+                       } else {
+
+                               // Extract error from statusText and normalize for non-aborts
+                               error = statusText;
+                               if ( status || !statusText ) {
+                                       statusText = "error";
+                                       if ( status < 0 ) {
+                                               status = 0;
+                                       }
+                               }
+                       }
+
+                       // Set data for the fake xhr object
+                       jqXHR.status = status;
+                       jqXHR.statusText = ( nativeStatusText || statusText ) + "";
+
+                       // Success/Error
+                       if ( isSuccess ) {
+                               deferred.resolveWith( callbackContext, [ success, statusText, jqXHR ] );
+                       } else {
+                               deferred.rejectWith( callbackContext, [ jqXHR, statusText, error ] );
+                       }
+
+                       // Status-dependent callbacks
+                       jqXHR.statusCode( statusCode );
+                       statusCode = undefined;
+
+                       if ( fireGlobals ) {
+                               globalEventContext.trigger( isSuccess ? "ajaxSuccess" : "ajaxError",
+                                       [ jqXHR, s, isSuccess ? success : error ] );
+                       }
+
+                       // Complete
+                       completeDeferred.fireWith( callbackContext, [ jqXHR, statusText ] );
+
+                       if ( fireGlobals ) {
+                               globalEventContext.trigger( "ajaxComplete", [ jqXHR, s ] );
+
+                               // Handle the global AJAX counter
+                               if ( !( --jQuery.active ) ) {
+                                       jQuery.event.trigger( "ajaxStop" );
+                               }
+                       }
+               }
+
+               return jqXHR;
+       },
+
+       getJSON: function( url, data, callback ) {
+               return jQuery.get( url, data, callback, "json" );
+       },
+
+       getScript: function( url, callback ) {
+               return jQuery.get( url, undefined, callback, "script" );
+       }
+} );
+
+jQuery.each( [ "get", "post" ], function( i, method ) {
+       jQuery[ method ] = function( url, data, callback, type ) {
+
+               // Shift arguments if data argument was omitted
+               if ( isFunction( data ) ) {
+                       type = type || callback;
+                       callback = data;
+                       data = undefined;
+               }
+
+               // The url can be an options object (which then must have .url)
+               return jQuery.ajax( jQuery.extend( {
+                       url: url,
+                       type: method,
+                       dataType: type,
+                       data: data,
+                       success: callback
+               }, jQuery.isPlainObject( url ) && url ) );
+       };
+} );
+
+
+jQuery._evalUrl = function( url, options ) {
+       return jQuery.ajax( {
+               url: url,
+
+               // Make this explicit, since user can override this through ajaxSetup (#11264)
+               type: "GET",
+               dataType: "script",
+               cache: true,
+               async: false,
+               global: false,
+
+               // Only evaluate the response if it is successful (gh-4126)
+               // dataFilter is not invoked for failure responses, so using it instead
+               // of the default converter is kludgy but it works.
+               converters: {
+                       "text script": function() {}
+               },
+               dataFilter: function( response ) {
+                       jQuery.globalEval( response, options );
+               }
+       } );
+};
+
+
+jQuery.fn.extend( {
+       wrapAll: function( html ) {
+               var wrap;
+
+               if ( this[ 0 ] ) {
+                       if ( isFunction( html ) ) {
+                               html = html.call( this[ 0 ] );
+                       }
+
+                       // The elements to wrap the target around
+                       wrap = jQuery( html, this[ 0 ].ownerDocument ).eq( 0 ).clone( true );
+
+                       if ( this[ 0 ].parentNode ) {
+                               wrap.insertBefore( this[ 0 ] );
+                       }
+
+                       wrap.map( function() {
+                               var elem = this;
+
+                               while ( elem.firstElementChild ) {
+                                       elem = elem.firstElementChild;
+                               }
+
+                               return elem;
+                       } ).append( this );
+               }
+
+               return this;
+       },
+
+       wrapInner: function( html ) {
+               if ( isFunction( html ) ) {
+                       return this.each( function( i ) {
+                               jQuery( this ).wrapInner( html.call( this, i ) );
+                       } );
+               }
+
+               return this.each( function() {
+                       var self = jQuery( this ),
+                               contents = self.contents();
+
+                       if ( contents.length ) {
+                               contents.wrapAll( html );
+
+                       } else {
+                               self.append( html );
+                       }
+               } );
+       },
+
+       wrap: function( html ) {
+               var htmlIsFunction = isFunction( html );
+
+               return this.each( function( i ) {
+                       jQuery( this ).wrapAll( htmlIsFunction ? html.call( this, i ) : html );
+               } );
+       },
+
+       unwrap: function( selector ) {
+               this.parent( selector ).not( "body" ).each( function() {
+                       jQuery( this ).replaceWith( this.childNodes );
+               } );
+               return this;
+       }
+} );
+
+
+jQuery.expr.pseudos.hidden = function( elem ) {
+       return !jQuery.expr.pseudos.visible( elem );
+};
+jQuery.expr.pseudos.visible = function( elem ) {
+       return !!( elem.offsetWidth || elem.offsetHeight || elem.getClientRects().length );
+};
+
+
+
+
+jQuery.ajaxSettings.xhr = function() {
+       try {
+               return new window.XMLHttpRequest();
+       } catch ( e ) {}
+};
+
+var xhrSuccessStatus = {
+
+               // File protocol always yields status code 0, assume 200
+               0: 200,
+
+               // Support: IE <=9 only
+               // #1450: sometimes IE returns 1223 when it should be 204
+               1223: 204
+       },
+       xhrSupported = jQuery.ajaxSettings.xhr();
+
+support.cors = !!xhrSupported && ( "withCredentials" in xhrSupported );
+support.ajax = xhrSupported = !!xhrSupported;
+
+jQuery.ajaxTransport( function( options ) {
+       var callback, errorCallback;
+
+       // Cross domain only allowed if supported through XMLHttpRequest
+       if ( support.cors || xhrSupported && !options.crossDomain ) {
+               return {
+                       send: function( headers, complete ) {
+                               var i,
+                                       xhr = options.xhr();
+
+                               xhr.open(
+                                       options.type,
+                                       options.url,
+                                       options.async,
+                                       options.username,
+                                       options.password
+                               );
+
+                               // Apply custom fields if provided
+                               if ( options.xhrFields ) {
+                                       for ( i in options.xhrFields ) {
+                                               xhr[ i ] = options.xhrFields[ i ];
+                                       }
+                               }
+
+                               // Override mime type if needed
+                               if ( options.mimeType && xhr.overrideMimeType ) {
+                                       xhr.overrideMimeType( options.mimeType );
+                               }
+
+                               // X-Requested-With header
+                               // For cross-domain requests, seeing as conditions for a preflight are
+                               // akin to a jigsaw puzzle, we simply never set it to be sure.
+                               // (it can always be set on a per-request basis or even using ajaxSetup)
+                               // For same-domain requests, won't change header if already provided.
+                               if ( !options.crossDomain && !headers[ "X-Requested-With" ] ) {
+                                       headers[ "X-Requested-With" ] = "XMLHttpRequest";
+                               }
+
+                               // Set headers
+                               for ( i in headers ) {
+                                       xhr.setRequestHeader( i, headers[ i ] );
+                               }
+
+                               // Callback
+                               callback = function( type ) {
+                                       return function() {
+                                               if ( callback ) {
+                                                       callback = errorCallback = xhr.onload =
+                                                               xhr.onerror = xhr.onabort = xhr.ontimeout =
+                                                                       xhr.onreadystatechange = null;
+
+                                                       if ( type === "abort" ) {
+                                                               xhr.abort();
+                                                       } else if ( type === "error" ) {
+
+                                                               // Support: IE <=9 only
+                                                               // On a manual native abort, IE9 throws
+                                                               // errors on any property access that is not readyState
+                                                               if ( typeof xhr.status !== "number" ) {
+                                                                       complete( 0, "error" );
+                                                               } else {
+                                                                       complete(
+
+                                                                               // File: protocol always yields status 0; see #8605, #14207
+                                                                               xhr.status,
+                                                                               xhr.statusText
+                                                                       );
+                                                               }
+                                                       } else {
+                                                               complete(
+                                                                       xhrSuccessStatus[ xhr.status ] || xhr.status,
+                                                                       xhr.statusText,
+
+                                                                       // Support: IE <=9 only
+                                                                       // IE9 has no XHR2 but throws on binary (trac-11426)
+                                                                       // For XHR2 non-text, let the caller handle it (gh-2498)
+                                                                       ( xhr.responseType || "text" ) !== "text"  ||
+                                                                       typeof xhr.responseText !== "string" ?
+                                                                               { binary: xhr.response } :
+                                                                               { text: xhr.responseText },
+                                                                       xhr.getAllResponseHeaders()
+                                                               );
+                                                       }
+                                               }
+                                       };
+                               };
+
+                               // Listen to events
+                               xhr.onload = callback();
+                               errorCallback = xhr.onerror = xhr.ontimeout = callback( "error" );
+
+                               // Support: IE 9 only
+                               // Use onreadystatechange to replace onabort
+                               // to handle uncaught aborts
+                               if ( xhr.onabort !== undefined ) {
+                                       xhr.onabort = errorCallback;
+                               } else {
+                                       xhr.onreadystatechange = function() {
+
+                                               // Check readyState before timeout as it changes
+                                               if ( xhr.readyState === 4 ) {
+
+                                                       // Allow onerror to be called first,
+                                                       // but that will not handle a native abort
+                                                       // Also, save errorCallback to a variable
+                                                       // as xhr.onerror cannot be accessed
+                                                       window.setTimeout( function() {
+                                                               if ( callback ) {
+                                                                       errorCallback();
+                                                               }
+                                                       } );
+                                               }
+                                       };
+                               }
+
+                               // Create the abort callback
+                               callback = callback( "abort" );
+
+                               try {
+
+                                       // Do send the request (this may raise an exception)
+                                       xhr.send( options.hasContent && options.data || null );
+                               } catch ( e ) {
+
+                                       // #14683: Only rethrow if this hasn't been notified as an error yet
+                                       if ( callback ) {
+                                               throw e;
+                                       }
+                               }
+                       },
+
+                       abort: function() {
+                               if ( callback ) {
+                                       callback();
+                               }
+                       }
+               };
+       }
+} );
+
+
+
+
+// Prevent auto-execution of scripts when no explicit dataType was provided (See gh-2432)
+jQuery.ajaxPrefilter( function( s ) {
+       if ( s.crossDomain ) {
+               s.contents.script = false;
+       }
+} );
+
+// Install script dataType
+jQuery.ajaxSetup( {
+       accepts: {
+               script: "text/javascript, application/javascript, " +
+                       "application/ecmascript, application/x-ecmascript"
+       },
+       contents: {
+               script: /\b(?:java|ecma)script\b/
+       },
+       converters: {
+               "text script": function( text ) {
+                       jQuery.globalEval( text );
+                       return text;
+               }
+       }
+} );
+
+// Handle cache's special case and crossDomain
+jQuery.ajaxPrefilter( "script", function( s ) {
+       if ( s.cache === undefined ) {
+               s.cache = false;
+       }
+       if ( s.crossDomain ) {
+               s.type = "GET";
+       }
+} );
+
+// Bind script tag hack transport
+jQuery.ajaxTransport( "script", function( s ) {
+
+       // This transport only deals with cross domain or forced-by-attrs requests
+       if ( s.crossDomain || s.scriptAttrs ) {
+               var script, callback;
+               return {
+                       send: function( _, complete ) {
+                               script = jQuery( "<script>" )
+                                       .attr( s.scriptAttrs || {} )
+                                       .prop( { charset: s.scriptCharset, src: s.url } )
+                                       .on( "load error", callback = function( evt ) {
+                                               script.remove();
+                                               callback = null;
+                                               if ( evt ) {
+                                                       complete( evt.type === "error" ? 404 : 200, evt.type );
+                                               }
+                                       } );
+
+                               // Use native DOM manipulation to avoid our domManip AJAX trickery
+                               document.head.appendChild( script[ 0 ] );
+                       },
+                       abort: function() {
+                               if ( callback ) {
+                                       callback();
+                               }
+                       }
+               };
+       }
+} );
+
+
+
+
+var oldCallbacks = [],
+       rjsonp = /(=)\?(?=&|$)|\?\?/;
+
+// Default jsonp settings
+jQuery.ajaxSetup( {
+       jsonp: "callback",
+       jsonpCallback: function() {
+               var callback = oldCallbacks.pop() || ( jQuery.expando + "_" + ( nonce++ ) );
+               this[ callback ] = true;
+               return callback;
+       }
+} );
+
+// Detect, normalize options and install callbacks for jsonp requests
+jQuery.ajaxPrefilter( "json jsonp", function( s, originalSettings, jqXHR ) {
+
+       var callbackName, overwritten, responseContainer,
+               jsonProp = s.jsonp !== false && ( rjsonp.test( s.url ) ?
+                       "url" :
+                       typeof s.data === "string" &&
+                               ( s.contentType || "" )
+                                       .indexOf( "application/x-www-form-urlencoded" ) === 0 &&
+                               rjsonp.test( s.data ) && "data"
+               );
+
+       // Handle iff the expected data type is "jsonp" or we have a parameter to set
+       if ( jsonProp || s.dataTypes[ 0 ] === "jsonp" ) {
+
+               // Get callback name, remembering preexisting value associated with it
+               callbackName = s.jsonpCallback = isFunction( s.jsonpCallback ) ?
+                       s.jsonpCallback() :
+                       s.jsonpCallback;
+
+               // Insert callback into url or form data
+               if ( jsonProp ) {
+                       s[ jsonProp ] = s[ jsonProp ].replace( rjsonp, "$1" + callbackName );
+               } else if ( s.jsonp !== false ) {
+                       s.url += ( rquery.test( s.url ) ? "&" : "?" ) + s.jsonp + "=" + callbackName;
+               }
+
+               // Use data converter to retrieve json after script execution
+               s.converters[ "script json" ] = function() {
+                       if ( !responseContainer ) {
+                               jQuery.error( callbackName + " was not called" );
+                       }
+                       return responseContainer[ 0 ];
+               };
+
+               // Force json dataType
+               s.dataTypes[ 0 ] = "json";
+
+               // Install callback
+               overwritten = window[ callbackName ];
+               window[ callbackName ] = function() {
+                       responseContainer = arguments;
+               };
+
+               // Clean-up function (fires after converters)
+               jqXHR.always( function() {
+
+                       // If previous value didn't exist - remove it
+                       if ( overwritten === undefined ) {
+                               jQuery( window ).removeProp( callbackName );
+
+                       // Otherwise restore preexisting value
+                       } else {
+                               window[ callbackName ] = overwritten;
+                       }
+
+                       // Save back as free
+                       if ( s[ callbackName ] ) {
+
+                               // Make sure that re-using the options doesn't screw things around
+                               s.jsonpCallback = originalSettings.jsonpCallback;
+
+                               // Save the callback name for future use
+                               oldCallbacks.push( callbackName );
+                       }
+
+                       // Call if it was a function and we have a response
+                       if ( responseContainer && isFunction( overwritten ) ) {
+                               overwritten( responseContainer[ 0 ] );
+                       }
+
+                       responseContainer = overwritten = undefined;
+               } );
+
+               // Delegate to script
+               return "script";
+       }
+} );
+
+
+
+
+// Support: Safari 8 only
+// In Safari 8 documents created via document.implementation.createHTMLDocument
+// collapse sibling forms: the second one becomes a child of the first one.
+// Because of that, this security measure has to be disabled in Safari 8.
+// https://bugs.webkit.org/show_bug.cgi?id=137337
+support.createHTMLDocument = ( function() {
+       var body = document.implementation.createHTMLDocument( "" ).body;
+       body.innerHTML = "<form></form><form></form>";
+       return body.childNodes.length === 2;
+} )();
+
+
+// Argument "data" should be string of html
+// context (optional): If specified, the fragment will be created in this context,
+// defaults to document
+// keepScripts (optional): If true, will include scripts passed in the html string
+jQuery.parseHTML = function( data, context, keepScripts ) {
+       if ( typeof data !== "string" ) {
+               return [];
+       }
+       if ( typeof context === "boolean" ) {
+               keepScripts = context;
+               context = false;
+       }
+
+       var base, parsed, scripts;
+
+       if ( !context ) {
+
+               // Stop scripts or inline event handlers from being executed immediately
+               // by using document.implementation
+               if ( support.createHTMLDocument ) {
+                       context = document.implementation.createHTMLDocument( "" );
+
+                       // Set the base href for the created document
+                       // so any parsed elements with URLs
+                       // are based on the document's URL (gh-2965)
+                       base = context.createElement( "base" );
+                       base.href = document.location.href;
+                       context.head.appendChild( base );
+               } else {
+                       context = document;
+               }
+       }
+
+       parsed = rsingleTag.exec( data );
+       scripts = !keepScripts && [];
+
+       // Single tag
+       if ( parsed ) {
+               return [ context.createElement( parsed[ 1 ] ) ];
+       }
+
+       parsed = buildFragment( [ data ], context, scripts );
+
+       if ( scripts && scripts.length ) {
+               jQuery( scripts ).remove();
+       }
+
+       return jQuery.merge( [], parsed.childNodes );
+};
+
+
+/**
+ * Load a url into a page
+ */
+jQuery.fn.load = function( url, params, callback ) {
+       var selector, type, response,
+               self = this,
+               off = url.indexOf( " " );
+
+       if ( off > -1 ) {
+               selector = stripAndCollapse( url.slice( off ) );
+               url = url.slice( 0, off );
+       }
+
+       // If it's a function
+       if ( isFunction( params ) ) {
+
+               // We assume that it's the callback
+               callback = params;
+               params = undefined;
+
+       // Otherwise, build a param string
+       } else if ( params && typeof params === "object" ) {
+               type = "POST";
+       }
+
+       // If we have elements to modify, make the request
+       if ( self.length > 0 ) {
+               jQuery.ajax( {
+                       url: url,
+
+                       // If "type" variable is undefined, then "GET" method will be used.
+                       // Make value of this field explicit since
+                       // user can override it through ajaxSetup method
+                       type: type || "GET",
+                       dataType: "html",
+                       data: params
+               } ).done( function( responseText ) {
+
+                       // Save response for use in complete callback
+                       response = arguments;
+
+                       self.html( selector ?
+
+                               // If a selector was specified, locate the right elements in a dummy div
+                               // Exclude scripts to avoid IE 'Permission Denied' errors
+                               jQuery( "<div>" ).append( jQuery.parseHTML( responseText ) ).find( selector ) :
+
+                               // Otherwise use the full result
+                               responseText );
+
+               // If the request succeeds, this function gets "data", "status", "jqXHR"
+               // but they are ignored because response was set above.
+               // If it fails, this function gets "jqXHR", "status", "error"
+               } ).always( callback && function( jqXHR, status ) {
+                       self.each( function() {
+                               callback.apply( this, response || [ jqXHR.responseText, status, jqXHR ] );
+                       } );
+               } );
+       }
+
+       return this;
+};
+
+
+
+
+// Attach a bunch of functions for handling common AJAX events
+jQuery.each( [
+       "ajaxStart",
+       "ajaxStop",
+       "ajaxComplete",
+       "ajaxError",
+       "ajaxSuccess",
+       "ajaxSend"
+], function( i, type ) {
+       jQuery.fn[ type ] = function( fn ) {
+               return this.on( type, fn );
+       };
+} );
+
+
+
+
+jQuery.expr.pseudos.animated = function( elem ) {
+       return jQuery.grep( jQuery.timers, function( fn ) {
+               return elem === fn.elem;
+       } ).length;
+};
+
+
+
+
+jQuery.offset = {
+       setOffset: function( elem, options, i ) {
+               var curPosition, curLeft, curCSSTop, curTop, curOffset, curCSSLeft, calculatePosition,
+                       position = jQuery.css( elem, "position" ),
+                       curElem = jQuery( elem ),
+                       props = {};
+
+               // Set position first, in-case top/left are set even on static elem
+               if ( position === "static" ) {
+                       elem.style.position = "relative";
+               }
+
+               curOffset = curElem.offset();
+               curCSSTop = jQuery.css( elem, "top" );
+               curCSSLeft = jQuery.css( elem, "left" );
+               calculatePosition = ( position === "absolute" || position === "fixed" ) &&
+                       ( curCSSTop + curCSSLeft ).indexOf( "auto" ) > -1;
+
+               // Need to be able to calculate position if either
+               // top or left is auto and position is either absolute or fixed
+               if ( calculatePosition ) {
+                       curPosition = curElem.position();
+                       curTop = curPosition.top;
+                       curLeft = curPosition.left;
+
+               } else {
+                       curTop = parseFloat( curCSSTop ) || 0;
+                       curLeft = parseFloat( curCSSLeft ) || 0;
+               }
+
+               if ( isFunction( options ) ) {
+
+                       // Use jQuery.extend here to allow modification of coordinates argument (gh-1848)
+                       options = options.call( elem, i, jQuery.extend( {}, curOffset ) );
+               }
+
+               if ( options.top != null ) {
+                       props.top = ( options.top - curOffset.top ) + curTop;
+               }
+               if ( options.left != null ) {
+                       props.left = ( options.left - curOffset.left ) + curLeft;
+               }
+
+               if ( "using" in options ) {
+                       options.using.call( elem, props );
+
+               } else {
+                       curElem.css( props );
+               }
+       }
+};
+
+jQuery.fn.extend( {
+
+       // offset() relates an element's border box to the document origin
+       offset: function( options ) {
+
+               // Preserve chaining for setter
+               if ( arguments.length ) {
+                       return options === undefined ?
+                               this :
+                               this.each( function( i ) {
+                                       jQuery.offset.setOffset( this, options, i );
+                               } );
+               }
+
+               var rect, win,
+                       elem = this[ 0 ];
+
+               if ( !elem ) {
+                       return;
+               }
+
+               // Return zeros for disconnected and hidden (display: none) elements (gh-2310)
+               // Support: IE <=11 only
+               // Running getBoundingClientRect on a
+               // disconnected node in IE throws an error
+               if ( !elem.getClientRects().length ) {
+                       return { top: 0, left: 0 };
+               }
+
+               // Get document-relative position by adding viewport scroll to viewport-relative gBCR
+               rect = elem.getBoundingClientRect();
+               win = elem.ownerDocument.defaultView;
+               return {
+                       top: rect.top + win.pageYOffset,
+                       left: rect.left + win.pageXOffset
+               };
+       },
+
+       // position() relates an element's margin box to its offset parent's padding box
+       // This corresponds to the behavior of CSS absolute positioning
+       position: function() {
+               if ( !this[ 0 ] ) {
+                       return;
+               }
+
+               var offsetParent, offset, doc,
+                       elem = this[ 0 ],
+                       parentOffset = { top: 0, left: 0 };
+
+               // position:fixed elements are offset from the viewport, which itself always has zero offset
+               if ( jQuery.css( elem, "position" ) === "fixed" ) {
+
+                       // Assume position:fixed implies availability of getBoundingClientRect
+                       offset = elem.getBoundingClientRect();
+
+               } else {
+                       offset = this.offset();
+
+                       // Account for the *real* offset parent, which can be the document or its root element
+                       // when a statically positioned element is identified
+                       doc = elem.ownerDocument;
+                       offsetParent = elem.offsetParent || doc.documentElement;
+                       while ( offsetParent &&
+                               ( offsetParent === doc.body || offsetParent === doc.documentElement ) &&
+                               jQuery.css( offsetParent, "position" ) === "static" ) {
+
+                               offsetParent = offsetParent.parentNode;
+                       }
+                       if ( offsetParent && offsetParent !== elem && offsetParent.nodeType === 1 ) {
+
+                               // Incorporate borders into its offset, since they are outside its content origin
+                               parentOffset = jQuery( offsetParent ).offset();
+                               parentOffset.top += jQuery.css( offsetParent, "borderTopWidth", true );
+                               parentOffset.left += jQuery.css( offsetParent, "borderLeftWidth", true );
+                       }
+               }
+
+               // Subtract parent offsets and element margins
+               return {
+                       top: offset.top - parentOffset.top - jQuery.css( elem, "marginTop", true ),
+                       left: offset.left - parentOffset.left - jQuery.css( elem, "marginLeft", true )
+               };
+       },
+
+       // This method will return documentElement in the following cases:
+       // 1) For the element inside the iframe without offsetParent, this method will return
+       //    documentElement of the parent window
+       // 2) For the hidden or detached element
+       // 3) For body or html element, i.e. in case of the html node - it will return itself
+       //
+       // but those exceptions were never presented as a real life use-cases
+       // and might be considered as more preferable results.
+       //
+       // This logic, however, is not guaranteed and can change at any point in the future
+       offsetParent: function() {
+               return this.map( function() {
+                       var offsetParent = this.offsetParent;
+
+                       while ( offsetParent && jQuery.css( offsetParent, "position" ) === "static" ) {
+                               offsetParent = offsetParent.offsetParent;
+                       }
+
+                       return offsetParent || documentElement;
+               } );
+       }
+} );
+
+// Create scrollLeft and scrollTop methods
+jQuery.each( { scrollLeft: "pageXOffset", scrollTop: "pageYOffset" }, function( method, prop ) {
+       var top = "pageYOffset" === prop;
+
+       jQuery.fn[ method ] = function( val ) {
+               return access( this, function( elem, method, val ) {
+
+                       // Coalesce documents and windows
+                       var win;
+                       if ( isWindow( elem ) ) {
+                               win = elem;
+                       } else if ( elem.nodeType === 9 ) {
+                               win = elem.defaultView;
+                       }
+
+                       if ( val === undefined ) {
+                               return win ? win[ prop ] : elem[ method ];
+                       }
+
+                       if ( win ) {
+                               win.scrollTo(
+                                       !top ? val : win.pageXOffset,
+                                       top ? val : win.pageYOffset
+                               );
+
+                       } else {
+                               elem[ method ] = val;
+                       }
+               }, method, val, arguments.length );
+       };
+} );
+
+// Support: Safari <=7 - 9.1, Chrome <=37 - 49
+// Add the top/left cssHooks using jQuery.fn.position
+// Webkit bug: https://bugs.webkit.org/show_bug.cgi?id=29084
+// Blink bug: https://bugs.chromium.org/p/chromium/issues/detail?id=589347
+// getComputedStyle returns percent when specified for top/left/bottom/right;
+// rather than make the css module depend on the offset module, just check for it here
+jQuery.each( [ "top", "left" ], function( i, prop ) {
+       jQuery.cssHooks[ prop ] = addGetHookIf( support.pixelPosition,
+               function( elem, computed ) {
+                       if ( computed ) {
+                               computed = curCSS( elem, prop );
+
+                               // If curCSS returns percentage, fallback to offset
+                               return rnumnonpx.test( computed ) ?
+                                       jQuery( elem ).position()[ prop ] + "px" :
+                                       computed;
+                       }
+               }
+       );
+} );
+
+
+// Create innerHeight, innerWidth, height, width, outerHeight and outerWidth methods
+jQuery.each( { Height: "height", Width: "width" }, function( name, type ) {
+       jQuery.each( { padding: "inner" + name, content: type, "": "outer" + name },
+               function( defaultExtra, funcName ) {
+
+               // Margin is only for outerHeight, outerWidth
+               jQuery.fn[ funcName ] = function( margin, value ) {
+                       var chainable = arguments.length && ( defaultExtra || typeof margin !== "boolean" ),
+                               extra = defaultExtra || ( margin === true || value === true ? "margin" : "border" );
+
+                       return access( this, function( elem, type, value ) {
+                               var doc;
+
+                               if ( isWindow( elem ) ) {
+
+                                       // $( window ).outerWidth/Height return w/h including scrollbars (gh-1729)
+                                       return funcName.indexOf( "outer" ) === 0 ?
+                                               elem[ "inner" + name ] :
+                                               elem.document.documentElement[ "client" + name ];
+                               }
+
+                               // Get document width or height
+                               if ( elem.nodeType === 9 ) {
+                                       doc = elem.documentElement;
+
+                                       // Either scroll[Width/Height] or offset[Width/Height] or client[Width/Height],
+                                       // whichever is greatest
+                                       return Math.max(
+                                               elem.body[ "scroll" + name ], doc[ "scroll" + name ],
+                                               elem.body[ "offset" + name ], doc[ "offset" + name ],
+                                               doc[ "client" + name ]
+                                       );
+                               }
+
+                               return value === undefined ?
+
+                                       // Get width or height on the element, requesting but not forcing parseFloat
+                                       jQuery.css( elem, type, extra ) :
+
+                                       // Set width or height on the element
+                                       jQuery.style( elem, type, value, extra );
+                       }, type, chainable ? margin : undefined, chainable );
+               };
+       } );
+} );
+
+
+jQuery.each( ( "blur focus focusin focusout resize scroll click dblclick " +
+       "mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave " +
+       "change select submit keydown keypress keyup contextmenu" ).split( " " ),
+       function( i, name ) {
+
+       // Handle event binding
+       jQuery.fn[ name ] = function( data, fn ) {
+               return arguments.length > 0 ?
+                       this.on( name, null, data, fn ) :
+                       this.trigger( name );
+       };
+} );
+
+jQuery.fn.extend( {
+       hover: function( fnOver, fnOut ) {
+               return this.mouseenter( fnOver ).mouseleave( fnOut || fnOver );
+       }
+} );
+
+
+
+
+jQuery.fn.extend( {
+
+       bind: function( types, data, fn ) {
+               return this.on( types, null, data, fn );
+       },
+       unbind: function( types, fn ) {
+               return this.off( types, null, fn );
+       },
+
+       delegate: function( selector, types, data, fn ) {
+               return this.on( types, selector, data, fn );
+       },
+       undelegate: function( selector, types, fn ) {
+
+               // ( namespace ) or ( selector, types [, fn] )
+               return arguments.length === 1 ?
+                       this.off( selector, "**" ) :
+                       this.off( types, selector || "**", fn );
+       }
+} );
+
+// Bind a function to a context, optionally partially applying any
+// arguments.
+// jQuery.proxy is deprecated to promote standards (specifically Function#bind)
+// However, it is not slated for removal any time soon
+jQuery.proxy = function( fn, context ) {
+       var tmp, args, proxy;
+
+       if ( typeof context === "string" ) {
+               tmp = fn[ context ];
+               context = fn;
+               fn = tmp;
+       }
+
+       // Quick check to determine if target is callable, in the spec
+       // this throws a TypeError, but we will just return undefined.
+       if ( !isFunction( fn ) ) {
+               return undefined;
+       }
+
+       // Simulated bind
+       args = slice.call( arguments, 2 );
+       proxy = function() {
+               return fn.apply( context || this, args.concat( slice.call( arguments ) ) );
+       };
+
+       // Set the guid of unique handler to the same of original handler, so it can be removed
+       proxy.guid = fn.guid = fn.guid || jQuery.guid++;
+
+       return proxy;
+};
+
+jQuery.holdReady = function( hold ) {
+       if ( hold ) {
+               jQuery.readyWait++;
+       } else {
+               jQuery.ready( true );
+       }
+};
+jQuery.isArray = Array.isArray;
+jQuery.parseJSON = JSON.parse;
+jQuery.nodeName = nodeName;
+jQuery.isFunction = isFunction;
+jQuery.isWindow = isWindow;
+jQuery.camelCase = camelCase;
+jQuery.type = toType;
+
+jQuery.now = Date.now;
+
+jQuery.isNumeric = function( obj ) {
+
+       // As of jQuery 3.0, isNumeric is limited to
+       // strings and numbers (primitives or objects)
+       // that can be coerced to finite numbers (gh-2662)
+       var type = jQuery.type( obj );
+       return ( type === "number" || type === "string" ) &&
+
+               // parseFloat NaNs numeric-cast false positives ("")
+               // ...but misinterprets leading-number strings, particularly hex literals ("0x...")
+               // subtraction forces infinities to NaN
+               !isNaN( obj - parseFloat( obj ) );
+};
+
+
+
+
+// Register as a named AMD module, since jQuery can be concatenated with other
+// files that may use define, but not via a proper concatenation script that
+// understands anonymous AMD modules. A named AMD is safest and most robust
+// way to register. Lowercase jquery is used because AMD module names are
+// derived from file names, and jQuery is normally delivered in a lowercase
+// file name. Do this after creating the global so that if an AMD module wants
+// to call noConflict to hide this version of jQuery, it will work.
+
+// Note that for maximum portability, libraries that are not jQuery should
+// declare themselves as anonymous modules, and avoid setting a global if an
+// AMD loader is present. jQuery is a special case. For more information, see
+// https://github.com/jrburke/requirejs/wiki/Updating-existing-libraries#wiki-anon
+
+if ( typeof define === "function" && define.amd ) {
+       define( "jquery", [], function() {
+               return jQuery;
+       } );
+}
+
+
+
+
+var
+
+       // Map over jQuery in case of overwrite
+       _jQuery = window.jQuery,
+
+       // Map over the $ in case of overwrite
+       _$ = window.$;
+
+jQuery.noConflict = function( deep ) {
+       if ( window.$ === jQuery ) {
+               window.$ = _$;
+       }
+
+       if ( deep && window.jQuery === jQuery ) {
+               window.jQuery = _jQuery;
+       }
+
+       return jQuery;
+};
+
+// Expose jQuery and $ identifiers, even in AMD
+// (#7102#comment:10, https://github.com/jquery/jquery/pull/557)
+// and CommonJS for browser emulators (#13566)
+if ( !noGlobal ) {
+       window.jQuery = window.$ = jQuery;
+}
+
+
+
+
+return jQuery;
+} );
diff --git a/doc/_build/html/_static/jquery.js b/doc/_build/html/_static/jquery.js
new file mode 100644 (file)
index 0000000..a1c07fd
--- /dev/null
@@ -0,0 +1,2 @@
+/*! jQuery v3.4.1 | (c) JS Foundation and other contributors | jquery.org/license */
+!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],E=C.document,r=Object.getPrototypeOf,s=t.slice,g=t.concat,u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType},x=function(e){return null!=e&&e===e.window},c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.4.1",k=function(e,t){return new k.fn.init(e,t)},p=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;function d(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0<t&&t-1 in e)}k.fn=k.prototype={jquery:f,constructor:k,length:0,toArray:function(){return s.call(this)},get:function(e){return null==e?s.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var t=k.merge(this.constructor(),e);return t.prevObject=this,t},each:function(e){return k.each(this,e)},map:function(n){return this.pushStack(k.map(this,function(e,t){return n.call(e,t,e)}))},slice:function(){return this.pushStack(s.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var t=this.length,n=+e+(e<0?t:0);return this.pushStack(0<=n&&n<t?[this[n]]:[])},end:function(){return this.prevObject||this.constructor()},push:u,sort:t.sort,splice:t.splice},k.extend=k.fn.extend=function(){var e,t,n,r,i,o,a=arguments[0]||{},s=1,u=arguments.length,l=!1;for("boolean"==typeof a&&(l=a,a=arguments[s]||{},s++),"object"==typeof a||m(a)||(a={}),s===u&&(a=this,s--);s<u;s++)if(null!=(e=arguments[s]))for(t in e)r=e[t],"__proto__"!==t&&a!==r&&(l&&r&&(k.isPlainObject(r)||(i=Array.isArray(r)))?(n=a[t],o=i&&!Array.isArray(n)?[]:i||k.isPlainObject(n)?n:{},i=!1,a[t]=k.extend(l,o,r)):void 0!==r&&(a[t]=r));return a},k.extend({expando:"jQuery"+(f+Math.random()).replace(/\D/g,""),isReady:!0,error:function(e){throw new Error(e)},noop:function(){},isPlainObject:function(e){var t,n;return!(!e||"[object Object]"!==o.call(e))&&(!(t=r(e))||"function"==typeof(n=v.call(t,"constructor")&&t.constructor)&&a.call(n)===l)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},globalEval:function(e,t){b(e,{nonce:t&&t.nonce})},each:function(e,t){var n,r=0;if(d(e)){for(n=e.length;r<n;r++)if(!1===t.call(e[r],r,e[r]))break}else for(r in e)if(!1===t.call(e[r],r,e[r]))break;return e},trim:function(e){return null==e?"":(e+"").replace(p,"")},makeArray:function(e,t){var n=t||[];return null!=e&&(d(Object(e))?k.merge(n,"string"==typeof e?[e]:e):u.call(n,e)),n},inArray:function(e,t,n){return null==t?-1:i.call(t,e,n)},merge:function(e,t){for(var n=+t.length,r=0,i=e.length;r<n;r++)e[i++]=t[r];return e.length=i,e},grep:function(e,t,n){for(var r=[],i=0,o=e.length,a=!n;i<o;i++)!t(e[i],i)!==a&&r.push(e[i]);return r},map:function(e,t,n){var r,i,o=0,a=[];if(d(e))for(r=e.length;o<r;o++)null!=(i=t(e[o],o,n))&&a.push(i);else for(o in e)null!=(i=t(e[o],o,n))&&a.push(i);return g.apply([],a)},guid:1,support:y}),"function"==typeof Symbol&&(k.fn[Symbol.iterator]=t[Symbol.iterator]),k.each("Boolean Number String Function Array Date RegExp Object Error Symbol".split(" "),function(e,t){n["[object "+t+"]"]=t.toLowerCase()});var h=function(n){var e,d,b,o,i,h,f,g,w,u,l,T,C,a,E,v,s,c,y,k="sizzle"+1*new Date,m=n.document,S=0,r=0,p=ue(),x=ue(),N=ue(),A=ue(),D=function(e,t){return e===t&&(l=!0),0},j={}.hasOwnProperty,t=[],q=t.pop,L=t.push,H=t.push,O=t.slice,P=function(e,t){for(var n=0,r=e.length;n<r;n++)if(e[n]===t)return n;return-1},R="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",M="[\\x20\\t\\r\\n\\f]",I="(?:\\\\.|[\\w-]|[^\0-\\xa0])+",W="\\["+M+"*("+I+")(?:"+M+"*([*^$|!~]?=)"+M+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+I+"))|)"+M+"*\\]",$=":("+I+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+W+")*)|.*)\\)|)",F=new RegExp(M+"+","g"),B=new RegExp("^"+M+"+|((?:^|[^\\\\])(?:\\\\.)*)"+M+"+$","g"),_=new RegExp("^"+M+"*,"+M+"*"),z=new RegExp("^"+M+"*([>+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp($),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+$),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ne=function(e,t,n){var r="0x"+t-65536;return r!=r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023&r|56320)},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(m.childNodes),m.childNodes),t[m.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&((e?e.ownerDocument||e:m)!==C&&T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!A[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&U.test(t)){(s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=k),o=(l=h(t)).length;while(o--)l[o]="#"+s+" "+xe(l[o]);c=l.join(","),f=ee.test(t)&&ye(e.parentNode)||e}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){A(t,!0)}finally{s===k&&e.removeAttribute("id")}}}return g(t.replace(B,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[k]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e.namespaceURI,n=(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:m;return r!==C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),m!==C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=k,!C.getElementsByName||!C.getElementsByName(k).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){a.appendChild(e).innerHTML="<a id='"+k+"'></a><select id='"+k+"-\r\\' msallowcapture=''><option selected=''></option></select>",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+k+"-]").length||v.push("~="),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+k+"+*").length||v.push(".#.+[+~]")}),ce(function(e){e.innerHTML="<a href='' disabled='disabled'></a><select disabled='disabled'><option/></select>";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",$)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},D=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)===(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e===C||e.ownerDocument===m&&y(m,e)?-1:t===C||t.ownerDocument===m&&y(m,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e===C?-1:t===C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]===m?-1:s[r]===m?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if((e.ownerDocument||e)!==C&&T(e),d.matchesSelector&&E&&!A[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){A(t,!0)}return 0<se(t,C,null,[e]).length},se.contains=function(e,t){return(e.ownerDocument||e)!==C&&T(e),y(e,t)},se.attr=function(e,t){(e.ownerDocument||e)!==C&&T(e);var n=b.attrHandle[t.toLowerCase()],r=n&&j.call(b.attrHandle,t.toLowerCase())?n(e,t,!E):void 0;return void 0!==r?r:d.attributes||!E?e.getAttribute(t):(r=e.getAttributeNode(t))&&r.specified?r.value:null},se.escape=function(e){return(e+"").replace(re,ie)},se.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},se.uniqueSort=function(e){var t,n=[],r=0,i=0;if(l=!d.detectDuplicates,u=!d.sortStable&&e.slice(0),e.sort(D),l){while(t=e[i++])t===e[i]&&(r=n.push(i));while(r--)e.splice(n[r],1)}return u=null,e},o=se.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(1===i||9===i||11===i){if("string"==typeof e.textContent)return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=o(e)}else if(3===i||4===i)return e.nodeValue}else while(t=e[r++])n+=o(t);return n},(b=se.selectors={cacheLength:50,createPseudo:le,match:G,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=p[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&p(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1<t.indexOf(i):"$="===r?i&&t.slice(-i.length)===i:"~="===r?-1<(" "+t.replace(F," ")+" ").indexOf(i):"|="===r&&(t===i||t.slice(0,i.length+1)===i+"-"))}},CHILD:function(h,e,t,g,v){var y="nth"!==h.slice(0,3),m="last"!==h.slice(-4),x="of-type"===e;return 1===g&&0===v?function(e){return!!e.parentNode}:function(e,t,n){var r,i,o,a,s,u,l=y!==m?"nextSibling":"previousSibling",c=e.parentNode,f=x&&e.nodeName.toLowerCase(),p=!n&&!x,d=!1;if(c){if(y){while(l){a=e;while(a=a[l])if(x?a.nodeName.toLowerCase()===f:1===a.nodeType)return!1;u=l="only"===h&&!u&&"nextSibling"}return!0}if(u=[m?c.firstChild:c.lastChild],m&&p){d=(s=(r=(i=(o=(a=c)[k]||(a[k]={}))[a.uniqueID]||(o[a.uniqueID]={}))[h]||[])[0]===S&&r[1])&&r[2],a=s&&c.childNodes[s];while(a=++s&&a&&a[l]||(d=s=0)||u.pop())if(1===a.nodeType&&++d&&a===e){i[h]=[S,s,d];break}}else if(p&&(d=s=(r=(i=(o=(a=e)[k]||(a[k]={}))[a.uniqueID]||(o[a.uniqueID]={}))[h]||[])[0]===S&&r[1]),!1===d)while(a=++s&&a&&a[l]||(d=s=0)||u.pop())if((x?a.nodeName.toLowerCase()===f:1===a.nodeType)&&++d&&(p&&((i=(o=a[k]||(a[k]={}))[a.uniqueID]||(o[a.uniqueID]={}))[h]=[S,d]),a===e))break;return(d-=v)===g||d%g==0&&0<=d/g}}},PSEUDO:function(e,o){var t,a=b.pseudos[e]||b.setFilters[e.toLowerCase()]||se.error("unsupported pseudo: "+e);return a[k]?a(o):1<a.length?(t=[e,e,"",o],b.setFilters.hasOwnProperty(e.toLowerCase())?le(function(e,t){var n,r=a(e,o),i=r.length;while(i--)e[n=P(e,r[i])]=!(t[n]=r[i])}):function(e){return a(e,0,t)}):a}},pseudos:{not:le(function(e){var r=[],i=[],s=f(e.replace(B,"$1"));return s[k]?le(function(e,t,n,r){var i,o=s(e,null,r,[]),a=e.length;while(a--)(i=o[a])&&(e[a]=!(t[a]=i))}):function(e,t,n){return r[0]=e,s(r,null,n,i),r[0]=null,!i.pop()}}),has:le(function(t){return function(e){return 0<se(t,e).length}}),contains:le(function(t){return t=t.replace(te,ne),function(e){return-1<(e.textContent||o(e)).indexOf(t)}}),lang:le(function(n){return V.test(n||"")||se.error("unsupported lang: "+n),n=n.replace(te,ne).toLowerCase(),function(e){var t;do{if(t=E?e.lang:e.getAttribute("xml:lang")||e.getAttribute("lang"))return(t=t.toLowerCase())===n||0===t.indexOf(n+"-")}while((e=e.parentNode)&&1===e.nodeType);return!1}}),target:function(e){var t=n.location&&n.location.hash;return t&&t.slice(1)===e.id},root:function(e){return e===a},focus:function(e){return e===C.activeElement&&(!C.hasFocus||C.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},enabled:ge(!1),disabled:ge(!0),checked:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&!!e.checked||"option"===t&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,!0===e.selected},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeType<6)return!1;return!0},parent:function(e){return!b.pseudos.empty(e)},header:function(e){return J.test(e.nodeName)},input:function(e){return Q.test(e.nodeName)},button:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&"button"===e.type||"button"===t},text:function(e){var t;return"input"===e.nodeName.toLowerCase()&&"text"===e.type&&(null==(t=e.getAttribute("type"))||"text"===t.toLowerCase())},first:ve(function(){return[0]}),last:ve(function(e,t){return[t-1]}),eq:ve(function(e,t,n){return[n<0?n+t:n]}),even:ve(function(e,t){for(var n=0;n<t;n+=2)e.push(n);return e}),odd:ve(function(e,t){for(var n=1;n<t;n+=2)e.push(n);return e}),lt:ve(function(e,t,n){for(var r=n<0?n+t:t<n?t:n;0<=--r;)e.push(r);return e}),gt:ve(function(e,t,n){for(var r=n<0?n+t:n;++r<t;)e.push(r);return e})}}).pseudos.nth=b.pseudos.eq,{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})b.pseudos[e]=de(e);for(e in{submit:!0,reset:!0})b.pseudos[e]=he(e);function me(){}function xe(e){for(var t=0,n=e.length,r="";t<n;t++)r+=e[t].value;return r}function be(s,e,t){var u=e.dir,l=e.next,c=l||u,f=t&&"parentNode"===c,p=r++;return e.first?function(e,t,n){while(e=e[u])if(1===e.nodeType||f)return s(e,t,n);return!1}:function(e,t,n){var r,i,o,a=[S,p];if(n){while(e=e[u])if((1===e.nodeType||f)&&s(e,t,n))return!0}else while(e=e[u])if(1===e.nodeType||f)if(i=(o=e[k]||(e[k]={}))[e.uniqueID]||(o[e.uniqueID]={}),l&&l===e.nodeName.toLowerCase())e=e[u]||e;else{if((r=i[c])&&r[0]===S&&r[1]===p)return a[2]=r[2];if((i[c]=a)[2]=s(e,t,n))return!0}return!1}}function we(i){return 1<i.length?function(e,t,n){var r=i.length;while(r--)if(!i[r](e,t,n))return!1;return!0}:i[0]}function Te(e,t,n,r,i){for(var o,a=[],s=0,u=e.length,l=null!=t;s<u;s++)(o=e[s])&&(n&&!n(o,r,i)||(a.push(o),l&&t.push(s)));return a}function Ce(d,h,g,v,y,e){return v&&!v[k]&&(v=Ce(v)),y&&!y[k]&&(y=Ce(y,e)),le(function(e,t,n,r){var i,o,a,s=[],u=[],l=t.length,c=e||function(e,t,n){for(var r=0,i=t.length;r<i;r++)se(e,t[r],n);return n}(h||"*",n.nodeType?[n]:n,[]),f=!d||!e&&h?c:Te(c,s,d,n,r),p=g?y||(e?d:l||v)?[]:t:f;if(g&&g(f,p,n,r),v){i=Te(p,u),v(i,[],n,r),o=i.length;while(o--)(a=i[o])&&(p[u[o]]=!(f[u[o]]=a))}if(e){if(y||d){if(y){i=[],o=p.length;while(o--)(a=p[o])&&i.push(f[o]=a);y(null,p=[],i,r)}o=p.length;while(o--)(a=p[o])&&-1<(i=y?P(e,a):s[o])&&(e[i]=!(t[i]=a))}}else p=Te(p===t?p.splice(l,p.length):p),y?y(null,t,p,r):H.apply(t,p)})}function Ee(e){for(var i,t,n,r=e.length,o=b.relative[e[0].type],a=o||b.relative[" "],s=o?1:0,u=be(function(e){return e===i},a,!0),l=be(function(e){return-1<P(i,e)},a,!0),c=[function(e,t,n){var r=!o&&(n||t!==w)||((i=t).nodeType?u(e,t,n):l(e,t,n));return i=null,r}];s<r;s++)if(t=b.relative[e[s].type])c=[be(we(c),t)];else{if((t=b.filter[e[s].type].apply(null,e[s].matches))[k]){for(n=++s;n<r;n++)if(b.relative[e[n].type])break;return Ce(1<s&&we(c),1<s&&xe(e.slice(0,s-1).concat({value:" "===e[s-2].type?"*":""})).replace(B,"$1"),t,s<n&&Ee(e.slice(s,n)),n<r&&Ee(e=e.slice(n)),n<r&&xe(e))}c.push(t)}return we(c)}return me.prototype=b.filters=b.pseudos,b.setFilters=new me,h=se.tokenize=function(e,t){var n,r,i,o,a,s,u,l=x[e+" "];if(l)return t?0:l.slice(0);a=e,s=[],u=b.preFilter;while(a){for(o in n&&!(r=_.exec(a))||(r&&(a=a.slice(r[0].length)||a),s.push(i=[])),n=!1,(r=z.exec(a))&&(n=r.shift(),i.push({value:n,type:r[0].replace(B," ")}),a=a.slice(n.length)),b.filter)!(r=G[o].exec(a))||u[o]&&!(r=u[o](r))||(n=r.shift(),i.push({value:n,type:o,matches:r}),a=a.slice(n.length));if(!n)break}return t?a.length:a?se.error(e):x(e,s).slice(0)},f=se.compile=function(e,t){var n,v,y,m,x,r,i=[],o=[],a=N[e+" "];if(!a){t||(t=h(e)),n=t.length;while(n--)(a=Ee(t[n]))[k]?i.push(a):o.push(a);(a=N(e,(v=o,m=0<(y=i).length,x=0<v.length,r=function(e,t,n,r,i){var o,a,s,u=0,l="0",c=e&&[],f=[],p=w,d=e||x&&b.find.TAG("*",i),h=S+=null==p?1:Math.random()||.1,g=d.length;for(i&&(w=t===C||t||i);l!==g&&null!=(o=d[l]);l++){if(x&&o){a=0,t||o.ownerDocument===C||(T(o),n=!E);while(s=v[a++])if(s(o,t||C,n)){r.push(o);break}i&&(S=h)}m&&((o=!s&&o)&&u--,e&&c.push(o))}if(u+=l,m&&l!==u){a=0;while(s=y[a++])s(c,f,t,n);if(e){if(0<u)while(l--)c[l]||f[l]||(f[l]=q.call(r));f=Te(f)}H.apply(r,f),i&&!e&&0<f.length&&1<u+y.length&&se.uniqueSort(r)}return i&&(S=h,w=p),c},m?le(r):r))).selector=e}return a},g=se.select=function(e,t,n,r){var i,o,a,s,u,l="function"==typeof e&&e,c=!r&&h(e=l.selector||e);if(n=n||[],1===c.length){if(2<(o=c[0]=c[0].slice(0)).length&&"ID"===(a=o[0]).type&&9===t.nodeType&&E&&b.relative[o[1].type]){if(!(t=(b.find.ID(a.matches[0].replace(te,ne),t)||[])[0]))return n;l&&(t=t.parentNode),e=e.slice(o.shift().value.length)}i=G.needsContext.test(e)?0:o.length;while(i--){if(a=o[i],b.relative[s=a.type])break;if((u=b.find[s])&&(r=u(a.matches[0].replace(te,ne),ee.test(o[0].type)&&ye(t.parentNode)||t))){if(o.splice(i,1),!(e=r.length&&xe(o)))return H.apply(n,r),n;break}}}return(l||f(e,c))(r,t,!E,n,!t||ee.test(e)&&ye(t.parentNode)||t),n},d.sortStable=k.split("").sort(D).join("")===k,d.detectDuplicates=!!l,T(),d.sortDetached=ce(function(e){return 1&e.compareDocumentPosition(C.createElement("fieldset"))}),ce(function(e){return e.innerHTML="<a href='#'></a>","#"===e.firstChild.getAttribute("href")})||fe("type|href|height|width",function(e,t,n){if(!n)return e.getAttribute(t,"type"===t.toLowerCase()?1:2)}),d.attributes&&ce(function(e){return e.innerHTML="<input/>",e.firstChild.setAttribute("value",""),""===e.firstChild.getAttribute("value")})||fe("value",function(e,t,n){if(!n&&"input"===e.nodeName.toLowerCase())return e.defaultValue}),ce(function(e){return null==e.getAttribute("disabled")})||fe(R,function(e,t,n){var r;if(!n)return!0===e[t]?t.toLowerCase():(r=e.getAttributeNode(t))&&r.specified?r.value:null}),se}(C);k.find=h,k.expr=h.selectors,k.expr[":"]=k.expr.pseudos,k.uniqueSort=k.unique=h.uniqueSort,k.text=h.getText,k.isXMLDoc=h.isXML,k.contains=h.contains,k.escapeSelector=h.escape;var T=function(e,t,n){var r=[],i=void 0!==n;while((e=e[t])&&9!==e.nodeType)if(1===e.nodeType){if(i&&k(e).is(n))break;r.push(e)}return r},S=function(e,t){for(var n=[];e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n},N=k.expr.match.needsContext;function A(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()}var D=/^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?k.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?k.grep(e,function(e){return e===n!==r}):"string"!=typeof n?k.grep(e,function(e){return-1<i.call(n,e)!==r}):k.filter(n,e,r)}k.filter=function(e,t,n){var r=t[0];return n&&(e=":not("+e+")"),1===t.length&&1===r.nodeType?k.find.matchesSelector(r,e)?[r]:[]:k.find.matches(e,k.grep(t,function(e){return 1===e.nodeType}))},k.fn.extend({find:function(e){var t,n,r=this.length,i=this;if("string"!=typeof e)return this.pushStack(k(e).filter(function(){for(t=0;t<r;t++)if(k.contains(i[t],this))return!0}));for(n=this.pushStack([]),t=0;t<r;t++)k.find(e,i[t],n);return 1<r?k.uniqueSort(n):n},filter:function(e){return this.pushStack(j(this,e||[],!1))},not:function(e){return this.pushStack(j(this,e||[],!0))},is:function(e){return!!j(this,"string"==typeof e&&N.test(e)?k(e):e||[],!1).length}});var q,L=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/;(k.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||q,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:L.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof k?t[0]:t,k.merge(this,k.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),D.test(r[1])&&k.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(k):k.makeArray(e,this)}).prototype=k.fn,q=k(E);var H=/^(?:parents|prev(?:Until|All))/,O={children:!0,contents:!0,next:!0,prev:!0};function P(e,t){while((e=e[t])&&1!==e.nodeType);return e}k.fn.extend({has:function(e){var t=k(e,this),n=t.length;return this.filter(function(){for(var e=0;e<n;e++)if(k.contains(this,t[e]))return!0})},closest:function(e,t){var n,r=0,i=this.length,o=[],a="string"!=typeof e&&k(e);if(!N.test(e))for(;r<i;r++)for(n=this[r];n&&n!==t;n=n.parentNode)if(n.nodeType<11&&(a?-1<a.index(n):1===n.nodeType&&k.find.matchesSelector(n,e))){o.push(n);break}return this.pushStack(1<o.length?k.uniqueSort(o):o)},index:function(e){return e?"string"==typeof e?i.call(k(e),this[0]):i.call(this,e.jquery?e[0]:e):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){return this.pushStack(k.uniqueSort(k.merge(this.get(),k(e,t))))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}}),k.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return T(e,"parentNode")},parentsUntil:function(e,t,n){return T(e,"parentNode",n)},next:function(e){return P(e,"nextSibling")},prev:function(e){return P(e,"previousSibling")},nextAll:function(e){return T(e,"nextSibling")},prevAll:function(e){return T(e,"previousSibling")},nextUntil:function(e,t,n){return T(e,"nextSibling",n)},prevUntil:function(e,t,n){return T(e,"previousSibling",n)},siblings:function(e){return S((e.parentNode||{}).firstChild,e)},children:function(e){return S(e.firstChild)},contents:function(e){return"undefined"!=typeof e.contentDocument?e.contentDocument:(A(e,"template")&&(e=e.content||e),k.merge([],e.childNodes))}},function(r,i){k.fn[r]=function(e,t){var n=k.map(this,i,e);return"Until"!==r.slice(-5)&&(t=e),t&&"string"==typeof t&&(n=k.filter(t,n)),1<this.length&&(O[r]||k.uniqueSort(n),H.test(r)&&n.reverse()),this.pushStack(n)}});var R=/[^\x20\t\r\n\f]+/g;function M(e){return e}function I(e){throw e}function W(e,t,n,r){var i;try{e&&m(i=e.promise)?i.call(e).done(t).fail(n):e&&m(i=e.then)?i.call(e,t,n):t.apply(void 0,[e].slice(r))}catch(e){n.apply(void 0,[e])}}k.Callbacks=function(r){var e,n;r="string"==typeof r?(e=r,n={},k.each(e.match(R)||[],function(e,t){n[t]=!0}),n):k.extend({},r);var i,t,o,a,s=[],u=[],l=-1,c=function(){for(a=a||r.once,o=i=!0;u.length;l=-1){t=u.shift();while(++l<s.length)!1===s[l].apply(t[0],t[1])&&r.stopOnFalse&&(l=s.length,t=!1)}r.memory||(t=!1),i=!1,a&&(s=t?[]:"")},f={add:function(){return s&&(t&&!i&&(l=s.length-1,u.push(t)),function n(e){k.each(e,function(e,t){m(t)?r.unique&&f.has(t)||s.push(t):t&&t.length&&"string"!==w(t)&&n(t)})}(arguments),t&&!i&&c()),this},remove:function(){return k.each(arguments,function(e,t){var n;while(-1<(n=k.inArray(t,s,n)))s.splice(n,1),n<=l&&l--}),this},has:function(e){return e?-1<k.inArray(e,s):0<s.length},empty:function(){return s&&(s=[]),this},disable:function(){return a=u=[],s=t="",this},disabled:function(){return!s},lock:function(){return a=u=[],t||i||(s=t=""),this},locked:function(){return!!a},fireWith:function(e,t){return a||(t=[e,(t=t||[]).slice?t.slice():t],u.push(t),i||c()),this},fire:function(){return f.fireWith(this,arguments),this},fired:function(){return!!o}};return f},k.extend({Deferred:function(e){var o=[["notify","progress",k.Callbacks("memory"),k.Callbacks("memory"),2],["resolve","done",k.Callbacks("once memory"),k.Callbacks("once memory"),0,"resolved"],["reject","fail",k.Callbacks("once memory"),k.Callbacks("once memory"),1,"rejected"]],i="pending",a={state:function(){return i},always:function(){return s.done(arguments).fail(arguments),this},"catch":function(e){return a.then(null,e)},pipe:function(){var i=arguments;return k.Deferred(function(r){k.each(o,function(e,t){var n=m(i[t[4]])&&i[t[4]];s[t[1]](function(){var e=n&&n.apply(this,arguments);e&&m(e.promise)?e.promise().progress(r.notify).done(r.resolve).fail(r.reject):r[t[0]+"With"](this,n?[e]:arguments)})}),i=null}).promise()},then:function(t,n,r){var u=0;function l(i,o,a,s){return function(){var n=this,r=arguments,e=function(){var e,t;if(!(i<u)){if((e=a.apply(n,r))===o.promise())throw new TypeError("Thenable self-resolution");t=e&&("object"==typeof e||"function"==typeof e)&&e.then,m(t)?s?t.call(e,l(u,o,M,s),l(u,o,I,s)):(u++,t.call(e,l(u,o,M,s),l(u,o,I,s),l(u,o,M,o.notifyWith))):(a!==M&&(n=void 0,r=[e]),(s||o.resolveWith)(n,r))}},t=s?e:function(){try{e()}catch(e){k.Deferred.exceptionHook&&k.Deferred.exceptionHook(e,t.stackTrace),u<=i+1&&(a!==I&&(n=void 0,r=[e]),o.rejectWith(n,r))}};i?t():(k.Deferred.getStackHook&&(t.stackTrace=k.Deferred.getStackHook()),C.setTimeout(t))}}return k.Deferred(function(e){o[0][3].add(l(0,e,m(r)?r:M,e.notifyWith)),o[1][3].add(l(0,e,m(t)?t:M)),o[2][3].add(l(0,e,m(n)?n:I))}).promise()},promise:function(e){return null!=e?k.extend(e,a):a}},s={};return k.each(o,function(e,t){var n=t[2],r=t[5];a[t[1]]=n.add,r&&n.add(function(){i=r},o[3-e][2].disable,o[3-e][3].disable,o[0][2].lock,o[0][3].lock),n.add(t[3].fire),s[t[0]]=function(){return s[t[0]+"With"](this===s?void 0:this,arguments),this},s[t[0]+"With"]=n.fireWith}),a.promise(s),e&&e.call(s,s),s},when:function(e){var n=arguments.length,t=n,r=Array(t),i=s.call(arguments),o=k.Deferred(),a=function(t){return function(e){r[t]=this,i[t]=1<arguments.length?s.call(arguments):e,--n||o.resolveWith(r,i)}};if(n<=1&&(W(e,o.done(a(t)).resolve,o.reject,!n),"pending"===o.state()||m(i[t]&&i[t].then)))return o.then();while(t--)W(i[t],a(t),o.reject);return o.promise()}});var $=/^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/;k.Deferred.exceptionHook=function(e,t){C.console&&C.console.warn&&e&&$.test(e.name)&&C.console.warn("jQuery.Deferred exception: "+e.message,e.stack,t)},k.readyException=function(e){C.setTimeout(function(){throw e})};var F=k.Deferred();function B(){E.removeEventListener("DOMContentLoaded",B),C.removeEventListener("load",B),k.ready()}k.fn.ready=function(e){return F.then(e)["catch"](function(e){k.readyException(e)}),this},k.extend({isReady:!1,readyWait:1,ready:function(e){(!0===e?--k.readyWait:k.isReady)||(k.isReady=!0)!==e&&0<--k.readyWait||F.resolveWith(E,[k])}}),k.ready.then=F.then,"complete"===E.readyState||"loading"!==E.readyState&&!E.documentElement.doScroll?C.setTimeout(k.ready):(E.addEventListener("DOMContentLoaded",B),C.addEventListener("load",B));var _=function(e,t,n,r,i,o,a){var s=0,u=e.length,l=null==n;if("object"===w(n))for(s in i=!0,n)_(e,t,s,n[s],!0,o,a);else if(void 0!==r&&(i=!0,m(r)||(a=!0),l&&(a?(t.call(e,r),t=null):(l=t,t=function(e,t,n){return l.call(k(e),n)})),t))for(;s<u;s++)t(e[s],n,a?r:r.call(e[s],s,t(e[s],n)));return i?e:l?t.call(e):u?t(e[0],n):o},z=/^-ms-/,U=/-([a-z])/g;function X(e,t){return t.toUpperCase()}function V(e){return e.replace(z,"ms-").replace(U,X)}var G=function(e){return 1===e.nodeType||9===e.nodeType||!+e.nodeType};function Y(){this.expando=k.expando+Y.uid++}Y.uid=1,Y.prototype={cache:function(e){var t=e[this.expando];return t||(t={},G(e)&&(e.nodeType?e[this.expando]=t:Object.defineProperty(e,this.expando,{value:t,configurable:!0}))),t},set:function(e,t,n){var r,i=this.cache(e);if("string"==typeof t)i[V(t)]=n;else for(r in t)i[V(r)]=t[r];return i},get:function(e,t){return void 0===t?this.cache(e):e[this.expando]&&e[this.expando][V(t)]},access:function(e,t,n){return void 0===t||t&&"string"==typeof t&&void 0===n?this.get(e,t):(this.set(e,t,n),void 0!==n?n:t)},remove:function(e,t){var n,r=e[this.expando];if(void 0!==r){if(void 0!==t){n=(t=Array.isArray(t)?t.map(V):(t=V(t))in r?[t]:t.match(R)||[]).length;while(n--)delete r[t[n]]}(void 0===t||k.isEmptyObject(r))&&(e.nodeType?e[this.expando]=void 0:delete e[this.expando])}},hasData:function(e){var t=e[this.expando];return void 0!==t&&!k.isEmptyObject(t)}};var Q=new Y,J=new Y,K=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,Z=/[A-Z]/g;function ee(e,t,n){var r,i;if(void 0===n&&1===e.nodeType)if(r="data-"+t.replace(Z,"-$&").toLowerCase(),"string"==typeof(n=e.getAttribute(r))){try{n="true"===(i=n)||"false"!==i&&("null"===i?null:i===+i+""?+i:K.test(i)?JSON.parse(i):i)}catch(e){}J.set(e,t,n)}else n=void 0;return n}k.extend({hasData:function(e){return J.hasData(e)||Q.hasData(e)},data:function(e,t,n){return J.access(e,t,n)},removeData:function(e,t){J.remove(e,t)},_data:function(e,t,n){return Q.access(e,t,n)},_removeData:function(e,t){Q.remove(e,t)}}),k.fn.extend({data:function(n,e){var t,r,i,o=this[0],a=o&&o.attributes;if(void 0===n){if(this.length&&(i=J.get(o),1===o.nodeType&&!Q.get(o,"hasDataAttrs"))){t=a.length;while(t--)a[t]&&0===(r=a[t].name).indexOf("data-")&&(r=V(r.slice(5)),ee(o,r,i[r]));Q.set(o,"hasDataAttrs",!0)}return i}return"object"==typeof n?this.each(function(){J.set(this,n)}):_(this,function(e){var t;if(o&&void 0===e)return void 0!==(t=J.get(o,n))?t:void 0!==(t=ee(o,n))?t:void 0;this.each(function(){J.set(this,n,e)})},null,e,1<arguments.length,null,!0)},removeData:function(e){return this.each(function(){J.remove(this,e)})}}),k.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=Q.get(e,t),n&&(!r||Array.isArray(n)?r=Q.access(e,t,k.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=k.queue(e,t),r=n.length,i=n.shift(),o=k._queueHooks(e,t);"inprogress"===i&&(i=n.shift(),r--),i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,function(){k.dequeue(e,t)},o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return Q.get(e,n)||Q.access(e,n,{empty:k.Callbacks("once memory").add(function(){Q.remove(e,[t+"queue",n])})})}}),k.fn.extend({queue:function(t,n){var e=2;return"string"!=typeof t&&(n=t,t="fx",e--),arguments.length<e?k.queue(this[0],t):void 0===n?this:this.each(function(){var e=k.queue(this,t,n);k._queueHooks(this,t),"fx"===t&&"inprogress"!==e[0]&&k.dequeue(this,t)})},dequeue:function(e){return this.each(function(){k.dequeue(this,e)})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,t){var n,r=1,i=k.Deferred(),o=this,a=this.length,s=function(){--r||i.resolveWith(o,[o])};"string"!=typeof e&&(t=e,e=void 0),e=e||"fx";while(a--)(n=Q.get(o[a],e+"queueHooks"))&&n.empty&&(r++,n.empty.add(s));return s(),i.promise(t)}});var te=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,ne=new RegExp("^(?:([+-])=|)("+te+")([a-z%]*)$","i"),re=["Top","Right","Bottom","Left"],ie=E.documentElement,oe=function(e){return k.contains(e.ownerDocument,e)},ae={composed:!0};ie.getRootNode&&(oe=function(e){return k.contains(e.ownerDocument,e)||e.getRootNode(ae)===e.ownerDocument});var se=function(e,t){return"none"===(e=t||e).style.display||""===e.style.display&&oe(e)&&"none"===k.css(e,"display")},ue=function(e,t,n,r){var i,o,a={};for(o in t)a[o]=e.style[o],e.style[o]=t[o];for(o in i=n.apply(e,r||[]),t)e.style[o]=a[o];return i};function le(e,t,n,r){var i,o,a=20,s=r?function(){return r.cur()}:function(){return k.css(e,t,"")},u=s(),l=n&&n[3]||(k.cssNumber[t]?"":"px"),c=e.nodeType&&(k.cssNumber[t]||"px"!==l&&+u)&&ne.exec(k.css(e,t));if(c&&c[3]!==l){u/=2,l=l||c[3],c=+u||1;while(a--)k.style(e,t,c+l),(1-o)*(1-(o=s()/u||.5))<=0&&(a=0),c/=o;c*=2,k.style(e,t,c+l),n=n||[]}return n&&(c=+c||+u||0,i=n[1]?c+(n[1]+1)*n[2]:+n[2],r&&(r.unit=l,r.start=c,r.end=i)),i}var ce={};function fe(e,t){for(var n,r,i,o,a,s,u,l=[],c=0,f=e.length;c<f;c++)(r=e[c]).style&&(n=r.style.display,t?("none"===n&&(l[c]=Q.get(r,"display")||null,l[c]||(r.style.display="")),""===r.style.display&&se(r)&&(l[c]=(u=a=o=void 0,a=(i=r).ownerDocument,s=i.nodeName,(u=ce[s])||(o=a.body.appendChild(a.createElement(s)),u=k.css(o,"display"),o.parentNode.removeChild(o),"none"===u&&(u="block"),ce[s]=u)))):"none"!==n&&(l[c]="none",Q.set(r,"display",n)));for(c=0;c<f;c++)null!=l[c]&&(e[c].style.display=l[c]);return e}k.fn.extend({show:function(){return fe(this,!0)},hide:function(){return fe(this)},toggle:function(e){return"boolean"==typeof e?e?this.show():this.hide():this.each(function(){se(this)?k(this).show():k(this).hide()})}});var pe=/^(?:checkbox|radio)$/i,de=/<([a-z][^\/\0>\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i,ge={option:[1,"<select multiple='multiple'>","</select>"],thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?k.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;n<r;n++)Q.set(e[n],"globalEval",!t||Q.get(t[n],"globalEval"))}ge.optgroup=ge.option,ge.tbody=ge.tfoot=ge.colgroup=ge.caption=ge.thead,ge.th=ge.td;var me,xe,be=/<|&#?\w+;/;function we(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d<h;d++)if((o=e[d])||0===o)if("object"===w(o))k.merge(p,o.nodeType?[o]:o);else if(be.test(o)){a=a||f.appendChild(t.createElement("div")),s=(de.exec(o)||["",""])[1].toLowerCase(),u=ge[s]||ge._default,a.innerHTML=u[1]+k.htmlPrefilter(o)+u[2],c=u[0];while(c--)a=a.lastChild;k.merge(p,a.childNodes),(a=f.firstChild).textContent=""}else p.push(t.createTextNode(o));f.textContent="",d=0;while(o=p[d++])if(r&&-1<k.inArray(o,r))i&&i.push(o);else if(l=oe(o),a=ve(f.appendChild(o),"script"),l&&ye(a),n){c=0;while(o=a[c++])he.test(o.type||"")&&n.push(o)}return f}me=E.createDocumentFragment().appendChild(E.createElement("div")),(xe=E.createElement("input")).setAttribute("type","radio"),xe.setAttribute("checked","checked"),xe.setAttribute("name","t"),me.appendChild(xe),y.checkClone=me.cloneNode(!0).cloneNode(!0).lastChild.checked,me.innerHTML="<textarea>x</textarea>",y.noCloneChecked=!!me.cloneNode(!0).lastChild.defaultValue;var Te=/^key/,Ce=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,Ee=/^([^.]*)(?:\.(.+)|)/;function ke(){return!0}function Se(){return!1}function Ne(e,t){return e===function(){try{return E.activeElement}catch(e){}}()==("focus"===t)}function Ae(e,t,n,r,i,o){var a,s;if("object"==typeof t){for(s in"string"!=typeof n&&(r=r||n,n=void 0),t)Ae(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=Se;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return k().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=k.guid++)),e.each(function(){k.event.add(this,t,i,r,n)})}function De(e,i,o){o?(Q.set(e,i,!1),k.event.add(e,i,{namespace:!1,handler:function(e){var t,n,r=Q.get(this,i);if(1&e.isTrigger&&this[i]){if(r.length)(k.event.special[i]||{}).delegateType&&e.stopPropagation();else if(r=s.call(arguments),Q.set(this,i,r),t=o(this,i),this[i](),r!==(n=Q.get(this,i))||t?Q.set(this,i,!1):n={},r!==n)return e.stopImmediatePropagation(),e.preventDefault(),n.value}else r.length&&(Q.set(this,i,{value:k.event.trigger(k.extend(r[0],k.Event.prototype),r.slice(1),this)}),e.stopImmediatePropagation())}})):void 0===Q.get(e,i)&&k.event.add(e,i,ke)}k.event={global:{},add:function(t,e,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.get(t);if(v){n.handler&&(n=(o=n).handler,i=o.selector),i&&k.find.matchesSelector(ie,i),n.guid||(n.guid=k.guid++),(u=v.events)||(u=v.events={}),(a=v.handle)||(a=v.handle=function(e){return"undefined"!=typeof k&&k.event.triggered!==e.type?k.event.dispatch.apply(t,arguments):void 0}),l=(e=(e||"").match(R)||[""]).length;while(l--)d=g=(s=Ee.exec(e[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=k.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=k.event.special[d]||{},c=k.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&k.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(t,r,h,a)||t.addEventListener&&t.addEventListener(d,a)),f.add&&(f.add.call(t,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),k.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.hasData(e)&&Q.get(e);if(v&&(u=v.events)){l=(t=(t||"").match(R)||[""]).length;while(l--)if(d=g=(s=Ee.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d){f=k.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,v.handle)||k.removeEvent(e,d,v.handle),delete u[d])}else for(d in u)k.event.remove(e,d+t[l],n,r,!0);k.isEmptyObject(u)&&Q.remove(e,"handle events")}},dispatch:function(e){var t,n,r,i,o,a,s=k.event.fix(e),u=new Array(arguments.length),l=(Q.get(this,"events")||{})[s.type]||[],c=k.event.special[s.type]||{};for(u[0]=s,t=1;t<arguments.length;t++)u[t]=arguments[t];if(s.delegateTarget=this,!c.preDispatch||!1!==c.preDispatch.call(this,s)){a=k.event.handlers.call(this,s,l),t=0;while((i=a[t++])&&!s.isPropagationStopped()){s.currentTarget=i.elem,n=0;while((o=i.handlers[n++])&&!s.isImmediatePropagationStopped())s.rnamespace&&!1!==o.namespace&&!s.rnamespace.test(o.namespace)||(s.handleObj=o,s.data=o.data,void 0!==(r=((k.event.special[o.origType]||{}).handle||o.handler).apply(i.elem,u))&&!1===(s.result=r)&&(s.preventDefault(),s.stopPropagation()))}return c.postDispatch&&c.postDispatch.call(this,s),s.result}},handlers:function(e,t){var n,r,i,o,a,s=[],u=t.delegateCount,l=e.target;if(u&&l.nodeType&&!("click"===e.type&&1<=e.button))for(;l!==this;l=l.parentNode||this)if(1===l.nodeType&&("click"!==e.type||!0!==l.disabled)){for(o=[],a={},n=0;n<u;n++)void 0===a[i=(r=t[n]).selector+" "]&&(a[i]=r.needsContext?-1<k(i,this).index(l):k.find(i,this,null,[l]).length),a[i]&&o.push(r);o.length&&s.push({elem:l,handlers:o})}return l=this,u<t.length&&s.push({elem:l,handlers:t.slice(u)}),s},addProp:function(t,e){Object.defineProperty(k.Event.prototype,t,{enumerable:!0,configurable:!0,get:m(e)?function(){if(this.originalEvent)return e(this.originalEvent)}:function(){if(this.originalEvent)return this.originalEvent[t]},set:function(e){Object.defineProperty(this,t,{enumerable:!0,configurable:!0,writable:!0,value:e})}})},fix:function(e){return e[k.expando]?e:new k.Event(e)},special:{load:{noBubble:!0},click:{setup:function(e){var t=this||e;return pe.test(t.type)&&t.click&&A(t,"input")&&De(t,"click",ke),!1},trigger:function(e){var t=this||e;return pe.test(t.type)&&t.click&&A(t,"input")&&De(t,"click"),!0},_default:function(e){var t=e.target;return pe.test(t.type)&&t.click&&A(t,"input")&&Q.get(t,"click")||A(t,"a")}},beforeunload:{postDispatch:function(e){void 0!==e.result&&e.originalEvent&&(e.originalEvent.returnValue=e.result)}}}},k.removeEvent=function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n)},k.Event=function(e,t){if(!(this instanceof k.Event))return new k.Event(e,t);e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||void 0===e.defaultPrevented&&!1===e.returnValue?ke:Se,this.target=e.target&&3===e.target.nodeType?e.target.parentNode:e.target,this.currentTarget=e.currentTarget,this.relatedTarget=e.relatedTarget):this.type=e,t&&k.extend(this,t),this.timeStamp=e&&e.timeStamp||Date.now(),this[k.expando]=!0},k.Event.prototype={constructor:k.Event,isDefaultPrevented:Se,isPropagationStopped:Se,isImmediatePropagationStopped:Se,isSimulated:!1,preventDefault:function(){var e=this.originalEvent;this.isDefaultPrevented=ke,e&&!this.isSimulated&&e.preventDefault()},stopPropagation:function(){var e=this.originalEvent;this.isPropagationStopped=ke,e&&!this.isSimulated&&e.stopPropagation()},stopImmediatePropagation:function(){var e=this.originalEvent;this.isImmediatePropagationStopped=ke,e&&!this.isSimulated&&e.stopImmediatePropagation(),this.stopPropagation()}},k.each({altKey:!0,bubbles:!0,cancelable:!0,changedTouches:!0,ctrlKey:!0,detail:!0,eventPhase:!0,metaKey:!0,pageX:!0,pageY:!0,shiftKey:!0,view:!0,"char":!0,code:!0,charCode:!0,key:!0,keyCode:!0,button:!0,buttons:!0,clientX:!0,clientY:!0,offsetX:!0,offsetY:!0,pointerId:!0,pointerType:!0,screenX:!0,screenY:!0,targetTouches:!0,toElement:!0,touches:!0,which:function(e){var t=e.button;return null==e.which&&Te.test(e.type)?null!=e.charCode?e.charCode:e.keyCode:!e.which&&void 0!==t&&Ce.test(e.type)?1&t?1:2&t?3:4&t?2:0:e.which}},k.event.addProp),k.each({focus:"focusin",blur:"focusout"},function(e,t){k.event.special[e]={setup:function(){return De(this,e,Ne),!1},trigger:function(){return De(this,e),!0},delegateType:t}}),k.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(e,i){k.event.special[e]={delegateType:i,bindType:i,handle:function(e){var t,n=e.relatedTarget,r=e.handleObj;return n&&(n===this||k.contains(this,n))||(e.type=r.origType,t=r.handler.apply(this,arguments),e.type=i),t}}}),k.fn.extend({on:function(e,t,n,r){return Ae(this,e,t,n,r)},one:function(e,t,n,r){return Ae(this,e,t,n,r,1)},off:function(e,t,n){var r,i;if(e&&e.preventDefault&&e.handleObj)return r=e.handleObj,k(e.delegateTarget).off(r.namespace?r.origType+"."+r.namespace:r.origType,r.selector,r.handler),this;if("object"==typeof e){for(i in e)this.off(i,t,e[i]);return this}return!1!==t&&"function"!=typeof t||(n=t,t=void 0),!1===n&&(n=Se),this.each(function(){k.event.remove(this,e,n,t)})}});var je=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([a-z][^\/\0>\x20\t\r\n\f]*)[^>]*)\/>/gi,qe=/<script|<style|<link/i,Le=/checked\s*(?:[^=]|=\s*.checked.)/i,He=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g;function Oe(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&k(e).children("tbody")[0]||e}function Pe(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function Re(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Me(e,t){var n,r,i,o,a,s,u,l;if(1===t.nodeType){if(Q.hasData(e)&&(o=Q.access(e),a=Q.set(t,o),l=o.events))for(i in delete a.handle,a.events={},l)for(n=0,r=l[i].length;n<r;n++)k.event.add(t,i,l[i][n]);J.hasData(e)&&(s=J.access(e),u=k.extend({},s),J.set(t,u))}}function Ie(n,r,i,o){r=g.apply([],r);var e,t,a,s,u,l,c=0,f=n.length,p=f-1,d=r[0],h=m(d);if(h||1<f&&"string"==typeof d&&!y.checkClone&&Le.test(d))return n.each(function(e){var t=n.eq(e);h&&(r[0]=d.call(this,e,t.html())),Ie(t,r,i,o)});if(f&&(t=(e=we(r,n[0].ownerDocument,!1,n,o)).firstChild,1===e.childNodes.length&&(e=t),t||o)){for(s=(a=k.map(ve(e,"script"),Pe)).length;c<f;c++)u=e,c!==p&&(u=k.clone(u,!0,!0),s&&k.merge(a,ve(u,"script"))),i.call(n[c],u,c);if(s)for(l=a[a.length-1].ownerDocument,k.map(a,Re),c=0;c<s;c++)u=a[c],he.test(u.type||"")&&!Q.access(u,"globalEval")&&k.contains(l,u)&&(u.src&&"module"!==(u.type||"").toLowerCase()?k._evalUrl&&!u.noModule&&k._evalUrl(u.src,{nonce:u.nonce||u.getAttribute("nonce")}):b(u.textContent.replace(He,""),u,l))}return n}function We(e,t,n){for(var r,i=t?k.filter(t,e):e,o=0;null!=(r=i[o]);o++)n||1!==r.nodeType||k.cleanData(ve(r)),r.parentNode&&(n&&oe(r)&&ye(ve(r,"script")),r.parentNode.removeChild(r));return e}k.extend({htmlPrefilter:function(e){return e.replace(je,"<$1></$2>")},clone:function(e,t,n){var r,i,o,a,s,u,l,c=e.cloneNode(!0),f=oe(e);if(!(y.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||k.isXMLDoc(e)))for(a=ve(c),r=0,i=(o=ve(e)).length;r<i;r++)s=o[r],u=a[r],void 0,"input"===(l=u.nodeName.toLowerCase())&&pe.test(s.type)?u.checked=s.checked:"input"!==l&&"textarea"!==l||(u.defaultValue=s.defaultValue);if(t)if(n)for(o=o||ve(e),a=a||ve(c),r=0,i=o.length;r<i;r++)Me(o[r],a[r]);else Me(e,c);return 0<(a=ve(c,"script")).length&&ye(a,!f&&ve(e,"script")),c},cleanData:function(e){for(var t,n,r,i=k.event.special,o=0;void 0!==(n=e[o]);o++)if(G(n)){if(t=n[Q.expando]){if(t.events)for(r in t.events)i[r]?k.event.remove(n,r):k.removeEvent(n,r,t.handle);n[Q.expando]=void 0}n[J.expando]&&(n[J.expando]=void 0)}}}),k.fn.extend({detach:function(e){return We(this,e,!0)},remove:function(e){return We(this,e)},text:function(e){return _(this,function(e){return void 0===e?k.text(this):this.empty().each(function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=e)})},null,e,arguments.length)},append:function(){return Ie(this,arguments,function(e){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||Oe(this,e).appendChild(e)})},prepend:function(){return Ie(this,arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=Oe(this,e);t.insertBefore(e,t.firstChild)}})},before:function(){return Ie(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return Ie(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},empty:function(){for(var e,t=0;null!=(e=this[t]);t++)1===e.nodeType&&(k.cleanData(ve(e,!1)),e.textContent="");return this},clone:function(e,t){return e=null!=e&&e,t=null==t?e:t,this.map(function(){return k.clone(this,e,t)})},html:function(e){return _(this,function(e){var t=this[0]||{},n=0,r=this.length;if(void 0===e&&1===t.nodeType)return t.innerHTML;if("string"==typeof e&&!qe.test(e)&&!ge[(de.exec(e)||["",""])[1].toLowerCase()]){e=k.htmlPrefilter(e);try{for(;n<r;n++)1===(t=this[n]||{}).nodeType&&(k.cleanData(ve(t,!1)),t.innerHTML=e);t=0}catch(e){}}t&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(){var n=[];return Ie(this,arguments,function(e){var t=this.parentNode;k.inArray(this,n)<0&&(k.cleanData(ve(this)),t&&t.replaceChild(e,this))},n)}}),k.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,a){k.fn[e]=function(e){for(var t,n=[],r=k(e),i=r.length-1,o=0;o<=i;o++)t=o===i?this:this.clone(!0),k(r[o])[a](t),u.apply(n,t.get());return this.pushStack(n)}});var $e=new RegExp("^("+te+")(?!px)[a-z%]+$","i"),Fe=function(e){var t=e.ownerDocument.defaultView;return t&&t.opener||(t=C),t.getComputedStyle(e)},Be=new RegExp(re.join("|"),"i");function _e(e,t,n){var r,i,o,a,s=e.style;return(n=n||Fe(e))&&(""!==(a=n.getPropertyValue(t)||n[t])||oe(e)||(a=k.style(e,t)),!y.pixelBoxStyles()&&$e.test(a)&&Be.test(t)&&(r=s.width,i=s.minWidth,o=s.maxWidth,s.minWidth=s.maxWidth=s.width=a,a=n.width,s.width=r,s.minWidth=i,s.maxWidth=o)),void 0!==a?a+"":a}function ze(e,t){return{get:function(){if(!e())return(this.get=t).apply(this,arguments);delete this.get}}}!function(){function e(){if(u){s.style.cssText="position:absolute;left:-11111px;width:60px;margin-top:1px;padding:0;border:0",u.style.cssText="position:relative;display:block;box-sizing:border-box;overflow:scroll;margin:auto;border:1px;padding:1px;width:60%;top:1%",ie.appendChild(s).appendChild(u);var e=C.getComputedStyle(u);n="1%"!==e.top,a=12===t(e.marginLeft),u.style.right="60%",o=36===t(e.right),r=36===t(e.width),u.style.position="absolute",i=12===t(u.offsetWidth/3),ie.removeChild(s),u=null}}function t(e){return Math.round(parseFloat(e))}var n,r,i,o,a,s=E.createElement("div"),u=E.createElement("div");u.style&&(u.style.backgroundClip="content-box",u.cloneNode(!0).style.backgroundClip="",y.clearCloneStyle="content-box"===u.style.backgroundClip,k.extend(y,{boxSizingReliable:function(){return e(),r},pixelBoxStyles:function(){return e(),o},pixelPosition:function(){return e(),n},reliableMarginLeft:function(){return e(),a},scrollboxSize:function(){return e(),i}}))}();var Ue=["Webkit","Moz","ms"],Xe=E.createElement("div").style,Ve={};function Ge(e){var t=k.cssProps[e]||Ve[e];return t||(e in Xe?e:Ve[e]=function(e){var t=e[0].toUpperCase()+e.slice(1),n=Ue.length;while(n--)if((e=Ue[n]+t)in Xe)return e}(e)||e)}var Ye=/^(none|table(?!-c[ea]).+)/,Qe=/^--/,Je={position:"absolute",visibility:"hidden",display:"block"},Ke={letterSpacing:"0",fontWeight:"400"};function Ze(e,t,n){var r=ne.exec(t);return r?Math.max(0,r[2]-(n||0))+(r[3]||"px"):t}function et(e,t,n,r,i,o){var a="width"===t?1:0,s=0,u=0;if(n===(r?"border":"content"))return 0;for(;a<4;a+=2)"margin"===n&&(u+=k.css(e,n+re[a],!0,i)),r?("content"===n&&(u-=k.css(e,"padding"+re[a],!0,i)),"margin"!==n&&(u-=k.css(e,"border"+re[a]+"Width",!0,i))):(u+=k.css(e,"padding"+re[a],!0,i),"padding"!==n?u+=k.css(e,"border"+re[a]+"Width",!0,i):s+=k.css(e,"border"+re[a]+"Width",!0,i));return!r&&0<=o&&(u+=Math.max(0,Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-o-u-s-.5))||0),u}function tt(e,t,n){var r=Fe(e),i=(!y.boxSizingReliable()||n)&&"border-box"===k.css(e,"boxSizing",!1,r),o=i,a=_e(e,t,r),s="offset"+t[0].toUpperCase()+t.slice(1);if($e.test(a)){if(!n)return a;a="auto"}return(!y.boxSizingReliable()&&i||"auto"===a||!parseFloat(a)&&"inline"===k.css(e,"display",!1,r))&&e.getClientRects().length&&(i="border-box"===k.css(e,"boxSizing",!1,r),(o=s in e)&&(a=e[s])),(a=parseFloat(a)||0)+et(e,t,n||(i?"border":"content"),o,r,a)+"px"}function nt(e,t,n,r,i){return new nt.prototype.init(e,t,n,r,i)}k.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=_e(e,"opacity");return""===n?"1":n}}}},cssNumber:{animationIterationCount:!0,columnCount:!0,fillOpacity:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,gridArea:!0,gridColumn:!0,gridColumnEnd:!0,gridColumnStart:!0,gridRow:!0,gridRowEnd:!0,gridRowStart:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{},style:function(e,t,n,r){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var i,o,a,s=V(t),u=Qe.test(t),l=e.style;if(u||(t=Ge(s)),a=k.cssHooks[t]||k.cssHooks[s],void 0===n)return a&&"get"in a&&void 0!==(i=a.get(e,!1,r))?i:l[t];"string"===(o=typeof n)&&(i=ne.exec(n))&&i[1]&&(n=le(e,t,i),o="number"),null!=n&&n==n&&("number"!==o||u||(n+=i&&i[3]||(k.cssNumber[s]?"":"px")),y.clearCloneStyle||""!==n||0!==t.indexOf("background")||(l[t]="inherit"),a&&"set"in a&&void 0===(n=a.set(e,n,r))||(u?l.setProperty(t,n):l[t]=n))}},css:function(e,t,n,r){var i,o,a,s=V(t);return Qe.test(t)||(t=Ge(s)),(a=k.cssHooks[t]||k.cssHooks[s])&&"get"in a&&(i=a.get(e,!0,n)),void 0===i&&(i=_e(e,t,r)),"normal"===i&&t in Ke&&(i=Ke[t]),""===n||n?(o=parseFloat(i),!0===n||isFinite(o)?o||0:i):i}}),k.each(["height","width"],function(e,u){k.cssHooks[u]={get:function(e,t,n){if(t)return!Ye.test(k.css(e,"display"))||e.getClientRects().length&&e.getBoundingClientRect().width?tt(e,u,n):ue(e,Je,function(){return tt(e,u,n)})},set:function(e,t,n){var r,i=Fe(e),o=!y.scrollboxSize()&&"absolute"===i.position,a=(o||n)&&"border-box"===k.css(e,"boxSizing",!1,i),s=n?et(e,u,n,a,i):0;return a&&o&&(s-=Math.ceil(e["offset"+u[0].toUpperCase()+u.slice(1)]-parseFloat(i[u])-et(e,u,"border",!1,i)-.5)),s&&(r=ne.exec(t))&&"px"!==(r[3]||"px")&&(e.style[u]=t,t=k.css(e,u)),Ze(0,t,s)}}}),k.cssHooks.marginLeft=ze(y.reliableMarginLeft,function(e,t){if(t)return(parseFloat(_e(e,"marginLeft"))||e.getBoundingClientRect().left-ue(e,{marginLeft:0},function(){return e.getBoundingClientRect().left}))+"px"}),k.each({margin:"",padding:"",border:"Width"},function(i,o){k.cssHooks[i+o]={expand:function(e){for(var t=0,n={},r="string"==typeof e?e.split(" "):[e];t<4;t++)n[i+re[t]+o]=r[t]||r[t-2]||r[0];return n}},"margin"!==i&&(k.cssHooks[i+o].set=Ze)}),k.fn.extend({css:function(e,t){return _(this,function(e,t,n){var r,i,o={},a=0;if(Array.isArray(t)){for(r=Fe(e),i=t.length;a<i;a++)o[t[a]]=k.css(e,t[a],!1,r);return o}return void 0!==n?k.style(e,t,n):k.css(e,t)},e,t,1<arguments.length)}}),((k.Tween=nt).prototype={constructor:nt,init:function(e,t,n,r,i,o){this.elem=e,this.prop=n,this.easing=i||k.easing._default,this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=o||(k.cssNumber[n]?"":"px")},cur:function(){var e=nt.propHooks[this.prop];return e&&e.get?e.get(this):nt.propHooks._default.get(this)},run:function(e){var t,n=nt.propHooks[this.prop];return this.options.duration?this.pos=t=k.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):nt.propHooks._default.set(this),this}}).init.prototype=nt.prototype,(nt.propHooks={_default:{get:function(e){var t;return 1!==e.elem.nodeType||null!=e.elem[e.prop]&&null==e.elem.style[e.prop]?e.elem[e.prop]:(t=k.css(e.elem,e.prop,""))&&"auto"!==t?t:0},set:function(e){k.fx.step[e.prop]?k.fx.step[e.prop](e):1!==e.elem.nodeType||!k.cssHooks[e.prop]&&null==e.elem.style[Ge(e.prop)]?e.elem[e.prop]=e.now:k.style(e.elem,e.prop,e.now+e.unit)}}}).scrollTop=nt.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},k.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2},_default:"swing"},k.fx=nt.prototype.init,k.fx.step={};var rt,it,ot,at,st=/^(?:toggle|show|hide)$/,ut=/queueHooks$/;function lt(){it&&(!1===E.hidden&&C.requestAnimationFrame?C.requestAnimationFrame(lt):C.setTimeout(lt,k.fx.interval),k.fx.tick())}function ct(){return C.setTimeout(function(){rt=void 0}),rt=Date.now()}function ft(e,t){var n,r=0,i={height:e};for(t=t?1:0;r<4;r+=2-t)i["margin"+(n=re[r])]=i["padding"+n]=e;return t&&(i.opacity=i.width=e),i}function pt(e,t,n){for(var r,i=(dt.tweeners[t]||[]).concat(dt.tweeners["*"]),o=0,a=i.length;o<a;o++)if(r=i[o].call(n,t,e))return r}function dt(o,e,t){var n,a,r=0,i=dt.prefilters.length,s=k.Deferred().always(function(){delete u.elem}),u=function(){if(a)return!1;for(var e=rt||ct(),t=Math.max(0,l.startTime+l.duration-e),n=1-(t/l.duration||0),r=0,i=l.tweens.length;r<i;r++)l.tweens[r].run(n);return s.notifyWith(o,[l,n,t]),n<1&&i?t:(i||s.notifyWith(o,[l,1,0]),s.resolveWith(o,[l]),!1)},l=s.promise({elem:o,props:k.extend({},e),opts:k.extend(!0,{specialEasing:{},easing:k.easing._default},t),originalProperties:e,originalOptions:t,startTime:rt||ct(),duration:t.duration,tweens:[],createTween:function(e,t){var n=k.Tween(o,l.opts,e,t,l.opts.specialEasing[e]||l.opts.easing);return l.tweens.push(n),n},stop:function(e){var t=0,n=e?l.tweens.length:0;if(a)return this;for(a=!0;t<n;t++)l.tweens[t].run(1);return e?(s.notifyWith(o,[l,1,0]),s.resolveWith(o,[l,e])):s.rejectWith(o,[l,e]),this}}),c=l.props;for(!function(e,t){var n,r,i,o,a;for(n in e)if(i=t[r=V(n)],o=e[n],Array.isArray(o)&&(i=o[1],o=e[n]=o[0]),n!==r&&(e[r]=o,delete e[n]),(a=k.cssHooks[r])&&"expand"in a)for(n in o=a.expand(o),delete e[r],o)n in e||(e[n]=o[n],t[n]=i);else t[r]=i}(c,l.opts.specialEasing);r<i;r++)if(n=dt.prefilters[r].call(l,o,c,l.opts))return m(n.stop)&&(k._queueHooks(l.elem,l.opts.queue).stop=n.stop.bind(n)),n;return k.map(c,pt,l),m(l.opts.start)&&l.opts.start.call(o,l),l.progress(l.opts.progress).done(l.opts.done,l.opts.complete).fail(l.opts.fail).always(l.opts.always),k.fx.timer(k.extend(u,{elem:o,anim:l,queue:l.opts.queue})),l}k.Animation=k.extend(dt,{tweeners:{"*":[function(e,t){var n=this.createTween(e,t);return le(n.elem,e,ne.exec(t),n),n}]},tweener:function(e,t){m(e)?(t=e,e=["*"]):e=e.match(R);for(var n,r=0,i=e.length;r<i;r++)n=e[r],dt.tweeners[n]=dt.tweeners[n]||[],dt.tweeners[n].unshift(t)},prefilters:[function(e,t,n){var r,i,o,a,s,u,l,c,f="width"in t||"height"in t,p=this,d={},h=e.style,g=e.nodeType&&se(e),v=Q.get(e,"fxshow");for(r in n.queue||(null==(a=k._queueHooks(e,"fx")).unqueued&&(a.unqueued=0,s=a.empty.fire,a.empty.fire=function(){a.unqueued||s()}),a.unqueued++,p.always(function(){p.always(function(){a.unqueued--,k.queue(e,"fx").length||a.empty.fire()})})),t)if(i=t[r],st.test(i)){if(delete t[r],o=o||"toggle"===i,i===(g?"hide":"show")){if("show"!==i||!v||void 0===v[r])continue;g=!0}d[r]=v&&v[r]||k.style(e,r)}if((u=!k.isEmptyObject(t))||!k.isEmptyObject(d))for(r in f&&1===e.nodeType&&(n.overflow=[h.overflow,h.overflowX,h.overflowY],null==(l=v&&v.display)&&(l=Q.get(e,"display")),"none"===(c=k.css(e,"display"))&&(l?c=l:(fe([e],!0),l=e.style.display||l,c=k.css(e,"display"),fe([e]))),("inline"===c||"inline-block"===c&&null!=l)&&"none"===k.css(e,"float")&&(u||(p.done(function(){h.display=l}),null==l&&(c=h.display,l="none"===c?"":c)),h.display="inline-block")),n.overflow&&(h.overflow="hidden",p.always(function(){h.overflow=n.overflow[0],h.overflowX=n.overflow[1],h.overflowY=n.overflow[2]})),u=!1,d)u||(v?"hidden"in v&&(g=v.hidden):v=Q.access(e,"fxshow",{display:l}),o&&(v.hidden=!g),g&&fe([e],!0),p.done(function(){for(r in g||fe([e]),Q.remove(e,"fxshow"),d)k.style(e,r,d[r])})),u=pt(g?v[r]:0,r,p),r in v||(v[r]=u.start,g&&(u.end=u.start,u.start=0))}],prefilter:function(e,t){t?dt.prefilters.unshift(e):dt.prefilters.push(e)}}),k.speed=function(e,t,n){var r=e&&"object"==typeof e?k.extend({},e):{complete:n||!n&&t||m(e)&&e,duration:e,easing:n&&t||t&&!m(t)&&t};return k.fx.off?r.duration=0:"number"!=typeof r.duration&&(r.duration in k.fx.speeds?r.duration=k.fx.speeds[r.duration]:r.duration=k.fx.speeds._default),null!=r.queue&&!0!==r.queue||(r.queue="fx"),r.old=r.complete,r.complete=function(){m(r.old)&&r.old.call(this),r.queue&&k.dequeue(this,r.queue)},r},k.fn.extend({fadeTo:function(e,t,n,r){return this.filter(se).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(t,e,n,r){var i=k.isEmptyObject(t),o=k.speed(e,n,r),a=function(){var e=dt(this,k.extend({},t),o);(i||Q.get(this,"finish"))&&e.stop(!0)};return a.finish=a,i||!1===o.queue?this.each(a):this.queue(o.queue,a)},stop:function(i,e,o){var a=function(e){var t=e.stop;delete e.stop,t(o)};return"string"!=typeof i&&(o=e,e=i,i=void 0),e&&!1!==i&&this.queue(i||"fx",[]),this.each(function(){var e=!0,t=null!=i&&i+"queueHooks",n=k.timers,r=Q.get(this);if(t)r[t]&&r[t].stop&&a(r[t]);else for(t in r)r[t]&&r[t].stop&&ut.test(t)&&a(r[t]);for(t=n.length;t--;)n[t].elem!==this||null!=i&&n[t].queue!==i||(n[t].anim.stop(o),e=!1,n.splice(t,1));!e&&o||k.dequeue(this,i)})},finish:function(a){return!1!==a&&(a=a||"fx"),this.each(function(){var e,t=Q.get(this),n=t[a+"queue"],r=t[a+"queueHooks"],i=k.timers,o=n?n.length:0;for(t.finish=!0,k.queue(this,a,[]),r&&r.stop&&r.stop.call(this,!0),e=i.length;e--;)i[e].elem===this&&i[e].queue===a&&(i[e].anim.stop(!0),i.splice(e,1));for(e=0;e<o;e++)n[e]&&n[e].finish&&n[e].finish.call(this);delete t.finish})}}),k.each(["toggle","show","hide"],function(e,r){var i=k.fn[r];k.fn[r]=function(e,t,n){return null==e||"boolean"==typeof e?i.apply(this,arguments):this.animate(ft(r,!0),e,t,n)}}),k.each({slideDown:ft("show"),slideUp:ft("hide"),slideToggle:ft("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,r){k.fn[e]=function(e,t,n){return this.animate(r,e,t,n)}}),k.timers=[],k.fx.tick=function(){var e,t=0,n=k.timers;for(rt=Date.now();t<n.length;t++)(e=n[t])()||n[t]!==e||n.splice(t--,1);n.length||k.fx.stop(),rt=void 0},k.fx.timer=function(e){k.timers.push(e),k.fx.start()},k.fx.interval=13,k.fx.start=function(){it||(it=!0,lt())},k.fx.stop=function(){it=null},k.fx.speeds={slow:600,fast:200,_default:400},k.fn.delay=function(r,e){return r=k.fx&&k.fx.speeds[r]||r,e=e||"fx",this.queue(e,function(e,t){var n=C.setTimeout(e,r);t.stop=function(){C.clearTimeout(n)}})},ot=E.createElement("input"),at=E.createElement("select").appendChild(E.createElement("option")),ot.type="checkbox",y.checkOn=""!==ot.value,y.optSelected=at.selected,(ot=E.createElement("input")).value="t",ot.type="radio",y.radioValue="t"===ot.value;var ht,gt=k.expr.attrHandle;k.fn.extend({attr:function(e,t){return _(this,k.attr,e,t,1<arguments.length)},removeAttr:function(e){return this.each(function(){k.removeAttr(this,e)})}}),k.extend({attr:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return"undefined"==typeof e.getAttribute?k.prop(e,t,n):(1===o&&k.isXMLDoc(e)||(i=k.attrHooks[t.toLowerCase()]||(k.expr.match.bool.test(t)?ht:void 0)),void 0!==n?null===n?void k.removeAttr(e,t):i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:(e.setAttribute(t,n+""),n):i&&"get"in i&&null!==(r=i.get(e,t))?r:null==(r=k.find.attr(e,t))?void 0:r)},attrHooks:{type:{set:function(e,t){if(!y.radioValue&&"radio"===t&&A(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},removeAttr:function(e,t){var n,r=0,i=t&&t.match(R);if(i&&1===e.nodeType)while(n=i[r++])e.removeAttribute(n)}}),ht={set:function(e,t,n){return!1===t?k.removeAttr(e,n):e.setAttribute(n,n),n}},k.each(k.expr.match.bool.source.match(/\w+/g),function(e,t){var a=gt[t]||k.find.attr;gt[t]=function(e,t,n){var r,i,o=t.toLowerCase();return n||(i=gt[o],gt[o]=r,r=null!=a(e,t,n)?o:null,gt[o]=i),r}});var vt=/^(?:input|select|textarea|button)$/i,yt=/^(?:a|area)$/i;function mt(e){return(e.match(R)||[]).join(" ")}function xt(e){return e.getAttribute&&e.getAttribute("class")||""}function bt(e){return Array.isArray(e)?e:"string"==typeof e&&e.match(R)||[]}k.fn.extend({prop:function(e,t){return _(this,k.prop,e,t,1<arguments.length)},removeProp:function(e){return this.each(function(){delete this[k.propFix[e]||e]})}}),k.extend({prop:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return 1===o&&k.isXMLDoc(e)||(t=k.propFix[t]||t,i=k.propHooks[t]),void 0!==n?i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:e[t]=n:i&&"get"in i&&null!==(r=i.get(e,t))?r:e[t]},propHooks:{tabIndex:{get:function(e){var t=k.find.attr(e,"tabindex");return t?parseInt(t,10):vt.test(e.nodeName)||yt.test(e.nodeName)&&e.href?0:-1}}},propFix:{"for":"htmlFor","class":"className"}}),y.optSelected||(k.propHooks.selected={get:function(e){var t=e.parentNode;return t&&t.parentNode&&t.parentNode.selectedIndex,null},set:function(e){var t=e.parentNode;t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex)}}),k.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){k.propFix[this.toLowerCase()]=this}),k.fn.extend({addClass:function(t){var e,n,r,i,o,a,s,u=0;if(m(t))return this.each(function(e){k(this).addClass(t.call(this,e,xt(this)))});if((e=bt(t)).length)while(n=this[u++])if(i=xt(n),r=1===n.nodeType&&" "+mt(i)+" "){a=0;while(o=e[a++])r.indexOf(" "+o+" ")<0&&(r+=o+" ");i!==(s=mt(r))&&n.setAttribute("class",s)}return this},removeClass:function(t){var e,n,r,i,o,a,s,u=0;if(m(t))return this.each(function(e){k(this).removeClass(t.call(this,e,xt(this)))});if(!arguments.length)return this.attr("class","");if((e=bt(t)).length)while(n=this[u++])if(i=xt(n),r=1===n.nodeType&&" "+mt(i)+" "){a=0;while(o=e[a++])while(-1<r.indexOf(" "+o+" "))r=r.replace(" "+o+" "," ");i!==(s=mt(r))&&n.setAttribute("class",s)}return this},toggleClass:function(i,t){var o=typeof i,a="string"===o||Array.isArray(i);return"boolean"==typeof t&&a?t?this.addClass(i):this.removeClass(i):m(i)?this.each(function(e){k(this).toggleClass(i.call(this,e,xt(this),t),t)}):this.each(function(){var e,t,n,r;if(a){t=0,n=k(this),r=bt(i);while(e=r[t++])n.hasClass(e)?n.removeClass(e):n.addClass(e)}else void 0!==i&&"boolean"!==o||((e=xt(this))&&Q.set(this,"__className__",e),this.setAttribute&&this.setAttribute("class",e||!1===i?"":Q.get(this,"__className__")||""))})},hasClass:function(e){var t,n,r=0;t=" "+e+" ";while(n=this[r++])if(1===n.nodeType&&-1<(" "+mt(xt(n))+" ").indexOf(t))return!0;return!1}});var wt=/\r/g;k.fn.extend({val:function(n){var r,e,i,t=this[0];return arguments.length?(i=m(n),this.each(function(e){var t;1===this.nodeType&&(null==(t=i?n.call(this,e,k(this).val()):n)?t="":"number"==typeof t?t+="":Array.isArray(t)&&(t=k.map(t,function(e){return null==e?"":e+""})),(r=k.valHooks[this.type]||k.valHooks[this.nodeName.toLowerCase()])&&"set"in r&&void 0!==r.set(this,t,"value")||(this.value=t))})):t?(r=k.valHooks[t.type]||k.valHooks[t.nodeName.toLowerCase()])&&"get"in r&&void 0!==(e=r.get(t,"value"))?e:"string"==typeof(e=t.value)?e.replace(wt,""):null==e?"":e:void 0}}),k.extend({valHooks:{option:{get:function(e){var t=k.find.attr(e,"value");return null!=t?t:mt(k.text(e))}},select:{get:function(e){var t,n,r,i=e.options,o=e.selectedIndex,a="select-one"===e.type,s=a?null:[],u=a?o+1:i.length;for(r=o<0?u:a?o:0;r<u;r++)if(((n=i[r]).selected||r===o)&&!n.disabled&&(!n.parentNode.disabled||!A(n.parentNode,"optgroup"))){if(t=k(n).val(),a)return t;s.push(t)}return s},set:function(e,t){var n,r,i=e.options,o=k.makeArray(t),a=i.length;while(a--)((r=i[a]).selected=-1<k.inArray(k.valHooks.option.get(r),o))&&(n=!0);return n||(e.selectedIndex=-1),o}}}}),k.each(["radio","checkbox"],function(){k.valHooks[this]={set:function(e,t){if(Array.isArray(t))return e.checked=-1<k.inArray(k(e).val(),t)}},y.checkOn||(k.valHooks[this].get=function(e){return null===e.getAttribute("value")?"on":e.value})}),y.focusin="onfocusin"in C;var Tt=/^(?:focusinfocus|focusoutblur)$/,Ct=function(e){e.stopPropagation()};k.extend(k.event,{trigger:function(e,t,n,r){var i,o,a,s,u,l,c,f,p=[n||E],d=v.call(e,"type")?e.type:e,h=v.call(e,"namespace")?e.namespace.split("."):[];if(o=f=a=n=n||E,3!==n.nodeType&&8!==n.nodeType&&!Tt.test(d+k.event.triggered)&&(-1<d.indexOf(".")&&(d=(h=d.split(".")).shift(),h.sort()),u=d.indexOf(":")<0&&"on"+d,(e=e[k.expando]?e:new k.Event(d,"object"==typeof e&&e)).isTrigger=r?2:3,e.namespace=h.join("."),e.rnamespace=e.namespace?new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,e.result=void 0,e.target||(e.target=n),t=null==t?[e]:k.makeArray(t,[e]),c=k.event.special[d]||{},r||!c.trigger||!1!==c.trigger.apply(n,t))){if(!r&&!c.noBubble&&!x(n)){for(s=c.delegateType||d,Tt.test(s+d)||(o=o.parentNode);o;o=o.parentNode)p.push(o),a=o;a===(n.ownerDocument||E)&&p.push(a.defaultView||a.parentWindow||C)}i=0;while((o=p[i++])&&!e.isPropagationStopped())f=o,e.type=1<i?s:c.bindType||d,(l=(Q.get(o,"events")||{})[e.type]&&Q.get(o,"handle"))&&l.apply(o,t),(l=u&&o[u])&&l.apply&&G(o)&&(e.result=l.apply(o,t),!1===e.result&&e.preventDefault());return e.type=d,r||e.isDefaultPrevented()||c._default&&!1!==c._default.apply(p.pop(),t)||!G(n)||u&&m(n[d])&&!x(n)&&((a=n[u])&&(n[u]=null),k.event.triggered=d,e.isPropagationStopped()&&f.addEventListener(d,Ct),n[d](),e.isPropagationStopped()&&f.removeEventListener(d,Ct),k.event.triggered=void 0,a&&(n[u]=a)),e.result}},simulate:function(e,t,n){var r=k.extend(new k.Event,n,{type:e,isSimulated:!0});k.event.trigger(r,null,t)}}),k.fn.extend({trigger:function(e,t){return this.each(function(){k.event.trigger(e,t,this)})},triggerHandler:function(e,t){var n=this[0];if(n)return k.event.trigger(e,t,n,!0)}}),y.focusin||k.each({focus:"focusin",blur:"focusout"},function(n,r){var i=function(e){k.event.simulate(r,e.target,k.event.fix(e))};k.event.special[r]={setup:function(){var e=this.ownerDocument||this,t=Q.access(e,r);t||e.addEventListener(n,i,!0),Q.access(e,r,(t||0)+1)},teardown:function(){var e=this.ownerDocument||this,t=Q.access(e,r)-1;t?Q.access(e,r,t):(e.removeEventListener(n,i,!0),Q.remove(e,r))}}});var Et=C.location,kt=Date.now(),St=/\?/;k.parseXML=function(e){var t;if(!e||"string"!=typeof e)return null;try{t=(new C.DOMParser).parseFromString(e,"text/xml")}catch(e){t=void 0}return t&&!t.getElementsByTagName("parsererror").length||k.error("Invalid XML: "+e),t};var Nt=/\[\]$/,At=/\r?\n/g,Dt=/^(?:submit|button|image|reset|file)$/i,jt=/^(?:input|select|textarea|keygen)/i;function qt(n,e,r,i){var t;if(Array.isArray(e))k.each(e,function(e,t){r||Nt.test(n)?i(n,t):qt(n+"["+("object"==typeof t&&null!=t?e:"")+"]",t,r,i)});else if(r||"object"!==w(e))i(n,e);else for(t in e)qt(n+"["+t+"]",e[t],r,i)}k.param=function(e,t){var n,r=[],i=function(e,t){var n=m(t)?t():t;r[r.length]=encodeURIComponent(e)+"="+encodeURIComponent(null==n?"":n)};if(null==e)return"";if(Array.isArray(e)||e.jquery&&!k.isPlainObject(e))k.each(e,function(){i(this.name,this.value)});else for(n in e)qt(n,e[n],t,i);return r.join("&")},k.fn.extend({serialize:function(){return k.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=k.prop(this,"elements");return e?k.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!k(this).is(":disabled")&&jt.test(this.nodeName)&&!Dt.test(e)&&(this.checked||!pe.test(e))}).map(function(e,t){var n=k(this).val();return null==n?null:Array.isArray(n)?k.map(n,function(e){return{name:t.name,value:e.replace(At,"\r\n")}}):{name:t.name,value:n.replace(At,"\r\n")}}).get()}});var Lt=/%20/g,Ht=/#.*$/,Ot=/([?&])_=[^&]*/,Pt=/^(.*?):[ \t]*([^\r\n]*)$/gm,Rt=/^(?:GET|HEAD)$/,Mt=/^\/\//,It={},Wt={},$t="*/".concat("*"),Ft=E.createElement("a");function Bt(o){return function(e,t){"string"!=typeof e&&(t=e,e="*");var n,r=0,i=e.toLowerCase().match(R)||[];if(m(t))while(n=i[r++])"+"===n[0]?(n=n.slice(1)||"*",(o[n]=o[n]||[]).unshift(t)):(o[n]=o[n]||[]).push(t)}}function _t(t,i,o,a){var s={},u=t===Wt;function l(e){var r;return s[e]=!0,k.each(t[e]||[],function(e,t){var n=t(i,o,a);return"string"!=typeof n||u||s[n]?u?!(r=n):void 0:(i.dataTypes.unshift(n),l(n),!1)}),r}return l(i.dataTypes[0])||!s["*"]&&l("*")}function zt(e,t){var n,r,i=k.ajaxSettings.flatOptions||{};for(n in t)void 0!==t[n]&&((i[n]?e:r||(r={}))[n]=t[n]);return r&&k.extend(!0,e,r),e}Ft.href=Et.href,k.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:Et.href,type:"GET",isLocal:/^(?:about|app|app-storage|.+-extension|file|res|widget):$/.test(Et.protocol),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":$t,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":JSON.parse,"text xml":k.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?zt(zt(e,k.ajaxSettings),t):zt(k.ajaxSettings,e)},ajaxPrefilter:Bt(It),ajaxTransport:Bt(Wt),ajax:function(e,t){"object"==typeof e&&(t=e,e=void 0),t=t||{};var c,f,p,n,d,r,h,g,i,o,v=k.ajaxSetup({},t),y=v.context||v,m=v.context&&(y.nodeType||y.jquery)?k(y):k.event,x=k.Deferred(),b=k.Callbacks("once memory"),w=v.statusCode||{},a={},s={},u="canceled",T={readyState:0,getResponseHeader:function(e){var t;if(h){if(!n){n={};while(t=Pt.exec(p))n[t[1].toLowerCase()+" "]=(n[t[1].toLowerCase()+" "]||[]).concat(t[2])}t=n[e.toLowerCase()+" "]}return null==t?null:t.join(", ")},getAllResponseHeaders:function(){return h?p:null},setRequestHeader:function(e,t){return null==h&&(e=s[e.toLowerCase()]=s[e.toLowerCase()]||e,a[e]=t),this},overrideMimeType:function(e){return null==h&&(v.mimeType=e),this},statusCode:function(e){var t;if(e)if(h)T.always(e[T.status]);else for(t in e)w[t]=[w[t],e[t]];return this},abort:function(e){var t=e||u;return c&&c.abort(t),l(0,t),this}};if(x.promise(T),v.url=((e||v.url||Et.href)+"").replace(Mt,Et.protocol+"//"),v.type=t.method||t.type||v.method||v.type,v.dataTypes=(v.dataType||"*").toLowerCase().match(R)||[""],null==v.crossDomain){r=E.createElement("a");try{r.href=v.url,r.href=r.href,v.crossDomain=Ft.protocol+"//"+Ft.host!=r.protocol+"//"+r.host}catch(e){v.crossDomain=!0}}if(v.data&&v.processData&&"string"!=typeof v.data&&(v.data=k.param(v.data,v.traditional)),_t(It,v,t,T),h)return T;for(i in(g=k.event&&v.global)&&0==k.active++&&k.event.trigger("ajaxStart"),v.type=v.type.toUpperCase(),v.hasContent=!Rt.test(v.type),f=v.url.replace(Ht,""),v.hasContent?v.data&&v.processData&&0===(v.contentType||"").indexOf("application/x-www-form-urlencoded")&&(v.data=v.data.replace(Lt,"+")):(o=v.url.slice(f.length),v.data&&(v.processData||"string"==typeof v.data)&&(f+=(St.test(f)?"&":"?")+v.data,delete v.data),!1===v.cache&&(f=f.replace(Ot,"$1"),o=(St.test(f)?"&":"?")+"_="+kt+++o),v.url=f+o),v.ifModified&&(k.lastModified[f]&&T.setRequestHeader("If-Modified-Since",k.lastModified[f]),k.etag[f]&&T.setRequestHeader("If-None-Match",k.etag[f])),(v.data&&v.hasContent&&!1!==v.contentType||t.contentType)&&T.setRequestHeader("Content-Type",v.contentType),T.setRequestHeader("Accept",v.dataTypes[0]&&v.accepts[v.dataTypes[0]]?v.accepts[v.dataTypes[0]]+("*"!==v.dataTypes[0]?", "+$t+"; q=0.01":""):v.accepts["*"]),v.headers)T.setRequestHeader(i,v.headers[i]);if(v.beforeSend&&(!1===v.beforeSend.call(y,T,v)||h))return T.abort();if(u="abort",b.add(v.complete),T.done(v.success),T.fail(v.error),c=_t(Wt,v,t,T)){if(T.readyState=1,g&&m.trigger("ajaxSend",[T,v]),h)return T;v.async&&0<v.timeout&&(d=C.setTimeout(function(){T.abort("timeout")},v.timeout));try{h=!1,c.send(a,l)}catch(e){if(h)throw e;l(-1,e)}}else l(-1,"No Transport");function l(e,t,n,r){var i,o,a,s,u,l=t;h||(h=!0,d&&C.clearTimeout(d),c=void 0,p=r||"",T.readyState=0<e?4:0,i=200<=e&&e<300||304===e,n&&(s=function(e,t,n){var r,i,o,a,s=e.contents,u=e.dataTypes;while("*"===u[0])u.shift(),void 0===r&&(r=e.mimeType||t.getResponseHeader("Content-Type"));if(r)for(i in s)if(s[i]&&s[i].test(r)){u.unshift(i);break}if(u[0]in n)o=u[0];else{for(i in n){if(!u[0]||e.converters[i+" "+u[0]]){o=i;break}a||(a=i)}o=o||a}if(o)return o!==u[0]&&u.unshift(o),n[o]}(v,T,n)),s=function(e,t,n,r){var i,o,a,s,u,l={},c=e.dataTypes.slice();if(c[1])for(a in e.converters)l[a.toLowerCase()]=e.converters[a];o=c.shift();while(o)if(e.responseFields[o]&&(n[e.responseFields[o]]=t),!u&&r&&e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u=o,o=c.shift())if("*"===o)o=u;else if("*"!==u&&u!==o){if(!(a=l[u+" "+o]||l["* "+o]))for(i in l)if((s=i.split(" "))[1]===o&&(a=l[u+" "+s[0]]||l["* "+s[0]])){!0===a?a=l[i]:!0!==l[i]&&(o=s[0],c.unshift(s[1]));break}if(!0!==a)if(a&&e["throws"])t=a(t);else try{t=a(t)}catch(e){return{state:"parsererror",error:a?e:"No conversion from "+u+" to "+o}}}return{state:"success",data:t}}(v,s,T,i),i?(v.ifModified&&((u=T.getResponseHeader("Last-Modified"))&&(k.lastModified[f]=u),(u=T.getResponseHeader("etag"))&&(k.etag[f]=u)),204===e||"HEAD"===v.type?l="nocontent":304===e?l="notmodified":(l=s.state,o=s.data,i=!(a=s.error))):(a=l,!e&&l||(l="error",e<0&&(e=0))),T.status=e,T.statusText=(t||l)+"",i?x.resolveWith(y,[o,l,T]):x.rejectWith(y,[T,l,a]),T.statusCode(w),w=void 0,g&&m.trigger(i?"ajaxSuccess":"ajaxError",[T,v,i?o:a]),b.fireWith(y,[T,l]),g&&(m.trigger("ajaxComplete",[T,v]),--k.active||k.event.trigger("ajaxStop")))}return T},getJSON:function(e,t,n){return k.get(e,t,n,"json")},getScript:function(e,t){return k.get(e,void 0,t,"script")}}),k.each(["get","post"],function(e,i){k[i]=function(e,t,n,r){return m(t)&&(r=r||n,n=t,t=void 0),k.ajax(k.extend({url:e,type:i,dataType:r,data:t,success:n},k.isPlainObject(e)&&e))}}),k._evalUrl=function(e,t){return k.ajax({url:e,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,converters:{"text script":function(){}},dataFilter:function(e){k.globalEval(e,t)}})},k.fn.extend({wrapAll:function(e){var t;return this[0]&&(m(e)&&(e=e.call(this[0])),t=k(e,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstElementChild)e=e.firstElementChild;return e}).append(this)),this},wrapInner:function(n){return m(n)?this.each(function(e){k(this).wrapInner(n.call(this,e))}):this.each(function(){var e=k(this),t=e.contents();t.length?t.wrapAll(n):e.append(n)})},wrap:function(t){var n=m(t);return this.each(function(e){k(this).wrapAll(n?t.call(this,e):t)})},unwrap:function(e){return this.parent(e).not("body").each(function(){k(this).replaceWith(this.childNodes)}),this}}),k.expr.pseudos.hidden=function(e){return!k.expr.pseudos.visible(e)},k.expr.pseudos.visible=function(e){return!!(e.offsetWidth||e.offsetHeight||e.getClientRects().length)},k.ajaxSettings.xhr=function(){try{return new C.XMLHttpRequest}catch(e){}};var Ut={0:200,1223:204},Xt=k.ajaxSettings.xhr();y.cors=!!Xt&&"withCredentials"in Xt,y.ajax=Xt=!!Xt,k.ajaxTransport(function(i){var o,a;if(y.cors||Xt&&!i.crossDomain)return{send:function(e,t){var n,r=i.xhr();if(r.open(i.type,i.url,i.async,i.username,i.password),i.xhrFields)for(n in i.xhrFields)r[n]=i.xhrFields[n];for(n in i.mimeType&&r.overrideMimeType&&r.overrideMimeType(i.mimeType),i.crossDomain||e["X-Requested-With"]||(e["X-Requested-With"]="XMLHttpRequest"),e)r.setRequestHeader(n,e[n]);o=function(e){return function(){o&&(o=a=r.onload=r.onerror=r.onabort=r.ontimeout=r.onreadystatechange=null,"abort"===e?r.abort():"error"===e?"number"!=typeof r.status?t(0,"error"):t(r.status,r.statusText):t(Ut[r.status]||r.status,r.statusText,"text"!==(r.responseType||"text")||"string"!=typeof r.responseText?{binary:r.response}:{text:r.responseText},r.getAllResponseHeaders()))}},r.onload=o(),a=r.onerror=r.ontimeout=o("error"),void 0!==r.onabort?r.onabort=a:r.onreadystatechange=function(){4===r.readyState&&C.setTimeout(function(){o&&a()})},o=o("abort");try{r.send(i.hasContent&&i.data||null)}catch(e){if(o)throw e}},abort:function(){o&&o()}}}),k.ajaxPrefilter(function(e){e.crossDomain&&(e.contents.script=!1)}),k.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(e){return k.globalEval(e),e}}}),k.ajaxPrefilter("script",function(e){void 0===e.cache&&(e.cache=!1),e.crossDomain&&(e.type="GET")}),k.ajaxTransport("script",function(n){var r,i;if(n.crossDomain||n.scriptAttrs)return{send:function(e,t){r=k("<script>").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var Vt,Gt=[],Yt=/(=)\?(?=&|$)|\?\?/;k.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Gt.pop()||k.expando+"_"+kt++;return this[e]=!0,e}}),k.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Yt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Yt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Yt,"$1"+r):!1!==e.jsonp&&(e.url+=(St.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||k.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?k(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Gt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((Vt=E.implementation.createHTMLDocument("").body).innerHTML="<form></form><form></form>",2===Vt.childNodes.length),k.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=D.exec(e))?[t.createElement(i[1])]:(i=we([e],t,o),o&&o.length&&k(o).remove(),k.merge([],i.childNodes)));var r,i,o},k.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1<s&&(r=mt(e.slice(s)),e=e.slice(0,s)),m(t)?(n=t,t=void 0):t&&"object"==typeof t&&(i="POST"),0<a.length&&k.ajax({url:e,type:i||"GET",dataType:"html",data:t}).done(function(e){o=arguments,a.html(r?k("<div>").append(k.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},k.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){k.fn[t]=function(e){return this.on(t,e)}}),k.expr.pseudos.animated=function(t){return k.grep(k.timers,function(e){return t===e.elem}).length},k.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=k.css(e,"position"),c=k(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=k.css(e,"top"),u=k.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,k.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},k.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){k.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===k.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===k.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=k(e).offset()).top+=k.css(e,"borderTopWidth",!0),i.left+=k.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-k.css(r,"marginTop",!0),left:t.left-i.left-k.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===k.css(e,"position"))e=e.offsetParent;return e||ie})}}),k.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;k.fn[t]=function(e){return _(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),k.each(["top","left"],function(e,n){k.cssHooks[n]=ze(y.pixelPosition,function(e,t){if(t)return t=_e(e,n),$e.test(t)?k(e).position()[n]+"px":t})}),k.each({Height:"height",Width:"width"},function(a,s){k.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){k.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return _(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?k.css(e,t,i):k.style(e,t,n,i)},s,n?e:void 0,n)}})}),k.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){k.fn[n]=function(e,t){return 0<arguments.length?this.on(n,null,e,t):this.trigger(n)}}),k.fn.extend({hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),k.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)}}),k.proxy=function(e,t){var n,r,i;if("string"==typeof t&&(n=e[t],t=e,e=n),m(e))return r=s.call(arguments,2),(i=function(){return e.apply(t||this,r.concat(s.call(arguments)))}).guid=e.guid=e.guid||k.guid++,i},k.holdReady=function(e){e?k.readyWait++:k.ready(!0)},k.isArray=Array.isArray,k.parseJSON=JSON.parse,k.nodeName=A,k.isFunction=m,k.isWindow=x,k.camelCase=V,k.type=w,k.now=Date.now,k.isNumeric=function(e){var t=k.type(e);return("number"===t||"string"===t)&&!isNaN(e-parseFloat(e))},"function"==typeof define&&define.amd&&define("jquery",[],function(){return k});var Qt=C.jQuery,Jt=C.$;return k.noConflict=function(e){return C.$===k&&(C.$=Jt),e&&C.jQuery===k&&(C.jQuery=Qt),k},e||(C.jQuery=C.$=k),k});
diff --git a/doc/_build/html/_static/language_data.js b/doc/_build/html/_static/language_data.js
new file mode 100644 (file)
index 0000000..5266fb1
--- /dev/null
@@ -0,0 +1,297 @@
+/*
+ * language_data.js
+ * ~~~~~~~~~~~~~~~~
+ *
+ * This script contains the language-specific data used by searchtools.js,
+ * namely the list of stopwords, stemmer, scorer and splitter.
+ *
+ * :copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+var stopwords = ["a","and","are","as","at","be","but","by","for","if","in","into","is","it","near","no","not","of","on","or","such","that","the","their","then","there","these","they","this","to","was","will","with"];
+
+
+/* Non-minified version JS is _stemmer.js if file is provided */ 
+/**
+ * Porter Stemmer
+ */
+var Stemmer = function() {
+
+  var step2list = {
+    ational: 'ate',
+    tional: 'tion',
+    enci: 'ence',
+    anci: 'ance',
+    izer: 'ize',
+    bli: 'ble',
+    alli: 'al',
+    entli: 'ent',
+    eli: 'e',
+    ousli: 'ous',
+    ization: 'ize',
+    ation: 'ate',
+    ator: 'ate',
+    alism: 'al',
+    iveness: 'ive',
+    fulness: 'ful',
+    ousness: 'ous',
+    aliti: 'al',
+    iviti: 'ive',
+    biliti: 'ble',
+    logi: 'log'
+  };
+
+  var step3list = {
+    icate: 'ic',
+    ative: '',
+    alize: 'al',
+    iciti: 'ic',
+    ical: 'ic',
+    ful: '',
+    ness: ''
+  };
+
+  var c = "[^aeiou]";          // consonant
+  var v = "[aeiouy]";          // vowel
+  var C = c + "[^aeiouy]*";    // consonant sequence
+  var V = v + "[aeiou]*";      // vowel sequence
+
+  var mgr0 = "^(" + C + ")?" + V + C;                      // [C]VC... is m>0
+  var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$";    // [C]VC[V] is m=1
+  var mgr1 = "^(" + C + ")?" + V + C + V + C;              // [C]VCVC... is m>1
+  var s_v   = "^(" + C + ")?" + v;                         // vowel in stem
+
+  this.stemWord = function (w) {
+    var stem;
+    var suffix;
+    var firstch;
+    var origword = w;
+
+    if (w.length < 3)
+      return w;
+
+    var re;
+    var re2;
+    var re3;
+    var re4;
+
+    firstch = w.substr(0,1);
+    if (firstch == "y")
+      w = firstch.toUpperCase() + w.substr(1);
+
+    // Step 1a
+    re = /^(.+?)(ss|i)es$/;
+    re2 = /^(.+?)([^s])s$/;
+
+    if (re.test(w))
+      w = w.replace(re,"$1$2");
+    else if (re2.test(w))
+      w = w.replace(re2,"$1$2");
+
+    // Step 1b
+    re = /^(.+?)eed$/;
+    re2 = /^(.+?)(ed|ing)$/;
+    if (re.test(w)) {
+      var fp = re.exec(w);
+      re = new RegExp(mgr0);
+      if (re.test(fp[1])) {
+        re = /.$/;
+        w = w.replace(re,"");
+      }
+    }
+    else if (re2.test(w)) {
+      var fp = re2.exec(w);
+      stem = fp[1];
+      re2 = new RegExp(s_v);
+      if (re2.test(stem)) {
+        w = stem;
+        re2 = /(at|bl|iz)$/;
+        re3 = new RegExp("([^aeiouylsz])\\1$");
+        re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+        if (re2.test(w))
+          w = w + "e";
+        else if (re3.test(w)) {
+          re = /.$/;
+          w = w.replace(re,"");
+        }
+        else if (re4.test(w))
+          w = w + "e";
+      }
+    }
+
+    // Step 1c
+    re = /^(.+?)y$/;
+    if (re.test(w)) {
+      var fp = re.exec(w);
+      stem = fp[1];
+      re = new RegExp(s_v);
+      if (re.test(stem))
+        w = stem + "i";
+    }
+
+    // Step 2
+    re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
+    if (re.test(w)) {
+      var fp = re.exec(w);
+      stem = fp[1];
+      suffix = fp[2];
+      re = new RegExp(mgr0);
+      if (re.test(stem))
+        w = stem + step2list[suffix];
+    }
+
+    // Step 3
+    re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
+    if (re.test(w)) {
+      var fp = re.exec(w);
+      stem = fp[1];
+      suffix = fp[2];
+      re = new RegExp(mgr0);
+      if (re.test(stem))
+        w = stem + step3list[suffix];
+    }
+
+    // Step 4
+    re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
+    re2 = /^(.+?)(s|t)(ion)$/;
+    if (re.test(w)) {
+      var fp = re.exec(w);
+      stem = fp[1];
+      re = new RegExp(mgr1);
+      if (re.test(stem))
+        w = stem;
+    }
+    else if (re2.test(w)) {
+      var fp = re2.exec(w);
+      stem = fp[1] + fp[2];
+      re2 = new RegExp(mgr1);
+      if (re2.test(stem))
+        w = stem;
+    }
+
+    // Step 5
+    re = /^(.+?)e$/;
+    if (re.test(w)) {
+      var fp = re.exec(w);
+      stem = fp[1];
+      re = new RegExp(mgr1);
+      re2 = new RegExp(meq1);
+      re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+      if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
+        w = stem;
+    }
+    re = /ll$/;
+    re2 = new RegExp(mgr1);
+    if (re.test(w) && re2.test(w)) {
+      re = /.$/;
+      w = w.replace(re,"");
+    }
+
+    // and turn initial Y back to y
+    if (firstch == "y")
+      w = firstch.toLowerCase() + w.substr(1);
+    return w;
+  }
+}
+
+
+
+
+
+var splitChars = (function() {
+    var result = {};
+    var singles = [96, 180, 187, 191, 215, 247, 749, 885, 903, 907, 909, 930, 1014, 1648,
+         1748, 1809, 2416, 2473, 2481, 2526, 2601, 2609, 2612, 2615, 2653, 2702,
+         2706, 2729, 2737, 2740, 2857, 2865, 2868, 2910, 2928, 2948, 2961, 2971,
+         2973, 3085, 3089, 3113, 3124, 3213, 3217, 3241, 3252, 3295, 3341, 3345,
+         3369, 3506, 3516, 3633, 3715, 3721, 3736, 3744, 3748, 3750, 3756, 3761,
+         3781, 3912, 4239, 4347, 4681, 4695, 4697, 4745, 4785, 4799, 4801, 4823,
+         4881, 5760, 5901, 5997, 6313, 7405, 8024, 8026, 8028, 8030, 8117, 8125,
+         8133, 8181, 8468, 8485, 8487, 8489, 8494, 8527, 11311, 11359, 11687, 11695,
+         11703, 11711, 11719, 11727, 11735, 12448, 12539, 43010, 43014, 43019, 43587,
+         43696, 43713, 64286, 64297, 64311, 64317, 64319, 64322, 64325, 65141];
+    var i, j, start, end;
+    for (i = 0; i < singles.length; i++) {
+        result[singles[i]] = true;
+    }
+    var ranges = [[0, 47], [58, 64], [91, 94], [123, 169], [171, 177], [182, 184], [706, 709],
+         [722, 735], [741, 747], [751, 879], [888, 889], [894, 901], [1154, 1161],
+         [1318, 1328], [1367, 1368], [1370, 1376], [1416, 1487], [1515, 1519], [1523, 1568],
+         [1611, 1631], [1642, 1645], [1750, 1764], [1767, 1773], [1789, 1790], [1792, 1807],
+         [1840, 1868], [1958, 1968], [1970, 1983], [2027, 2035], [2038, 2041], [2043, 2047],
+         [2070, 2073], [2075, 2083], [2085, 2087], [2089, 2307], [2362, 2364], [2366, 2383],
+         [2385, 2391], [2402, 2405], [2419, 2424], [2432, 2436], [2445, 2446], [2449, 2450],
+         [2483, 2485], [2490, 2492], [2494, 2509], [2511, 2523], [2530, 2533], [2546, 2547],
+         [2554, 2564], [2571, 2574], [2577, 2578], [2618, 2648], [2655, 2661], [2672, 2673],
+         [2677, 2692], [2746, 2748], [2750, 2767], [2769, 2783], [2786, 2789], [2800, 2820],
+         [2829, 2830], [2833, 2834], [2874, 2876], [2878, 2907], [2914, 2917], [2930, 2946],
+         [2955, 2957], [2966, 2968], [2976, 2978], [2981, 2983], [2987, 2989], [3002, 3023],
+         [3025, 3045], [3059, 3076], [3130, 3132], [3134, 3159], [3162, 3167], [3170, 3173],
+         [3184, 3191], [3199, 3204], [3258, 3260], [3262, 3293], [3298, 3301], [3312, 3332],
+         [3386, 3388], [3390, 3423], [3426, 3429], [3446, 3449], [3456, 3460], [3479, 3481],
+         [3518, 3519], [3527, 3584], [3636, 3647], [3655, 3663], [3674, 3712], [3717, 3718],
+         [3723, 3724], [3726, 3731], [3752, 3753], [3764, 3772], [3774, 3775], [3783, 3791],
+         [3802, 3803], [3806, 3839], [3841, 3871], [3892, 3903], [3949, 3975], [3980, 4095],
+         [4139, 4158], [4170, 4175], [4182, 4185], [4190, 4192], [4194, 4196], [4199, 4205],
+         [4209, 4212], [4226, 4237], [4250, 4255], [4294, 4303], [4349, 4351], [4686, 4687],
+         [4702, 4703], [4750, 4751], [4790, 4791], [4806, 4807], [4886, 4887], [4955, 4968],
+         [4989, 4991], [5008, 5023], [5109, 5120], [5741, 5742], [5787, 5791], [5867, 5869],
+         [5873, 5887], [5906, 5919], [5938, 5951], [5970, 5983], [6001, 6015], [6068, 6102],
+         [6104, 6107], [6109, 6111], [6122, 6127], [6138, 6159], [6170, 6175], [6264, 6271],
+         [6315, 6319], [6390, 6399], [6429, 6469], [6510, 6511], [6517, 6527], [6572, 6592],
+         [6600, 6607], [6619, 6655], [6679, 6687], [6741, 6783], [6794, 6799], [6810, 6822],
+         [6824, 6916], [6964, 6980], [6988, 6991], [7002, 7042], [7073, 7085], [7098, 7167],
+         [7204, 7231], [7242, 7244], [7294, 7400], [7410, 7423], [7616, 7679], [7958, 7959],
+         [7966, 7967], [8006, 8007], [8014, 8015], [8062, 8063], [8127, 8129], [8141, 8143],
+         [8148, 8149], [8156, 8159], [8173, 8177], [8189, 8303], [8306, 8307], [8314, 8318],
+         [8330, 8335], [8341, 8449], [8451, 8454], [8456, 8457], [8470, 8472], [8478, 8483],
+         [8506, 8507], [8512, 8516], [8522, 8525], [8586, 9311], [9372, 9449], [9472, 10101],
+         [10132, 11263], [11493, 11498], [11503, 11516], [11518, 11519], [11558, 11567],
+         [11622, 11630], [11632, 11647], [11671, 11679], [11743, 11822], [11824, 12292],
+         [12296, 12320], [12330, 12336], [12342, 12343], [12349, 12352], [12439, 12444],
+         [12544, 12548], [12590, 12592], [12687, 12689], [12694, 12703], [12728, 12783],
+         [12800, 12831], [12842, 12880], [12896, 12927], [12938, 12976], [12992, 13311],
+         [19894, 19967], [40908, 40959], [42125, 42191], [42238, 42239], [42509, 42511],
+         [42540, 42559], [42592, 42593], [42607, 42622], [42648, 42655], [42736, 42774],
+         [42784, 42785], [42889, 42890], [42893, 43002], [43043, 43055], [43062, 43071],
+         [43124, 43137], [43188, 43215], [43226, 43249], [43256, 43258], [43260, 43263],
+         [43302, 43311], [43335, 43359], [43389, 43395], [43443, 43470], [43482, 43519],
+         [43561, 43583], [43596, 43599], [43610, 43615], [43639, 43641], [43643, 43647],
+         [43698, 43700], [43703, 43704], [43710, 43711], [43715, 43738], [43742, 43967],
+         [44003, 44015], [44026, 44031], [55204, 55215], [55239, 55242], [55292, 55295],
+         [57344, 63743], [64046, 64047], [64110, 64111], [64218, 64255], [64263, 64274],
+         [64280, 64284], [64434, 64466], [64830, 64847], [64912, 64913], [64968, 65007],
+         [65020, 65135], [65277, 65295], [65306, 65312], [65339, 65344], [65371, 65381],
+         [65471, 65473], [65480, 65481], [65488, 65489], [65496, 65497]];
+    for (i = 0; i < ranges.length; i++) {
+        start = ranges[i][0];
+        end = ranges[i][1];
+        for (j = start; j <= end; j++) {
+            result[j] = true;
+        }
+    }
+    return result;
+})();
+
+function splitQuery(query) {
+    var result = [];
+    var start = -1;
+    for (var i = 0; i < query.length; i++) {
+        if (splitChars[query.charCodeAt(i)]) {
+            if (start !== -1) {
+                result.push(query.slice(start, i));
+                start = -1;
+            }
+        } else if (start === -1) {
+            start = i;
+        }
+    }
+    if (start !== -1) {
+        result.push(query.slice(start));
+    }
+    return result;
+}
+
+
diff --git a/doc/_build/html/_static/listitem.png b/doc/_build/html/_static/listitem.png
new file mode 100644 (file)
index 0000000..e45715f
Binary files /dev/null and b/doc/_build/html/_static/listitem.png differ
diff --git a/doc/_build/html/_static/logo.png b/doc/_build/html/_static/logo.png
new file mode 100644 (file)
index 0000000..2c1a24d
Binary files /dev/null and b/doc/_build/html/_static/logo.png differ
diff --git a/doc/_build/html/_static/logo_new.png b/doc/_build/html/_static/logo_new.png
new file mode 100644 (file)
index 0000000..0ae4b20
Binary files /dev/null and b/doc/_build/html/_static/logo_new.png differ
diff --git a/doc/_build/html/_static/logo_only.png b/doc/_build/html/_static/logo_only.png
new file mode 100644 (file)
index 0000000..fdebcc4
Binary files /dev/null and b/doc/_build/html/_static/logo_only.png differ
diff --git a/doc/_build/html/_static/minus.png b/doc/_build/html/_static/minus.png
new file mode 100644 (file)
index 0000000..d96755f
Binary files /dev/null and b/doc/_build/html/_static/minus.png differ
diff --git a/doc/_build/html/_static/plus.png b/doc/_build/html/_static/plus.png
new file mode 100644 (file)
index 0000000..7107cec
Binary files /dev/null and b/doc/_build/html/_static/plus.png differ
diff --git a/doc/_build/html/_static/pocoo.png b/doc/_build/html/_static/pocoo.png
new file mode 100644 (file)
index 0000000..4174149
Binary files /dev/null and b/doc/_build/html/_static/pocoo.png differ
diff --git a/doc/_build/html/_static/pygments.css b/doc/_build/html/_static/pygments.css
new file mode 100644 (file)
index 0000000..21d9178
--- /dev/null
@@ -0,0 +1,69 @@
+.highlight .hll { background-color: #ffffcc }
+.highlight  { background: #f0f0f0; }
+.highlight .c { color: #60a0b0; font-style: italic } /* Comment */
+.highlight .err { border: 1px solid #FF0000 } /* Error */
+.highlight .k { color: #007020; font-weight: bold } /* Keyword */
+.highlight .o { color: #666666 } /* Operator */
+.highlight .ch { color: #60a0b0; font-style: italic } /* Comment.Hashbang */
+.highlight .cm { color: #60a0b0; font-style: italic } /* Comment.Multiline */
+.highlight .cp { color: #007020 } /* Comment.Preproc */
+.highlight .cpf { color: #60a0b0; font-style: italic } /* Comment.PreprocFile */
+.highlight .c1 { color: #60a0b0; font-style: italic } /* Comment.Single */
+.highlight .cs { color: #60a0b0; background-color: #fff0f0 } /* Comment.Special */
+.highlight .gd { color: #A00000 } /* Generic.Deleted */
+.highlight .ge { font-style: italic } /* Generic.Emph */
+.highlight .gr { color: #FF0000 } /* Generic.Error */
+.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
+.highlight .gi { color: #00A000 } /* Generic.Inserted */
+.highlight .go { color: #888888 } /* Generic.Output */
+.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
+.highlight .gs { font-weight: bold } /* Generic.Strong */
+.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
+.highlight .gt { color: #0044DD } /* Generic.Traceback */
+.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */
+.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */
+.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */
+.highlight .kp { color: #007020 } /* Keyword.Pseudo */
+.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */
+.highlight .kt { color: #902000 } /* Keyword.Type */
+.highlight .m { color: #40a070 } /* Literal.Number */
+.highlight .s { color: #4070a0 } /* Literal.String */
+.highlight .na { color: #4070a0 } /* Name.Attribute */
+.highlight .nb { color: #007020 } /* Name.Builtin */
+.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */
+.highlight .no { color: #60add5 } /* Name.Constant */
+.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */
+.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */
+.highlight .ne { color: #007020 } /* Name.Exception */
+.highlight .nf { color: #06287e } /* Name.Function */
+.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */
+.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
+.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */
+.highlight .nv { color: #bb60d5 } /* Name.Variable */
+.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */
+.highlight .w { color: #bbbbbb } /* Text.Whitespace */
+.highlight .mb { color: #40a070 } /* Literal.Number.Bin */
+.highlight .mf { color: #40a070 } /* Literal.Number.Float */
+.highlight .mh { color: #40a070 } /* Literal.Number.Hex */
+.highlight .mi { color: #40a070 } /* Literal.Number.Integer */
+.highlight .mo { color: #40a070 } /* Literal.Number.Oct */
+.highlight .sa { color: #4070a0 } /* Literal.String.Affix */
+.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */
+.highlight .sc { color: #4070a0 } /* Literal.String.Char */
+.highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */
+.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */
+.highlight .s2 { color: #4070a0 } /* Literal.String.Double */
+.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */
+.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */
+.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */
+.highlight .sx { color: #c65d09 } /* Literal.String.Other */
+.highlight .sr { color: #235388 } /* Literal.String.Regex */
+.highlight .s1 { color: #4070a0 } /* Literal.String.Single */
+.highlight .ss { color: #517918 } /* Literal.String.Symbol */
+.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */
+.highlight .fm { color: #06287e } /* Name.Function.Magic */
+.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */
+.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */
+.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */
+.highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */
+.highlight .il { color: #40a070 } /* Literal.Number.Integer.Long */
\ No newline at end of file
diff --git a/doc/_build/html/_static/pygments14.css b/doc/_build/html/_static/pygments14.css
new file mode 100644 (file)
index 0000000..0bc888c
--- /dev/null
@@ -0,0 +1,401 @@
+/*
+ * pygments14.css
+ * ~~~~~~~~~~~~~~
+ *
+ * Sphinx stylesheet -- pygments14 theme.  Heavily copied from sphinx13.
+ *
+ * :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+@import url("basic.css");
+
+/* -- page layout ----------------------------------------------------------- */
+
+body {
+    font-family: PT Sans, 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
+                 'Verdana', sans-serif;
+    font-size: 14px;
+    text-align: center;
+    background-image: url(bodybg.png);
+    background-color: #f9f9f9;
+    color: black;
+    padding: 0;
+    /*
+    border-right: 1px solid #66b55e;
+    border-left: 1px solid #66b55e;
+    */
+
+    margin: 0 auto;
+    min-width: 780px;
+    max-width: 1080px;
+}
+
+.outerwrapper {
+    background-image: url(docbg.png);
+    background-attachment: fixed;
+}
+
+.pageheader {
+    text-align: left;
+    padding: 10px 15px;
+}
+
+.pageheader ul {
+    float: right;
+    color: white;
+    list-style-type: none;
+    padding-left: 0;
+    margin-top: 40px;
+    margin-right: 10px;
+}
+
+.pageheader li {
+    float: left;
+    margin: 0 0 0 10px;
+}
+
+.pageheader li a {
+    border-radius: 3px;
+    padding: 8px 12px;
+    color: #666666;
+    text-shadow: 0 0 5px rgba(0, 0, 0, 0.2);
+}
+
+.pageheader li a:hover {
+    background-color: #f4cd00;
+    color: black;
+    text-shadow: none;
+}
+
+div.document {
+    text-align: left;
+    /*border-left: 1em solid #fffbe3;*/
+}
+
+div.bodywrapper {
+    margin: 0 12px 0 240px;
+    background-color: white;
+/*    border-right: 1px solid #66b55e; */
+}
+
+div.body {
+    margin: 0;
+    padding: 0.5em 20px 20px 20px;
+}
+
+div.related {
+    font-size: 1em;
+    color: #666666;
+}
+
+div.related ul {
+    background-image: url(relbg.png);
+    background-repeat: repeat-y;
+    background-color: #f4cd00;
+    height: 1.9em;
+    /*
+    border-top: 1px solid #66b55e;
+    border-bottom: 1px solid #66b55e;
+    */
+}
+
+div.related ul li {
+    margin: 0 5px 0 0;
+    padding: 0;
+    float: left;
+}
+
+div.related ul li.right {
+    float: right;
+    margin-right: 5px;
+}
+
+div.related ul li a {
+    margin: 0;
+    padding: 0 5px 0 5px;
+    line-height: 1.75em;
+    color: #666666;
+    /*text-shadow: 0px 0px 1px rgba(0, 0, 0, 0.5);*/
+}
+
+div.related ul li a:hover {
+    text-decoration: underline;
+    text-shadow: 0px 0px 1px rgba(255, 255, 255, 0.5);
+}
+
+div.sphinxsidebarwrapper {
+    position: relative;
+    top: 0px;
+    padding: 0;
+}
+
+div.sphinxsidebar {
+    margin: 0;
+    padding: 0 0px 15px 15px;
+    width: 210px;
+    float: left;
+    font-size: 1em;
+    text-align: left;
+}
+
+div.sphinxsidebar .logo {
+    font-size: 1.8em;
+    color: #666;
+    font-weight: 300;
+    text-align: center;
+}
+
+div.sphinxsidebar .logo img {
+    vertical-align: middle;
+}
+
+div.sphinxsidebar input {
+    border: 1px solid #aaa;
+    font-family: PT Sans, 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
+                 'Verdana', sans-serif;
+    font-size: 1em;
+}
+
+div.sphinxsidebar h3 {
+    font-size: 1.5em;
+    /* border-top: 1px solid #66b55e; */
+    margin-top: 1em;
+    margin-bottom: 0.5em;
+    padding-top: 0.5em;
+}
+
+div.sphinxsidebar h4 {
+    font-size: 1.2em;
+    margin-bottom: 0;
+}
+
+div.sphinxsidebar h3, div.sphinxsidebar h4 {
+    margin-right: -15px;
+    margin-left: -15px;
+    padding-right: 14px;
+    padding-left: 14px;
+    color: #333;
+    font-weight: 300;
+    /*text-shadow: 0px 0px 0.5px rgba(0, 0, 0, 0.4);*/
+}
+
+div.sphinxsidebarwrapper > h3:first-child {
+    margin-top: 0.5em;
+    border: none;
+}
+
+div.sphinxsidebar h3 a {
+    color: #333;
+}
+
+div.sphinxsidebar ul {
+    color: #444;
+    margin-top: 7px;
+    padding: 0;
+    line-height: 130%;
+}
+
+div.sphinxsidebar ul ul {
+    margin-left: 20px;
+    list-style-image: url(listitem.png);
+}
+
+div.footer {
+    color: #666666;
+    text-shadow: 0 0 .2px rgba(255, 255, 255, 0.8);
+    padding: 2em;
+    text-align: center;
+    clear: both;
+    font-size: 0.8em;
+}
+
+/* -- body styles ----------------------------------------------------------- */
+
+p {    
+    margin: 0.8em 0 0.5em 0;
+}
+
+a {
+    color: #36852e;
+    text-decoration: none;
+}
+
+a:hover {
+    color: #d4ad00;
+}
+
+div.body a {
+    text-decoration: underline;
+}
+
+h1 {
+    margin: 10px 0 0 0;
+    font-size: 2.4em;
+    color: #666666;
+    font-weight: 300;
+}
+
+h2 {
+    margin: 1.em 0 0.2em 0;
+    font-size: 1.5em;
+    font-weight: 300;
+    padding: 0;
+    color: #36852e;
+}
+
+h3 {
+    margin: 1em 0 -0.3em 0;
+    font-size: 1.3em;
+    font-weight: 300;
+}
+
+div.body h1 a, div.body h2 a, div.body h3 a, div.body h4 a, div.body h5 a, div.body h6 a {
+    text-decoration: none;
+}
+
+div.body h1 a tt, div.body h2 a tt, div.body h3 a tt, div.body h4 a tt, div.body h5 a tt, div.body h6 a tt {
+    color: #36852e !important;
+    font-size: inherit !important;
+}
+
+a.headerlink {
+    color: #66b55e !important;
+    font-size: 12px;
+    margin-left: 6px;
+    padding: 0 4px 0 4px;
+    text-decoration: none !important;
+    float: right;
+}
+
+a.headerlink:hover {
+    background-color: #ccc;
+    color: white!important;
+}
+
+cite, code, tt {
+    font-family: 'Consolas', 'DejaVu Sans Mono',
+                 'Bitstream Vera Sans Mono', monospace;
+    font-size: 14px;
+    letter-spacing: -0.02em;
+}
+
+tt {
+    background-color: #f2f2f2;
+    border: 1px solid #ddd;
+    border-radius: 2px;
+    color: #333;
+    padding: 1px;
+}
+
+tt.descname, tt.descclassname, tt.xref {
+    border: 0;
+}
+
+hr {
+    border: 1px solid #abc;
+    margin: 2em;
+}
+
+a tt {
+    border: 0;
+    color: #36852e;
+}
+
+a tt:hover {
+    color: #d4ad00;
+}
+
+pre {
+    font-family: 'Consolas', 'DejaVu Sans Mono',
+                 'Bitstream Vera Sans Mono', monospace;
+    font-size: 13px;
+    letter-spacing: 0.015em;
+    line-height: 120%;
+    padding: 0.5em;
+    border: 1px solid #ccc;
+    border-radius: 2px;
+    background-color: #f8f8f8;
+}
+
+pre a {
+    color: inherit;
+    text-decoration: underline;
+}
+
+td.linenos pre {
+    padding: 0.5em 0;
+}
+
+div.quotebar {
+    background-color: #f8f8f8;
+    max-width: 250px;
+    float: right;
+    padding: 0px 7px;
+    border: 1px solid #ccc;
+    margin-left: 1em;
+}
+
+div.topic {
+    background-color: #f8f8f8;
+}
+
+table {
+    border-collapse: collapse;
+    margin: 0 -0.5em 0 -0.5em;
+}
+
+table td, table th {
+    padding: 0.2em 0.5em 0.2em 0.5em;
+}
+
+div.admonition, div.warning {
+    font-size: 0.9em;
+    margin: 1em 0 1em 0;
+    border: 1px solid #86989B;
+    border-radius: 2px;
+    background-color: #f7f7f7;
+    padding: 0;
+}
+
+div.admonition p, div.warning p {
+    margin: 0.5em 1em 0.5em 1em;
+    padding: 0;
+}
+
+div.admonition pre, div.warning pre {
+    margin: 0.4em 1em 0.4em 1em;
+}
+
+div.admonition p.admonition-title,
+div.warning p.admonition-title {
+    margin-top: 1em;
+    padding-top: 0.5em;
+    font-weight: bold;
+}
+
+div.warning {
+    border: 1px solid #940000;
+/*    background-color: #FFCCCF;*/
+}
+
+div.warning p.admonition-title {
+}
+
+div.admonition ul, div.admonition ol,
+div.warning ul, div.warning ol {
+    margin: 0.1em 0.5em 0.5em 3em;
+    padding: 0;
+}
+
+.viewcode-back {
+    font-family: PT Sans, 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
+                 'Verdana', sans-serif;
+}
+
+div.viewcode-block:target {
+    background-color: #f4debf;
+    border-top: 1px solid #ac9;
+    border-bottom: 1px solid #ac9;
+}
\ No newline at end of file
diff --git a/doc/_build/html/_static/searchtools.js b/doc/_build/html/_static/searchtools.js
new file mode 100644 (file)
index 0000000..6031f99
--- /dev/null
@@ -0,0 +1,506 @@
+/*
+ * searchtools.js
+ * ~~~~~~~~~~~~~~~~
+ *
+ * Sphinx JavaScript utilities for the full-text search.
+ *
+ * :copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+if (!Scorer) {
+  /**
+   * Simple result scoring code.
+   */
+  var Scorer = {
+    // Implement the following function to further tweak the score for each result
+    // The function takes a result array [filename, title, anchor, descr, score]
+    // and returns the new score.
+    /*
+    score: function(result) {
+      return result[4];
+    },
+    */
+
+    // query matches the full name of an object
+    objNameMatch: 11,
+    // or matches in the last dotted part of the object name
+    objPartialMatch: 6,
+    // Additive scores depending on the priority of the object
+    objPrio: {0:  15,   // used to be importantResults
+              1:  5,   // used to be objectResults
+              2: -5},  // used to be unimportantResults
+    //  Used when the priority is not in the mapping.
+    objPrioDefault: 0,
+
+    // query found in title
+    title: 15,
+    partialTitle: 7,
+    // query found in terms
+    term: 5,
+    partialTerm: 2
+  };
+}
+
+if (!splitQuery) {
+  function splitQuery(query) {
+    return query.split(/\s+/);
+  }
+}
+
+/**
+ * Search Module
+ */
+var Search = {
+
+  _index : null,
+  _queued_query : null,
+  _pulse_status : -1,
+
+  htmlToText : function(htmlString) {
+      var htmlElement = document.createElement('span');
+      htmlElement.innerHTML = htmlString;
+      $(htmlElement).find('.headerlink').remove();
+      docContent = $(htmlElement).find('[role=main]')[0];
+      return docContent.textContent || docContent.innerText;
+  },
+
+  init : function() {
+      var params = $.getQueryParameters();
+      if (params.q) {
+          var query = params.q[0];
+          $('input[name="q"]')[0].value = query;
+          this.performSearch(query);
+      }
+  },
+
+  loadIndex : function(url) {
+    $.ajax({type: "GET", url: url, data: null,
+            dataType: "script", cache: true,
+            complete: function(jqxhr, textstatus) {
+              if (textstatus != "success") {
+                document.getElementById("searchindexloader").src = url;
+              }
+            }});
+  },
+
+  setIndex : function(index) {
+    var q;
+    this._index = index;
+    if ((q = this._queued_query) !== null) {
+      this._queued_query = null;
+      Search.query(q);
+    }
+  },
+
+  hasIndex : function() {
+      return this._index !== null;
+  },
+
+  deferQuery : function(query) {
+      this._queued_query = query;
+  },
+
+  stopPulse : function() {
+      this._pulse_status = 0;
+  },
+
+  startPulse : function() {
+    if (this._pulse_status >= 0)
+        return;
+    function pulse() {
+      var i;
+      Search._pulse_status = (Search._pulse_status + 1) % 4;
+      var dotString = '';
+      for (i = 0; i < Search._pulse_status; i++)
+        dotString += '.';
+      Search.dots.text(dotString);
+      if (Search._pulse_status > -1)
+        window.setTimeout(pulse, 500);
+    }
+    pulse();
+  },
+
+  /**
+   * perform a search for something (or wait until index is loaded)
+   */
+  performSearch : function(query) {
+    // create the required interface elements
+    this.out = $('#search-results');
+    this.title = $('<h2>' + _('Searching') + '</h2>').appendTo(this.out);
+    this.dots = $('<span></span>').appendTo(this.title);
+    this.status = $('<p class="search-summary">&nbsp;</p>').appendTo(this.out);
+    this.output = $('<ul class="search"/>').appendTo(this.out);
+
+    $('#search-progress').text(_('Preparing search...'));
+    this.startPulse();
+
+    // index already loaded, the browser was quick!
+    if (this.hasIndex())
+      this.query(query);
+    else
+      this.deferQuery(query);
+  },
+
+  /**
+   * execute search (requires search index to be loaded)
+   */
+  query : function(query) {
+    var i;
+
+    // stem the searchterms and add them to the correct list
+    var stemmer = new Stemmer();
+    var searchterms = [];
+    var excluded = [];
+    var hlterms = [];
+    var tmp = splitQuery(query);
+    var objectterms = [];
+    for (i = 0; i < tmp.length; i++) {
+      if (tmp[i] !== "") {
+          objectterms.push(tmp[i].toLowerCase());
+      }
+
+      if ($u.indexOf(stopwords, tmp[i].toLowerCase()) != -1 || tmp[i].match(/^\d+$/) ||
+          tmp[i] === "") {
+        // skip this "word"
+        continue;
+      }
+      // stem the word
+      var word = stemmer.stemWord(tmp[i].toLowerCase());
+      // prevent stemmer from cutting word smaller than two chars
+      if(word.length < 3 && tmp[i].length >= 3) {
+        word = tmp[i];
+      }
+      var toAppend;
+      // select the correct list
+      if (word[0] == '-') {
+        toAppend = excluded;
+        word = word.substr(1);
+      }
+      else {
+        toAppend = searchterms;
+        hlterms.push(tmp[i].toLowerCase());
+      }
+      // only add if not already in the list
+      if (!$u.contains(toAppend, word))
+        toAppend.push(word);
+    }
+    var highlightstring = '?highlight=' + $.urlencode(hlterms.join(" "));
+
+    // console.debug('SEARCH: searching for:');
+    // console.info('required: ', searchterms);
+    // console.info('excluded: ', excluded);
+
+    // prepare search
+    var terms = this._index.terms;
+    var titleterms = this._index.titleterms;
+
+    // array of [filename, title, anchor, descr, score]
+    var results = [];
+    $('#search-progress').empty();
+
+    // lookup as object
+    for (i = 0; i < objectterms.length; i++) {
+      var others = [].concat(objectterms.slice(0, i),
+                             objectterms.slice(i+1, objectterms.length));
+      results = results.concat(this.performObjectSearch(objectterms[i], others));
+    }
+
+    // lookup as search terms in fulltext
+    results = results.concat(this.performTermsSearch(searchterms, excluded, terms, titleterms));
+
+    // let the scorer override scores with a custom scoring function
+    if (Scorer.score) {
+      for (i = 0; i < results.length; i++)
+        results[i][4] = Scorer.score(results[i]);
+    }
+
+    // now sort the results by score (in opposite order of appearance, since the
+    // display function below uses pop() to retrieve items) and then
+    // alphabetically
+    results.sort(function(a, b) {
+      var left = a[4];
+      var right = b[4];
+      if (left > right) {
+        return 1;
+      } else if (left < right) {
+        return -1;
+      } else {
+        // same score: sort alphabetically
+        left = a[1].toLowerCase();
+        right = b[1].toLowerCase();
+        return (left > right) ? -1 : ((left < right) ? 1 : 0);
+      }
+    });
+
+    // for debugging
+    //Search.lastresults = results.slice();  // a copy
+    //console.info('search results:', Search.lastresults);
+
+    // print the results
+    var resultCount = results.length;
+    function displayNextItem() {
+      // results left, load the summary and display it
+      if (results.length) {
+        var item = results.pop();
+        var listItem = $('<li style="display:none"></li>');
+        if (DOCUMENTATION_OPTIONS.FILE_SUFFIX === '') {
+          // dirhtml builder
+          var dirname = item[0] + '/';
+          if (dirname.match(/\/index\/$/)) {
+            dirname = dirname.substring(0, dirname.length-6);
+          } else if (dirname == 'index/') {
+            dirname = '';
+          }
+          listItem.append($('<a/>').attr('href',
+            DOCUMENTATION_OPTIONS.URL_ROOT + dirname +
+            highlightstring + item[2]).html(item[1]));
+        } else {
+          // normal html builders
+          listItem.append($('<a/>').attr('href',
+            item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX +
+            highlightstring + item[2]).html(item[1]));
+        }
+        if (item[3]) {
+          listItem.append($('<span> (' + item[3] + ')</span>'));
+          Search.output.append(listItem);
+          listItem.slideDown(5, function() {
+            displayNextItem();
+          });
+        } else if (DOCUMENTATION_OPTIONS.HAS_SOURCE) {
+          $.ajax({url: DOCUMENTATION_OPTIONS.URL_ROOT + item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX,
+                  dataType: "text",
+                  complete: function(jqxhr, textstatus) {
+                    var data = jqxhr.responseText;
+                    if (data !== '' && data !== undefined) {
+                      listItem.append(Search.makeSearchSummary(data, searchterms, hlterms));
+                    }
+                    Search.output.append(listItem);
+                    listItem.slideDown(5, function() {
+                      displayNextItem();
+                    });
+                  }});
+        } else {
+          // no source available, just display title
+          Search.output.append(listItem);
+          listItem.slideDown(5, function() {
+            displayNextItem();
+          });
+        }
+      }
+      // search finished, update title and status message
+      else {
+        Search.stopPulse();
+        Search.title.text(_('Search Results'));
+        if (!resultCount)
+          Search.status.text(_('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.'));
+        else
+            Search.status.text(_('Search finished, found %s page(s) matching the search query.').replace('%s', resultCount));
+        Search.status.fadeIn(500);
+      }
+    }
+    displayNextItem();
+  },
+
+  /**
+   * search for object names
+   */
+  performObjectSearch : function(object, otherterms) {
+    var filenames = this._index.filenames;
+    var docnames = this._index.docnames;
+    var objects = this._index.objects;
+    var objnames = this._index.objnames;
+    var titles = this._index.titles;
+
+    var i;
+    var results = [];
+
+    for (var prefix in objects) {
+      for (var name in objects[prefix]) {
+        var fullname = (prefix ? prefix + '.' : '') + name;
+        var fullnameLower = fullname.toLowerCase()
+        if (fullnameLower.indexOf(object) > -1) {
+          var score = 0;
+          var parts = fullnameLower.split('.');
+          // check for different match types: exact matches of full name or
+          // "last name" (i.e. last dotted part)
+          if (fullnameLower == object || parts[parts.length - 1] == object) {
+            score += Scorer.objNameMatch;
+          // matches in last name
+          } else if (parts[parts.length - 1].indexOf(object) > -1) {
+            score += Scorer.objPartialMatch;
+          }
+          var match = objects[prefix][name];
+          var objname = objnames[match[1]][2];
+          var title = titles[match[0]];
+          // If more than one term searched for, we require other words to be
+          // found in the name/title/description
+          if (otherterms.length > 0) {
+            var haystack = (prefix + ' ' + name + ' ' +
+                            objname + ' ' + title).toLowerCase();
+            var allfound = true;
+            for (i = 0; i < otherterms.length; i++) {
+              if (haystack.indexOf(otherterms[i]) == -1) {
+                allfound = false;
+                break;
+              }
+            }
+            if (!allfound) {
+              continue;
+            }
+          }
+          var descr = objname + _(', in ') + title;
+
+          var anchor = match[3];
+          if (anchor === '')
+            anchor = fullname;
+          else if (anchor == '-')
+            anchor = objnames[match[1]][1] + '-' + fullname;
+          // add custom score for some objects according to scorer
+          if (Scorer.objPrio.hasOwnProperty(match[2])) {
+            score += Scorer.objPrio[match[2]];
+          } else {
+            score += Scorer.objPrioDefault;
+          }
+          results.push([docnames[match[0]], fullname, '#'+anchor, descr, score, filenames[match[0]]]);
+        }
+      }
+    }
+
+    return results;
+  },
+
+  /**
+   * search for full-text terms in the index
+   */
+  performTermsSearch : function(searchterms, excluded, terms, titleterms) {
+    var docnames = this._index.docnames;
+    var filenames = this._index.filenames;
+    var titles = this._index.titles;
+
+    var i, j, file;
+    var fileMap = {};
+    var scoreMap = {};
+    var results = [];
+
+    // perform the search on the required terms
+    for (i = 0; i < searchterms.length; i++) {
+      var word = searchterms[i];
+      var files = [];
+      var _o = [
+        {files: terms[word], score: Scorer.term},
+        {files: titleterms[word], score: Scorer.title}
+      ];
+      // add support for partial matches
+      if (word.length > 2) {
+        for (var w in terms) {
+          if (w.match(word) && !terms[word]) {
+            _o.push({files: terms[w], score: Scorer.partialTerm})
+          }
+        }
+        for (var w in titleterms) {
+          if (w.match(word) && !titleterms[word]) {
+              _o.push({files: titleterms[w], score: Scorer.partialTitle})
+          }
+        }
+      }
+
+      // no match but word was a required one
+      if ($u.every(_o, function(o){return o.files === undefined;})) {
+        break;
+      }
+      // found search word in contents
+      $u.each(_o, function(o) {
+        var _files = o.files;
+        if (_files === undefined)
+          return
+
+        if (_files.length === undefined)
+          _files = [_files];
+        files = files.concat(_files);
+
+        // set score for the word in each file to Scorer.term
+        for (j = 0; j < _files.length; j++) {
+          file = _files[j];
+          if (!(file in scoreMap))
+            scoreMap[file] = {}
+          scoreMap[file][word] = o.score;
+        }
+      });
+
+      // create the mapping
+      for (j = 0; j < files.length; j++) {
+        file = files[j];
+        if (file in fileMap)
+          fileMap[file].push(word);
+        else
+          fileMap[file] = [word];
+      }
+    }
+
+    // now check if the files don't contain excluded terms
+    for (file in fileMap) {
+      var valid = true;
+
+      // check if all requirements are matched
+      var filteredTermCount = // as search terms with length < 3 are discarded: ignore
+        searchterms.filter(function(term){return term.length > 2}).length
+      if (
+        fileMap[file].length != searchterms.length &&
+        fileMap[file].length != filteredTermCount
+      ) continue;
+
+      // ensure that none of the excluded terms is in the search result
+      for (i = 0; i < excluded.length; i++) {
+        if (terms[excluded[i]] == file ||
+            titleterms[excluded[i]] == file ||
+            $u.contains(terms[excluded[i]] || [], file) ||
+            $u.contains(titleterms[excluded[i]] || [], file)) {
+          valid = false;
+          break;
+        }
+      }
+
+      // if we have still a valid result we can add it to the result list
+      if (valid) {
+        // select one (max) score for the file.
+        // for better ranking, we should calculate ranking by using words statistics like basic tf-idf...
+        var score = $u.max($u.map(fileMap[file], function(w){return scoreMap[file][w]}));
+        results.push([docnames[file], titles[file], '', null, score, filenames[file]]);
+      }
+    }
+    return results;
+  },
+
+  /**
+   * helper function to return a node containing the
+   * search summary for a given text. keywords is a list
+   * of stemmed words, hlwords is the list of normal, unstemmed
+   * words. the first one is used to find the occurrence, the
+   * latter for highlighting it.
+   */
+  makeSearchSummary : function(htmlText, keywords, hlwords) {
+    var text = Search.htmlToText(htmlText);
+    var textLower = text.toLowerCase();
+    var start = 0;
+    $.each(keywords, function() {
+      var i = textLower.indexOf(this.toLowerCase());
+      if (i > -1)
+        start = i;
+    });
+    start = Math.max(start - 120, 0);
+    var excerpt = ((start > 0) ? '...' : '') +
+      $.trim(text.substr(start, 240)) +
+      ((start + 240 - text.length) ? '...' : '');
+    var rv = $('<div class="context"></div>').text(excerpt);
+    $.each(hlwords, function() {
+      rv = rv.highlightText(this, 'highlighted');
+    });
+    return rv;
+  }
+};
+
+$(document).ready(function() {
+  Search.init();
+});
diff --git a/doc/_build/html/_static/spinner.gif b/doc/_build/html/_static/spinner.gif
new file mode 100644 (file)
index 0000000..2212db9
Binary files /dev/null and b/doc/_build/html/_static/spinner.gif differ
diff --git a/doc/_build/html/_static/underscore-1.3.1.js b/doc/_build/html/_static/underscore-1.3.1.js
new file mode 100644 (file)
index 0000000..208d4cd
--- /dev/null
@@ -0,0 +1,999 @@
+//     Underscore.js 1.3.1
+//     (c) 2009-2012 Jeremy Ashkenas, DocumentCloud Inc.
+//     Underscore is freely distributable under the MIT license.
+//     Portions of Underscore are inspired or borrowed from Prototype,
+//     Oliver Steele's Functional, and John Resig's Micro-Templating.
+//     For all details and documentation:
+//     http://documentcloud.github.com/underscore
+
+(function() {
+
+  // Baseline setup
+  // --------------
+
+  // Establish the root object, `window` in the browser, or `global` on the server.
+  var root = this;
+
+  // Save the previous value of the `_` variable.
+  var previousUnderscore = root._;
+
+  // Establish the object that gets returned to break out of a loop iteration.
+  var breaker = {};
+
+  // Save bytes in the minified (but not gzipped) version:
+  var ArrayProto = Array.prototype, ObjProto = Object.prototype, FuncProto = Function.prototype;
+
+  // Create quick reference variables for speed access to core prototypes.
+  var slice            = ArrayProto.slice,
+      unshift          = ArrayProto.unshift,
+      toString         = ObjProto.toString,
+      hasOwnProperty   = ObjProto.hasOwnProperty;
+
+  // All **ECMAScript 5** native function implementations that we hope to use
+  // are declared here.
+  var
+    nativeForEach      = ArrayProto.forEach,
+    nativeMap          = ArrayProto.map,
+    nativeReduce       = ArrayProto.reduce,
+    nativeReduceRight  = ArrayProto.reduceRight,
+    nativeFilter       = ArrayProto.filter,
+    nativeEvery        = ArrayProto.every,
+    nativeSome         = ArrayProto.some,
+    nativeIndexOf      = ArrayProto.indexOf,
+    nativeLastIndexOf  = ArrayProto.lastIndexOf,
+    nativeIsArray      = Array.isArray,
+    nativeKeys         = Object.keys,
+    nativeBind         = FuncProto.bind;
+
+  // Create a safe reference to the Underscore object for use below.
+  var _ = function(obj) { return new wrapper(obj); };
+
+  // Export the Underscore object for **Node.js**, with
+  // backwards-compatibility for the old `require()` API. If we're in
+  // the browser, add `_` as a global object via a string identifier,
+  // for Closure Compiler "advanced" mode.
+  if (typeof exports !== 'undefined') {
+    if (typeof module !== 'undefined' && module.exports) {
+      exports = module.exports = _;
+    }
+    exports._ = _;
+  } else {
+    root['_'] = _;
+  }
+
+  // Current version.
+  _.VERSION = '1.3.1';
+
+  // Collection Functions
+  // --------------------
+
+  // The cornerstone, an `each` implementation, aka `forEach`.
+  // Handles objects with the built-in `forEach`, arrays, and raw objects.
+  // Delegates to **ECMAScript 5**'s native `forEach` if available.
+  var each = _.each = _.forEach = function(obj, iterator, context) {
+    if (obj == null) return;
+    if (nativeForEach && obj.forEach === nativeForEach) {
+      obj.forEach(iterator, context);
+    } else if (obj.length === +obj.length) {
+      for (var i = 0, l = obj.length; i < l; i++) {
+        if (i in obj && iterator.call(context, obj[i], i, obj) === breaker) return;
+      }
+    } else {
+      for (var key in obj) {
+        if (_.has(obj, key)) {
+          if (iterator.call(context, obj[key], key, obj) === breaker) return;
+        }
+      }
+    }
+  };
+
+  // Return the results of applying the iterator to each element.
+  // Delegates to **ECMAScript 5**'s native `map` if available.
+  _.map = _.collect = function(obj, iterator, context) {
+    var results = [];
+    if (obj == null) return results;
+    if (nativeMap && obj.map === nativeMap) return obj.map(iterator, context);
+    each(obj, function(value, index, list) {
+      results[results.length] = iterator.call(context, value, index, list);
+    });
+    if (obj.length === +obj.length) results.length = obj.length;
+    return results;
+  };
+
+  // **Reduce** builds up a single result from a list of values, aka `inject`,
+  // or `foldl`. Delegates to **ECMAScript 5**'s native `reduce` if available.
+  _.reduce = _.foldl = _.inject = function(obj, iterator, memo, context) {
+    var initial = arguments.length > 2;
+    if (obj == null) obj = [];
+    if (nativeReduce && obj.reduce === nativeReduce) {
+      if (context) iterator = _.bind(iterator, context);
+      return initial ? obj.reduce(iterator, memo) : obj.reduce(iterator);
+    }
+    each(obj, function(value, index, list) {
+      if (!initial) {
+        memo = value;
+        initial = true;
+      } else {
+        memo = iterator.call(context, memo, value, index, list);
+      }
+    });
+    if (!initial) throw new TypeError('Reduce of empty array with no initial value');
+    return memo;
+  };
+
+  // The right-associative version of reduce, also known as `foldr`.
+  // Delegates to **ECMAScript 5**'s native `reduceRight` if available.
+  _.reduceRight = _.foldr = function(obj, iterator, memo, context) {
+    var initial = arguments.length > 2;
+    if (obj == null) obj = [];
+    if (nativeReduceRight && obj.reduceRight === nativeReduceRight) {
+      if (context) iterator = _.bind(iterator, context);
+      return initial ? obj.reduceRight(iterator, memo) : obj.reduceRight(iterator);
+    }
+    var reversed = _.toArray(obj).reverse();
+    if (context && !initial) iterator = _.bind(iterator, context);
+    return initial ? _.reduce(reversed, iterator, memo, context) : _.reduce(reversed, iterator);
+  };
+
+  // Return the first value which passes a truth test. Aliased as `detect`.
+  _.find = _.detect = function(obj, iterator, context) {
+    var result;
+    any(obj, function(value, index, list) {
+      if (iterator.call(context, value, index, list)) {
+        result = value;
+        return true;
+      }
+    });
+    return result;
+  };
+
+  // Return all the elements that pass a truth test.
+  // Delegates to **ECMAScript 5**'s native `filter` if available.
+  // Aliased as `select`.
+  _.filter = _.select = function(obj, iterator, context) {
+    var results = [];
+    if (obj == null) return results;
+    if (nativeFilter && obj.filter === nativeFilter) return obj.filter(iterator, context);
+    each(obj, function(value, index, list) {
+      if (iterator.call(context, value, index, list)) results[results.length] = value;
+    });
+    return results;
+  };
+
+  // Return all the elements for which a truth test fails.
+  _.reject = function(obj, iterator, context) {
+    var results = [];
+    if (obj == null) return results;
+    each(obj, function(value, index, list) {
+      if (!iterator.call(context, value, index, list)) results[results.length] = value;
+    });
+    return results;
+  };
+
+  // Determine whether all of the elements match a truth test.
+  // Delegates to **ECMAScript 5**'s native `every` if available.
+  // Aliased as `all`.
+  _.every = _.all = function(obj, iterator, context) {
+    var result = true;
+    if (obj == null) return result;
+    if (nativeEvery && obj.every === nativeEvery) return obj.every(iterator, context);
+    each(obj, function(value, index, list) {
+      if (!(result = result && iterator.call(context, value, index, list))) return breaker;
+    });
+    return result;
+  };
+
+  // Determine if at least one element in the object matches a truth test.
+  // Delegates to **ECMAScript 5**'s native `some` if available.
+  // Aliased as `any`.
+  var any = _.some = _.any = function(obj, iterator, context) {
+    iterator || (iterator = _.identity);
+    var result = false;
+    if (obj == null) return result;
+    if (nativeSome && obj.some === nativeSome) return obj.some(iterator, context);
+    each(obj, function(value, index, list) {
+      if (result || (result = iterator.call(context, value, index, list))) return breaker;
+    });
+    return !!result;
+  };
+
+  // Determine if a given value is included in the array or object using `===`.
+  // Aliased as `contains`.
+  _.include = _.contains = function(obj, target) {
+    var found = false;
+    if (obj == null) return found;
+    if (nativeIndexOf && obj.indexOf === nativeIndexOf) return obj.indexOf(target) != -1;
+    found = any(obj, function(value) {
+      return value === target;
+    });
+    return found;
+  };
+
+  // Invoke a method (with arguments) on every item in a collection.
+  _.invoke = function(obj, method) {
+    var args = slice.call(arguments, 2);
+    return _.map(obj, function(value) {
+      return (_.isFunction(method) ? method || value : value[method]).apply(value, args);
+    });
+  };
+
+  // Convenience version of a common use case of `map`: fetching a property.
+  _.pluck = function(obj, key) {
+    return _.map(obj, function(value){ return value[key]; });
+  };
+
+  // Return the maximum element or (element-based computation).
+  _.max = function(obj, iterator, context) {
+    if (!iterator && _.isArray(obj)) return Math.max.apply(Math, obj);
+    if (!iterator && _.isEmpty(obj)) return -Infinity;
+    var result = {computed : -Infinity};
+    each(obj, function(value, index, list) {
+      var computed = iterator ? iterator.call(context, value, index, list) : value;
+      computed >= result.computed && (result = {value : value, computed : computed});
+    });
+    return result.value;
+  };
+
+  // Return the minimum element (or element-based computation).
+  _.min = function(obj, iterator, context) {
+    if (!iterator && _.isArray(obj)) return Math.min.apply(Math, obj);
+    if (!iterator && _.isEmpty(obj)) return Infinity;
+    var result = {computed : Infinity};
+    each(obj, function(value, index, list) {
+      var computed = iterator ? iterator.call(context, value, index, list) : value;
+      computed < result.computed && (result = {value : value, computed : computed});
+    });
+    return result.value;
+  };
+
+  // Shuffle an array.
+  _.shuffle = function(obj) {
+    var shuffled = [], rand;
+    each(obj, function(value, index, list) {
+      if (index == 0) {
+        shuffled[0] = value;
+      } else {
+        rand = Math.floor(Math.random() * (index + 1));
+        shuffled[index] = shuffled[rand];
+        shuffled[rand] = value;
+      }
+    });
+    return shuffled;
+  };
+
+  // Sort the object's values by a criterion produced by an iterator.
+  _.sortBy = function(obj, iterator, context) {
+    return _.pluck(_.map(obj, function(value, index, list) {
+      return {
+        value : value,
+        criteria : iterator.call(context, value, index, list)
+      };
+    }).sort(function(left, right) {
+      var a = left.criteria, b = right.criteria;
+      return a < b ? -1 : a > b ? 1 : 0;
+    }), 'value');
+  };
+
+  // Groups the object's values by a criterion. Pass either a string attribute
+  // to group by, or a function that returns the criterion.
+  _.groupBy = function(obj, val) {
+    var result = {};
+    var iterator = _.isFunction(val) ? val : function(obj) { return obj[val]; };
+    each(obj, function(value, index) {
+      var key = iterator(value, index);
+      (result[key] || (result[key] = [])).push(value);
+    });
+    return result;
+  };
+
+  // Use a comparator function to figure out at what index an object should
+  // be inserted so as to maintain order. Uses binary search.
+  _.sortedIndex = function(array, obj, iterator) {
+    iterator || (iterator = _.identity);
+    var low = 0, high = array.length;
+    while (low < high) {
+      var mid = (low + high) >> 1;
+      iterator(array[mid]) < iterator(obj) ? low = mid + 1 : high = mid;
+    }
+    return low;
+  };
+
+  // Safely convert anything iterable into a real, live array.
+  _.toArray = function(iterable) {
+    if (!iterable)                return [];
+    if (iterable.toArray)         return iterable.toArray();
+    if (_.isArray(iterable))      return slice.call(iterable);
+    if (_.isArguments(iterable))  return slice.call(iterable);
+    return _.values(iterable);
+  };
+
+  // Return the number of elements in an object.
+  _.size = function(obj) {
+    return _.toArray(obj).length;
+  };
+
+  // Array Functions
+  // ---------------
+
+  // Get the first element of an array. Passing **n** will return the first N
+  // values in the array. Aliased as `head`. The **guard** check allows it to work
+  // with `_.map`.
+  _.first = _.head = function(array, n, guard) {
+    return (n != null) && !guard ? slice.call(array, 0, n) : array[0];
+  };
+
+  // Returns everything but the last entry of the array. Especcialy useful on
+  // the arguments object. Passing **n** will return all the values in
+  // the array, excluding the last N. The **guard** check allows it to work with
+  // `_.map`.
+  _.initial = function(array, n, guard) {
+    return slice.call(array, 0, array.length - ((n == null) || guard ? 1 : n));
+  };
+
+  // Get the last element of an array. Passing **n** will return the last N
+  // values in the array. The **guard** check allows it to work with `_.map`.
+  _.last = function(array, n, guard) {
+    if ((n != null) && !guard) {
+      return slice.call(array, Math.max(array.length - n, 0));
+    } else {
+      return array[array.length - 1];
+    }
+  };
+
+  // Returns everything but the first entry of the array. Aliased as `tail`.
+  // Especially useful on the arguments object. Passing an **index** will return
+  // the rest of the values in the array from that index onward. The **guard**
+  // check allows it to work with `_.map`.
+  _.rest = _.tail = function(array, index, guard) {
+    return slice.call(array, (index == null) || guard ? 1 : index);
+  };
+
+  // Trim out all falsy values from an array.
+  _.compact = function(array) {
+    return _.filter(array, function(value){ return !!value; });
+  };
+
+  // Return a completely flattened version of an array.
+  _.flatten = function(array, shallow) {
+    return _.reduce(array, function(memo, value) {
+      if (_.isArray(value)) return memo.concat(shallow ? value : _.flatten(value));
+      memo[memo.length] = value;
+      return memo;
+    }, []);
+  };
+
+  // Return a version of the array that does not contain the specified value(s).
+  _.without = function(array) {
+    return _.difference(array, slice.call(arguments, 1));
+  };
+
+  // Produce a duplicate-free version of the array. If the array has already
+  // been sorted, you have the option of using a faster algorithm.
+  // Aliased as `unique`.
+  _.uniq = _.unique = function(array, isSorted, iterator) {
+    var initial = iterator ? _.map(array, iterator) : array;
+    var result = [];
+    _.reduce(initial, function(memo, el, i) {
+      if (0 == i || (isSorted === true ? _.last(memo) != el : !_.include(memo, el))) {
+        memo[memo.length] = el;
+        result[result.length] = array[i];
+      }
+      return memo;
+    }, []);
+    return result;
+  };
+
+  // Produce an array that contains the union: each distinct element from all of
+  // the passed-in arrays.
+  _.union = function() {
+    return _.uniq(_.flatten(arguments, true));
+  };
+
+  // Produce an array that contains every item shared between all the
+  // passed-in arrays. (Aliased as "intersect" for back-compat.)
+  _.intersection = _.intersect = function(array) {
+    var rest = slice.call(arguments, 1);
+    return _.filter(_.uniq(array), function(item) {
+      return _.every(rest, function(other) {
+        return _.indexOf(other, item) >= 0;
+      });
+    });
+  };
+
+  // Take the difference between one array and a number of other arrays.
+  // Only the elements present in just the first array will remain.
+  _.difference = function(array) {
+    var rest = _.flatten(slice.call(arguments, 1));
+    return _.filter(array, function(value){ return !_.include(rest, value); });
+  };
+
+  // Zip together multiple lists into a single array -- elements that share
+  // an index go together.
+  _.zip = function() {
+    var args = slice.call(arguments);
+    var length = _.max(_.pluck(args, 'length'));
+    var results = new Array(length);
+    for (var i = 0; i < length; i++) results[i] = _.pluck(args, "" + i);
+    return results;
+  };
+
+  // If the browser doesn't supply us with indexOf (I'm looking at you, **MSIE**),
+  // we need this function. Return the position of the first occurrence of an
+  // item in an array, or -1 if the item is not included in the array.
+  // Delegates to **ECMAScript 5**'s native `indexOf` if available.
+  // If the array is large and already in sort order, pass `true`
+  // for **isSorted** to use binary search.
+  _.indexOf = function(array, item, isSorted) {
+    if (array == null) return -1;
+    var i, l;
+    if (isSorted) {
+      i = _.sortedIndex(array, item);
+      return array[i] === item ? i : -1;
+    }
+    if (nativeIndexOf && array.indexOf === nativeIndexOf) return array.indexOf(item);
+    for (i = 0, l = array.length; i < l; i++) if (i in array && array[i] === item) return i;
+    return -1;
+  };
+
+  // Delegates to **ECMAScript 5**'s native `lastIndexOf` if available.
+  _.lastIndexOf = function(array, item) {
+    if (array == null) return -1;
+    if (nativeLastIndexOf && array.lastIndexOf === nativeLastIndexOf) return array.lastIndexOf(item);
+    var i = array.length;
+    while (i--) if (i in array && array[i] === item) return i;
+    return -1;
+  };
+
+  // Generate an integer Array containing an arithmetic progression. A port of
+  // the native Python `range()` function. See
+  // [the Python documentation](http://docs.python.org/library/functions.html#range).
+  _.range = function(start, stop, step) {
+    if (arguments.length <= 1) {
+      stop = start || 0;
+      start = 0;
+    }
+    step = arguments[2] || 1;
+
+    var len = Math.max(Math.ceil((stop - start) / step), 0);
+    var idx = 0;
+    var range = new Array(len);
+
+    while(idx < len) {
+      range[idx++] = start;
+      start += step;
+    }
+
+    return range;
+  };
+
+  // Function (ahem) Functions
+  // ------------------
+
+  // Reusable constructor function for prototype setting.
+  var ctor = function(){};
+
+  // Create a function bound to a given object (assigning `this`, and arguments,
+  // optionally). Binding with arguments is also known as `curry`.
+  // Delegates to **ECMAScript 5**'s native `Function.bind` if available.
+  // We check for `func.bind` first, to fail fast when `func` is undefined.
+  _.bind = function bind(func, context) {
+    var bound, args;
+    if (func.bind === nativeBind && nativeBind) return nativeBind.apply(func, slice.call(arguments, 1));
+    if (!_.isFunction(func)) throw new TypeError;
+    args = slice.call(arguments, 2);
+    return bound = function() {
+      if (!(this instanceof bound)) return func.apply(context, args.concat(slice.call(arguments)));
+      ctor.prototype = func.prototype;
+      var self = new ctor;
+      var result = func.apply(self, args.concat(slice.call(arguments)));
+      if (Object(result) === result) return result;
+      return self;
+    };
+  };
+
+  // Bind all of an object's methods to that object. Useful for ensuring that
+  // all callbacks defined on an object belong to it.
+  _.bindAll = function(obj) {
+    var funcs = slice.call(arguments, 1);
+    if (funcs.length == 0) funcs = _.functions(obj);
+    each(funcs, function(f) { obj[f] = _.bind(obj[f], obj); });
+    return obj;
+  };
+
+  // Memoize an expensive function by storing its results.
+  _.memoize = function(func, hasher) {
+    var memo = {};
+    hasher || (hasher = _.identity);
+    return function() {
+      var key = hasher.apply(this, arguments);
+      return _.has(memo, key) ? memo[key] : (memo[key] = func.apply(this, arguments));
+    };
+  };
+
+  // Delays a function for the given number of milliseconds, and then calls
+  // it with the arguments supplied.
+  _.delay = function(func, wait) {
+    var args = slice.call(arguments, 2);
+    return setTimeout(function(){ return func.apply(func, args); }, wait);
+  };
+
+  // Defers a function, scheduling it to run after the current call stack has
+  // cleared.
+  _.defer = function(func) {
+    return _.delay.apply(_, [func, 1].concat(slice.call(arguments, 1)));
+  };
+
+  // Returns a function, that, when invoked, will only be triggered at most once
+  // during a given window of time.
+  _.throttle = function(func, wait) {
+    var context, args, timeout, throttling, more;
+    var whenDone = _.debounce(function(){ more = throttling = false; }, wait);
+    return function() {
+      context = this; args = arguments;
+      var later = function() {
+        timeout = null;
+        if (more) func.apply(context, args);
+        whenDone();
+      };
+      if (!timeout) timeout = setTimeout(later, wait);
+      if (throttling) {
+        more = true;
+      } else {
+        func.apply(context, args);
+      }
+      whenDone();
+      throttling = true;
+    };
+  };
+
+  // Returns a function, that, as long as it continues to be invoked, will not
+  // be triggered. The function will be called after it stops being called for
+  // N milliseconds.
+  _.debounce = function(func, wait) {
+    var timeout;
+    return function() {
+      var context = this, args = arguments;
+      var later = function() {
+        timeout = null;
+        func.apply(context, args);
+      };
+      clearTimeout(timeout);
+      timeout = setTimeout(later, wait);
+    };
+  };
+
+  // Returns a function that will be executed at most one time, no matter how
+  // often you call it. Useful for lazy initialization.
+  _.once = function(func) {
+    var ran = false, memo;
+    return function() {
+      if (ran) return memo;
+      ran = true;
+      return memo = func.apply(this, arguments);
+    };
+  };
+
+  // Returns the first function passed as an argument to the second,
+  // allowing you to adjust arguments, run code before and after, and
+  // conditionally execute the original function.
+  _.wrap = function(func, wrapper) {
+    return function() {
+      var args = [func].concat(slice.call(arguments, 0));
+      return wrapper.apply(this, args);
+    };
+  };
+
+  // Returns a function that is the composition of a list of functions, each
+  // consuming the return value of the function that follows.
+  _.compose = function() {
+    var funcs = arguments;
+    return function() {
+      var args = arguments;
+      for (var i = funcs.length - 1; i >= 0; i--) {
+        args = [funcs[i].apply(this, args)];
+      }
+      return args[0];
+    };
+  };
+
+  // Returns a function that will only be executed after being called N times.
+  _.after = function(times, func) {
+    if (times <= 0) return func();
+    return function() {
+      if (--times < 1) { return func.apply(this, arguments); }
+    };
+  };
+
+  // Object Functions
+  // ----------------
+
+  // Retrieve the names of an object's properties.
+  // Delegates to **ECMAScript 5**'s native `Object.keys`
+  _.keys = nativeKeys || function(obj) {
+    if (obj !== Object(obj)) throw new TypeError('Invalid object');
+    var keys = [];
+    for (var key in obj) if (_.has(obj, key)) keys[keys.length] = key;
+    return keys;
+  };
+
+  // Retrieve the values of an object's properties.
+  _.values = function(obj) {
+    return _.map(obj, _.identity);
+  };
+
+  // Return a sorted list of the function names available on the object.
+  // Aliased as `methods`
+  _.functions = _.methods = function(obj) {
+    var names = [];
+    for (var key in obj) {
+      if (_.isFunction(obj[key])) names.push(key);
+    }
+    return names.sort();
+  };
+
+  // Extend a given object with all the properties in passed-in object(s).
+  _.extend = function(obj) {
+    each(slice.call(arguments, 1), function(source) {
+      for (var prop in source) {
+        obj[prop] = source[prop];
+      }
+    });
+    return obj;
+  };
+
+  // Fill in a given object with default properties.
+  _.defaults = function(obj) {
+    each(slice.call(arguments, 1), function(source) {
+      for (var prop in source) {
+        if (obj[prop] == null) obj[prop] = source[prop];
+      }
+    });
+    return obj;
+  };
+
+  // Create a (shallow-cloned) duplicate of an object.
+  _.clone = function(obj) {
+    if (!_.isObject(obj)) return obj;
+    return _.isArray(obj) ? obj.slice() : _.extend({}, obj);
+  };
+
+  // Invokes interceptor with the obj, and then returns obj.
+  // The primary purpose of this method is to "tap into" a method chain, in
+  // order to perform operations on intermediate results within the chain.
+  _.tap = function(obj, interceptor) {
+    interceptor(obj);
+    return obj;
+  };
+
+  // Internal recursive comparison function.
+  function eq(a, b, stack) {
+    // Identical objects are equal. `0 === -0`, but they aren't identical.
+    // See the Harmony `egal` proposal: http://wiki.ecmascript.org/doku.php?id=harmony:egal.
+    if (a === b) return a !== 0 || 1 / a == 1 / b;
+    // A strict comparison is necessary because `null == undefined`.
+    if (a == null || b == null) return a === b;
+    // Unwrap any wrapped objects.
+    if (a._chain) a = a._wrapped;
+    if (b._chain) b = b._wrapped;
+    // Invoke a custom `isEqual` method if one is provided.
+    if (a.isEqual && _.isFunction(a.isEqual)) return a.isEqual(b);
+    if (b.isEqual && _.isFunction(b.isEqual)) return b.isEqual(a);
+    // Compare `[[Class]]` names.
+    var className = toString.call(a);
+    if (className != toString.call(b)) return false;
+    switch (className) {
+      // Strings, numbers, dates, and booleans are compared by value.
+      case '[object String]':
+        // Primitives and their corresponding object wrappers are equivalent; thus, `"5"` is
+        // equivalent to `new String("5")`.
+        return a == String(b);
+      case '[object Number]':
+        // `NaN`s are equivalent, but non-reflexive. An `egal` comparison is performed for
+        // other numeric values.
+        return a != +a ? b != +b : (a == 0 ? 1 / a == 1 / b : a == +b);
+      case '[object Date]':
+      case '[object Boolean]':
+        // Coerce dates and booleans to numeric primitive values. Dates are compared by their
+        // millisecond representations. Note that invalid dates with millisecond representations
+        // of `NaN` are not equivalent.
+        return +a == +b;
+      // RegExps are compared by their source patterns and flags.
+      case '[object RegExp]':
+        return a.source == b.source &&
+               a.global == b.global &&
+               a.multiline == b.multiline &&
+               a.ignoreCase == b.ignoreCase;
+    }
+    if (typeof a != 'object' || typeof b != 'object') return false;
+    // Assume equality for cyclic structures. The algorithm for detecting cyclic
+    // structures is adapted from ES 5.1 section 15.12.3, abstract operation `JO`.
+    var length = stack.length;
+    while (length--) {
+      // Linear search. Performance is inversely proportional to the number of
+      // unique nested structures.
+      if (stack[length] == a) return true;
+    }
+    // Add the first object to the stack of traversed objects.
+    stack.push(a);
+    var size = 0, result = true;
+    // Recursively compare objects and arrays.
+    if (className == '[object Array]') {
+      // Compare array lengths to determine if a deep comparison is necessary.
+      size = a.length;
+      result = size == b.length;
+      if (result) {
+        // Deep compare the contents, ignoring non-numeric properties.
+        while (size--) {
+          // Ensure commutative equality for sparse arrays.
+          if (!(result = size in a == size in b && eq(a[size], b[size], stack))) break;
+        }
+      }
+    } else {
+      // Objects with different constructors are not equivalent.
+      if ('constructor' in a != 'constructor' in b || a.constructor != b.constructor) return false;
+      // Deep compare objects.
+      for (var key in a) {
+        if (_.has(a, key)) {
+          // Count the expected number of properties.
+          size++;
+          // Deep compare each member.
+          if (!(result = _.has(b, key) && eq(a[key], b[key], stack))) break;
+        }
+      }
+      // Ensure that both objects contain the same number of properties.
+      if (result) {
+        for (key in b) {
+          if (_.has(b, key) && !(size--)) break;
+        }
+        result = !size;
+      }
+    }
+    // Remove the first object from the stack of traversed objects.
+    stack.pop();
+    return result;
+  }
+
+  // Perform a deep comparison to check if two objects are equal.
+  _.isEqual = function(a, b) {
+    return eq(a, b, []);
+  };
+
+  // Is a given array, string, or object empty?
+  // An "empty" object has no enumerable own-properties.
+  _.isEmpty = function(obj) {
+    if (_.isArray(obj) || _.isString(obj)) return obj.length === 0;
+    for (var key in obj) if (_.has(obj, key)) return false;
+    return true;
+  };
+
+  // Is a given value a DOM element?
+  _.isElement = function(obj) {
+    return !!(obj && obj.nodeType == 1);
+  };
+
+  // Is a given value an array?
+  // Delegates to ECMA5's native Array.isArray
+  _.isArray = nativeIsArray || function(obj) {
+    return toString.call(obj) == '[object Array]';
+  };
+
+  // Is a given variable an object?
+  _.isObject = function(obj) {
+    return obj === Object(obj);
+  };
+
+  // Is a given variable an arguments object?
+  _.isArguments = function(obj) {
+    return toString.call(obj) == '[object Arguments]';
+  };
+  if (!_.isArguments(arguments)) {
+    _.isArguments = function(obj) {
+      return !!(obj && _.has(obj, 'callee'));
+    };
+  }
+
+  // Is a given value a function?
+  _.isFunction = function(obj) {
+    return toString.call(obj) == '[object Function]';
+  };
+
+  // Is a given value a string?
+  _.isString = function(obj) {
+    return toString.call(obj) == '[object String]';
+  };
+
+  // Is a given value a number?
+  _.isNumber = function(obj) {
+    return toString.call(obj) == '[object Number]';
+  };
+
+  // Is the given value `NaN`?
+  _.isNaN = function(obj) {
+    // `NaN` is the only value for which `===` is not reflexive.
+    return obj !== obj;
+  };
+
+  // Is a given value a boolean?
+  _.isBoolean = function(obj) {
+    return obj === true || obj === false || toString.call(obj) == '[object Boolean]';
+  };
+
+  // Is a given value a date?
+  _.isDate = function(obj) {
+    return toString.call(obj) == '[object Date]';
+  };
+
+  // Is the given value a regular expression?
+  _.isRegExp = function(obj) {
+    return toString.call(obj) == '[object RegExp]';
+  };
+
+  // Is a given value equal to null?
+  _.isNull = function(obj) {
+    return obj === null;
+  };
+
+  // Is a given variable undefined?
+  _.isUndefined = function(obj) {
+    return obj === void 0;
+  };
+
+  // Has own property?
+  _.has = function(obj, key) {
+    return hasOwnProperty.call(obj, key);
+  };
+
+  // Utility Functions
+  // -----------------
+
+  // Run Underscore.js in *noConflict* mode, returning the `_` variable to its
+  // previous owner. Returns a reference to the Underscore object.
+  _.noConflict = function() {
+    root._ = previousUnderscore;
+    return this;
+  };
+
+  // Keep the identity function around for default iterators.
+  _.identity = function(value) {
+    return value;
+  };
+
+  // Run a function **n** times.
+  _.times = function (n, iterator, context) {
+    for (var i = 0; i < n; i++) iterator.call(context, i);
+  };
+
+  // Escape a string for HTML interpolation.
+  _.escape = function(string) {
+    return (''+string).replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/"/g, '&quot;').replace(/'/g, '&#x27;').replace(/\//g,'&#x2F;');
+  };
+
+  // Add your own custom functions to the Underscore object, ensuring that
+  // they're correctly added to the OOP wrapper as well.
+  _.mixin = function(obj) {
+    each(_.functions(obj), function(name){
+      addToWrapper(name, _[name] = obj[name]);
+    });
+  };
+
+  // Generate a unique integer id (unique within the entire client session).
+  // Useful for temporary DOM ids.
+  var idCounter = 0;
+  _.uniqueId = function(prefix) {
+    var id = idCounter++;
+    return prefix ? prefix + id : id;
+  };
+
+  // By default, Underscore uses ERB-style template delimiters, change the
+  // following template settings to use alternative delimiters.
+  _.templateSettings = {
+    evaluate    : /<%([\s\S]+?)%>/g,
+    interpolate : /<%=([\s\S]+?)%>/g,
+    escape      : /<%-([\s\S]+?)%>/g
+  };
+
+  // When customizing `templateSettings`, if you don't want to define an
+  // interpolation, evaluation or escaping regex, we need one that is
+  // guaranteed not to match.
+  var noMatch = /.^/;
+
+  // Within an interpolation, evaluation, or escaping, remove HTML escaping
+  // that had been previously added.
+  var unescape = function(code) {
+    return code.replace(/\\\\/g, '\\').replace(/\\'/g, "'");
+  };
+
+  // JavaScript micro-templating, similar to John Resig's implementation.
+  // Underscore templating handles arbitrary delimiters, preserves whitespace,
+  // and correctly escapes quotes within interpolated code.
+  _.template = function(str, data) {
+    var c  = _.templateSettings;
+    var tmpl = 'var __p=[],print=function(){__p.push.apply(__p,arguments);};' +
+      'with(obj||{}){__p.push(\'' +
+      str.replace(/\\/g, '\\\\')
+         .replace(/'/g, "\\'")
+         .replace(c.escape || noMatch, function(match, code) {
+           return "',_.escape(" + unescape(code) + "),'";
+         })
+         .replace(c.interpolate || noMatch, function(match, code) {
+           return "'," + unescape(code) + ",'";
+         })
+         .replace(c.evaluate || noMatch, function(match, code) {
+           return "');" + unescape(code).replace(/[\r\n\t]/g, ' ') + ";__p.push('";
+         })
+         .replace(/\r/g, '\\r')
+         .replace(/\n/g, '\\n')
+         .replace(/\t/g, '\\t')
+         + "');}return __p.join('');";
+    var func = new Function('obj', '_', tmpl);
+    if (data) return func(data, _);
+    return function(data) {
+      return func.call(this, data, _);
+    };
+  };
+
+  // Add a "chain" function, which will delegate to the wrapper.
+  _.chain = function(obj) {
+    return _(obj).chain();
+  };
+
+  // The OOP Wrapper
+  // ---------------
+
+  // If Underscore is called as a function, it returns a wrapped object that
+  // can be used OO-style. This wrapper holds altered versions of all the
+  // underscore functions. Wrapped objects may be chained.
+  var wrapper = function(obj) { this._wrapped = obj; };
+
+  // Expose `wrapper.prototype` as `_.prototype`
+  _.prototype = wrapper.prototype;
+
+  // Helper function to continue chaining intermediate results.
+  var result = function(obj, chain) {
+    return chain ? _(obj).chain() : obj;
+  };
+
+  // A method to easily add functions to the OOP wrapper.
+  var addToWrapper = function(name, func) {
+    wrapper.prototype[name] = function() {
+      var args = slice.call(arguments);
+      unshift.call(args, this._wrapped);
+      return result(func.apply(_, args), this._chain);
+    };
+  };
+
+  // Add all of the Underscore functions to the wrapper object.
+  _.mixin(_);
+
+  // Add all mutator Array functions to the wrapper.
+  each(['pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift'], function(name) {
+    var method = ArrayProto[name];
+    wrapper.prototype[name] = function() {
+      var wrapped = this._wrapped;
+      method.apply(wrapped, arguments);
+      var length = wrapped.length;
+      if ((name == 'shift' || name == 'splice') && length === 0) delete wrapped[0];
+      return result(wrapped, this._chain);
+    };
+  });
+
+  // Add all accessor Array functions to the wrapper.
+  each(['concat', 'join', 'slice'], function(name) {
+    var method = ArrayProto[name];
+    wrapper.prototype[name] = function() {
+      return result(method.apply(this._wrapped, arguments), this._chain);
+    };
+  });
+
+  // Start chaining a wrapped Underscore object.
+  wrapper.prototype.chain = function() {
+    this._chain = true;
+    return this;
+  };
+
+  // Extracts the result from a wrapped and chained object.
+  wrapper.prototype.value = function() {
+    return this._wrapped;
+  };
+
+}).call(this);
diff --git a/doc/_build/html/_static/underscore.js b/doc/_build/html/_static/underscore.js
new file mode 100644 (file)
index 0000000..5b55f32
--- /dev/null
@@ -0,0 +1,31 @@
+// Underscore.js 1.3.1
+// (c) 2009-2012 Jeremy Ashkenas, DocumentCloud Inc.
+// Underscore is freely distributable under the MIT license.
+// Portions of Underscore are inspired or borrowed from Prototype,
+// Oliver Steele's Functional, and John Resig's Micro-Templating.
+// For all details and documentation:
+// http://documentcloud.github.com/underscore
+(function(){function q(a,c,d){if(a===c)return a!==0||1/a==1/c;if(a==null||c==null)return a===c;if(a._chain)a=a._wrapped;if(c._chain)c=c._wrapped;if(a.isEqual&&b.isFunction(a.isEqual))return a.isEqual(c);if(c.isEqual&&b.isFunction(c.isEqual))return c.isEqual(a);var e=l.call(a);if(e!=l.call(c))return false;switch(e){case "[object String]":return a==String(c);case "[object Number]":return a!=+a?c!=+c:a==0?1/a==1/c:a==+c;case "[object Date]":case "[object Boolean]":return+a==+c;case "[object RegExp]":return a.source==
+c.source&&a.global==c.global&&a.multiline==c.multiline&&a.ignoreCase==c.ignoreCase}if(typeof a!="object"||typeof c!="object")return false;for(var f=d.length;f--;)if(d[f]==a)return true;d.push(a);var f=0,g=true;if(e=="[object Array]"){if(f=a.length,g=f==c.length)for(;f--;)if(!(g=f in a==f in c&&q(a[f],c[f],d)))break}else{if("constructor"in a!="constructor"in c||a.constructor!=c.constructor)return false;for(var h in a)if(b.has(a,h)&&(f++,!(g=b.has(c,h)&&q(a[h],c[h],d))))break;if(g){for(h in c)if(b.has(c,
+h)&&!f--)break;g=!f}}d.pop();return g}var r=this,G=r._,n={},k=Array.prototype,o=Object.prototype,i=k.slice,H=k.unshift,l=o.toString,I=o.hasOwnProperty,w=k.forEach,x=k.map,y=k.reduce,z=k.reduceRight,A=k.filter,B=k.every,C=k.some,p=k.indexOf,D=k.lastIndexOf,o=Array.isArray,J=Object.keys,s=Function.prototype.bind,b=function(a){return new m(a)};if(typeof exports!=="undefined"){if(typeof module!=="undefined"&&module.exports)exports=module.exports=b;exports._=b}else r._=b;b.VERSION="1.3.1";var j=b.each=
+b.forEach=function(a,c,d){if(a!=null)if(w&&a.forEach===w)a.forEach(c,d);else if(a.length===+a.length)for(var e=0,f=a.length;e<f;e++){if(e in a&&c.call(d,a[e],e,a)===n)break}else for(e in a)if(b.has(a,e)&&c.call(d,a[e],e,a)===n)break};b.map=b.collect=function(a,c,b){var e=[];if(a==null)return e;if(x&&a.map===x)return a.map(c,b);j(a,function(a,g,h){e[e.length]=c.call(b,a,g,h)});if(a.length===+a.length)e.length=a.length;return e};b.reduce=b.foldl=b.inject=function(a,c,d,e){var f=arguments.length>2;a==
+null&&(a=[]);if(y&&a.reduce===y)return e&&(c=b.bind(c,e)),f?a.reduce(c,d):a.reduce(c);j(a,function(a,b,i){f?d=c.call(e,d,a,b,i):(d=a,f=true)});if(!f)throw new TypeError("Reduce of empty array with no initial value");return d};b.reduceRight=b.foldr=function(a,c,d,e){var f=arguments.length>2;a==null&&(a=[]);if(z&&a.reduceRight===z)return e&&(c=b.bind(c,e)),f?a.reduceRight(c,d):a.reduceRight(c);var g=b.toArray(a).reverse();e&&!f&&(c=b.bind(c,e));return f?b.reduce(g,c,d,e):b.reduce(g,c)};b.find=b.detect=
+function(a,c,b){var e;E(a,function(a,g,h){if(c.call(b,a,g,h))return e=a,true});return e};b.filter=b.select=function(a,c,b){var e=[];if(a==null)return e;if(A&&a.filter===A)return a.filter(c,b);j(a,function(a,g,h){c.call(b,a,g,h)&&(e[e.length]=a)});return e};b.reject=function(a,c,b){var e=[];if(a==null)return e;j(a,function(a,g,h){c.call(b,a,g,h)||(e[e.length]=a)});return e};b.every=b.all=function(a,c,b){var e=true;if(a==null)return e;if(B&&a.every===B)return a.every(c,b);j(a,function(a,g,h){if(!(e=
+e&&c.call(b,a,g,h)))return n});return e};var E=b.some=b.any=function(a,c,d){c||(c=b.identity);var e=false;if(a==null)return e;if(C&&a.some===C)return a.some(c,d);j(a,function(a,b,h){if(e||(e=c.call(d,a,b,h)))return n});return!!e};b.include=b.contains=function(a,c){var b=false;if(a==null)return b;return p&&a.indexOf===p?a.indexOf(c)!=-1:b=E(a,function(a){return a===c})};b.invoke=function(a,c){var d=i.call(arguments,2);return b.map(a,function(a){return(b.isFunction(c)?c||a:a[c]).apply(a,d)})};b.pluck=
+function(a,c){return b.map(a,function(a){return a[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a))return Math.max.apply(Math,a);if(!c&&b.isEmpty(a))return-Infinity;var e={computed:-Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b>=e.computed&&(e={value:a,computed:b})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);if(!c&&b.isEmpty(a))return Infinity;var e={computed:Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b<e.computed&&(e={value:a,computed:b})});
+return e.value};b.shuffle=function(a){var b=[],d;j(a,function(a,f){f==0?b[0]=a:(d=Math.floor(Math.random()*(f+1)),b[f]=b[d],b[d]=a)});return b};b.sortBy=function(a,c,d){return b.pluck(b.map(a,function(a,b,g){return{value:a,criteria:c.call(d,a,b,g)}}).sort(function(a,b){var c=a.criteria,d=b.criteria;return c<d?-1:c>d?1:0}),"value")};b.groupBy=function(a,c){var d={},e=b.isFunction(c)?c:function(a){return a[c]};j(a,function(a,b){var c=e(a,b);(d[c]||(d[c]=[])).push(a)});return d};b.sortedIndex=function(a,
+c,d){d||(d=b.identity);for(var e=0,f=a.length;e<f;){var g=e+f>>1;d(a[g])<d(c)?e=g+1:f=g}return e};b.toArray=function(a){return!a?[]:a.toArray?a.toArray():b.isArray(a)?i.call(a):b.isArguments(a)?i.call(a):b.values(a)};b.size=function(a){return b.toArray(a).length};b.first=b.head=function(a,b,d){return b!=null&&!d?i.call(a,0,b):a[0]};b.initial=function(a,b,d){return i.call(a,0,a.length-(b==null||d?1:b))};b.last=function(a,b,d){return b!=null&&!d?i.call(a,Math.max(a.length-b,0)):a[a.length-1]};b.rest=
+b.tail=function(a,b,d){return i.call(a,b==null||d?1:b)};b.compact=function(a){return b.filter(a,function(a){return!!a})};b.flatten=function(a,c){return b.reduce(a,function(a,e){if(b.isArray(e))return a.concat(c?e:b.flatten(e));a[a.length]=e;return a},[])};b.without=function(a){return b.difference(a,i.call(arguments,1))};b.uniq=b.unique=function(a,c,d){var d=d?b.map(a,d):a,e=[];b.reduce(d,function(d,g,h){if(0==h||(c===true?b.last(d)!=g:!b.include(d,g)))d[d.length]=g,e[e.length]=a[h];return d},[]);
+return e};b.union=function(){return b.uniq(b.flatten(arguments,true))};b.intersection=b.intersect=function(a){var c=i.call(arguments,1);return b.filter(b.uniq(a),function(a){return b.every(c,function(c){return b.indexOf(c,a)>=0})})};b.difference=function(a){var c=b.flatten(i.call(arguments,1));return b.filter(a,function(a){return!b.include(c,a)})};b.zip=function(){for(var a=i.call(arguments),c=b.max(b.pluck(a,"length")),d=Array(c),e=0;e<c;e++)d[e]=b.pluck(a,""+e);return d};b.indexOf=function(a,c,
+d){if(a==null)return-1;var e;if(d)return d=b.sortedIndex(a,c),a[d]===c?d:-1;if(p&&a.indexOf===p)return a.indexOf(c);for(d=0,e=a.length;d<e;d++)if(d in a&&a[d]===c)return d;return-1};b.lastIndexOf=function(a,b){if(a==null)return-1;if(D&&a.lastIndexOf===D)return a.lastIndexOf(b);for(var d=a.length;d--;)if(d in a&&a[d]===b)return d;return-1};b.range=function(a,b,d){arguments.length<=1&&(b=a||0,a=0);for(var d=arguments[2]||1,e=Math.max(Math.ceil((b-a)/d),0),f=0,g=Array(e);f<e;)g[f++]=a,a+=d;return g};
+var F=function(){};b.bind=function(a,c){var d,e;if(a.bind===s&&s)return s.apply(a,i.call(arguments,1));if(!b.isFunction(a))throw new TypeError;e=i.call(arguments,2);return d=function(){if(!(this instanceof d))return a.apply(c,e.concat(i.call(arguments)));F.prototype=a.prototype;var b=new F,g=a.apply(b,e.concat(i.call(arguments)));return Object(g)===g?g:b}};b.bindAll=function(a){var c=i.call(arguments,1);c.length==0&&(c=b.functions(a));j(c,function(c){a[c]=b.bind(a[c],a)});return a};b.memoize=function(a,
+c){var d={};c||(c=b.identity);return function(){var e=c.apply(this,arguments);return b.has(d,e)?d[e]:d[e]=a.apply(this,arguments)}};b.delay=function(a,b){var d=i.call(arguments,2);return setTimeout(function(){return a.apply(a,d)},b)};b.defer=function(a){return b.delay.apply(b,[a,1].concat(i.call(arguments,1)))};b.throttle=function(a,c){var d,e,f,g,h,i=b.debounce(function(){h=g=false},c);return function(){d=this;e=arguments;var b;f||(f=setTimeout(function(){f=null;h&&a.apply(d,e);i()},c));g?h=true:
+a.apply(d,e);i();g=true}};b.debounce=function(a,b){var d;return function(){var e=this,f=arguments;clearTimeout(d);d=setTimeout(function(){d=null;a.apply(e,f)},b)}};b.once=function(a){var b=false,d;return function(){if(b)return d;b=true;return d=a.apply(this,arguments)}};b.wrap=function(a,b){return function(){var d=[a].concat(i.call(arguments,0));return b.apply(this,d)}};b.compose=function(){var a=arguments;return function(){for(var b=arguments,d=a.length-1;d>=0;d--)b=[a[d].apply(this,b)];return b[0]}};
+b.after=function(a,b){return a<=0?b():function(){if(--a<1)return b.apply(this,arguments)}};b.keys=J||function(a){if(a!==Object(a))throw new TypeError("Invalid object");var c=[],d;for(d in a)b.has(a,d)&&(c[c.length]=d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=b.methods=function(a){var c=[],d;for(d in a)b.isFunction(a[d])&&c.push(d);return c.sort()};b.extend=function(a){j(i.call(arguments,1),function(b){for(var d in b)a[d]=b[d]});return a};b.defaults=function(a){j(i.call(arguments,
+1),function(b){for(var d in b)a[d]==null&&(a[d]=b[d])});return a};b.clone=function(a){return!b.isObject(a)?a:b.isArray(a)?a.slice():b.extend({},a)};b.tap=function(a,b){b(a);return a};b.isEqual=function(a,b){return q(a,b,[])};b.isEmpty=function(a){if(b.isArray(a)||b.isString(a))return a.length===0;for(var c in a)if(b.has(a,c))return false;return true};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=o||function(a){return l.call(a)=="[object Array]"};b.isObject=function(a){return a===Object(a)};
+b.isArguments=function(a){return l.call(a)=="[object Arguments]"};if(!b.isArguments(arguments))b.isArguments=function(a){return!(!a||!b.has(a,"callee"))};b.isFunction=function(a){return l.call(a)=="[object Function]"};b.isString=function(a){return l.call(a)=="[object String]"};b.isNumber=function(a){return l.call(a)=="[object Number]"};b.isNaN=function(a){return a!==a};b.isBoolean=function(a){return a===true||a===false||l.call(a)=="[object Boolean]"};b.isDate=function(a){return l.call(a)=="[object Date]"};
+b.isRegExp=function(a){return l.call(a)=="[object RegExp]"};b.isNull=function(a){return a===null};b.isUndefined=function(a){return a===void 0};b.has=function(a,b){return I.call(a,b)};b.noConflict=function(){r._=G;return this};b.identity=function(a){return a};b.times=function(a,b,d){for(var e=0;e<a;e++)b.call(d,e)};b.escape=function(a){return(""+a).replace(/&/g,"&amp;").replace(/</g,"&lt;").replace(/>/g,"&gt;").replace(/"/g,"&quot;").replace(/'/g,"&#x27;").replace(/\//g,"&#x2F;")};b.mixin=function(a){j(b.functions(a),
+function(c){K(c,b[c]=a[c])})};var L=0;b.uniqueId=function(a){var b=L++;return a?a+b:b};b.templateSettings={evaluate:/<%([\s\S]+?)%>/g,interpolate:/<%=([\s\S]+?)%>/g,escape:/<%-([\s\S]+?)%>/g};var t=/.^/,u=function(a){return a.replace(/\\\\/g,"\\").replace(/\\'/g,"'")};b.template=function(a,c){var d=b.templateSettings,d="var __p=[],print=function(){__p.push.apply(__p,arguments);};with(obj||{}){__p.push('"+a.replace(/\\/g,"\\\\").replace(/'/g,"\\'").replace(d.escape||t,function(a,b){return"',_.escape("+
+u(b)+"),'"}).replace(d.interpolate||t,function(a,b){return"',"+u(b)+",'"}).replace(d.evaluate||t,function(a,b){return"');"+u(b).replace(/[\r\n\t]/g," ")+";__p.push('"}).replace(/\r/g,"\\r").replace(/\n/g,"\\n").replace(/\t/g,"\\t")+"');}return __p.join('');",e=new Function("obj","_",d);return c?e(c,b):function(a){return e.call(this,a,b)}};b.chain=function(a){return b(a).chain()};var m=function(a){this._wrapped=a};b.prototype=m.prototype;var v=function(a,c){return c?b(a).chain():a},K=function(a,c){m.prototype[a]=
+function(){var a=i.call(arguments);H.call(a,this._wrapped);return v(c.apply(b,a),this._chain)}};b.mixin(b);j("pop,push,reverse,shift,sort,splice,unshift".split(","),function(a){var b=k[a];m.prototype[a]=function(){var d=this._wrapped;b.apply(d,arguments);var e=d.length;(a=="shift"||a=="splice")&&e===0&&delete d[0];return v(d,this._chain)}});j(["concat","join","slice"],function(a){var b=k[a];m.prototype[a]=function(){return v(b.apply(this._wrapped,arguments),this._chain)}});m.prototype.chain=function(){this._chain=
+true;return this};m.prototype.value=function(){return this._wrapped}}).call(this);
diff --git a/doc/_build/html/docs/api.html b/doc/_build/html/docs/api.html
new file mode 100644 (file)
index 0000000..5797458
--- /dev/null
@@ -0,0 +1,519 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>The full Pygments API &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Write your own lexer" href="lexerdevelopment.html" />
+    <link rel="prev" title="Builtin Tokens" href="tokens.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="../index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">The full Pygments API</a><ul>
+<li><a class="reference internal" href="#module-pygments">High-level API</a></li>
+<li><a class="reference internal" href="#lexers">Lexers</a></li>
+<li><a class="reference internal" href="#formatters">Formatters</a></li>
+<li><a class="reference internal" href="#option-processing">Option processing</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="tokens.html"
+                        title="previous chapter">Builtin Tokens</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="lexerdevelopment.html"
+                        title="next chapter">Write your own lexer</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/api.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="the-full-pygments-api">
+<h1>The full Pygments API<a class="headerlink" href="#the-full-pygments-api" title="Permalink to this headline">¶</a></h1>
+<p>This page describes the Pygments API.</p>
+<div class="section" id="module-pygments">
+<span id="high-level-api"></span><h2>High-level API<a class="headerlink" href="#module-pygments" title="Permalink to this headline">¶</a></h2>
+<p>Functions from the <a class="reference internal" href="#module-pygments" title="pygments"><code class="xref py py-mod docutils literal notranslate"><span class="pre">pygments</span></code></a> module:</p>
+<dl class="function">
+<dt id="pygments.lex">
+<code class="sig-prename descclassname">pygments.</code><code class="sig-name descname">lex</code><span class="sig-paren">(</span><em class="sig-param">code</em>, <em class="sig-param">lexer</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.lex" title="Permalink to this definition">¶</a></dt>
+<dd><p>Lex <cite>code</cite> with the <cite>lexer</cite> (must be a <cite>Lexer</cite> instance)
+and return an iterable of tokens. Currently, this only calls
+<cite>lexer.get_tokens()</cite>.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.format">
+<code class="sig-prename descclassname">pygments.</code><code class="sig-name descname">format</code><span class="sig-paren">(</span><em class="sig-param">tokens</em>, <em class="sig-param">formatter</em>, <em class="sig-param">outfile=None</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.format" title="Permalink to this definition">¶</a></dt>
+<dd><p>Format a token stream (iterable of tokens) <cite>tokens</cite> with the
+<cite>formatter</cite> (must be a <cite>Formatter</cite> instance). The result is
+written to <cite>outfile</cite>, or if that is <code class="docutils literal notranslate"><span class="pre">None</span></code>, returned as a
+string.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.highlight">
+<code class="sig-prename descclassname">pygments.</code><code class="sig-name descname">highlight</code><span class="sig-paren">(</span><em class="sig-param">code</em>, <em class="sig-param">lexer</em>, <em class="sig-param">formatter</em>, <em class="sig-param">outfile=None</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.highlight" title="Permalink to this definition">¶</a></dt>
+<dd><p>This is the most high-level highlighting function.
+It combines <cite>lex</cite> and <cite>format</cite> in one function.</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers"></span><p>Functions from <a class="reference internal" href="#module-pygments.lexers" title="pygments.lexers"><code class="xref py py-mod docutils literal notranslate"><span class="pre">pygments.lexers</span></code></a>:</p>
+<dl class="function">
+<dt id="pygments.lexers.get_lexer_by_name">
+<code class="sig-prename descclassname">pygments.lexers.</code><code class="sig-name descname">get_lexer_by_name</code><span class="sig-paren">(</span><em class="sig-param">alias</em>, <em class="sig-param">**options</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.lexers.get_lexer_by_name" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return an instance of a <cite>Lexer</cite> subclass that has <cite>alias</cite> in its
+aliases list. The lexer is given the <cite>options</cite> at its
+instantiation.</p>
+<p>Will raise <code class="xref py py-exc docutils literal notranslate"><span class="pre">pygments.util.ClassNotFound</span></code> if no lexer with that alias is
+found.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.lexers.get_lexer_for_filename">
+<code class="sig-prename descclassname">pygments.lexers.</code><code class="sig-name descname">get_lexer_for_filename</code><span class="sig-paren">(</span><em class="sig-param">fn</em>, <em class="sig-param">**options</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.lexers.get_lexer_for_filename" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a <cite>Lexer</cite> subclass instance that has a filename pattern
+matching <cite>fn</cite>. The lexer is given the <cite>options</cite> at its
+instantiation.</p>
+<p>Will raise <code class="xref py py-exc docutils literal notranslate"><span class="pre">pygments.util.ClassNotFound</span></code> if no lexer for that filename
+is found.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.lexers.get_lexer_for_mimetype">
+<code class="sig-prename descclassname">pygments.lexers.</code><code class="sig-name descname">get_lexer_for_mimetype</code><span class="sig-paren">(</span><em class="sig-param">mime</em>, <em class="sig-param">**options</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.lexers.get_lexer_for_mimetype" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a <cite>Lexer</cite> subclass instance that has <cite>mime</cite> in its mimetype
+list. The lexer is given the <cite>options</cite> at its instantiation.</p>
+<p>Will raise <code class="xref py py-exc docutils literal notranslate"><span class="pre">pygments.util.ClassNotFound</span></code> if not lexer for that mimetype
+is found.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.lexers.load_lexer_from_file">
+<code class="sig-prename descclassname">pygments.lexers.</code><code class="sig-name descname">load_lexer_from_file</code><span class="sig-paren">(</span><em class="sig-param">filename</em>, <em class="sig-param">lexername=&quot;CustomLexer&quot;</em>, <em class="sig-param">**options</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.lexers.load_lexer_from_file" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a <cite>Lexer</cite> subclass instance loaded from the provided file, relative
+to the current directory. The file is expected to contain a Lexer class
+named <cite>lexername</cite> (by default, CustomLexer). Users should be very careful with
+the input, because this method is equivalent to running eval on the input file.
+The lexer is given the <cite>options</cite> at its instantiation.</p>
+<p><code class="xref py py-exc docutils literal notranslate"><span class="pre">ClassNotFound</span></code> is raised if there are any errors loading the Lexer</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.lexers.guess_lexer">
+<code class="sig-prename descclassname">pygments.lexers.</code><code class="sig-name descname">guess_lexer</code><span class="sig-paren">(</span><em class="sig-param">text</em>, <em class="sig-param">**options</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.lexers.guess_lexer" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a <cite>Lexer</cite> subclass instance that’s guessed from the text in
+<cite>text</cite>. For that, the <a class="reference internal" href="#pygments.lexer.Lexer.analyse_text" title="pygments.lexer.Lexer.analyse_text"><code class="xref py py-meth docutils literal notranslate"><span class="pre">analyse_text()</span></code></a> method of every known lexer
+class is called with the text as argument, and the lexer which returned the
+highest value will be instantiated and returned.</p>
+<p><code class="xref py py-exc docutils literal notranslate"><span class="pre">pygments.util.ClassNotFound</span></code> is raised if no lexer thinks it can
+handle the content.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.lexers.guess_lexer_for_filename">
+<code class="sig-prename descclassname">pygments.lexers.</code><code class="sig-name descname">guess_lexer_for_filename</code><span class="sig-paren">(</span><em class="sig-param">filename</em>, <em class="sig-param">text</em>, <em class="sig-param">**options</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.lexers.guess_lexer_for_filename" title="Permalink to this definition">¶</a></dt>
+<dd><p>As <a class="reference internal" href="#pygments.lexers.guess_lexer" title="pygments.lexers.guess_lexer"><code class="xref py py-func docutils literal notranslate"><span class="pre">guess_lexer()</span></code></a>, but only lexers which have a pattern in <cite>filenames</cite>
+or <cite>alias_filenames</cite> that matches <cite>filename</cite> are taken into consideration.</p>
+<p><code class="xref py py-exc docutils literal notranslate"><span class="pre">pygments.util.ClassNotFound</span></code> is raised if no lexer thinks it can
+handle the content.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.lexers.get_all_lexers">
+<code class="sig-prename descclassname">pygments.lexers.</code><code class="sig-name descname">get_all_lexers</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" href="#pygments.lexers.get_all_lexers" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return an iterable over all registered lexers, yielding tuples in the
+format:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="p">(</span><span class="n">longname</span><span class="p">,</span> <span class="nb">tuple</span> <span class="n">of</span> <span class="n">aliases</span><span class="p">,</span> <span class="nb">tuple</span> <span class="n">of</span> <span class="n">filename</span> <span class="n">patterns</span><span class="p">,</span> <span class="nb">tuple</span> <span class="n">of</span> <span class="n">mimetypes</span><span class="p">)</span>
+</pre></div>
+</div>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.lexers.find_lexer_class_by_name">
+<code class="sig-prename descclassname">pygments.lexers.</code><code class="sig-name descname">find_lexer_class_by_name</code><span class="sig-paren">(</span><em class="sig-param">alias</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.lexers.find_lexer_class_by_name" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the <cite>Lexer</cite> subclass that has <cite>alias</cite> in its aliases list, without
+instantiating it.</p>
+<p>Will raise <code class="xref py py-exc docutils literal notranslate"><span class="pre">pygments.util.ClassNotFound</span></code> if no lexer with that alias is
+found.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.lexers.find_lexer_class">
+<code class="sig-prename descclassname">pygments.lexers.</code><code class="sig-name descname">find_lexer_class</code><span class="sig-paren">(</span><em class="sig-param">name</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.lexers.find_lexer_class" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return the <cite>Lexer</cite> subclass that with the <em>name</em> attribute as given by
+the <em>name</em> argument.</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.formatters"></span><p>Functions from <a class="reference internal" href="#module-pygments.formatters" title="pygments.formatters"><code class="xref py py-mod docutils literal notranslate"><span class="pre">pygments.formatters</span></code></a>:</p>
+<dl class="function">
+<dt id="pygments.formatters.get_formatter_by_name">
+<code class="sig-prename descclassname">pygments.formatters.</code><code class="sig-name descname">get_formatter_by_name</code><span class="sig-paren">(</span><em class="sig-param">alias</em>, <em class="sig-param">**options</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.formatters.get_formatter_by_name" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return an instance of a <a class="reference internal" href="#pygments.formatter.Formatter" title="pygments.formatter.Formatter"><code class="xref py py-class docutils literal notranslate"><span class="pre">Formatter</span></code></a> subclass that has <cite>alias</cite> in its
+aliases list. The formatter is given the <cite>options</cite> at its instantiation.</p>
+<p>Will raise <code class="xref py py-exc docutils literal notranslate"><span class="pre">pygments.util.ClassNotFound</span></code> if no formatter with that
+alias is found.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.formatters.get_formatter_for_filename">
+<code class="sig-prename descclassname">pygments.formatters.</code><code class="sig-name descname">get_formatter_for_filename</code><span class="sig-paren">(</span><em class="sig-param">fn</em>, <em class="sig-param">**options</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.formatters.get_formatter_for_filename" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a <a class="reference internal" href="#pygments.formatter.Formatter" title="pygments.formatter.Formatter"><code class="xref py py-class docutils literal notranslate"><span class="pre">Formatter</span></code></a> subclass instance that has a filename pattern
+matching <cite>fn</cite>. The formatter is given the <cite>options</cite> at its instantiation.</p>
+<p>Will raise <code class="xref py py-exc docutils literal notranslate"><span class="pre">pygments.util.ClassNotFound</span></code> if no formatter for that filename
+is found.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.formatters.load_formatter_from_file">
+<code class="sig-prename descclassname">pygments.formatters.</code><code class="sig-name descname">load_formatter_from_file</code><span class="sig-paren">(</span><em class="sig-param">filename</em>, <em class="sig-param">formattername=&quot;CustomFormatter&quot;</em>, <em class="sig-param">**options</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.formatters.load_formatter_from_file" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a <cite>Formatter</cite> subclass instance loaded from the provided file, relative
+to the current directory. The file is expected to contain a Formatter class
+named <code class="docutils literal notranslate"><span class="pre">formattername</span></code> (by default, CustomFormatter). Users should be very
+careful with the input, because this method is equivalent to running eval
+on the input file. The formatter is given the <cite>options</cite> at its instantiation.</p>
+<p><code class="xref py py-exc docutils literal notranslate"><span class="pre">ClassNotFound</span></code> is raised if there are any errors loading the Formatter</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.styles"></span><p>Functions from <a class="reference internal" href="#module-pygments.styles" title="pygments.styles"><code class="xref py py-mod docutils literal notranslate"><span class="pre">pygments.styles</span></code></a>:</p>
+<dl class="function">
+<dt id="pygments.styles.get_style_by_name">
+<code class="sig-prename descclassname">pygments.styles.</code><code class="sig-name descname">get_style_by_name</code><span class="sig-paren">(</span><em class="sig-param">name</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.styles.get_style_by_name" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return a style class by its short name. The names of the builtin styles
+are listed in <code class="xref py py-data docutils literal notranslate"><span class="pre">pygments.styles.STYLE_MAP</span></code>.</p>
+<p>Will raise <code class="xref py py-exc docutils literal notranslate"><span class="pre">pygments.util.ClassNotFound</span></code> if no style of that name is
+found.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.styles.get_all_styles">
+<code class="sig-prename descclassname">pygments.styles.</code><code class="sig-name descname">get_all_styles</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" href="#pygments.styles.get_all_styles" title="Permalink to this definition">¶</a></dt>
+<dd><p>Return an iterable over all registered styles, yielding their names.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexer"></span></div>
+<div class="section" id="lexers">
+<h2>Lexers<a class="headerlink" href="#lexers" title="Permalink to this headline">¶</a></h2>
+<p>The base lexer class from which all lexers are derived is:</p>
+<dl class="class">
+<dt id="pygments.lexer.Lexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexer.</code><code class="sig-name descname">Lexer</code><span class="sig-paren">(</span><em class="sig-param">**options</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.lexer.Lexer" title="Permalink to this definition">¶</a></dt>
+<dd><p>The constructor takes a **keywords dictionary of options.
+Every subclass must first process its own options and then call
+the <cite>Lexer</cite> constructor, since it processes the <cite>stripnl</cite>,
+<cite>stripall</cite> and <cite>tabsize</cite> options.</p>
+<p>An example looks like this:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">**</span><span class="n">options</span><span class="p">):</span>
+    <span class="bp">self</span><span class="o">.</span><span class="n">compress</span> <span class="o">=</span> <span class="n">options</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">&#39;compress&#39;</span><span class="p">,</span> <span class="s1">&#39;&#39;</span><span class="p">)</span>
+    <span class="n">Lexer</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">**</span><span class="n">options</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>As these options must all be specifiable as strings (due to the
+command line usage), there are various utility functions
+available to help with that, see <a class="reference internal" href="#option-processing">Option processing</a>.</p>
+<dl class="method">
+<dt id="pygments.lexer.Lexer.get_tokens">
+<code class="sig-name descname">get_tokens</code><span class="sig-paren">(</span><em class="sig-param">text</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.lexer.Lexer.get_tokens" title="Permalink to this definition">¶</a></dt>
+<dd><p>This method is the basic interface of a lexer. It is called by
+the <cite>highlight()</cite> function. It must process the text and return an
+iterable of <code class="docutils literal notranslate"><span class="pre">(tokentype,</span> <span class="pre">value)</span></code> pairs from <cite>text</cite>.</p>
+<p>Normally, you don’t need to override this method. The default
+implementation processes the <cite>stripnl</cite>, <cite>stripall</cite> and <cite>tabsize</cite>
+options and then yields all tokens from <cite>get_tokens_unprocessed()</cite>,
+with the <code class="docutils literal notranslate"><span class="pre">index</span></code> dropped.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="pygments.lexer.Lexer.get_tokens_unprocessed">
+<code class="sig-name descname">get_tokens_unprocessed</code><span class="sig-paren">(</span><em class="sig-param">text</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.lexer.Lexer.get_tokens_unprocessed" title="Permalink to this definition">¶</a></dt>
+<dd><p>This method should process the text and return an iterable of
+<code class="docutils literal notranslate"><span class="pre">(index,</span> <span class="pre">tokentype,</span> <span class="pre">value)</span></code> tuples where <code class="docutils literal notranslate"><span class="pre">index</span></code> is the starting
+position of the token within the input text.</p>
+<p>This method must be overridden by subclasses.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="pygments.lexer.Lexer.analyse_text">
+<em class="property">static </em><code class="sig-name descname">analyse_text</code><span class="sig-paren">(</span><em class="sig-param">text</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.lexer.Lexer.analyse_text" title="Permalink to this definition">¶</a></dt>
+<dd><p>A static method which is called for lexer guessing. It should analyse
+the text and return a float in the range from <code class="docutils literal notranslate"><span class="pre">0.0</span></code> to <code class="docutils literal notranslate"><span class="pre">1.0</span></code>.
+If it returns <code class="docutils literal notranslate"><span class="pre">0.0</span></code>, the lexer will not be selected as the most
+probable one, if it returns <code class="docutils literal notranslate"><span class="pre">1.0</span></code>, it will be selected immediately.</p>
+<div class="admonition note">
+<p class="admonition-title">Note</p>
+<p>You don’t have to add <code class="docutils literal notranslate"><span class="pre">&#64;staticmethod</span></code> to the definition of
+this method, this will be taken care of by the Lexer’s metaclass.</p>
+</div>
+</dd></dl>
+
+<p>For a list of known tokens have a look at the <a class="reference internal" href="tokens.html"><span class="doc">Builtin Tokens</span></a> page.</p>
+<p>A lexer also can have the following attributes (in fact, they are mandatory
+except <cite>alias_filenames</cite>) that are used by the builtin lookup mechanism.</p>
+<dl class="attribute">
+<dt id="pygments.lexer.Lexer.name">
+<code class="sig-name descname">name</code><a class="headerlink" href="#pygments.lexer.Lexer.name" title="Permalink to this definition">¶</a></dt>
+<dd><p>Full name for the lexer, in human-readable form.</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="pygments.lexer.Lexer.aliases">
+<code class="sig-name descname">aliases</code><a class="headerlink" href="#pygments.lexer.Lexer.aliases" title="Permalink to this definition">¶</a></dt>
+<dd><p>A list of short, unique identifiers that can be used to lookup
+the lexer from a list, e.g. using <cite>get_lexer_by_name()</cite>.</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="pygments.lexer.Lexer.filenames">
+<code class="sig-name descname">filenames</code><a class="headerlink" href="#pygments.lexer.Lexer.filenames" title="Permalink to this definition">¶</a></dt>
+<dd><p>A list of <cite>fnmatch</cite> patterns that match filenames which contain
+content for this lexer. The patterns in this list should be unique among
+all lexers.</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="pygments.lexer.Lexer.alias_filenames">
+<code class="sig-name descname">alias_filenames</code><a class="headerlink" href="#pygments.lexer.Lexer.alias_filenames" title="Permalink to this definition">¶</a></dt>
+<dd><p>A list of <cite>fnmatch</cite> patterns that match filenames which may or may not
+contain content for this lexer. This list is used by the
+<a class="reference internal" href="#pygments.lexers.guess_lexer_for_filename" title="pygments.lexers.guess_lexer_for_filename"><code class="xref py py-func docutils literal notranslate"><span class="pre">guess_lexer_for_filename()</span></code></a> function, to determine which lexers
+are then included in guessing the correct one. That means that
+e.g. every lexer for HTML and a template language should include
+<code class="docutils literal notranslate"><span class="pre">\*.html</span></code> in this list.</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="pygments.lexer.Lexer.mimetypes">
+<code class="sig-name descname">mimetypes</code><a class="headerlink" href="#pygments.lexer.Lexer.mimetypes" title="Permalink to this definition">¶</a></dt>
+<dd><p>A list of MIME types for content that can be lexed with this
+lexer.</p>
+</dd></dl>
+
+</dd></dl>
+
+<span class="target" id="module-pygments.formatter"></span></div>
+<div class="section" id="formatters">
+<h2>Formatters<a class="headerlink" href="#formatters" title="Permalink to this headline">¶</a></h2>
+<p>A formatter is derived from this class:</p>
+<dl class="class">
+<dt id="pygments.formatter.Formatter">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.formatter.</code><code class="sig-name descname">Formatter</code><span class="sig-paren">(</span><em class="sig-param">**options</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.formatter.Formatter" title="Permalink to this definition">¶</a></dt>
+<dd><p>As with lexers, this constructor processes options and then must call the
+base class <code class="xref py py-meth docutils literal notranslate"><span class="pre">__init__()</span></code>.</p>
+<p>The <a class="reference internal" href="#pygments.formatter.Formatter" title="pygments.formatter.Formatter"><code class="xref py py-class docutils literal notranslate"><span class="pre">Formatter</span></code></a> class recognizes the options <cite>style</cite>, <cite>full</cite> and
+<cite>title</cite>.  It is up to the formatter class whether it uses them.</p>
+<dl class="method">
+<dt id="pygments.formatter.Formatter.get_style_defs">
+<code class="sig-name descname">get_style_defs</code><span class="sig-paren">(</span><em class="sig-param">arg=''</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.formatter.Formatter.get_style_defs" title="Permalink to this definition">¶</a></dt>
+<dd><p>This method must return statements or declarations suitable to define
+the current style for subsequent highlighted text (e.g. CSS classes
+in the <cite>HTMLFormatter</cite>).</p>
+<p>The optional argument <cite>arg</cite> can be used to modify the generation and
+is formatter dependent (it is standardized because it can be given on
+the command line).</p>
+<p>This method is called by the <code class="docutils literal notranslate"><span class="pre">-S</span></code> <a class="reference internal" href="cmdline.html"><span class="doc">command-line option</span></a>,
+the <cite>arg</cite> is then given by the <code class="docutils literal notranslate"><span class="pre">-a</span></code> option.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="pygments.formatter.Formatter.format">
+<code class="sig-name descname">format</code><span class="sig-paren">(</span><em class="sig-param">tokensource</em>, <em class="sig-param">outfile</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.formatter.Formatter.format" title="Permalink to this definition">¶</a></dt>
+<dd><p>This method must format the tokens from the <cite>tokensource</cite> iterable and
+write the formatted version to the file object <cite>outfile</cite>.</p>
+<p>Formatter options can control how exactly the tokens are converted.</p>
+</dd></dl>
+
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7: </span>A formatter must have the following attributes that are used by the
+builtin lookup mechanism.</p>
+</div>
+<dl class="attribute">
+<dt id="pygments.formatter.Formatter.name">
+<code class="sig-name descname">name</code><a class="headerlink" href="#pygments.formatter.Formatter.name" title="Permalink to this definition">¶</a></dt>
+<dd><p>Full name for the formatter, in human-readable form.</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="pygments.formatter.Formatter.aliases">
+<code class="sig-name descname">aliases</code><a class="headerlink" href="#pygments.formatter.Formatter.aliases" title="Permalink to this definition">¶</a></dt>
+<dd><p>A list of short, unique identifiers that can be used to lookup
+the formatter from a list, e.g. using <a class="reference internal" href="#pygments.formatters.get_formatter_by_name" title="pygments.formatters.get_formatter_by_name"><code class="xref py py-func docutils literal notranslate"><span class="pre">get_formatter_by_name()</span></code></a>.</p>
+</dd></dl>
+
+<dl class="attribute">
+<dt id="pygments.formatter.Formatter.filenames">
+<code class="sig-name descname">filenames</code><a class="headerlink" href="#pygments.formatter.Formatter.filenames" title="Permalink to this definition">¶</a></dt>
+<dd><p>A list of <code class="xref py py-mod docutils literal notranslate"><span class="pre">fnmatch</span></code> patterns that match filenames for which this
+formatter can produce output. The patterns in this list should be unique
+among all formatters.</p>
+</dd></dl>
+
+</dd></dl>
+
+<span class="target" id="module-pygments.util"></span></div>
+<div class="section" id="option-processing">
+<h2>Option processing<a class="headerlink" href="#option-processing" title="Permalink to this headline">¶</a></h2>
+<p>The <a class="reference internal" href="#module-pygments.util" title="pygments.util"><code class="xref py py-mod docutils literal notranslate"><span class="pre">pygments.util</span></code></a> module has some utility functions usable for option
+processing:</p>
+<dl class="exception">
+<dt id="pygments.util.OptionError">
+<em class="property">exception </em><code class="sig-prename descclassname">pygments.util.</code><code class="sig-name descname">OptionError</code><a class="headerlink" href="#pygments.util.OptionError" title="Permalink to this definition">¶</a></dt>
+<dd><p>This exception will be raised by all option processing functions if
+the type or value of the argument is not correct.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.util.get_bool_opt">
+<code class="sig-prename descclassname">pygments.util.</code><code class="sig-name descname">get_bool_opt</code><span class="sig-paren">(</span><em class="sig-param">options</em>, <em class="sig-param">optname</em>, <em class="sig-param">default=None</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.util.get_bool_opt" title="Permalink to this definition">¶</a></dt>
+<dd><p>Interpret the key <cite>optname</cite> from the dictionary <cite>options</cite> as a boolean and
+return it. Return <cite>default</cite> if <cite>optname</cite> is not in <cite>options</cite>.</p>
+<p>The valid string values for <code class="docutils literal notranslate"><span class="pre">True</span></code> are <code class="docutils literal notranslate"><span class="pre">1</span></code>, <code class="docutils literal notranslate"><span class="pre">yes</span></code>, <code class="docutils literal notranslate"><span class="pre">true</span></code> and
+<code class="docutils literal notranslate"><span class="pre">on</span></code>, the ones for <code class="docutils literal notranslate"><span class="pre">False</span></code> are <code class="docutils literal notranslate"><span class="pre">0</span></code>, <code class="docutils literal notranslate"><span class="pre">no</span></code>, <code class="docutils literal notranslate"><span class="pre">false</span></code> and <code class="docutils literal notranslate"><span class="pre">off</span></code>
+(matched case-insensitively).</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.util.get_int_opt">
+<code class="sig-prename descclassname">pygments.util.</code><code class="sig-name descname">get_int_opt</code><span class="sig-paren">(</span><em class="sig-param">options</em>, <em class="sig-param">optname</em>, <em class="sig-param">default=None</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.util.get_int_opt" title="Permalink to this definition">¶</a></dt>
+<dd><p>As <a class="reference internal" href="#pygments.util.get_bool_opt" title="pygments.util.get_bool_opt"><code class="xref py py-func docutils literal notranslate"><span class="pre">get_bool_opt()</span></code></a>, but interpret the value as an integer.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.util.get_list_opt">
+<code class="sig-prename descclassname">pygments.util.</code><code class="sig-name descname">get_list_opt</code><span class="sig-paren">(</span><em class="sig-param">options</em>, <em class="sig-param">optname</em>, <em class="sig-param">default=None</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.util.get_list_opt" title="Permalink to this definition">¶</a></dt>
+<dd><p>If the key <cite>optname</cite> from the dictionary <cite>options</cite> is a string,
+split it at whitespace and return it. If it is already a list
+or a tuple, it is returned as a list.</p>
+</dd></dl>
+
+<dl class="function">
+<dt id="pygments.util.get_choice_opt">
+<code class="sig-prename descclassname">pygments.util.</code><code class="sig-name descname">get_choice_opt</code><span class="sig-paren">(</span><em class="sig-param">options</em>, <em class="sig-param">optname</em>, <em class="sig-param">allowed</em>, <em class="sig-param">default=None</em><span class="sig-paren">)</span><a class="headerlink" href="#pygments.util.get_choice_opt" title="Permalink to this definition">¶</a></dt>
+<dd><p>If the key <cite>optname</cite> from the dictionary is not in the sequence
+<cite>allowed</cite>, raise an error, otherwise return it.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.8.</span></p>
+</div>
+</dd></dl>
+
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/authors.html b/doc/_build/html/docs/authors.html
new file mode 100644 (file)
index 0000000..0058940
--- /dev/null
@@ -0,0 +1,349 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Full contributor list &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="prev" title="Pygments changelog" href="changelog.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="changelog.html"
+                        title="previous chapter">Pygments changelog</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/authors.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="full-contributor-list">
+<h1>Full contributor list<a class="headerlink" href="#full-contributor-list" title="Permalink to this headline">¶</a></h1>
+<p>Pygments is written and maintained by Georg Brandl &lt;<a class="reference external" href="mailto:georg&#37;&#52;&#48;python&#46;org">georg<span>&#64;</span>python<span>&#46;</span>org</a>&gt;.</p>
+<p>Major developers are Tim Hatch &lt;<a class="reference external" href="mailto:tim&#37;&#52;&#48;timhatch&#46;com">tim<span>&#64;</span>timhatch<span>&#46;</span>com</a>&gt; and Armin Ronacher
+&lt;<a class="reference external" href="mailto:armin&#46;ronacher&#37;&#52;&#48;active-4&#46;com">armin<span>&#46;</span>ronacher<span>&#64;</span>active-4<span>&#46;</span>com</a>&gt;.</p>
+<p>Other contributors, listed alphabetically, are:</p>
+<ul class="simple">
+<li><p>Sam Aaron – Ioke lexer</p></li>
+<li><p>Ali Afshar – image formatter</p></li>
+<li><p>Thomas Aglassinger – Easytrieve, JCL, Rexx, Transact-SQL and VBScript
+lexers</p></li>
+<li><p>Muthiah Annamalai – Ezhil lexer</p></li>
+<li><p>Kumar Appaiah – Debian control lexer</p></li>
+<li><p>Andreas Amann – AppleScript lexer</p></li>
+<li><p>Timothy Armstrong – Dart lexer fixes</p></li>
+<li><p>Jeffrey Arnold – R/S, Rd, BUGS, Jags, and Stan lexers</p></li>
+<li><p>Jeremy Ashkenas – CoffeeScript lexer</p></li>
+<li><p>José Joaquín Atria – Praat lexer</p></li>
+<li><p>Stefan Matthias Aust – Smalltalk lexer</p></li>
+<li><p>Lucas Bajolet – Nit lexer</p></li>
+<li><p>Ben Bangert – Mako lexers</p></li>
+<li><p>Max Battcher – Darcs patch lexer</p></li>
+<li><p>Thomas Baruchel – APL lexer</p></li>
+<li><p>Tim Baumann – (Literate) Agda lexer</p></li>
+<li><p>Paul Baumgart, 280 North, Inc. – Objective-J lexer</p></li>
+<li><p>Michael Bayer – Myghty lexers</p></li>
+<li><p>Thomas Beale – Archetype lexers</p></li>
+<li><p>John Benediktsson – Factor lexer</p></li>
+<li><p>Trevor Bergeron – mIRC formatter</p></li>
+<li><p>Vincent Bernat – LessCSS lexer</p></li>
+<li><p>Christopher Bertels – Fancy lexer</p></li>
+<li><p>Sébastien Bigaret – QVT Operational lexer</p></li>
+<li><p>Jarrett Billingsley – MiniD lexer</p></li>
+<li><p>Adam Blinkinsop – Haskell, Redcode lexers</p></li>
+<li><p>Stéphane Blondon – SGF lexer</p></li>
+<li><p>Frits van Bommel – assembler lexers</p></li>
+<li><p>Pierre Bourdon – bugfixes</p></li>
+<li><p>Matthias Bussonnier – ANSI style handling for terminal-256 formatter</p></li>
+<li><p>chebee7i – Python traceback lexer improvements</p></li>
+<li><p>Hiram Chirino – Scaml and Jade lexers</p></li>
+<li><p>Mauricio Caceres – SAS and Stata lexers.</p></li>
+<li><p>Ian Cooper – VGL lexer</p></li>
+<li><p>David Corbett – Inform, Jasmin, JSGF, Snowball, and TADS 3 lexers</p></li>
+<li><p>Leaf Corcoran – MoonScript lexer</p></li>
+<li><p>Christopher Creutzig – MuPAD lexer</p></li>
+<li><p>Daniël W. Crompton – Pike lexer</p></li>
+<li><p>Pete Curry – bugfixes</p></li>
+<li><p>Bryan Davis – EBNF lexer</p></li>
+<li><p>Bruno Deferrari – Shen lexer</p></li>
+<li><p>Giedrius Dubinskas – HTML formatter improvements</p></li>
+<li><p>Owen Durni – Haxe lexer</p></li>
+<li><p>Alexander Dutton, Oxford University Computing Services – SPARQL lexer</p></li>
+<li><p>James Edwards – Terraform lexer</p></li>
+<li><p>Nick Efford – Python 3 lexer</p></li>
+<li><p>Sven Efftinge – Xtend lexer</p></li>
+<li><p>Artem Egorkine – terminal256 formatter</p></li>
+<li><p>Matthew Fernandez – CAmkES lexer</p></li>
+<li><p>Michael Ficarra – CPSA lexer</p></li>
+<li><p>James H. Fisher – PostScript lexer</p></li>
+<li><p>William S. Fulton – SWIG lexer</p></li>
+<li><p>Carlos Galdino – Elixir and Elixir Console lexers</p></li>
+<li><p>Michael Galloy – IDL lexer</p></li>
+<li><p>Naveen Garg – Autohotkey lexer</p></li>
+<li><p>Laurent Gautier – R/S lexer</p></li>
+<li><p>Alex Gaynor – PyPy log lexer</p></li>
+<li><p>Richard Gerkin – Igor Pro lexer</p></li>
+<li><p>Alain Gilbert – TypeScript lexer</p></li>
+<li><p>Alex Gilding – BlitzBasic lexer</p></li>
+<li><p>Bertrand Goetzmann – Groovy lexer</p></li>
+<li><p>Krzysiek Goj – Scala lexer</p></li>
+<li><p>Andrey Golovizin – BibTeX lexers</p></li>
+<li><p>Matt Good – Genshi, Cheetah lexers</p></li>
+<li><p>Michał Górny – vim modeline support</p></li>
+<li><p>Alex Gosse – TrafficScript lexer</p></li>
+<li><p>Patrick Gotthardt – PHP namespaces support</p></li>
+<li><p>Olivier Guibe – Asymptote lexer</p></li>
+<li><p>Phil Hagelberg – Fennel lexer</p></li>
+<li><p>Florian Hahn – Boogie lexer</p></li>
+<li><p>Martin Harriman – SNOBOL lexer</p></li>
+<li><p>Matthew Harrison – SVG formatter</p></li>
+<li><p>Steven Hazel – Tcl lexer</p></li>
+<li><p>Dan Michael Heggø – Turtle lexer</p></li>
+<li><p>Aslak Hellesøy – Gherkin lexer</p></li>
+<li><p>Greg Hendershott – Racket lexer</p></li>
+<li><p>Justin Hendrick – ParaSail lexer</p></li>
+<li><p>Jordi Gutiérrez Hermoso – Octave lexer</p></li>
+<li><p>David Hess, Fish Software, Inc. – Objective-J lexer</p></li>
+<li><p>Varun Hiremath – Debian control lexer</p></li>
+<li><p>Rob Hoelz – Perl 6 lexer</p></li>
+<li><p>Doug Hogan – Mscgen lexer</p></li>
+<li><p>Ben Hollis – Mason lexer</p></li>
+<li><p>Max Horn – GAP lexer</p></li>
+<li><p>Alastair Houghton – Lexer inheritance facility</p></li>
+<li><p>Tim Howard – BlitzMax lexer</p></li>
+<li><p>Dustin Howett – Logos lexer</p></li>
+<li><p>Ivan Inozemtsev – Fantom lexer</p></li>
+<li><p>Hiroaki Itoh – Shell console rewrite, Lexers for PowerShell session,
+MSDOS session, BC, WDiff</p></li>
+<li><p>Brian R. Jackson – Tea lexer</p></li>
+<li><p>Christian Jann – ShellSession lexer</p></li>
+<li><p>Dennis Kaarsemaker – sources.list lexer</p></li>
+<li><p>Dmitri Kabak – Inferno Limbo lexer</p></li>
+<li><p>Igor Kalnitsky – vhdl lexer</p></li>
+<li><p>Alexander Kit – MaskJS lexer</p></li>
+<li><p>Pekka Klärck – Robot Framework lexer</p></li>
+<li><p>Gerwin Klein – Isabelle lexer</p></li>
+<li><p>Eric Knibbe – Lasso lexer</p></li>
+<li><p>Stepan Koltsov – Clay lexer</p></li>
+<li><p>Adam Koprowski – Opa lexer</p></li>
+<li><p>Benjamin Kowarsch – Modula-2 lexer</p></li>
+<li><p>Domen Kožar – Nix lexer</p></li>
+<li><p>Oleh Krekel – Emacs Lisp lexer</p></li>
+<li><p>Alexander Kriegisch – Kconfig and AspectJ lexers</p></li>
+<li><p>Marek Kubica – Scheme lexer</p></li>
+<li><p>Jochen Kupperschmidt – Markdown processor</p></li>
+<li><p>Gerd Kurzbach – Modelica lexer</p></li>
+<li><p>Jon Larimer, Google Inc. – Smali lexer</p></li>
+<li><p>Olov Lassus – Dart lexer</p></li>
+<li><p>Matt Layman – TAP lexer</p></li>
+<li><p>Kristian Lyngstøl – Varnish lexers</p></li>
+<li><p>Sylvestre Ledru – Scilab lexer</p></li>
+<li><p>Chee Sing Lee – Flatline lexer</p></li>
+<li><p>Mark Lee – Vala lexer</p></li>
+<li><p>Valentin Lorentz – C++ lexer improvements</p></li>
+<li><p>Ben Mabey – Gherkin lexer</p></li>
+<li><p>Angus MacArthur – QML lexer</p></li>
+<li><p>Louis Mandel – X10 lexer</p></li>
+<li><p>Louis Marchand – Eiffel lexer</p></li>
+<li><p>Simone Margaritelli – Hybris lexer</p></li>
+<li><p>Kirk McDonald – D lexer</p></li>
+<li><p>Gordon McGregor – SystemVerilog lexer</p></li>
+<li><p>Stephen McKamey – Duel/JBST lexer</p></li>
+<li><p>Brian McKenna – F# lexer</p></li>
+<li><p>Charles McLaughlin – Puppet lexer</p></li>
+<li><p>Kurt McKee – Tera Term macro lexer</p></li>
+<li><p>Lukas Meuser – BBCode formatter, Lua lexer</p></li>
+<li><p>Cat Miller – Pig lexer</p></li>
+<li><p>Paul Miller – LiveScript lexer</p></li>
+<li><p>Hong Minhee – HTTP lexer</p></li>
+<li><p>Michael Mior – Awk lexer</p></li>
+<li><p>Bruce Mitchener – Dylan lexer rewrite</p></li>
+<li><p>Reuben Morais – SourcePawn lexer</p></li>
+<li><p>Jon Morton – Rust lexer</p></li>
+<li><p>Paulo Moura – Logtalk lexer</p></li>
+<li><p>Mher Movsisyan – DTD lexer</p></li>
+<li><p>Dejan Muhamedagic – Crmsh lexer</p></li>
+<li><p>Ana Nelson – Ragel, ANTLR, R console lexers</p></li>
+<li><p>Kurt Neufeld – Markdown lexer</p></li>
+<li><p>Nam T. Nguyen – Monokai style</p></li>
+<li><p>Jesper Noehr – HTML formatter “anchorlinenos”</p></li>
+<li><p>Mike Nolta – Julia lexer</p></li>
+<li><p>Jonas Obrist – BBCode lexer</p></li>
+<li><p>Edward O’Callaghan – Cryptol lexer</p></li>
+<li><p>David Oliva – Rebol lexer</p></li>
+<li><p>Pat Pannuto – nesC lexer</p></li>
+<li><p>Jon Parise – Protocol buffers and Thrift lexers</p></li>
+<li><p>Benjamin Peterson – Test suite refactoring</p></li>
+<li><p>Ronny Pfannschmidt – BBCode lexer</p></li>
+<li><p>Dominik Picheta – Nimrod lexer</p></li>
+<li><p>Andrew Pinkham – RTF Formatter Refactoring</p></li>
+<li><p>Clément Prévost – UrbiScript lexer</p></li>
+<li><p>Tanner Prynn – cmdline -x option and loading lexers from files</p></li>
+<li><p>Oleh Prypin – Crystal lexer (based on Ruby lexer)</p></li>
+<li><p>Elias Rabel – Fortran fixed form lexer</p></li>
+<li><p>raichoo – Idris lexer</p></li>
+<li><p>Kashif Rasul – CUDA lexer</p></li>
+<li><p>Nathan Reed – HLSL lexer</p></li>
+<li><p>Justin Reidy – MXML lexer</p></li>
+<li><p>Norman Richards – JSON lexer</p></li>
+<li><p>Corey Richardson – Rust lexer updates</p></li>
+<li><p>Lubomir Rintel – GoodData MAQL and CL lexers</p></li>
+<li><p>Andre Roberge – Tango style</p></li>
+<li><p>Georg Rollinger – HSAIL lexer</p></li>
+<li><p>Michiel Roos – TypoScript lexer</p></li>
+<li><p>Konrad Rudolph – LaTeX formatter enhancements</p></li>
+<li><p>Mario Ruggier – Evoque lexers</p></li>
+<li><p>Miikka Salminen – Lovelace style, Hexdump lexer, lexer enhancements</p></li>
+<li><p>Stou Sandalski – NumPy, FORTRAN, tcsh and XSLT lexers</p></li>
+<li><p>Matteo Sasso – Common Lisp lexer</p></li>
+<li><p>Joe Schafer – Ada lexer</p></li>
+<li><p>Ken Schutte – Matlab lexers</p></li>
+<li><p>René Schwaiger – Rainbow Dash style</p></li>
+<li><p>Sebastian Schweizer – Whiley lexer</p></li>
+<li><p>Tassilo Schweyer – Io, MOOCode lexers</p></li>
+<li><p>Ted Shaw – AutoIt lexer</p></li>
+<li><p>Joerg Sieker – ABAP lexer</p></li>
+<li><p>Robert Simmons – Standard ML lexer</p></li>
+<li><p>Kirill Simonov – YAML lexer</p></li>
+<li><p>Corbin Simpson – Monte lexer</p></li>
+<li><p>Alexander Smishlajev – Visual FoxPro lexer</p></li>
+<li><p>Steve Spigarelli – XQuery lexer</p></li>
+<li><p>Jerome St-Louis – eC lexer</p></li>
+<li><p>Camil Staps – Clean and NuSMV lexers; Solarized style</p></li>
+<li><p>James Strachan – Kotlin lexer</p></li>
+<li><p>Tom Stuart – Treetop lexer</p></li>
+<li><p>Colin Sullivan – SuperCollider lexer</p></li>
+<li><p>Ben Swift – Extempore lexer</p></li>
+<li><p>Edoardo Tenani – Arduino lexer</p></li>
+<li><p>Tiberius Teng – default style overhaul</p></li>
+<li><p>Jeremy Thurgood – Erlang, Squid config lexers</p></li>
+<li><p>Brian Tiffin – OpenCOBOL lexer</p></li>
+<li><p>Bob Tolbert – Hy lexer</p></li>
+<li><p>Matthias Trute – Forth lexer</p></li>
+<li><p>Erick Tryzelaar – Felix lexer</p></li>
+<li><p>Alexander Udalov – Kotlin lexer improvements</p></li>
+<li><p>Thomas Van Doren – Chapel lexer</p></li>
+<li><p>Daniele Varrazzo – PostgreSQL lexers</p></li>
+<li><p>Abe Voelker – OpenEdge ABL lexer</p></li>
+<li><p>Pepijn de Vos – HTML formatter CTags support</p></li>
+<li><p>Matthias Vallentin – Bro lexer</p></li>
+<li><p>Benoît Vinot – AMPL lexer</p></li>
+<li><p>Linh Vu Hong – RSL lexer</p></li>
+<li><p>Nathan Weizenbaum – Haml and Sass lexers</p></li>
+<li><p>Nathan Whetsell – Csound lexers</p></li>
+<li><p>Dietmar Winkler – Modelica lexer</p></li>
+<li><p>Nils Winter – Smalltalk lexer</p></li>
+<li><p>Davy Wybiral – Clojure lexer</p></li>
+<li><p>Whitney Young – ObjectiveC lexer</p></li>
+<li><p>Diego Zamboni – CFengine3 lexer</p></li>
+<li><p>Enrique Zamudio – Ceylon lexer</p></li>
+<li><p>Alex Zimin – Nemerle lexer</p></li>
+<li><p>Rob Zimmerman – Kal lexer</p></li>
+<li><p>Vincent Zurczak – Roboconf lexer</p></li>
+<li><p>Rostyslav Golda – FloScript lexer</p></li>
+<li><p>GitHub, Inc – DASM16, Augeas, TOML, and Slash lexers</p></li>
+<li><p>Simon Garnotel – FreeFem++ lexer</p></li>
+</ul>
+<p>Many thanks for all contributions!</p>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/changelog.html b/doc/_build/html/docs/changelog.html
new file mode 100644 (file)
index 0000000..d75d677
--- /dev/null
@@ -0,0 +1,1333 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Pygments changelog &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Full contributor list" href="authors.html" />
+    <link rel="prev" title="Using Pygments in various scenarios" href="integrate.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="../index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Pygments changelog</a><ul>
+<li><a class="reference internal" href="#version-2-5-0">Version 2.5.0</a></li>
+<li><a class="reference internal" href="#version-2-4-2">Version 2.4.2</a></li>
+<li><a class="reference internal" href="#version-2-4-1">Version 2.4.1</a></li>
+<li><a class="reference internal" href="#version-2-4-0">Version 2.4.0</a></li>
+<li><a class="reference internal" href="#version-2-3-1">Version 2.3.1</a></li>
+<li><a class="reference internal" href="#version-2-3-0">Version 2.3.0</a></li>
+<li><a class="reference internal" href="#version-2-2-0">Version 2.2.0</a></li>
+<li><a class="reference internal" href="#version-2-1-3">Version 2.1.3</a></li>
+<li><a class="reference internal" href="#version-2-1-2">Version 2.1.2</a></li>
+<li><a class="reference internal" href="#version-2-1-1">Version 2.1.1</a></li>
+<li><a class="reference internal" href="#version-2-1">Version 2.1</a></li>
+<li><a class="reference internal" href="#version-2-0-2">Version 2.0.2</a></li>
+<li><a class="reference internal" href="#version-2-0-1">Version 2.0.1</a></li>
+<li><a class="reference internal" href="#version-2-0">Version 2.0</a></li>
+<li><a class="reference internal" href="#version-2-0rc1">Version 2.0rc1</a></li>
+<li><a class="reference internal" href="#version-1-6">Version 1.6</a></li>
+<li><a class="reference internal" href="#version-1-6rc1">Version 1.6rc1</a></li>
+<li><a class="reference internal" href="#version-1-5">Version 1.5</a></li>
+<li><a class="reference internal" href="#version-1-4">Version 1.4</a></li>
+<li><a class="reference internal" href="#version-1-3-1">Version 1.3.1</a></li>
+<li><a class="reference internal" href="#version-1-3">Version 1.3</a></li>
+<li><a class="reference internal" href="#version-1-2-2">Version 1.2.2</a></li>
+<li><a class="reference internal" href="#version-1-2-1">Version 1.2.1</a></li>
+<li><a class="reference internal" href="#version-1-2">Version 1.2</a></li>
+<li><a class="reference internal" href="#version-1-1-1">Version 1.1.1</a></li>
+<li><a class="reference internal" href="#version-1-1">Version 1.1</a></li>
+<li><a class="reference internal" href="#version-1-0">Version 1.0</a></li>
+<li><a class="reference internal" href="#version-0-11-1">Version 0.11.1</a></li>
+<li><a class="reference internal" href="#version-0-11">Version 0.11</a></li>
+<li><a class="reference internal" href="#version-0-10">Version 0.10</a></li>
+<li><a class="reference internal" href="#version-0-9">Version 0.9</a></li>
+<li><a class="reference internal" href="#version-0-8-1">Version 0.8.1</a></li>
+<li><a class="reference internal" href="#version-0-8">Version 0.8</a></li>
+<li><a class="reference internal" href="#version-0-7-1">Version 0.7.1</a></li>
+<li><a class="reference internal" href="#version-0-7">Version 0.7</a></li>
+<li><a class="reference internal" href="#version-0-6">Version 0.6</a></li>
+<li><a class="reference internal" href="#version-0-5-1">Version 0.5.1</a></li>
+<li><a class="reference internal" href="#version-0-5">Version 0.5</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="integrate.html"
+                        title="previous chapter">Using Pygments in various scenarios</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="authors.html"
+                        title="next chapter">Full contributor list</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/changelog.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="pygments-changelog">
+<h1>Pygments changelog<a class="headerlink" href="#pygments-changelog" title="Permalink to this headline">¶</a></h1>
+<p>Since 2.5.0, issue numbers refer to the tracker at
+&lt;<a class="reference external" href="https://github.com/pygments/pygments/issues">https://github.com/pygments/pygments/issues</a>&gt;,
+pull request numbers to the requests at
+&lt;<a class="reference external" href="https://github.com/pygments/pygments/pulls">https://github.com/pygments/pygments/pulls</a>&gt;.</p>
+<div class="section" id="version-2-5-0">
+<h2>Version 2.5.0<a class="headerlink" href="#version-2-5-0" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><p>Added lexers:</p>
+<ul>
+<li><p>Email (PR#1246)</p></li>
+<li><p>Erlang, Elxir shells (PR#823, #1521)</p></li>
+<li><p>Notmuch (PR#1264)</p></li>
+<li><p><a class="reference external" href="https://git.sr.ht/~sircmpwn/scdoc">Scdoc</a> (PR#1268)</p></li>
+<li><p><a class="reference external" href="https://solidity.readthedocs.io/">Solidity</a> (#1214)</p></li>
+<li><p><a class="reference external" href="https://www.zeek.org">Zeek</a> (new name for Bro) (PR#1269)</p></li>
+<li><p><a class="reference external" href="https://ziglang.org/">Zig</a> (PR#820)</p></li>
+</ul>
+</li>
+<li><p>Updated lexers:</p>
+<ul>
+<li><p>Apache2 Configuration (PR#1251)</p></li>
+<li><p>Bash sessions (#1253)</p></li>
+<li><p>CSound (PR#1250)</p></li>
+<li><p>Dart</p></li>
+<li><p>Dockerfile</p></li>
+<li><p>Emacs Lisp</p></li>
+<li><p>Handlebars (PR#773)</p></li>
+<li><p>Java (#1101, #987)</p></li>
+<li><p>Logtalk (PR#1261)</p></li>
+<li><p>Matlab (PR#1271)</p></li>
+<li><p>Praat (PR#1277)</p></li>
+<li><p>Python3 (PR#1255)</p></li>
+<li><p>Ruby</p></li>
+<li><p>YAML (#1528)</p></li>
+<li><p>Velocity</p></li>
+</ul>
+</li>
+<li><p>Added styles:</p>
+<ul>
+<li><p>Inkpot (PR#1276)</p></li>
+</ul>
+</li>
+<li><p>The <code class="docutils literal notranslate"><span class="pre">PythonLexer</span></code> class is now an alias for the former <code class="docutils literal notranslate"><span class="pre">Python3Lexer</span></code>.
+The old <code class="docutils literal notranslate"><span class="pre">PythonLexer</span></code> is available as <code class="docutils literal notranslate"><span class="pre">Python2Lexer</span></code>.  Same change has
+been done for the <code class="docutils literal notranslate"><span class="pre">PythonTracebackLexer</span></code>.  The <code class="docutils literal notranslate"><span class="pre">python3</span></code> option for
+the <code class="docutils literal notranslate"><span class="pre">PythonConsoleLexer</span></code> is now true by default.</p></li>
+<li><p>Bump <code class="docutils literal notranslate"><span class="pre">NasmLexer</span></code> priority over <code class="docutils literal notranslate"><span class="pre">TasmLexer</span></code> for <code class="docutils literal notranslate"><span class="pre">.asm</span></code> files
+(fixes #1326)</p></li>
+<li><p>Default font in the <code class="docutils literal notranslate"><span class="pre">ImageFormatter</span></code> has been updated (#928, PR#1245)</p></li>
+<li><p>Test suite switched to py.test, removed nose dependency (#1490)</p></li>
+<li><p>Reduce <code class="docutils literal notranslate"><span class="pre">TeraTerm</span></code> lexer score – it used to match nearly all languages
+(#1256)</p></li>
+<li><p>Treat <code class="docutils literal notranslate"><span class="pre">Skylark</span></code>/<code class="docutils literal notranslate"><span class="pre">Starlark</span></code> files as Python files (PR#1259)</p></li>
+<li><p>Image formatter: actually respect <code class="docutils literal notranslate"><span class="pre">line_number_separator</span></code> option</p></li>
+<li><p>Add LICENSE file to wheel builds</p></li>
+<li><p>Agda: fix lambda highlighting</p></li>
+<li><p>Dart: support <code class="docutils literal notranslate"><span class="pre">&#64;</span></code> annotations</p></li>
+<li><p>Dockerfile: accept <code class="docutils literal notranslate"><span class="pre">FROM</span> <span class="pre">...</span> <span class="pre">AS</span></code> syntax</p></li>
+<li><p>Emacs Lisp: add more string functions</p></li>
+<li><p>GAS: accept registers in directive arguments</p></li>
+<li><p>Java: make structural punctuation (braces, parens, colon, comma) <code class="docutils literal notranslate"><span class="pre">Punctuation</span></code>, not <code class="docutils literal notranslate"><span class="pre">Operator</span></code> (#987)</p></li>
+<li><p>Java: support <code class="docutils literal notranslate"><span class="pre">var</span></code> contextual keyword (#1101)</p></li>
+<li><p>Matlab: Fix recognition of <code class="docutils literal notranslate"><span class="pre">function</span></code> keyword (PR#1271)</p></li>
+<li><p>Python: recognize <code class="docutils literal notranslate"><span class="pre">.jy</span></code> filenames (#976)</p></li>
+<li><p>Python: recognize <code class="docutils literal notranslate"><span class="pre">f</span></code> string prefix (#1156)</p></li>
+<li><p>Ruby: support squiggly heredocs</p></li>
+<li><p>Shell sessions: recognize Virtualenv prompt (PR#1266)</p></li>
+<li><p>Velocity: support silent reference syntax</p></li>
+</ul>
+</div>
+<div class="section" id="version-2-4-2">
+<h2>Version 2.4.2<a class="headerlink" href="#version-2-4-2" title="Permalink to this headline">¶</a></h2>
+<p>(released May 28, 2019)</p>
+<ul class="simple">
+<li><p>Fix encoding error when guessing lexer with given <code class="docutils literal notranslate"><span class="pre">encoding</span></code> option
+(#1438)</p></li>
+</ul>
+</div>
+<div class="section" id="version-2-4-1">
+<h2>Version 2.4.1<a class="headerlink" href="#version-2-4-1" title="Permalink to this headline">¶</a></h2>
+<p>(released May 24, 2019)</p>
+<ul class="simple">
+<li><p>Updated lexers:</p>
+<ul>
+<li><p>Coq (#1430)</p></li>
+<li><p>MSDOS Session (PR#734)</p></li>
+<li><p>NASM (#1517)</p></li>
+<li><p>Objective-C (PR#813, #1508)</p></li>
+<li><p>Prolog (#1511)</p></li>
+<li><p>TypeScript (#1515)</p></li>
+</ul>
+</li>
+<li><p>Support CSS variables in stylesheets (PR#814, #1356)</p></li>
+<li><p>Fix F# lexer name (PR#709)</p></li>
+<li><p>Fix <code class="docutils literal notranslate"><span class="pre">TerminalFormatter</span></code> using bold for bright text (#1480)</p></li>
+</ul>
+</div>
+<div class="section" id="version-2-4-0">
+<h2>Version 2.4.0<a class="headerlink" href="#version-2-4-0" title="Permalink to this headline">¶</a></h2>
+<p>(released May 8, 2019)</p>
+<ul class="simple">
+<li><p>Added lexers:</p>
+<ul>
+<li><p>Augeas (PR#807)</p></li>
+<li><p>BBC Basic (PR#806)</p></li>
+<li><p>Boa (PR#756)</p></li>
+<li><p>Charm++ CI (PR#788)</p></li>
+<li><p>DASM16 (PR#807)</p></li>
+<li><p>FloScript (PR#750)</p></li>
+<li><p>FreeFem++ (PR#785)</p></li>
+<li><p>Hspec (PR#790)</p></li>
+<li><p>Pony (PR#627)</p></li>
+<li><p>SGF (PR#780)</p></li>
+<li><p>Slash (PR#807)</p></li>
+<li><p>Slurm (PR#760)</p></li>
+<li><p>Tera Term Language (PR#749)</p></li>
+<li><p>TOML (PR#807)</p></li>
+<li><p>Unicon (PR#731)</p></li>
+<li><p>VBScript (PR#673)</p></li>
+</ul>
+</li>
+<li><p>Updated lexers:</p>
+<ul>
+<li><p>Apache2 (PR#766)</p></li>
+<li><p>Cypher (PR#746)</p></li>
+<li><p>LLVM (PR#792)</p></li>
+<li><p>Makefiles (PR#766)</p></li>
+<li><p>PHP (#1482)</p></li>
+<li><p>Rust</p></li>
+<li><p>SQL (PR#672)</p></li>
+<li><p>Stan (PR#774)</p></li>
+<li><p>Stata (PR#800)</p></li>
+<li><p>Terraform (PR#787)</p></li>
+<li><p>YAML</p></li>
+</ul>
+</li>
+<li><p>Add solarized style (PR#708)</p></li>
+<li><p>Add support for Markdown reference-style links (PR#753)</p></li>
+<li><p>Add license information to generated HTML/CSS files (#1496)</p></li>
+<li><p>Change ANSI color names (PR#777)</p></li>
+<li><p>Fix catastrophic backtracking in the bash lexer (#1494)</p></li>
+<li><p>Fix documentation failing to build using Sphinx 2.0 (#1501)</p></li>
+<li><p>Fix incorrect links in the Lisp and R lexer documentation (PR#775)</p></li>
+<li><p>Fix rare unicode errors on Python 2.7 (PR#798, #1492)</p></li>
+<li><p>Fix lexers popping from an empty stack (#1506)</p></li>
+<li><p>TypoScript uses <code class="docutils literal notranslate"><span class="pre">.typoscript</span></code> now (#1498)</p></li>
+<li><p>Updated Trove classifiers and <code class="docutils literal notranslate"><span class="pre">pip</span></code> requirements (PR#799)</p></li>
+</ul>
+</div>
+<div class="section" id="version-2-3-1">
+<h2>Version 2.3.1<a class="headerlink" href="#version-2-3-1" title="Permalink to this headline">¶</a></h2>
+<p>(released Dec 16, 2018)</p>
+<ul class="simple">
+<li><p>Updated lexers:</p>
+<ul>
+<li><p>ASM (PR#784)</p></li>
+<li><p>Chapel (PR#735)</p></li>
+<li><p>Clean (PR#621)</p></li>
+<li><p>CSound (PR#684)</p></li>
+<li><p>Elm (PR#744)</p></li>
+<li><p>Fortran (PR#747)</p></li>
+<li><p>GLSL (PR#740)</p></li>
+<li><p>Haskell (PR#745)</p></li>
+<li><p>Hy (PR#754)</p></li>
+<li><p>Igor Pro (PR#764)</p></li>
+<li><p>PowerShell (PR#705)</p></li>
+<li><p>Python (PR#720, #1299, PR#715)</p></li>
+<li><p>SLexer (PR#680)</p></li>
+<li><p>YAML (PR#762, PR#724)</p></li>
+</ul>
+</li>
+<li><p>Fix invalid string escape sequences</p></li>
+<li><p>Fix <cite>FutureWarning</cite> introduced by regex changes in Python 3.7</p></li>
+</ul>
+</div>
+<div class="section" id="version-2-3-0">
+<h2>Version 2.3.0<a class="headerlink" href="#version-2-3-0" title="Permalink to this headline">¶</a></h2>
+<p>(released Nov 25, 2018)</p>
+<ul class="simple">
+<li><p>Added lexers:</p>
+<ul>
+<li><p>Fennel (PR#783)</p></li>
+<li><p>HLSL (PR#675)</p></li>
+</ul>
+</li>
+<li><p>Updated lexers:</p>
+<ul>
+<li><p>Dockerfile (PR#714)</p></li>
+</ul>
+</li>
+<li><p>Minimum Python versions changed to 2.7 and 3.5</p></li>
+<li><p>Added support for Python 3.7 generator changes (PR#772)</p></li>
+<li><p>Fix incorrect token type in SCSS for single-quote strings (#1322)</p></li>
+<li><p>Use <cite>terminal256</cite> formatter if <cite>TERM</cite> contains <cite>256</cite> (PR#666)</p></li>
+<li><p>Fix incorrect handling of GitHub style fences in Markdown (PR#741, #1389)</p></li>
+<li><p>Fix <cite>%a</cite> not being highlighted in Python3 strings (PR#727)</p></li>
+</ul>
+</div>
+<div class="section" id="version-2-2-0">
+<h2>Version 2.2.0<a class="headerlink" href="#version-2-2-0" title="Permalink to this headline">¶</a></h2>
+<p>(released Jan 22, 2017)</p>
+<ul class="simple">
+<li><p>Added lexers:</p>
+<ul>
+<li><p>AMPL</p></li>
+<li><p>TypoScript (#1173)</p></li>
+<li><p>Varnish config (PR#554)</p></li>
+<li><p>Clean (PR#503)</p></li>
+<li><p>WDiff (PR#513)</p></li>
+<li><p>Flatline (PR#551)</p></li>
+<li><p>Silver (PR#537)</p></li>
+<li><p>HSAIL (PR#518)</p></li>
+<li><p>JSGF (PR#546)</p></li>
+<li><p>NCAR command language (PR#536)</p></li>
+<li><p>Extempore (PR#530)</p></li>
+<li><p>Cap’n Proto (PR#595)</p></li>
+<li><p>Whiley (PR#573)</p></li>
+<li><p>Monte (PR#592)</p></li>
+<li><p>Crystal (PR#576)</p></li>
+<li><p>Snowball (PR#589)</p></li>
+<li><p>CapDL (PR#579)</p></li>
+<li><p>NuSMV (PR#564)</p></li>
+<li><p>SAS, Stata (PR#593)</p></li>
+</ul>
+</li>
+<li><p>Added the ability to load lexer and formatter classes directly from files
+with the <cite>-x</cite> command line option and the <cite>lexers.load_lexer_from_file()</cite>
+and <cite>formatters.load_formatter_from_file()</cite> functions. (PR#559)</p></li>
+<li><p>Added <cite>lexers.find_lexer_class_by_name()</cite>. (#1203)</p></li>
+<li><p>Added new token types and lexing for magic methods and variables in Python
+and PHP.</p></li>
+<li><p>Added a new token type for string affixes and lexing for them in Python, C++
+and Postgresql lexers.</p></li>
+<li><p>Added a new token type for heredoc (and similar) string delimiters and
+lexing for them in C++, Perl, PHP, Postgresql and Ruby lexers.</p></li>
+<li><p>Styles can now define colors with ANSI colors for use in the 256-color
+terminal formatter. (PR#531)</p></li>
+<li><p>Improved the CSS lexer. (#1083, #1130)</p></li>
+<li><p>Added “Rainbow Dash” style. (PR#623)</p></li>
+<li><p>Delay loading <cite>pkg_resources</cite>, which takes a long while to import. (PR#690)</p></li>
+</ul>
+</div>
+<div class="section" id="version-2-1-3">
+<h2>Version 2.1.3<a class="headerlink" href="#version-2-1-3" title="Permalink to this headline">¶</a></h2>
+<p>(released Mar 2, 2016)</p>
+<ul class="simple">
+<li><p>Fixed regression in Bash lexer (PR#563)</p></li>
+</ul>
+</div>
+<div class="section" id="version-2-1-2">
+<h2>Version 2.1.2<a class="headerlink" href="#version-2-1-2" title="Permalink to this headline">¶</a></h2>
+<p>(released Feb 29, 2016)</p>
+<ul class="simple">
+<li><p>Fixed Python 3 regression in image formatter (#1215)</p></li>
+<li><p>Fixed regression in Bash lexer (PR#562)</p></li>
+</ul>
+</div>
+<div class="section" id="version-2-1-1">
+<h2>Version 2.1.1<a class="headerlink" href="#version-2-1-1" title="Permalink to this headline">¶</a></h2>
+<p>(relased Feb 14, 2016)</p>
+<ul class="simple">
+<li><p>Fixed Jython compatibility (#1205)</p></li>
+<li><p>Fixed HTML formatter output with leading empty lines (#1111)</p></li>
+<li><p>Added a mapping table for LaTeX encodings and added utf8 (#1152)</p></li>
+<li><p>Fixed image formatter font searching on Macs (#1188)</p></li>
+<li><p>Fixed deepcopy-ing of Token instances (#1168)</p></li>
+<li><p>Fixed Julia string interpolation (#1170)</p></li>
+<li><p>Fixed statefulness of HttpLexer between get_tokens calls</p></li>
+<li><p>Many smaller fixes to various lexers</p></li>
+</ul>
+</div>
+<div class="section" id="version-2-1">
+<h2>Version 2.1<a class="headerlink" href="#version-2-1" title="Permalink to this headline">¶</a></h2>
+<p>(released Jan 17, 2016)</p>
+<ul class="simple">
+<li><p>Added lexers:</p>
+<ul>
+<li><p>Emacs Lisp (PR#431)</p></li>
+<li><p>Arduino (PR#442)</p></li>
+<li><p>Modula-2 with multi-dialect support (#1090)</p></li>
+<li><p>Fortran fixed format (PR#213)</p></li>
+<li><p>Archetype Definition language (PR#483)</p></li>
+<li><p>Terraform (PR#432)</p></li>
+<li><p>Jcl, Easytrieve (PR#208)</p></li>
+<li><p>ParaSail (PR#381)</p></li>
+<li><p>Boogie (PR#420)</p></li>
+<li><p>Turtle (PR#425)</p></li>
+<li><p>Fish Shell (PR#422)</p></li>
+<li><p>Roboconf (PR#449)</p></li>
+<li><p>Test Anything Protocol (PR#428)</p></li>
+<li><p>Shen (PR#385)</p></li>
+<li><p>Component Pascal (PR#437)</p></li>
+<li><p>SuperCollider (PR#472)</p></li>
+<li><p>Shell consoles (Tcsh, PowerShell, MSDOS) (PR#479)</p></li>
+<li><p>Elm and J (PR#452)</p></li>
+<li><p>Crmsh (PR#440)</p></li>
+<li><p>Praat (PR#492)</p></li>
+<li><p>CSound (PR#494)</p></li>
+<li><p>Ezhil (PR#443)</p></li>
+<li><p>Thrift (PR#469)</p></li>
+<li><p>QVT Operational (PR#204)</p></li>
+<li><p>Hexdump (PR#508)</p></li>
+<li><p>CAmkES Configuration (PR#462)</p></li>
+</ul>
+</li>
+<li><p>Added styles:</p>
+<ul>
+<li><p>Lovelace (PR#456)</p></li>
+<li><p>Algol and Algol-nu (#1090)</p></li>
+</ul>
+</li>
+<li><p>Added formatters:</p>
+<ul>
+<li><p>IRC (PR#458)</p></li>
+<li><p>True color (24-bit) terminal ANSI sequences (#1142)
+(formatter alias: “16m”)</p></li>
+</ul>
+</li>
+<li><p>New “filename” option for HTML formatter (PR#527).</p></li>
+<li><p>Improved performance of the HTML formatter for long lines (PR#504).</p></li>
+<li><p>Updated autopygmentize script (PR#445).</p></li>
+<li><p>Fixed style inheritance for non-standard token types in HTML output.</p></li>
+<li><p>Added support for async/await to Python 3 lexer.</p></li>
+<li><p>Rewrote linenos option for TerminalFormatter (it’s better, but slightly
+different output than before) (#1147).</p></li>
+<li><p>Javascript lexer now supports most of ES6 (#1100).</p></li>
+<li><p>Cocoa builtins updated for iOS 8.1 (PR#433).</p></li>
+<li><p>Combined BashSessionLexer and ShellSessionLexer, new version should support
+the prompt styles of either.</p></li>
+<li><p>Added option to pygmentize to show a full traceback on exceptions.</p></li>
+<li><p>Fixed incomplete output on Windows and Python 3 (e.g. when using iPython
+Notebook) (#1153).</p></li>
+<li><p>Allowed more traceback styles in Python console lexer (PR#253).</p></li>
+<li><p>Added decorators to TypeScript (PR#509).</p></li>
+<li><p>Fix highlighting of certain IRC logs formats (#1076).</p></li>
+</ul>
+</div>
+<div class="section" id="version-2-0-2">
+<h2>Version 2.0.2<a class="headerlink" href="#version-2-0-2" title="Permalink to this headline">¶</a></h2>
+<p>(released Jan 20, 2015)</p>
+<ul class="simple">
+<li><p>Fix Python tracebacks getting duplicated in the console lexer (#1068).</p></li>
+<li><p>Backquote-delimited identifiers are now recognized in F# (#1062).</p></li>
+</ul>
+</div>
+<div class="section" id="version-2-0-1">
+<h2>Version 2.0.1<a class="headerlink" href="#version-2-0-1" title="Permalink to this headline">¶</a></h2>
+<p>(released Nov 10, 2014)</p>
+<ul class="simple">
+<li><p>Fix an encoding issue when using <code class="docutils literal notranslate"><span class="pre">pygmentize</span></code> with the <code class="docutils literal notranslate"><span class="pre">-o</span></code> option.</p></li>
+</ul>
+</div>
+<div class="section" id="version-2-0">
+<h2>Version 2.0<a class="headerlink" href="#version-2-0" title="Permalink to this headline">¶</a></h2>
+<p>(released Nov 9, 2014)</p>
+<ul class="simple">
+<li><p>Default lexer encoding is now “guess”, i.e. UTF-8 / Locale / Latin1 is
+tried in that order.</p></li>
+<li><p>Major update to Swift lexer (PR#410).</p></li>
+<li><p>Multiple fixes to lexer guessing in conflicting cases:</p>
+<ul>
+<li><p>recognize HTML5 by doctype</p></li>
+<li><p>recognize XML by XML declaration</p></li>
+<li><p>don’t recognize C/C++ as SystemVerilog</p></li>
+</ul>
+</li>
+<li><p>Simplified regexes and builtin lists.</p></li>
+</ul>
+</div>
+<div class="section" id="version-2-0rc1">
+<h2>Version 2.0rc1<a class="headerlink" href="#version-2-0rc1" title="Permalink to this headline">¶</a></h2>
+<p>(released Oct 16, 2014)</p>
+<ul class="simple">
+<li><p>Dropped Python 2.4 and 2.5 compatibility.  This is in favor of single-source
+compatibility between Python 2.6, 2.7 and 3.3+.</p></li>
+<li><p>New website and documentation based on Sphinx (finally!)</p></li>
+<li><p>Lexers added:</p>
+<ul>
+<li><p>APL (#969)</p></li>
+<li><p>Agda and Literate Agda (PR#203)</p></li>
+<li><p>Alloy (PR#355)</p></li>
+<li><p>AmbientTalk</p></li>
+<li><p>BlitzBasic (PR#197)</p></li>
+<li><p>ChaiScript (PR#24)</p></li>
+<li><p>Chapel (PR#256)</p></li>
+<li><p>Cirru (PR#275)</p></li>
+<li><p>Clay (PR#184)</p></li>
+<li><p>ColdFusion CFC (PR#283)</p></li>
+<li><p>Cryptol and Literate Cryptol (PR#344)</p></li>
+<li><p>Cypher (PR#257)</p></li>
+<li><p>Docker config files</p></li>
+<li><p>EBNF (PR#193)</p></li>
+<li><p>Eiffel (PR#273)</p></li>
+<li><p>GAP (PR#311)</p></li>
+<li><p>Golo (PR#309)</p></li>
+<li><p>Handlebars (PR#186)</p></li>
+<li><p>Hy (PR#238)</p></li>
+<li><p>Idris and Literate Idris (PR#210)</p></li>
+<li><p>Igor Pro (PR#172)</p></li>
+<li><p>Inform 6/7 (PR#281)</p></li>
+<li><p>Intel objdump (PR#279)</p></li>
+<li><p>Isabelle (PR#386)</p></li>
+<li><p>Jasmin (PR#349)</p></li>
+<li><p>JSON-LD (PR#289)</p></li>
+<li><p>Kal (PR#233)</p></li>
+<li><p>Lean (PR#399)</p></li>
+<li><p>LSL (PR#296)</p></li>
+<li><p>Limbo (PR#291)</p></li>
+<li><p>Liquid (#977)</p></li>
+<li><p>MQL (PR#285)</p></li>
+<li><p>MaskJS (PR#280)</p></li>
+<li><p>Mozilla preprocessors</p></li>
+<li><p>Mathematica (PR#245)</p></li>
+<li><p>NesC (PR#166)</p></li>
+<li><p>Nit (PR#375)</p></li>
+<li><p>Nix (PR#267)</p></li>
+<li><p>Pan</p></li>
+<li><p>Pawn (PR#211)</p></li>
+<li><p>Perl 6 (PR#181)</p></li>
+<li><p>Pig (PR#304)</p></li>
+<li><p>Pike (PR#237)</p></li>
+<li><p>QBasic (PR#182)</p></li>
+<li><p>Red (PR#341)</p></li>
+<li><p>ResourceBundle (#1038)</p></li>
+<li><p>Rexx (PR#199)</p></li>
+<li><p>Rql (PR#251)</p></li>
+<li><p>Rsl</p></li>
+<li><p>SPARQL (PR#78)</p></li>
+<li><p>Slim (PR#366)</p></li>
+<li><p>Swift (PR#371)</p></li>
+<li><p>Swig (PR#168)</p></li>
+<li><p>TADS 3 (PR#407)</p></li>
+<li><p>Todo.txt todo lists</p></li>
+<li><p>Twig (PR#404)</p></li>
+</ul>
+</li>
+<li><p>Added a helper to “optimize” regular expressions that match one of many
+literal words; this can save 20% and more lexing time with lexers that
+highlight many keywords or builtins.</p></li>
+<li><p>New styles: “xcode” and “igor”, similar to the default highlighting of
+the respective IDEs.</p></li>
+<li><p>The command-line “pygmentize” tool now tries a little harder to find the
+correct encoding for files and the terminal (#979).</p></li>
+<li><p>Added “inencoding” option for lexers to override “encoding” analogous
+to “outencoding” (#800).</p></li>
+<li><p>Added line-by-line “streaming” mode for pygmentize with the “-s” option.
+(PR#165)  Only fully works for lexers that have no constructs spanning
+lines!</p></li>
+<li><p>Added an “envname” option to the LaTeX formatter to select a replacement
+verbatim environment (PR#235).</p></li>
+<li><p>Updated the Makefile lexer to yield a little more useful highlighting.</p></li>
+<li><p>Lexer aliases passed to <code class="docutils literal notranslate"><span class="pre">get_lexer_by_name()</span></code> are now case-insensitive.</p></li>
+<li><p>File name matching in lexers and formatters will now use a regex cache
+for speed (PR#205).</p></li>
+<li><p>Pygments will now recognize “vim” modelines when guessing the lexer for
+a file based on content (PR#118).</p></li>
+<li><p>Major restructure of the <code class="docutils literal notranslate"><span class="pre">pygments.lexers</span></code> module namespace.  There are now
+many more modules with less lexers per module.  Old modules are still around
+and re-export the lexers they previously contained.</p></li>
+<li><p>The NameHighlightFilter now works with any Name.* token type (#790).</p></li>
+<li><p>Python 3 lexer: add new exceptions from PEP 3151.</p></li>
+<li><p>Opa lexer: add new keywords (PR#170).</p></li>
+<li><p>Julia lexer: add keywords and underscore-separated number
+literals (PR#176).</p></li>
+<li><p>Lasso lexer: fix method highlighting, update builtins. Fix
+guessing so that plain XML isn’t always taken as Lasso (PR#163).</p></li>
+<li><p>Objective C/C++ lexers: allow “&#64;” prefixing any expression (#871).</p></li>
+<li><p>Ruby lexer: fix lexing of Name::Space tokens (#860) and of symbols
+in hashes (#873).</p></li>
+<li><p>Stan lexer: update for version 2.4.0 of the language (PR#162, PR#255, PR#377).</p></li>
+<li><p>JavaScript lexer: add the “yield” keyword (PR#196).</p></li>
+<li><p>HTTP lexer: support for PATCH method (PR#190).</p></li>
+<li><p>Koka lexer: update to newest language spec (PR#201).</p></li>
+<li><p>Haxe lexer: rewrite and support for Haxe 3 (PR#174).</p></li>
+<li><p>Prolog lexer: add different kinds of numeric literals (#864).</p></li>
+<li><p>F# lexer: rewrite with newest spec for F# 3.0 (#842), fix a bug with
+dotted chains (#948).</p></li>
+<li><p>Kotlin lexer: general update (PR#271).</p></li>
+<li><p>Rebol lexer: fix comment detection and analyse_text (PR#261).</p></li>
+<li><p>LLVM lexer: update keywords to v3.4 (PR#258).</p></li>
+<li><p>PHP lexer: add new keywords and binary literals (PR#222).</p></li>
+<li><p>external/markdown-processor.py updated to newest python-markdown (PR#221).</p></li>
+<li><p>CSS lexer: some highlighting order fixes (PR#231).</p></li>
+<li><p>Ceylon lexer: fix parsing of nested multiline comments (#915).</p></li>
+<li><p>C family lexers: fix parsing of indented preprocessor directives (#944).</p></li>
+<li><p>Rust lexer: update to 0.9 language version (PR#270, PR#388).</p></li>
+<li><p>Elixir lexer: update to 0.15 language version (PR#392).</p></li>
+<li><p>Fix swallowing incomplete tracebacks in Python console lexer (#874).</p></li>
+</ul>
+</div>
+<div class="section" id="version-1-6">
+<h2>Version 1.6<a class="headerlink" href="#version-1-6" title="Permalink to this headline">¶</a></h2>
+<p>(released Feb 3, 2013)</p>
+<ul class="simple">
+<li><p>Lexers added:</p>
+<ul>
+<li><p>Dylan console (PR#149)</p></li>
+<li><p>Logos (PR#150)</p></li>
+<li><p>Shell sessions (PR#158)</p></li>
+</ul>
+</li>
+<li><p>Fix guessed lexers not receiving lexer options (#838).</p></li>
+<li><p>Fix unquoted HTML attribute lexing in Opa (#841).</p></li>
+<li><p>Fixes to the Dart lexer (PR#160).</p></li>
+</ul>
+</div>
+<div class="section" id="version-1-6rc1">
+<h2>Version 1.6rc1<a class="headerlink" href="#version-1-6rc1" title="Permalink to this headline">¶</a></h2>
+<p>(released Jan 9, 2013)</p>
+<ul class="simple">
+<li><p>Lexers added:</p>
+<ul>
+<li><p>AspectJ (PR#90)</p></li>
+<li><p>AutoIt (PR#122)</p></li>
+<li><p>BUGS-like languages (PR#89)</p></li>
+<li><p>Ceylon (PR#86)</p></li>
+<li><p>Croc (new name for MiniD)</p></li>
+<li><p>CUDA (PR#75)</p></li>
+<li><p>Dg (PR#116)</p></li>
+<li><p>IDL (PR#115)</p></li>
+<li><p>Jags (PR#89)</p></li>
+<li><p>Julia (PR#61)</p></li>
+<li><p>Kconfig (#711)</p></li>
+<li><p>Lasso (PR#95, PR#113)</p></li>
+<li><p>LiveScript (PR#84)</p></li>
+<li><p>Monkey (PR#117)</p></li>
+<li><p>Mscgen (PR#80)</p></li>
+<li><p>NSIS scripts (PR#136)</p></li>
+<li><p>OpenCOBOL (PR#72)</p></li>
+<li><p>QML (PR#123)</p></li>
+<li><p>Puppet (PR#133)</p></li>
+<li><p>Racket (PR#94)</p></li>
+<li><p>Rdoc (PR#99)</p></li>
+<li><p>Robot Framework (PR#137)</p></li>
+<li><p>RPM spec files (PR#124)</p></li>
+<li><p>Rust (PR#67)</p></li>
+<li><p>Smali (Dalvik assembly)</p></li>
+<li><p>SourcePawn (PR#39)</p></li>
+<li><p>Stan (PR#89)</p></li>
+<li><p>Treetop (PR#125)</p></li>
+<li><p>TypeScript (PR#114)</p></li>
+<li><p>VGL (PR#12)</p></li>
+<li><p>Visual FoxPro (#762)</p></li>
+<li><p>Windows Registry (#819)</p></li>
+<li><p>Xtend (PR#68)</p></li>
+</ul>
+</li>
+<li><p>The HTML formatter now supports linking to tags using CTags files, when the
+python-ctags package is installed (PR#87).</p></li>
+<li><p>The HTML formatter now has a “linespans” option that wraps every line in a
+&lt;span&gt; tag with a specific id (PR#82).</p></li>
+<li><p>When deriving a lexer from another lexer with token definitions, definitions
+for states not in the child lexer are now inherited.  If you override a state
+in the child lexer, an “inherit” keyword has been added to insert the base
+state at that position (PR#141).</p></li>
+<li><p>The C family lexers now inherit token definitions from a common base class,
+removing code duplication (PR#141).</p></li>
+<li><p>Use “colorama” on Windows for console color output (PR#142).</p></li>
+<li><p>Fix Template Haskell highlighting (PR#63).</p></li>
+<li><p>Fix some S/R lexer errors (PR#91).</p></li>
+<li><p>Fix a bug in the Prolog lexer with names that start with ‘is’ (#810).</p></li>
+<li><p>Rewrite Dylan lexer, add Dylan LID lexer (PR#147).</p></li>
+<li><p>Add a Java quickstart document (PR#146).</p></li>
+<li><p>Add a “external/autopygmentize” file that can be used as .lessfilter (#802).</p></li>
+</ul>
+</div>
+<div class="section" id="version-1-5">
+<h2>Version 1.5<a class="headerlink" href="#version-1-5" title="Permalink to this headline">¶</a></h2>
+<p>(codename Zeitdilatation, released Mar 10, 2012)</p>
+<ul class="simple">
+<li><p>Lexers added:</p>
+<ul>
+<li><p>Awk (#630)</p></li>
+<li><p>Fancy (#633)</p></li>
+<li><p>PyPy Log</p></li>
+<li><p>eC</p></li>
+<li><p>Nimrod</p></li>
+<li><p>Nemerle (#667)</p></li>
+<li><p>F# (#353)</p></li>
+<li><p>Groovy (#501)</p></li>
+<li><p>PostgreSQL (#660)</p></li>
+<li><p>DTD</p></li>
+<li><p>Gosu (#634)</p></li>
+<li><p>Octave (PR#22)</p></li>
+<li><p>Standard ML (PR#14)</p></li>
+<li><p>CFengine3 (#601)</p></li>
+<li><p>Opa (PR#37)</p></li>
+<li><p>HTTP sessions (PR#42)</p></li>
+<li><p>JSON (PR#31)</p></li>
+<li><p>SNOBOL (PR#30)</p></li>
+<li><p>MoonScript (PR#43)</p></li>
+<li><p>ECL (PR#29)</p></li>
+<li><p>Urbiscript (PR#17)</p></li>
+<li><p>OpenEdge ABL (PR#27)</p></li>
+<li><p>SystemVerilog (PR#35)</p></li>
+<li><p>Coq (#734)</p></li>
+<li><p>PowerShell (#654)</p></li>
+<li><p>Dart (#715)</p></li>
+<li><p>Fantom (PR#36)</p></li>
+<li><p>Bro (PR#5)</p></li>
+<li><p>NewLISP (PR#26)</p></li>
+<li><p>VHDL (PR#45)</p></li>
+<li><p>Scilab (#740)</p></li>
+<li><p>Elixir (PR#57)</p></li>
+<li><p>Tea (PR#56)</p></li>
+<li><p>Kotlin (PR#58)</p></li>
+</ul>
+</li>
+<li><p>Fix Python 3 terminal highlighting with pygmentize (#691).</p></li>
+<li><p>In the LaTeX formatter, escape special &amp;, &lt; and &gt; chars (#648).</p></li>
+<li><p>In the LaTeX formatter, fix display problems for styles with token
+background colors (#670).</p></li>
+<li><p>Enhancements to the Squid conf lexer (#664).</p></li>
+<li><p>Several fixes to the reStructuredText lexer (#636).</p></li>
+<li><p>Recognize methods in the ObjC lexer (#638).</p></li>
+<li><p>Fix Lua “class” highlighting: it does not have classes (#665).</p></li>
+<li><p>Fix degenerate regex in Scala lexer (#671) and highlighting bugs (#713, 708).</p></li>
+<li><p>Fix number pattern order in Ocaml lexer (#647).</p></li>
+<li><p>Fix generic type highlighting in ActionScript 3 (#666).</p></li>
+<li><p>Fixes to the Clojure lexer (PR#9).</p></li>
+<li><p>Fix degenerate regex in Nemerle lexer (#706).</p></li>
+<li><p>Fix infinite looping in CoffeeScript lexer (#729).</p></li>
+<li><p>Fix crashes and analysis with ObjectiveC lexer (#693, #696).</p></li>
+<li><p>Add some Fortran 2003 keywords.</p></li>
+<li><p>Fix Boo string regexes (#679).</p></li>
+<li><p>Add “rrt” style (#727).</p></li>
+<li><p>Fix infinite looping in Darcs Patch lexer.</p></li>
+<li><p>Lots of misc fixes to character-eating bugs and ordering problems in many
+different lexers.</p></li>
+</ul>
+</div>
+<div class="section" id="version-1-4">
+<h2>Version 1.4<a class="headerlink" href="#version-1-4" title="Permalink to this headline">¶</a></h2>
+<p>(codename Unschärfe, released Jan 03, 2011)</p>
+<ul class="simple">
+<li><p>Lexers added:</p>
+<ul>
+<li><p>Factor (#520)</p></li>
+<li><p>PostScript (#486)</p></li>
+<li><p>Verilog (#491)</p></li>
+<li><p>BlitzMax Basic (#478)</p></li>
+<li><p>Ioke (#465)</p></li>
+<li><p>Java properties, split out of the INI lexer (#445)</p></li>
+<li><p>Scss (#509)</p></li>
+<li><p>Duel/JBST</p></li>
+<li><p>XQuery (#617)</p></li>
+<li><p>Mason (#615)</p></li>
+<li><p>GoodData (#609)</p></li>
+<li><p>SSP (#473)</p></li>
+<li><p>Autohotkey (#417)</p></li>
+<li><p>Google Protocol Buffers</p></li>
+<li><p>Hybris (#506)</p></li>
+</ul>
+</li>
+<li><p>Do not fail in analyse_text methods (#618).</p></li>
+<li><p>Performance improvements in the HTML formatter (#523).</p></li>
+<li><p>With the <code class="docutils literal notranslate"><span class="pre">noclasses</span></code> option in the HTML formatter, some styles
+present in the stylesheet were not added as inline styles.</p></li>
+<li><p>Four fixes to the Lua lexer (#480, #481, #482, #497).</p></li>
+<li><p>More context-sensitive Gherkin lexer with support for more i18n translations.</p></li>
+<li><p>Support new OO keywords in Matlab lexer (#521).</p></li>
+<li><p>Small fix in the CoffeeScript lexer (#519).</p></li>
+<li><p>A bugfix for backslashes in ocaml strings (#499).</p></li>
+<li><p>Fix unicode/raw docstrings in the Python lexer (#489).</p></li>
+<li><p>Allow PIL to work without PIL.pth (#502).</p></li>
+<li><p>Allow seconds as a unit in CSS (#496).</p></li>
+<li><p>Support <code class="docutils literal notranslate"><span class="pre">application/javascript</span></code> as a JavaScript mime type (#504).</p></li>
+<li><p>Support <a class="reference external" href="http://offload.codeplay.com">Offload</a> C++ Extensions as
+keywords in the C++ lexer (#484).</p></li>
+<li><p>Escape more characters in LaTeX output (#505).</p></li>
+<li><p>Update Haml/Sass lexers to version 3 (#509).</p></li>
+<li><p>Small PHP lexer string escaping fix (#515).</p></li>
+<li><p>Support comments before preprocessor directives, and unsigned/
+long long literals in C/C++ (#613, #616).</p></li>
+<li><p>Support line continuations in the INI lexer (#494).</p></li>
+<li><p>Fix lexing of Dylan string and char literals (#628).</p></li>
+<li><p>Fix class/procedure name highlighting in VB.NET lexer (#624).</p></li>
+</ul>
+</div>
+<div class="section" id="version-1-3-1">
+<h2>Version 1.3.1<a class="headerlink" href="#version-1-3-1" title="Permalink to this headline">¶</a></h2>
+<p>(bugfix release, released Mar 05, 2010)</p>
+<ul class="simple">
+<li><p>The <code class="docutils literal notranslate"><span class="pre">pygmentize</span></code> script was missing from the distribution.</p></li>
+</ul>
+</div>
+<div class="section" id="version-1-3">
+<h2>Version 1.3<a class="headerlink" href="#version-1-3" title="Permalink to this headline">¶</a></h2>
+<p>(codename Schneeglöckchen, released Mar 01, 2010)</p>
+<ul class="simple">
+<li><p>Added the <code class="docutils literal notranslate"><span class="pre">ensurenl</span></code> lexer option, which can be used to suppress the
+automatic addition of a newline to the lexer input.</p></li>
+<li><p>Lexers added:</p>
+<ul>
+<li><p>Ada</p></li>
+<li><p>Coldfusion</p></li>
+<li><p>Modula-2</p></li>
+<li><p>Haxe</p></li>
+<li><p>R console</p></li>
+<li><p>Objective-J</p></li>
+<li><p>Haml and Sass</p></li>
+<li><p>CoffeeScript</p></li>
+</ul>
+</li>
+<li><p>Enhanced reStructuredText highlighting.</p></li>
+<li><p>Added support for PHP 5.3 namespaces in the PHP lexer.</p></li>
+<li><p>Added a bash completion script for <cite>pygmentize</cite>, to the external/
+directory (#466).</p></li>
+<li><p>Fixed a bug in <cite>do_insertions()</cite> used for multi-lexer languages.</p></li>
+<li><p>Fixed a Ruby regex highlighting bug (#476).</p></li>
+<li><p>Fixed regex highlighting bugs in Perl lexer (#258).</p></li>
+<li><p>Add small enhancements to the C lexer (#467) and Bash lexer (#469).</p></li>
+<li><p>Small fixes for the Tcl, Debian control file, Nginx config,
+Smalltalk, Objective-C, Clojure, Lua lexers.</p></li>
+<li><p>Gherkin lexer: Fixed single apostrophe bug and added new i18n keywords.</p></li>
+</ul>
+</div>
+<div class="section" id="version-1-2-2">
+<h2>Version 1.2.2<a class="headerlink" href="#version-1-2-2" title="Permalink to this headline">¶</a></h2>
+<p>(bugfix release, released Jan 02, 2010)</p>
+<ul class="simple">
+<li><p>Removed a backwards incompatibility in the LaTeX formatter that caused
+Sphinx to produce invalid commands when writing LaTeX output (#463).</p></li>
+<li><p>Fixed a forever-backtracking regex in the BashLexer (#462).</p></li>
+</ul>
+</div>
+<div class="section" id="version-1-2-1">
+<h2>Version 1.2.1<a class="headerlink" href="#version-1-2-1" title="Permalink to this headline">¶</a></h2>
+<p>(bugfix release, released Jan 02, 2010)</p>
+<ul class="simple">
+<li><p>Fixed mishandling of an ellipsis in place of the frames in a Python
+console traceback, resulting in clobbered output.</p></li>
+</ul>
+</div>
+<div class="section" id="version-1-2">
+<h2>Version 1.2<a class="headerlink" href="#version-1-2" title="Permalink to this headline">¶</a></h2>
+<p>(codename Neujahr, released Jan 01, 2010)</p>
+<ul class="simple">
+<li><p>Dropped Python 2.3 compatibility.</p></li>
+<li><p>Lexers added:</p>
+<ul>
+<li><p>Asymptote</p></li>
+<li><p>Go</p></li>
+<li><p>Gherkin (Cucumber)</p></li>
+<li><p>CMake</p></li>
+<li><p>Ooc</p></li>
+<li><p>Coldfusion</p></li>
+<li><p>Haxe</p></li>
+<li><p>R console</p></li>
+</ul>
+</li>
+<li><p>Added options for rendering LaTeX in source code comments in the
+LaTeX formatter (#461).</p></li>
+<li><p>Updated the Logtalk lexer.</p></li>
+<li><p>Added <cite>line_number_start</cite> option to image formatter (#456).</p></li>
+<li><p>Added <cite>hl_lines</cite> and <cite>hl_color</cite> options to image formatter (#457).</p></li>
+<li><p>Fixed the HtmlFormatter’s handling of noclasses=True to not output any
+classes (#427).</p></li>
+<li><p>Added the Monokai style (#453).</p></li>
+<li><p>Fixed LLVM lexer identifier syntax and added new keywords (#442).</p></li>
+<li><p>Fixed the PythonTracebackLexer to handle non-traceback data in header or
+trailer, and support more partial tracebacks that start on line 2 (#437).</p></li>
+<li><p>Fixed the CLexer to not highlight ternary statements as labels.</p></li>
+<li><p>Fixed lexing of some Ruby quoting peculiarities (#460).</p></li>
+<li><p>A few ASM lexer fixes (#450).</p></li>
+</ul>
+</div>
+<div class="section" id="version-1-1-1">
+<h2>Version 1.1.1<a class="headerlink" href="#version-1-1-1" title="Permalink to this headline">¶</a></h2>
+<p>(bugfix release, released Sep 15, 2009)</p>
+<ul class="simple">
+<li><p>Fixed the BBCode lexer (#435).</p></li>
+<li><p>Added support for new Jinja2 keywords.</p></li>
+<li><p>Fixed test suite failures.</p></li>
+<li><p>Added Gentoo-specific suffixes to Bash lexer.</p></li>
+</ul>
+</div>
+<div class="section" id="version-1-1">
+<h2>Version 1.1<a class="headerlink" href="#version-1-1" title="Permalink to this headline">¶</a></h2>
+<p>(codename Brillouin, released Sep 11, 2009)</p>
+<ul class="simple">
+<li><p>Ported Pygments to Python 3.  This needed a few changes in the way
+encodings are handled; they may affect corner cases when used with
+Python 2 as well.</p></li>
+<li><p>Lexers added:</p>
+<ul>
+<li><p>Antlr/Ragel, thanks to Ana Nelson</p></li>
+<li><p>(Ba)sh shell</p></li>
+<li><p>Erlang shell</p></li>
+<li><p>GLSL</p></li>
+<li><p>Prolog</p></li>
+<li><p>Evoque</p></li>
+<li><p>Modelica</p></li>
+<li><p>Rebol</p></li>
+<li><p>MXML</p></li>
+<li><p>Cython</p></li>
+<li><p>ABAP</p></li>
+<li><p>ASP.net (VB/C#)</p></li>
+<li><p>Vala</p></li>
+<li><p>Newspeak</p></li>
+</ul>
+</li>
+<li><p>Fixed the LaTeX formatter’s output so that output generated for one style
+can be used with the style definitions of another (#384).</p></li>
+<li><p>Added “anchorlinenos” and “noclobber_cssfile” (#396) options to HTML
+formatter.</p></li>
+<li><p>Support multiline strings in Lua lexer.</p></li>
+<li><p>Rewrite of the JavaScript lexer by Pumbaa80 to better support regular
+expression literals (#403).</p></li>
+<li><p>When pygmentize is asked to highlight a file for which multiple lexers
+match the filename, use the analyse_text guessing engine to determine the
+winner (#355).</p></li>
+<li><p>Fixed minor bugs in the JavaScript lexer (#383), the Matlab lexer (#378),
+the Scala lexer (#392), the INI lexer (#391), the Clojure lexer (#387)
+and the AS3 lexer (#389).</p></li>
+<li><p>Fixed three Perl heredoc lexing bugs (#379, #400, #422).</p></li>
+<li><p>Fixed a bug in the image formatter which misdetected lines (#380).</p></li>
+<li><p>Fixed bugs lexing extended Ruby strings and regexes.</p></li>
+<li><p>Fixed a bug when lexing git diffs.</p></li>
+<li><p>Fixed a bug lexing the empty commit in the PHP lexer (#405).</p></li>
+<li><p>Fixed a bug causing Python numbers to be mishighlighted as floats (#397).</p></li>
+<li><p>Fixed a bug when backslashes are used in odd locations in Python (#395).</p></li>
+<li><p>Fixed various bugs in Matlab and S-Plus lexers, thanks to Winston Chang (#410,
+#411, #413, #414) and fmarc (#419).</p></li>
+<li><p>Fixed a bug in Haskell single-line comment detection (#426).</p></li>
+<li><p>Added new-style reStructuredText directive for docutils 0.5+ (#428).</p></li>
+</ul>
+</div>
+<div class="section" id="version-1-0">
+<h2>Version 1.0<a class="headerlink" href="#version-1-0" title="Permalink to this headline">¶</a></h2>
+<p>(codename Dreiundzwanzig, released Nov 23, 2008)</p>
+<ul>
+<li><p>Don’t use join(splitlines()) when converting newlines to <code class="docutils literal notranslate"><span class="pre">\n</span></code>,
+because that doesn’t keep all newlines at the end when the
+<code class="docutils literal notranslate"><span class="pre">stripnl</span></code> lexer option is False.</p></li>
+<li><p>Added <code class="docutils literal notranslate"><span class="pre">-N</span></code> option to command-line interface to get a lexer name
+for a given filename.</p></li>
+<li><p>Added Tango style, written by Andre Roberge for the Crunchy project.</p></li>
+<li><p>Added Python3TracebackLexer and <code class="docutils literal notranslate"><span class="pre">python3</span></code> option to
+PythonConsoleLexer.</p></li>
+<li><p>Fixed a few bugs in the Haskell lexer.</p></li>
+<li><p>Fixed PythonTracebackLexer to be able to recognize SyntaxError and
+KeyboardInterrupt (#360).</p></li>
+<li><p>Provide one formatter class per image format, so that surprises like:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="n">pygmentize</span> <span class="o">-</span><span class="n">f</span> <span class="n">gif</span> <span class="o">-</span><span class="n">o</span> <span class="n">foo</span><span class="o">.</span><span class="n">gif</span> <span class="n">foo</span><span class="o">.</span><span class="n">py</span>
+</pre></div>
+</div>
+<p>creating a PNG file are avoided.</p>
+</li>
+<li><p>Actually use the <cite>font_size</cite> option of the image formatter.</p></li>
+<li><p>Fixed numpy lexer that it doesn’t listen for <cite>*.py</cite> any longer.</p></li>
+<li><p>Fixed HTML formatter so that text options can be Unicode
+strings (#371).</p></li>
+<li><p>Unified Diff lexer supports the “udiff” alias now.</p></li>
+<li><p>Fixed a few issues in Scala lexer (#367).</p></li>
+<li><p>RubyConsoleLexer now supports simple prompt mode (#363).</p></li>
+<li><p>JavascriptLexer is smarter about what constitutes a regex (#356).</p></li>
+<li><p>Add Applescript lexer, thanks to Andreas Amann (#330).</p></li>
+<li><p>Make the codetags more strict about matching words (#368).</p></li>
+<li><p>NginxConfLexer is a little more accurate on mimetypes and
+variables (#370).</p></li>
+</ul>
+</div>
+<div class="section" id="version-0-11-1">
+<h2>Version 0.11.1<a class="headerlink" href="#version-0-11-1" title="Permalink to this headline">¶</a></h2>
+<p>(released Aug 24, 2008)</p>
+<ul class="simple">
+<li><p>Fixed a Jython compatibility issue in pygments.unistring (#358).</p></li>
+</ul>
+</div>
+<div class="section" id="version-0-11">
+<h2>Version 0.11<a class="headerlink" href="#version-0-11" title="Permalink to this headline">¶</a></h2>
+<p>(codename Straußenei, released Aug 23, 2008)</p>
+<p>Many thanks go to Tim Hatch for writing or integrating most of the bug
+fixes and new features.</p>
+<ul class="simple">
+<li><p>Lexers added:</p>
+<ul>
+<li><p>Nasm-style assembly language, thanks to delroth</p></li>
+<li><p>YAML, thanks to Kirill Simonov</p></li>
+<li><p>ActionScript 3, thanks to Pierre Bourdon</p></li>
+<li><p>Cheetah/Spitfire templates, thanks to Matt Good</p></li>
+<li><p>Lighttpd config files</p></li>
+<li><p>Nginx config files</p></li>
+<li><p>Gnuplot plotting scripts</p></li>
+<li><p>Clojure</p></li>
+<li><p>POV-Ray scene files</p></li>
+<li><p>Sqlite3 interactive console sessions</p></li>
+<li><p>Scala source files, thanks to Krzysiek Goj</p></li>
+</ul>
+</li>
+<li><p>Lexers improved:</p>
+<ul>
+<li><p>C lexer highlights standard library functions now and supports C99
+types.</p></li>
+<li><p>Bash lexer now correctly highlights heredocs without preceding
+whitespace.</p></li>
+<li><p>Vim lexer now highlights hex colors properly and knows a couple
+more keywords.</p></li>
+<li><p>Irc logs lexer now handles xchat’s default time format (#340) and
+correctly highlights lines ending in <code class="docutils literal notranslate"><span class="pre">&gt;</span></code>.</p></li>
+<li><p>Support more delimiters for perl regular expressions (#258).</p></li>
+<li><p>ObjectiveC lexer now supports 2.0 features.</p></li>
+</ul>
+</li>
+<li><p>Added “Visual Studio” style.</p></li>
+<li><p>Updated markdown processor to Markdown 1.7.</p></li>
+<li><p>Support roman/sans/mono style defs and use them in the LaTeX
+formatter.</p></li>
+<li><p>The RawTokenFormatter is no longer registered to <code class="docutils literal notranslate"><span class="pre">*.raw</span></code> and it’s
+documented that tokenization with this lexer may raise exceptions.</p></li>
+<li><p>New option <code class="docutils literal notranslate"><span class="pre">hl_lines</span></code> to HTML formatter, to highlight certain
+lines.</p></li>
+<li><p>New option <code class="docutils literal notranslate"><span class="pre">prestyles</span></code> to HTML formatter.</p></li>
+<li><p>New option <em>-g</em> to pygmentize, to allow lexer guessing based on
+filetext (can be slowish, so file extensions are still checked
+first).</p></li>
+<li><p><code class="docutils literal notranslate"><span class="pre">guess_lexer()</span></code> now makes its decision much faster due to a cache
+of whether data is xml-like (a check which is used in several
+versions of <code class="docutils literal notranslate"><span class="pre">analyse_text()</span></code>.  Several lexers also have more
+accurate <code class="docutils literal notranslate"><span class="pre">analyse_text()</span></code> now.</p></li>
+</ul>
+</div>
+<div class="section" id="version-0-10">
+<h2>Version 0.10<a class="headerlink" href="#version-0-10" title="Permalink to this headline">¶</a></h2>
+<p>(codename Malzeug, released May 06, 2008)</p>
+<ul class="simple">
+<li><p>Lexers added:</p>
+<ul>
+<li><p>Io</p></li>
+<li><p>Smalltalk</p></li>
+<li><p>Darcs patches</p></li>
+<li><p>Tcl</p></li>
+<li><p>Matlab</p></li>
+<li><p>Matlab sessions</p></li>
+<li><p>FORTRAN</p></li>
+<li><p>XSLT</p></li>
+<li><p>tcsh</p></li>
+<li><p>NumPy</p></li>
+<li><p>Python 3</p></li>
+<li><p>S, S-plus, R statistics languages</p></li>
+<li><p>Logtalk</p></li>
+</ul>
+</li>
+<li><p>In the LatexFormatter, the <em>commandprefix</em> option is now by default
+‘PY’ instead of ‘C’, since the latter resulted in several collisions
+with other packages.  Also, the special meaning of the <em>arg</em>
+argument to <code class="docutils literal notranslate"><span class="pre">get_style_defs()</span></code> was removed.</p></li>
+<li><p>Added ImageFormatter, to format code as PNG, JPG, GIF or BMP.
+(Needs the Python Imaging Library.)</p></li>
+<li><p>Support doc comments in the PHP lexer.</p></li>
+<li><p>Handle format specifications in the Perl lexer.</p></li>
+<li><p>Fix comment handling in the Batch lexer.</p></li>
+<li><p>Add more file name extensions for the C++, INI and XML lexers.</p></li>
+<li><p>Fixes in the IRC and MuPad lexers.</p></li>
+<li><p>Fix function and interface name highlighting in the Java lexer.</p></li>
+<li><p>Fix at-rule handling in the CSS lexer.</p></li>
+<li><p>Handle KeyboardInterrupts gracefully in pygmentize.</p></li>
+<li><p>Added BlackWhiteStyle.</p></li>
+<li><p>Bash lexer now correctly highlights math, does not require
+whitespace after semicolons, and correctly highlights boolean
+operators.</p></li>
+<li><p>Makefile lexer is now capable of handling BSD and GNU make syntax.</p></li>
+</ul>
+</div>
+<div class="section" id="version-0-9">
+<h2>Version 0.9<a class="headerlink" href="#version-0-9" title="Permalink to this headline">¶</a></h2>
+<p>(codename Herbstzeitlose, released Oct 14, 2007)</p>
+<ul class="simple">
+<li><p>Lexers added:</p>
+<ul>
+<li><p>Erlang</p></li>
+<li><p>ActionScript</p></li>
+<li><p>Literate Haskell</p></li>
+<li><p>Common Lisp</p></li>
+<li><p>Various assembly languages</p></li>
+<li><p>Gettext catalogs</p></li>
+<li><p>Squid configuration</p></li>
+<li><p>Debian control files</p></li>
+<li><p>MySQL-style SQL</p></li>
+<li><p>MOOCode</p></li>
+</ul>
+</li>
+<li><p>Lexers improved:</p>
+<ul>
+<li><p>Greatly improved the Haskell and OCaml lexers.</p></li>
+<li><p>Improved the Bash lexer’s handling of nested constructs.</p></li>
+<li><p>The C# and Java lexers exhibited abysmal performance with some
+input code; this should now be fixed.</p></li>
+<li><p>The IRC logs lexer is now able to colorize weechat logs too.</p></li>
+<li><p>The Lua lexer now recognizes multi-line comments.</p></li>
+<li><p>Fixed bugs in the D and MiniD lexer.</p></li>
+</ul>
+</li>
+<li><p>The encoding handling of the command line mode (pygmentize) was
+enhanced. You shouldn’t get UnicodeErrors from it anymore if you
+don’t give an encoding option.</p></li>
+<li><p>Added a <code class="docutils literal notranslate"><span class="pre">-P</span></code> option to the command line mode which can be used to
+give options whose values contain commas or equals signs.</p></li>
+<li><p>Added 256-color terminal formatter.</p></li>
+<li><p>Added an experimental SVG formatter.</p></li>
+<li><p>Added the <code class="docutils literal notranslate"><span class="pre">lineanchors</span></code> option to the HTML formatter, thanks to
+Ian Charnas for the idea.</p></li>
+<li><p>Gave the line numbers table a CSS class in the HTML formatter.</p></li>
+<li><p>Added a Vim 7-like style.</p></li>
+</ul>
+</div>
+<div class="section" id="version-0-8-1">
+<h2>Version 0.8.1<a class="headerlink" href="#version-0-8-1" title="Permalink to this headline">¶</a></h2>
+<p>(released Jun 27, 2007)</p>
+<ul class="simple">
+<li><p>Fixed POD highlighting in the Ruby lexer.</p></li>
+<li><p>Fixed Unicode class and namespace name highlighting in the C# lexer.</p></li>
+<li><p>Fixed Unicode string prefix highlighting in the Python lexer.</p></li>
+<li><p>Fixed a bug in the D and MiniD lexers.</p></li>
+<li><p>Fixed the included MoinMoin parser.</p></li>
+</ul>
+</div>
+<div class="section" id="version-0-8">
+<h2>Version 0.8<a class="headerlink" href="#version-0-8" title="Permalink to this headline">¶</a></h2>
+<p>(codename Maikäfer, released May 30, 2007)</p>
+<ul class="simple">
+<li><p>Lexers added:</p>
+<ul>
+<li><p>Haskell, thanks to Adam Blinkinsop</p></li>
+<li><p>Redcode, thanks to Adam Blinkinsop</p></li>
+<li><p>D, thanks to Kirk McDonald</p></li>
+<li><p>MuPad, thanks to Christopher Creutzig</p></li>
+<li><p>MiniD, thanks to Jarrett Billingsley</p></li>
+<li><p>Vim Script, by Tim Hatch</p></li>
+</ul>
+</li>
+<li><p>The HTML formatter now has a second line-numbers mode in which it
+will just integrate the numbers in the same <code class="docutils literal notranslate"><span class="pre">&lt;pre&gt;</span></code> tag as the
+code.</p></li>
+<li><p>The <cite>CSharpLexer</cite> now is Unicode-aware, which means that it has an
+option that can be set so that it correctly lexes Unicode
+identifiers allowed by the C# specs.</p></li>
+<li><p>Added a <cite>RaiseOnErrorTokenFilter</cite> that raises an exception when the
+lexer generates an error token, and a <cite>VisibleWhitespaceFilter</cite> that
+converts whitespace (spaces, tabs, newlines) into visible
+characters.</p></li>
+<li><p>Fixed the <cite>do_insertions()</cite> helper function to yield correct
+indices.</p></li>
+<li><p>The ReST lexer now automatically highlights source code blocks in
+“.. sourcecode:: language” and “.. code:: language” directive
+blocks.</p></li>
+<li><p>Improved the default style (thanks to Tiberius Teng). The old
+default is still available as the “emacs” style (which was an alias
+before).</p></li>
+<li><p>The <cite>get_style_defs</cite> method of HTML formatters now uses the
+<cite>cssclass</cite> option as the default selector if it was given.</p></li>
+<li><p>Improved the ReST and Bash lexers a bit.</p></li>
+<li><p>Fixed a few bugs in the Makefile and Bash lexers, thanks to Tim
+Hatch.</p></li>
+<li><p>Fixed a bug in the command line code that disallowed <code class="docutils literal notranslate"><span class="pre">-O</span></code> options
+when using the <code class="docutils literal notranslate"><span class="pre">-S</span></code> option.</p></li>
+<li><p>Fixed a bug in the <cite>RawTokenFormatter</cite>.</p></li>
+</ul>
+</div>
+<div class="section" id="version-0-7-1">
+<h2>Version 0.7.1<a class="headerlink" href="#version-0-7-1" title="Permalink to this headline">¶</a></h2>
+<p>(released Feb 15, 2007)</p>
+<ul class="simple">
+<li><p>Fixed little highlighting bugs in the Python, Java, Scheme and
+Apache Config lexers.</p></li>
+<li><p>Updated the included manpage.</p></li>
+<li><p>Included a built version of the documentation in the source tarball.</p></li>
+</ul>
+</div>
+<div class="section" id="version-0-7">
+<h2>Version 0.7<a class="headerlink" href="#version-0-7" title="Permalink to this headline">¶</a></h2>
+<p>(codename Faschingskrapfn, released Feb 14, 2007)</p>
+<ul class="simple">
+<li><p>Added a MoinMoin parser that uses Pygments. With it, you get
+Pygments highlighting in Moin Wiki pages.</p></li>
+<li><p>Changed the exception raised if no suitable lexer, formatter etc. is
+found in one of the <cite>get_*_by_*</cite> functions to a custom exception,
+<cite>pygments.util.ClassNotFound</cite>. It is, however, a subclass of
+<cite>ValueError</cite> in order to retain backwards compatibility.</p></li>
+<li><p>Added a <cite>-H</cite> command line option which can be used to get the
+docstring of a lexer, formatter or filter.</p></li>
+<li><p>Made the handling of lexers and formatters more consistent. The
+aliases and filename patterns of formatters are now attributes on
+them.</p></li>
+<li><p>Added an OCaml lexer, thanks to Adam Blinkinsop.</p></li>
+<li><p>Made the HTML formatter more flexible, and easily subclassable in
+order to make it easy to implement custom wrappers, e.g. alternate
+line number markup. See the documentation.</p></li>
+<li><p>Added an <cite>outencoding</cite> option to all formatters, making it possible
+to override the <cite>encoding</cite> (which is used by lexers and formatters)
+when using the command line interface. Also, if using the terminal
+formatter and the output file is a terminal and has an encoding
+attribute, use it if no encoding is given.</p></li>
+<li><p>Made it possible to just drop style modules into the <cite>styles</cite>
+subpackage of the Pygments installation.</p></li>
+<li><p>Added a “state” keyword argument to the <cite>using</cite> helper.</p></li>
+<li><p>Added a <cite>commandprefix</cite> option to the <cite>LatexFormatter</cite> which allows
+to control how the command names are constructed.</p></li>
+<li><p>Added quite a few new lexers, thanks to Tim Hatch:</p>
+<ul>
+<li><p>Java Server Pages</p></li>
+<li><p>Windows batch files</p></li>
+<li><p>Trac Wiki markup</p></li>
+<li><p>Python tracebacks</p></li>
+<li><p>ReStructuredText</p></li>
+<li><p>Dylan</p></li>
+<li><p>and the Befunge esoteric programming language (yay!)</p></li>
+</ul>
+</li>
+<li><p>Added Mako lexers by Ben Bangert.</p></li>
+<li><p>Added “fruity” style, another dark background originally vim-based
+theme.</p></li>
+<li><p>Added sources.list lexer by Dennis Kaarsemaker.</p></li>
+<li><p>Added token stream filters, and a pygmentize option to use them.</p></li>
+<li><p>Changed behavior of <cite>in</cite> Operator for tokens.</p></li>
+<li><p>Added mimetypes for all lexers.</p></li>
+<li><p>Fixed some problems lexing Python strings.</p></li>
+<li><p>Fixed tickets: #167, #178, #179, #180, #185, #201.</p></li>
+</ul>
+</div>
+<div class="section" id="version-0-6">
+<h2>Version 0.6<a class="headerlink" href="#version-0-6" title="Permalink to this headline">¶</a></h2>
+<p>(codename Zimtstern, released Dec 20, 2006)</p>
+<ul class="simple">
+<li><p>Added option for the HTML formatter to write the CSS to an external
+file in “full document” mode.</p></li>
+<li><p>Added RTF formatter.</p></li>
+<li><p>Added Bash and Apache configuration lexers (thanks to Tim Hatch).</p></li>
+<li><p>Improved guessing methods for various lexers.</p></li>
+<li><p>Added <cite>&#64;media</cite> support to CSS lexer (thanks to Tim Hatch).</p></li>
+<li><p>Added a Groff lexer (thanks to Tim Hatch).</p></li>
+<li><p>License change to BSD.</p></li>
+<li><p>Added lexers for the Myghty template language.</p></li>
+<li><p>Added a Scheme lexer (thanks to Marek Kubica).</p></li>
+<li><p>Added some functions to iterate over existing lexers, formatters and
+lexers.</p></li>
+<li><p>The HtmlFormatter’s <cite>get_style_defs()</cite> can now take a list as an
+argument to generate CSS with multiple prefixes.</p></li>
+<li><p>Support for guessing input encoding added.</p></li>
+<li><p>Encoding support added: all processing is now done with Unicode
+strings, input and output are converted from and optionally to byte
+strings (see the <code class="docutils literal notranslate"><span class="pre">encoding</span></code> option of lexers and formatters).</p></li>
+<li><p>Some improvements in the C(++) lexers handling comments and line
+continuations.</p></li>
+</ul>
+</div>
+<div class="section" id="version-0-5-1">
+<h2>Version 0.5.1<a class="headerlink" href="#version-0-5-1" title="Permalink to this headline">¶</a></h2>
+<p>(released Oct 30, 2006)</p>
+<ul class="simple">
+<li><p>Fixed traceback in <code class="docutils literal notranslate"><span class="pre">pygmentize</span> <span class="pre">-L</span></code> (thanks to Piotr Ozarowski).</p></li>
+</ul>
+</div>
+<div class="section" id="version-0-5">
+<h2>Version 0.5<a class="headerlink" href="#version-0-5" title="Permalink to this headline">¶</a></h2>
+<p>(codename PyKleur, released Oct 30, 2006)</p>
+<ul class="simple">
+<li><p>Initial public release.</p></li>
+</ul>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/cmdline.html b/doc/_build/html/docs/cmdline.html
new file mode 100644 (file)
index 0000000..5f08230
--- /dev/null
@@ -0,0 +1,282 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Command Line Interface &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Available lexers" href="lexers.html" />
+    <link rel="prev" title="Introduction and Quickstart" href="quickstart.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="../index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Command Line Interface</a><ul>
+<li><a class="reference internal" href="#options-and-filters">Options and filters</a></li>
+<li><a class="reference internal" href="#generating-styles">Generating styles</a></li>
+<li><a class="reference internal" href="#getting-lexer-names">Getting lexer names</a></li>
+<li><a class="reference internal" href="#custom-lexers-and-formatters">Custom Lexers and Formatters</a></li>
+<li><a class="reference internal" href="#getting-help">Getting help</a></li>
+<li><a class="reference internal" href="#a-note-on-encodings">A note on encodings</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="quickstart.html"
+                        title="previous chapter">Introduction and Quickstart</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="lexers.html"
+                        title="next chapter">Available lexers</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/cmdline.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="command-line-interface">
+<h1>Command Line Interface<a class="headerlink" href="#command-line-interface" title="Permalink to this headline">¶</a></h1>
+<p>You can use Pygments from the shell, provided you installed the
+<strong class="program">pygmentize</strong> script:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize test.py
+print &quot;Hello World&quot;
+</pre></div>
+</div>
+<p>will print the file test.py to standard output, using the Python lexer
+(inferred from the file name extension) and the terminal formatter (because
+you didn’t give an explicit formatter name).</p>
+<p>If you want HTML output:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -f html -l python -o test.html test.py
+</pre></div>
+</div>
+<p>As you can see, the -l option explicitly selects a lexer. As seen above, if you
+give an input file name and it has an extension that Pygments recognizes, you can
+omit this option.</p>
+<p>The <code class="docutils literal notranslate"><span class="pre">-o</span></code> option gives an output file name. If it is not given, output is
+written to stdout.</p>
+<p>The <code class="docutils literal notranslate"><span class="pre">-f</span></code> option selects a formatter (as with <code class="docutils literal notranslate"><span class="pre">-l</span></code>, it can also be omitted
+if an output file name is given and has a supported extension).
+If no output file name is given and <code class="docutils literal notranslate"><span class="pre">-f</span></code> is omitted, the
+<a class="reference internal" href="formatters.html#TerminalFormatter" title="TerminalFormatter"><code class="xref py py-class docutils literal notranslate"><span class="pre">TerminalFormatter</span></code></a> is used.</p>
+<p>The above command could therefore also be given as:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -o test.html test.py
+</pre></div>
+</div>
+<p>To create a full HTML document, including line numbers and stylesheet (using the
+“emacs” style), highlighting the Python file <code class="docutils literal notranslate"><span class="pre">test.py</span></code> to <code class="docutils literal notranslate"><span class="pre">test.html</span></code>:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -O full,style=emacs -o test.html test.py
+</pre></div>
+</div>
+<div class="section" id="options-and-filters">
+<h2>Options and filters<a class="headerlink" href="#options-and-filters" title="Permalink to this headline">¶</a></h2>
+<p>Lexer and formatter options can be given using the <code class="docutils literal notranslate"><span class="pre">-O</span></code> option:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -f html -O style=colorful,linenos=1 -l python test.py
+</pre></div>
+</div>
+<p>Be sure to enclose the option string in quotes if it contains any special shell
+characters, such as spaces or expansion wildcards like <code class="docutils literal notranslate"><span class="pre">*</span></code>. If an option
+expects a list value, separate the list entries with spaces (you’ll have to
+quote the option value in this case too, so that the shell doesn’t split it).</p>
+<p>Since the <code class="docutils literal notranslate"><span class="pre">-O</span></code> option argument is split at commas and expects the split values
+to be of the form <code class="docutils literal notranslate"><span class="pre">name=value</span></code>, you can’t give an option value that contains
+commas or equals signs.  Therefore, an option <code class="docutils literal notranslate"><span class="pre">-P</span></code> is provided (as of Pygments
+0.9) that works like <code class="docutils literal notranslate"><span class="pre">-O</span></code> but can only pass one option per <code class="docutils literal notranslate"><span class="pre">-P</span></code>. Its value
+can then contain all characters:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -P &quot;heading=Pygments, the Python highlighter&quot; ...
+</pre></div>
+</div>
+<p>Filters are added to the token stream using the <code class="docutils literal notranslate"><span class="pre">-F</span></code> option:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -f html -l pascal -F keywordcase:case=upper main.pas
+</pre></div>
+</div>
+<p>As you see, options for the filter are given after a colon. As for <code class="docutils literal notranslate"><span class="pre">-O</span></code>, the
+filter name and options must be one shell word, so there may not be any spaces
+around the colon.</p>
+</div>
+<div class="section" id="generating-styles">
+<h2>Generating styles<a class="headerlink" href="#generating-styles" title="Permalink to this headline">¶</a></h2>
+<p>Formatters normally don’t output full style information.  For example, the HTML
+formatter by default only outputs <code class="docutils literal notranslate"><span class="pre">&lt;span&gt;</span></code> tags with <code class="docutils literal notranslate"><span class="pre">class</span></code> attributes.
+Therefore, there’s a special <code class="docutils literal notranslate"><span class="pre">-S</span></code> option for generating style definitions.
+Usage is as follows:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -f html -S colorful -a .syntax
+</pre></div>
+</div>
+<p>generates a CSS style sheet (because you selected the HTML formatter) for
+the “colorful” style prepending a “.syntax” selector to all style rules.</p>
+<p>For an explanation what <code class="docutils literal notranslate"><span class="pre">-a</span></code> means for <a class="reference internal" href="formatters.html"><span class="doc">a particular formatter</span></a>, look for the <cite>arg</cite> argument for the formatter’s
+<a class="reference internal" href="api.html#pygments.formatter.Formatter.get_style_defs" title="pygments.formatter.Formatter.get_style_defs"><code class="xref py py-meth docutils literal notranslate"><span class="pre">get_style_defs()</span></code></a> method.</p>
+</div>
+<div class="section" id="getting-lexer-names">
+<h2>Getting lexer names<a class="headerlink" href="#getting-lexer-names" title="Permalink to this headline">¶</a></h2>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.0.</span></p>
+</div>
+<p>The <code class="docutils literal notranslate"><span class="pre">-N</span></code> option guesses a lexer name for a given filename, so that</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -N setup.py
+</pre></div>
+</div>
+<p>will print out <code class="docutils literal notranslate"><span class="pre">python</span></code>.  It won’t highlight anything yet.  If no specific
+lexer is known for that filename, <code class="docutils literal notranslate"><span class="pre">text</span></code> is printed.</p>
+</div>
+<div class="section" id="custom-lexers-and-formatters">
+<h2>Custom Lexers and Formatters<a class="headerlink" href="#custom-lexers-and-formatters" title="Permalink to this headline">¶</a></h2>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+<p>The <code class="docutils literal notranslate"><span class="pre">-x</span></code> flag enables custom lexers and formatters to be loaded
+from files relative to the current directory. Create a file with a class named
+CustomLexer or CustomFormatter, then specify it on the command line:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -l your_lexer.py -f your_formatter.py -x
+</pre></div>
+</div>
+<p>You can also specify the name of your class with a colon:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -l your_lexer.py:SomeLexer -x
+</pre></div>
+</div>
+<p>For more information, see <a class="reference internal" href="lexerdevelopment.html"><span class="doc">the Pygments documentation on Lexer development</span></a>.</p>
+</div>
+<div class="section" id="getting-help">
+<h2>Getting help<a class="headerlink" href="#getting-help" title="Permalink to this headline">¶</a></h2>
+<p>The <code class="docutils literal notranslate"><span class="pre">-L</span></code> option lists lexers, formatters, along with their short
+names and supported file name extensions, styles and filters. If you want to see
+only one category, give it as an argument:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -L filters
+</pre></div>
+</div>
+<p>will list only all installed filters.</p>
+<p>The <code class="docutils literal notranslate"><span class="pre">-H</span></code> option will give you detailed information (the same that can be found
+in this documentation) about a lexer, formatter or filter. Usage is as follows:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -H formatter html
+</pre></div>
+</div>
+<p>will print the help for the HTML formatter, while</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -H lexer python
+</pre></div>
+</div>
+<p>will print the help for the Python lexer, etc.</p>
+</div>
+<div class="section" id="a-note-on-encodings">
+<h2>A note on encodings<a class="headerlink" href="#a-note-on-encodings" title="Permalink to this headline">¶</a></h2>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.9.</span></p>
+</div>
+<p>Pygments tries to be smart regarding encodings in the formatting process:</p>
+<ul class="simple">
+<li><p>If you give an <code class="docutils literal notranslate"><span class="pre">encoding</span></code> option, it will be used as the input and
+output encoding.</p></li>
+<li><p>If you give an <code class="docutils literal notranslate"><span class="pre">outencoding</span></code> option, it will override <code class="docutils literal notranslate"><span class="pre">encoding</span></code>
+as the output encoding.</p></li>
+<li><p>If you give an <code class="docutils literal notranslate"><span class="pre">inencoding</span></code> option, it will override <code class="docutils literal notranslate"><span class="pre">encoding</span></code>
+as the input encoding.</p></li>
+<li><p>If you don’t give an encoding and have given an output file, the default
+encoding for lexer and formatter is the terminal encoding or the default
+locale encoding of the system.  As a last resort, <code class="docutils literal notranslate"><span class="pre">latin1</span></code> is used (which
+will pass through all non-ASCII characters).</p></li>
+<li><p>If you don’t give an encoding and haven’t given an output file (that means
+output is written to the console), the default encoding for lexer and
+formatter is the terminal encoding (<code class="docutils literal notranslate"><span class="pre">sys.stdout.encoding</span></code>).</p></li>
+</ul>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/filterdevelopment.html b/doc/_build/html/docs/filterdevelopment.html
new file mode 100644 (file)
index 0000000..29e9c97
--- /dev/null
@@ -0,0 +1,194 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Write your own filter &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Register Plugins" href="plugins.html" />
+    <link rel="prev" title="Write your own formatter" href="formatterdevelopment.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="../index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Write your own filter</a><ul>
+<li><a class="reference internal" href="#subclassing-filters">Subclassing Filters</a></li>
+<li><a class="reference internal" href="#using-a-decorator">Using a decorator</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="formatterdevelopment.html"
+                        title="previous chapter">Write your own formatter</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="plugins.html"
+                        title="next chapter">Register Plugins</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/filterdevelopment.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="write-your-own-filter">
+<h1>Write your own filter<a class="headerlink" href="#write-your-own-filter" title="Permalink to this headline">¶</a></h1>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+<p>Writing own filters is very easy. All you have to do is to subclass
+the <cite>Filter</cite> class and override the <cite>filter</cite> method. Additionally a
+filter is instantiated with some keyword arguments you can use to
+adjust the behavior of your filter.</p>
+<div class="section" id="subclassing-filters">
+<h2>Subclassing Filters<a class="headerlink" href="#subclassing-filters" title="Permalink to this headline">¶</a></h2>
+<p>As an example, we write a filter that converts all <cite>Name.Function</cite> tokens
+to normal <cite>Name</cite> tokens to make the output less colorful.</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.util</span> <span class="kn">import</span> <span class="n">get_bool_opt</span>
+<span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="n">Name</span>
+<span class="kn">from</span> <span class="nn">pygments.filter</span> <span class="kn">import</span> <span class="n">Filter</span>
+
+<span class="k">class</span> <span class="nc">UncolorFilter</span><span class="p">(</span><span class="n">Filter</span><span class="p">):</span>
+
+    <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">**</span><span class="n">options</span><span class="p">):</span>
+        <span class="n">Filter</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">**</span><span class="n">options</span><span class="p">)</span>
+        <span class="bp">self</span><span class="o">.</span><span class="n">class_too</span> <span class="o">=</span> <span class="n">get_bool_opt</span><span class="p">(</span><span class="n">options</span><span class="p">,</span> <span class="s1">&#39;classtoo&#39;</span><span class="p">)</span>
+
+    <span class="k">def</span> <span class="nf">filter</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">lexer</span><span class="p">,</span> <span class="n">stream</span><span class="p">):</span>
+        <span class="k">for</span> <span class="n">ttype</span><span class="p">,</span> <span class="n">value</span> <span class="ow">in</span> <span class="n">stream</span><span class="p">:</span>
+            <span class="k">if</span> <span class="n">ttype</span> <span class="ow">is</span> <span class="n">Name</span><span class="o">.</span><span class="n">Function</span> <span class="ow">or</span> <span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">class_too</span> <span class="ow">and</span>
+                                          <span class="n">ttype</span> <span class="ow">is</span> <span class="n">Name</span><span class="o">.</span><span class="n">Class</span><span class="p">):</span>
+                <span class="n">ttype</span> <span class="o">=</span> <span class="n">Name</span>
+            <span class="k">yield</span> <span class="n">ttype</span><span class="p">,</span> <span class="n">value</span>
+</pre></div>
+</div>
+<p>Some notes on the <cite>lexer</cite> argument: that can be quite confusing since it doesn’t
+need to be a lexer instance. If a filter was added by using the <cite>add_filter()</cite>
+function of lexers, that lexer is registered for the filter. In that case
+<cite>lexer</cite> will refer to the lexer that has registered the filter. It <em>can</em> be used
+to access options passed to a lexer. Because it could be <cite>None</cite> you always have
+to check for that case if you access it.</p>
+</div>
+<div class="section" id="using-a-decorator">
+<h2>Using a decorator<a class="headerlink" href="#using-a-decorator" title="Permalink to this headline">¶</a></h2>
+<p>You can also use the <cite>simplefilter</cite> decorator from the <cite>pygments.filter</cite> module:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.util</span> <span class="kn">import</span> <span class="n">get_bool_opt</span>
+<span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="n">Name</span>
+<span class="kn">from</span> <span class="nn">pygments.filter</span> <span class="kn">import</span> <span class="n">simplefilter</span>
+
+
+<span class="nd">@simplefilter</span>
+<span class="k">def</span> <span class="nf">uncolor</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">lexer</span><span class="p">,</span> <span class="n">stream</span><span class="p">,</span> <span class="n">options</span><span class="p">):</span>
+    <span class="n">class_too</span> <span class="o">=</span> <span class="n">get_bool_opt</span><span class="p">(</span><span class="n">options</span><span class="p">,</span> <span class="s1">&#39;classtoo&#39;</span><span class="p">)</span>
+    <span class="k">for</span> <span class="n">ttype</span><span class="p">,</span> <span class="n">value</span> <span class="ow">in</span> <span class="n">stream</span><span class="p">:</span>
+        <span class="k">if</span> <span class="n">ttype</span> <span class="ow">is</span> <span class="n">Name</span><span class="o">.</span><span class="n">Function</span> <span class="ow">or</span> <span class="p">(</span><span class="n">class_too</span> <span class="ow">and</span>
+                                      <span class="n">ttype</span> <span class="ow">is</span> <span class="n">Name</span><span class="o">.</span><span class="n">Class</span><span class="p">):</span>
+            <span class="n">ttype</span> <span class="o">=</span> <span class="n">Name</span>
+        <span class="k">yield</span> <span class="n">ttype</span><span class="p">,</span> <span class="n">value</span>
+</pre></div>
+</div>
+<p>The decorator automatically subclasses an internal filter class and uses the
+decorated function as a method for filtering.  (That’s why there is a <cite>self</cite>
+argument that you probably won’t end up using in the method.)</p>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/filters.html b/doc/_build/html/docs/filters.html
new file mode 100644 (file)
index 0000000..fd2b579
--- /dev/null
@@ -0,0 +1,324 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Filters &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Available formatters" href="formatters.html" />
+    <link rel="prev" title="Available lexers" href="lexers.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="../index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Filters</a><ul>
+<li><a class="reference internal" href="#builtin-filters">Builtin Filters</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="lexers.html"
+                        title="previous chapter">Available lexers</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="formatters.html"
+                        title="next chapter">Available formatters</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/filters.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="filters">
+<h1>Filters<a class="headerlink" href="#filters" title="Permalink to this headline">¶</a></h1>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+<p>You can filter token streams coming from lexers to improve or annotate the
+output. For example, you can highlight special words in comments, convert
+keywords to upper or lowercase to enforce a style guide etc.</p>
+<p>To apply a filter, you can use the <cite>add_filter()</cite> method of a lexer:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.lexers</span> <span class="kn">import</span> <span class="n">PythonLexer</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">l</span> <span class="o">=</span> <span class="n">PythonLexer</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="c1"># add a filter given by a string and options</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">l</span><span class="o">.</span><span class="n">add_filter</span><span class="p">(</span><span class="s1">&#39;codetagify&#39;</span><span class="p">,</span> <span class="n">case</span><span class="o">=</span><span class="s1">&#39;lower&#39;</span><span class="p">)</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">l</span><span class="o">.</span><span class="n">filters</span>
+<span class="go">[&lt;pygments.filters.CodeTagFilter object at 0xb785decc&gt;]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.filters</span> <span class="kn">import</span> <span class="n">KeywordCaseFilter</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="c1"># or give an instance</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">l</span><span class="o">.</span><span class="n">add_filter</span><span class="p">(</span><span class="n">KeywordCaseFilter</span><span class="p">(</span><span class="n">case</span><span class="o">=</span><span class="s1">&#39;lower&#39;</span><span class="p">))</span>
+</pre></div>
+</div>
+<p>The <cite>add_filter()</cite> method takes keyword arguments which are forwarded to
+the constructor of the filter.</p>
+<p>To get a list of all registered filters by name, you can use the
+<cite>get_all_filters()</cite> function from the <cite>pygments.filters</cite> module that returns an
+iterable for all known filters.</p>
+<p>If you want to write your own filter, have a look at <a class="reference internal" href="filterdevelopment.html"><span class="doc">Write your own filter</span></a>.</p>
+<div class="section" id="builtin-filters">
+<h2>Builtin Filters<a class="headerlink" href="#builtin-filters" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="CodeTagFilter">
+<em class="property">class </em><code class="sig-name descname">CodeTagFilter</code><a class="headerlink" href="#CodeTagFilter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Name</dt>
+<dd class="field-odd"><p>codetagify</p>
+</dd>
+</dl>
+<p>Highlight special code tags in comments and docstrings.</p>
+<p>Options accepted:</p>
+<dl class="simple">
+<dt><cite>codetags</cite><span class="classifier">list of strings</span></dt><dd><p>A list of strings that are flagged as code tags.  The default is to
+highlight <code class="docutils literal notranslate"><span class="pre">XXX</span></code>, <code class="docutils literal notranslate"><span class="pre">TODO</span></code>, <code class="docutils literal notranslate"><span class="pre">BUG</span></code> and <code class="docutils literal notranslate"><span class="pre">NOTE</span></code>.</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="KeywordCaseFilter">
+<em class="property">class </em><code class="sig-name descname">KeywordCaseFilter</code><a class="headerlink" href="#KeywordCaseFilter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Name</dt>
+<dd class="field-odd"><p>keywordcase</p>
+</dd>
+</dl>
+<p>Convert keywords to lowercase or uppercase or capitalize them, which
+means first letter uppercase, rest lowercase.</p>
+<p>This can be useful e.g. if you highlight Pascal code and want to adapt the
+code to your styleguide.</p>
+<p>Options accepted:</p>
+<dl class="simple">
+<dt><cite>case</cite><span class="classifier">string</span></dt><dd><p>The casing to convert keywords to. Must be one of <code class="docutils literal notranslate"><span class="pre">'lower'</span></code>,
+<code class="docutils literal notranslate"><span class="pre">'upper'</span></code> or <code class="docutils literal notranslate"><span class="pre">'capitalize'</span></code>.  The default is <code class="docutils literal notranslate"><span class="pre">'lower'</span></code>.</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="NameHighlightFilter">
+<em class="property">class </em><code class="sig-name descname">NameHighlightFilter</code><a class="headerlink" href="#NameHighlightFilter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Name</dt>
+<dd class="field-odd"><p>highlight</p>
+</dd>
+</dl>
+<p>Highlight a normal Name (and Name.*) token with a different token type.</p>
+<p>Example:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="nb">filter</span> <span class="o">=</span> <span class="n">NameHighlightFilter</span><span class="p">(</span>
+    <span class="n">names</span><span class="o">=</span><span class="p">[</span><span class="s1">&#39;foo&#39;</span><span class="p">,</span> <span class="s1">&#39;bar&#39;</span><span class="p">,</span> <span class="s1">&#39;baz&#39;</span><span class="p">],</span>
+    <span class="n">tokentype</span><span class="o">=</span><span class="n">Name</span><span class="o">.</span><span class="n">Function</span><span class="p">,</span>
+<span class="p">)</span>
+</pre></div>
+</div>
+<p>This would highlight the names “foo”, “bar” and “baz”
+as functions. <cite>Name.Function</cite> is the default token type.</p>
+<p>Options accepted:</p>
+<dl class="simple">
+<dt><cite>names</cite><span class="classifier">list of strings</span></dt><dd><p>A list of names that should be given the different token type.
+There is no default.</p>
+</dd>
+<dt><cite>tokentype</cite><span class="classifier">TokenType or string</span></dt><dd><p>A token type or a string containing a token type name that is
+used for highlighting the strings in <cite>names</cite>.  The default is
+<cite>Name.Function</cite>.</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="RaiseOnErrorTokenFilter">
+<em class="property">class </em><code class="sig-name descname">RaiseOnErrorTokenFilter</code><a class="headerlink" href="#RaiseOnErrorTokenFilter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Name</dt>
+<dd class="field-odd"><p>raiseonerror</p>
+</dd>
+</dl>
+<p>Raise an exception when the lexer generates an error token.</p>
+<p>Options accepted:</p>
+<dl class="simple">
+<dt><cite>excclass</cite><span class="classifier">Exception class</span></dt><dd><p>The exception class to raise.
+The default is <cite>pygments.filters.ErrorToken</cite>.</p>
+</dd>
+</dl>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.8.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="VisibleWhitespaceFilter">
+<em class="property">class </em><code class="sig-name descname">VisibleWhitespaceFilter</code><a class="headerlink" href="#VisibleWhitespaceFilter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Name</dt>
+<dd class="field-odd"><p>whitespace</p>
+</dd>
+</dl>
+<p>Convert tabs, newlines and/or spaces to visible characters.</p>
+<p>Options accepted:</p>
+<dl class="simple">
+<dt><cite>spaces</cite><span class="classifier">string or bool</span></dt><dd><p>If this is a one-character string, spaces will be replaces by this string.
+If it is another true value, spaces will be replaced by <code class="docutils literal notranslate"><span class="pre">·</span></code> (unicode
+MIDDLE DOT).  If it is a false value, spaces will not be replaced.  The
+default is <code class="docutils literal notranslate"><span class="pre">False</span></code>.</p>
+</dd>
+<dt><cite>tabs</cite><span class="classifier">string or bool</span></dt><dd><p>The same as for <cite>spaces</cite>, but the default replacement character is <code class="docutils literal notranslate"><span class="pre">»</span></code>
+(unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK).  The default value
+is <code class="docutils literal notranslate"><span class="pre">False</span></code>.  Note: this will not work if the <cite>tabsize</cite> option for the
+lexer is nonzero, as tabs will already have been expanded then.</p>
+</dd>
+<dt><cite>tabsize</cite><span class="classifier">int</span></dt><dd><p>If tabs are to be replaced by this filter (see the <cite>tabs</cite> option), this
+is the total number of characters that a tab should be expanded to.
+The default is <code class="docutils literal notranslate"><span class="pre">8</span></code>.</p>
+</dd>
+<dt><cite>newlines</cite><span class="classifier">string or bool</span></dt><dd><p>The same as for <cite>spaces</cite>, but the default replacement character is <code class="docutils literal notranslate"><span class="pre">¶</span></code>
+(unicode PILCROW SIGN).  The default value is <code class="docutils literal notranslate"><span class="pre">False</span></code>.</p>
+</dd>
+<dt><cite>wstokentype</cite><span class="classifier">bool</span></dt><dd><p>If true, give whitespace the special <cite>Whitespace</cite> token type.  This allows
+styling the visible whitespace differently (e.g. greyed out), but it can
+disrupt background colors.  The default is <code class="docutils literal notranslate"><span class="pre">True</span></code>.</p>
+</dd>
+</dl>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.8.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="GobbleFilter">
+<em class="property">class </em><code class="sig-name descname">GobbleFilter</code><a class="headerlink" href="#GobbleFilter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Name</dt>
+<dd class="field-odd"><p>gobble</p>
+</dd>
+</dl>
+<p>Gobbles source code lines (eats initial characters).</p>
+<p>This filter drops the first <code class="docutils literal notranslate"><span class="pre">n</span></code> characters off every line of code.  This
+may be useful when the source code fed to the lexer is indented by a fixed
+amount of space that isn’t desired in the output.</p>
+<p>Options accepted:</p>
+<dl class="simple">
+<dt><cite>n</cite><span class="classifier">int</span></dt><dd><p>The number of characters to gobble.</p>
+</dd>
+</dl>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="TokenMergeFilter">
+<em class="property">class </em><code class="sig-name descname">TokenMergeFilter</code><a class="headerlink" href="#TokenMergeFilter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Name</dt>
+<dd class="field-odd"><p>tokenmerge</p>
+</dd>
+</dl>
+<p>Merges consecutive tokens with the same token type in the output
+stream of a lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.2.</span></p>
+</div>
+</dd></dl>
+
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/formatterdevelopment.html b/doc/_build/html/docs/formatterdevelopment.html
new file mode 100644 (file)
index 0000000..43bbd36
--- /dev/null
@@ -0,0 +1,281 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Write your own formatter &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Write your own filter" href="filterdevelopment.html" />
+    <link rel="prev" title="Write your own lexer" href="lexerdevelopment.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="../index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Write your own formatter</a><ul>
+<li><a class="reference internal" href="#quickstart">Quickstart</a></li>
+<li><a class="reference internal" href="#styles">Styles</a></li>
+<li><a class="reference internal" href="#html-3-2-formatter">HTML 3.2 Formatter</a></li>
+<li><a class="reference internal" href="#generating-style-definitions">Generating Style Definitions</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="lexerdevelopment.html"
+                        title="previous chapter">Write your own lexer</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="filterdevelopment.html"
+                        title="next chapter">Write your own filter</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/formatterdevelopment.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="write-your-own-formatter">
+<h1>Write your own formatter<a class="headerlink" href="#write-your-own-formatter" title="Permalink to this headline">¶</a></h1>
+<p>As well as creating <a class="reference internal" href="lexerdevelopment.html"><span class="doc">your own lexer</span></a>, writing a new
+formatter for Pygments is easy and straightforward.</p>
+<p>A formatter is a class that is initialized with some keyword arguments (the
+formatter options) and that must provides a <cite>format()</cite> method.
+Additionally a formatter should provide a <cite>get_style_defs()</cite> method that
+returns the style definitions from the style in a form usable for the
+formatter’s output format.</p>
+<div class="section" id="quickstart">
+<h2>Quickstart<a class="headerlink" href="#quickstart" title="Permalink to this headline">¶</a></h2>
+<p>The most basic formatter shipped with Pygments is the <cite>NullFormatter</cite>. It just
+sends the value of a token to the output stream:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.formatter</span> <span class="kn">import</span> <span class="n">Formatter</span>
+
+<span class="k">class</span> <span class="nc">NullFormatter</span><span class="p">(</span><span class="n">Formatter</span><span class="p">):</span>
+    <span class="k">def</span> <span class="nf">format</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">tokensource</span><span class="p">,</span> <span class="n">outfile</span><span class="p">):</span>
+        <span class="k">for</span> <span class="n">ttype</span><span class="p">,</span> <span class="n">value</span> <span class="ow">in</span> <span class="n">tokensource</span><span class="p">:</span>
+            <span class="n">outfile</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="n">value</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>As you can see, the <cite>format()</cite> method is passed two parameters: <cite>tokensource</cite>
+and <cite>outfile</cite>. The first is an iterable of <code class="docutils literal notranslate"><span class="pre">(token_type,</span> <span class="pre">value)</span></code> tuples,
+the latter a file like object with a <cite>write()</cite> method.</p>
+<p>Because the formatter is that basic it doesn’t overwrite the <cite>get_style_defs()</cite>
+method.</p>
+</div>
+<div class="section" id="styles">
+<h2>Styles<a class="headerlink" href="#styles" title="Permalink to this headline">¶</a></h2>
+<p>Styles aren’t instantiated but their metaclass provides some class functions
+so that you can access the style definitions easily.</p>
+<p>Styles are iterable and yield tuples in the form <code class="docutils literal notranslate"><span class="pre">(ttype,</span> <span class="pre">d)</span></code> where <cite>ttype</cite>
+is a token and <cite>d</cite> is a dict with the following keys:</p>
+<dl class="simple">
+<dt><code class="docutils literal notranslate"><span class="pre">'color'</span></code></dt><dd><p>Hexadecimal color value (eg: <code class="docutils literal notranslate"><span class="pre">'ff0000'</span></code> for red) or <cite>None</cite> if not
+defined.</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">'bold'</span></code></dt><dd><p><cite>True</cite> if the value should be bold</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">'italic'</span></code></dt><dd><p><cite>True</cite> if the value should be italic</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">'underline'</span></code></dt><dd><p><cite>True</cite> if the value should be underlined</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">'bgcolor'</span></code></dt><dd><p>Hexadecimal color value for the background (eg: <code class="docutils literal notranslate"><span class="pre">'eeeeeee'</span></code> for light
+gray) or <cite>None</cite> if not defined.</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">'border'</span></code></dt><dd><p>Hexadecimal color value for the border (eg: <code class="docutils literal notranslate"><span class="pre">'0000aa'</span></code> for a dark
+blue) or <cite>None</cite> for no border.</p>
+</dd>
+</dl>
+<p>Additional keys might appear in the future, formatters should ignore all keys
+they don’t support.</p>
+</div>
+<div class="section" id="html-3-2-formatter">
+<h2>HTML 3.2 Formatter<a class="headerlink" href="#html-3-2-formatter" title="Permalink to this headline">¶</a></h2>
+<p>For an more complex example, let’s implement a HTML 3.2 Formatter. We don’t
+use CSS but inline markup (<code class="docutils literal notranslate"><span class="pre">&lt;u&gt;</span></code>, <code class="docutils literal notranslate"><span class="pre">&lt;font&gt;</span></code>, etc). Because this isn’t good
+style this formatter isn’t in the standard library ;-)</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.formatter</span> <span class="kn">import</span> <span class="n">Formatter</span>
+
+<span class="k">class</span> <span class="nc">OldHtmlFormatter</span><span class="p">(</span><span class="n">Formatter</span><span class="p">):</span>
+
+    <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">**</span><span class="n">options</span><span class="p">):</span>
+        <span class="n">Formatter</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">**</span><span class="n">options</span><span class="p">)</span>
+
+        <span class="c1"># create a dict of (start, end) tuples that wrap the</span>
+        <span class="c1"># value of a token so that we can use it in the format</span>
+        <span class="c1"># method later</span>
+        <span class="bp">self</span><span class="o">.</span><span class="n">styles</span> <span class="o">=</span> <span class="p">{}</span>
+
+        <span class="c1"># we iterate over the `_styles` attribute of a style item</span>
+        <span class="c1"># that contains the parsed style values.</span>
+        <span class="k">for</span> <span class="n">token</span><span class="p">,</span> <span class="n">style</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">style</span><span class="p">:</span>
+            <span class="n">start</span> <span class="o">=</span> <span class="n">end</span> <span class="o">=</span> <span class="s1">&#39;&#39;</span>
+            <span class="c1"># a style item is a tuple in the following form:</span>
+            <span class="c1"># colors are readily specified in hex: &#39;RRGGBB&#39;</span>
+            <span class="k">if</span> <span class="n">style</span><span class="p">[</span><span class="s1">&#39;color&#39;</span><span class="p">]:</span>
+                <span class="n">start</span> <span class="o">+=</span> <span class="s1">&#39;&lt;font color=&quot;#</span><span class="si">%s</span><span class="s1">&quot;&gt;&#39;</span> <span class="o">%</span> <span class="n">style</span><span class="p">[</span><span class="s1">&#39;color&#39;</span><span class="p">]</span>
+                <span class="n">end</span> <span class="o">=</span> <span class="s1">&#39;&lt;/font&gt;&#39;</span> <span class="o">+</span> <span class="n">end</span>
+            <span class="k">if</span> <span class="n">style</span><span class="p">[</span><span class="s1">&#39;bold&#39;</span><span class="p">]:</span>
+                <span class="n">start</span> <span class="o">+=</span> <span class="s1">&#39;&lt;b&gt;&#39;</span>
+                <span class="n">end</span> <span class="o">=</span> <span class="s1">&#39;&lt;/b&gt;&#39;</span> <span class="o">+</span> <span class="n">end</span>
+            <span class="k">if</span> <span class="n">style</span><span class="p">[</span><span class="s1">&#39;italic&#39;</span><span class="p">]:</span>
+                <span class="n">start</span> <span class="o">+=</span> <span class="s1">&#39;&lt;i&gt;&#39;</span>
+                <span class="n">end</span> <span class="o">=</span> <span class="s1">&#39;&lt;/i&gt;&#39;</span> <span class="o">+</span> <span class="n">end</span>
+            <span class="k">if</span> <span class="n">style</span><span class="p">[</span><span class="s1">&#39;underline&#39;</span><span class="p">]:</span>
+                <span class="n">start</span> <span class="o">+=</span> <span class="s1">&#39;&lt;u&gt;&#39;</span>
+                <span class="n">end</span> <span class="o">=</span> <span class="s1">&#39;&lt;/u&gt;&#39;</span> <span class="o">+</span> <span class="n">end</span>
+            <span class="bp">self</span><span class="o">.</span><span class="n">styles</span><span class="p">[</span><span class="n">token</span><span class="p">]</span> <span class="o">=</span> <span class="p">(</span><span class="n">start</span><span class="p">,</span> <span class="n">end</span><span class="p">)</span>
+
+    <span class="k">def</span> <span class="nf">format</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">tokensource</span><span class="p">,</span> <span class="n">outfile</span><span class="p">):</span>
+        <span class="c1"># lastval is a string we use for caching</span>
+        <span class="c1"># because it&#39;s possible that an lexer yields a number</span>
+        <span class="c1"># of consecutive tokens with the same token type.</span>
+        <span class="c1"># to minimize the size of the generated html markup we</span>
+        <span class="c1"># try to join the values of same-type tokens here</span>
+        <span class="n">lastval</span> <span class="o">=</span> <span class="s1">&#39;&#39;</span>
+        <span class="n">lasttype</span> <span class="o">=</span> <span class="kc">None</span>
+
+        <span class="c1"># wrap the whole output with &lt;pre&gt;</span>
+        <span class="n">outfile</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="s1">&#39;&lt;pre&gt;&#39;</span><span class="p">)</span>
+
+        <span class="k">for</span> <span class="n">ttype</span><span class="p">,</span> <span class="n">value</span> <span class="ow">in</span> <span class="n">tokensource</span><span class="p">:</span>
+            <span class="c1"># if the token type doesn&#39;t exist in the stylemap</span>
+            <span class="c1"># we try it with the parent of the token type</span>
+            <span class="c1"># eg: parent of Token.Literal.String.Double is</span>
+            <span class="c1"># Token.Literal.String</span>
+            <span class="k">while</span> <span class="n">ttype</span> <span class="ow">not</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">styles</span><span class="p">:</span>
+                <span class="n">ttype</span> <span class="o">=</span> <span class="n">ttype</span><span class="o">.</span><span class="n">parent</span>
+            <span class="k">if</span> <span class="n">ttype</span> <span class="o">==</span> <span class="n">lasttype</span><span class="p">:</span>
+                <span class="c1"># the current token type is the same of the last</span>
+                <span class="c1"># iteration. cache it</span>
+                <span class="n">lastval</span> <span class="o">+=</span> <span class="n">value</span>
+            <span class="k">else</span><span class="p">:</span>
+                <span class="c1"># not the same token as last iteration, but we</span>
+                <span class="c1"># have some data in the buffer. wrap it with the</span>
+                <span class="c1"># defined style and write it to the output file</span>
+                <span class="k">if</span> <span class="n">lastval</span><span class="p">:</span>
+                    <span class="n">stylebegin</span><span class="p">,</span> <span class="n">styleend</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">styles</span><span class="p">[</span><span class="n">lasttype</span><span class="p">]</span>
+                    <span class="n">outfile</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="n">stylebegin</span> <span class="o">+</span> <span class="n">lastval</span> <span class="o">+</span> <span class="n">styleend</span><span class="p">)</span>
+                <span class="c1"># set lastval/lasttype to current values</span>
+                <span class="n">lastval</span> <span class="o">=</span> <span class="n">value</span>
+                <span class="n">lasttype</span> <span class="o">=</span> <span class="n">ttype</span>
+
+        <span class="c1"># if something is left in the buffer, write it to the</span>
+        <span class="c1"># output file, then close the opened &lt;pre&gt; tag</span>
+        <span class="k">if</span> <span class="n">lastval</span><span class="p">:</span>
+            <span class="n">stylebegin</span><span class="p">,</span> <span class="n">styleend</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">styles</span><span class="p">[</span><span class="n">lasttype</span><span class="p">]</span>
+            <span class="n">outfile</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="n">stylebegin</span> <span class="o">+</span> <span class="n">lastval</span> <span class="o">+</span> <span class="n">styleend</span><span class="p">)</span>
+        <span class="n">outfile</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="s1">&#39;&lt;/pre&gt;</span><span class="se">\n</span><span class="s1">&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>The comments should explain it. Again, this formatter doesn’t override the
+<cite>get_style_defs()</cite> method. If we would have used CSS classes instead of
+inline HTML markup, we would need to generate the CSS first. For that
+purpose the <cite>get_style_defs()</cite> method exists:</p>
+</div>
+<div class="section" id="generating-style-definitions">
+<h2>Generating Style Definitions<a class="headerlink" href="#generating-style-definitions" title="Permalink to this headline">¶</a></h2>
+<p>Some formatters like the <cite>LatexFormatter</cite> and the <cite>HtmlFormatter</cite> don’t
+output inline markup but reference either macros or css classes. Because
+the definitions of those are not part of the output, the <cite>get_style_defs()</cite>
+method exists. It is passed one parameter (if it’s used and how it’s used
+is up to the formatter) and has to return a string or <code class="docutils literal notranslate"><span class="pre">None</span></code>.</p>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/formatters.html b/doc/_build/html/docs/formatters.html
new file mode 100644 (file)
index 0000000..5266a3d
--- /dev/null
@@ -0,0 +1,976 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Available formatters &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Styles" href="styles.html" />
+    <link rel="prev" title="Filters" href="filters.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="../index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Available formatters</a><ul>
+<li><a class="reference internal" href="#common-options">Common options</a></li>
+<li><a class="reference internal" href="#formatter-classes">Formatter classes</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="filters.html"
+                        title="previous chapter">Filters</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="styles.html"
+                        title="next chapter">Styles</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/formatters.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="available-formatters">
+<h1>Available formatters<a class="headerlink" href="#available-formatters" title="Permalink to this headline">¶</a></h1>
+<p>This page lists all builtin formatters.</p>
+<div class="section" id="common-options">
+<h2>Common options<a class="headerlink" href="#common-options" title="Permalink to this headline">¶</a></h2>
+<p>All formatters support these options:</p>
+<dl>
+<dt><cite>encoding</cite></dt><dd><p>If given, must be an encoding name (such as <code class="docutils literal notranslate"><span class="pre">&quot;utf-8&quot;</span></code>). This will
+be used to convert the token strings (which are Unicode strings)
+to byte strings in the output (default: <code class="docutils literal notranslate"><span class="pre">None</span></code>).
+It will also be written in an encoding declaration suitable for the
+document format if the <cite>full</cite> option is given (e.g. a <code class="docutils literal notranslate"><span class="pre">meta</span>
+<span class="pre">content-type</span></code> directive in HTML or an invocation of the <cite>inputenc</cite>
+package in LaTeX).</p>
+<p>If this is <code class="docutils literal notranslate"><span class="pre">&quot;&quot;</span></code> or <code class="docutils literal notranslate"><span class="pre">None</span></code>, Unicode strings will be written
+to the output file, which most file-like objects do not support.
+For example, <cite>pygments.highlight()</cite> will return a Unicode string if
+called with no <cite>outfile</cite> argument and a formatter that has <cite>encoding</cite>
+set to <code class="docutils literal notranslate"><span class="pre">None</span></code> because it uses a <cite>StringIO.StringIO</cite> object that
+supports Unicode arguments to <cite>write()</cite>. Using a regular file object
+wouldn’t work.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd>
+<dt><cite>outencoding</cite></dt><dd><p>When using Pygments from the command line, any <cite>encoding</cite> option given is
+passed to the lexer and the formatter. This is sometimes not desirable,
+for example if you want to set the input encoding to <code class="docutils literal notranslate"><span class="pre">&quot;guess&quot;</span></code>.
+Therefore, <cite>outencoding</cite> has been introduced which overrides <cite>encoding</cite>
+for the formatter if given.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+</dd>
+</dl>
+</div>
+<div class="section" id="formatter-classes">
+<h2>Formatter classes<a class="headerlink" href="#formatter-classes" title="Permalink to this headline">¶</a></h2>
+<p>All these classes are importable from <a class="reference internal" href="api.html#module-pygments.formatters" title="pygments.formatters"><code class="xref py py-mod docutils literal notranslate"><span class="pre">pygments.formatters</span></code></a>.</p>
+<dl class="class">
+<dt id="BBCodeFormatter">
+<em class="property">class </em><code class="sig-name descname">BBCodeFormatter</code><a class="headerlink" href="#BBCodeFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>bbcode, bb</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+</dl>
+<p>Format tokens with BBcodes. These formatting codes are used by many
+bulletin boards, so you can highlight your sourcecode with pygments before
+posting it there.</p>
+<p>This formatter has no support for background colors and borders, as there
+are no common BBcode tags for that.</p>
+<p>Some board systems (e.g. phpBB) don’t support colors in their [code] tag,
+so you can’t use the highlighting together with that tag.
+Text in a [code] tag usually is shown with a monospace font (which this
+formatter can do with the <code class="docutils literal notranslate"><span class="pre">monofont</span></code> option) and no spaces (which you
+need for indentation) are removed.</p>
+<p>Additional options accepted:</p>
+<dl class="simple">
+<dt><cite>style</cite></dt><dd><p>The style to use, can be a string or a Style subclass (default:
+<code class="docutils literal notranslate"><span class="pre">'default'</span></code>).</p>
+</dd>
+<dt><cite>codetag</cite></dt><dd><p>If set to true, put the output into <code class="docutils literal notranslate"><span class="pre">[code]</span></code> tags (default:
+<code class="docutils literal notranslate"><span class="pre">false</span></code>)</p>
+</dd>
+<dt><cite>monofont</cite></dt><dd><p>If set to true, add a tag to show the code with a monospace font
+(default: <code class="docutils literal notranslate"><span class="pre">false</span></code>).</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="BmpImageFormatter">
+<em class="property">class </em><code class="sig-name descname">BmpImageFormatter</code><a class="headerlink" href="#BmpImageFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>bmp, bitmap</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.bmp</p>
+</dd>
+</dl>
+<p>Create a bitmap image from source code. This uses the Python Imaging Library to
+generate a pixmap from the source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="GifImageFormatter">
+<em class="property">class </em><code class="sig-name descname">GifImageFormatter</code><a class="headerlink" href="#GifImageFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>gif</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.gif</p>
+</dd>
+</dl>
+<p>Create a GIF image from source code. This uses the Python Imaging Library to
+generate a pixmap from the source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="HtmlFormatter">
+<em class="property">class </em><code class="sig-name descname">HtmlFormatter</code><a class="headerlink" href="#HtmlFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.html, *.htm</p>
+</dd>
+</dl>
+<p>Format tokens as HTML 4 <code class="docutils literal notranslate"><span class="pre">&lt;span&gt;</span></code> tags within a <code class="docutils literal notranslate"><span class="pre">&lt;pre&gt;</span></code> tag, wrapped
+in a <code class="docutils literal notranslate"><span class="pre">&lt;div&gt;</span></code> tag. The <code class="docutils literal notranslate"><span class="pre">&lt;div&gt;</span></code>’s CSS class can be set by the <cite>cssclass</cite>
+option.</p>
+<p>If the <cite>linenos</cite> option is set to <code class="docutils literal notranslate"><span class="pre">&quot;table&quot;</span></code>, the <code class="docutils literal notranslate"><span class="pre">&lt;pre&gt;</span></code> is
+additionally wrapped inside a <code class="docutils literal notranslate"><span class="pre">&lt;table&gt;</span></code> which has one row and two
+cells: one containing the line numbers and one containing the code.
+Example:</p>
+<div class="highlight-html notranslate"><div class="highlight"><pre><span></span><span class="p">&lt;</span><span class="nt">div</span> <span class="na">class</span><span class="o">=</span><span class="s">&quot;highlight&quot;</span> <span class="p">&gt;</span>
+<span class="p">&lt;</span><span class="nt">table</span><span class="p">&gt;&lt;</span><span class="nt">tr</span><span class="p">&gt;</span>
+  <span class="p">&lt;</span><span class="nt">td</span> <span class="na">class</span><span class="o">=</span><span class="s">&quot;linenos&quot;</span> <span class="na">title</span><span class="o">=</span><span class="s">&quot;click to toggle&quot;</span>
+    <span class="na">onclick</span><span class="o">=</span><span class="s">&quot;with (this.firstChild.style)</span>
+<span class="s">             { display = (display == &#39;&#39;) ? &#39;none&#39; : &#39;&#39; }&quot;</span><span class="p">&gt;</span>
+    <span class="p">&lt;</span><span class="nt">pre</span><span class="p">&gt;</span>1
+    2<span class="p">&lt;/</span><span class="nt">pre</span><span class="p">&gt;</span>
+  <span class="p">&lt;/</span><span class="nt">td</span><span class="p">&gt;</span>
+  <span class="p">&lt;</span><span class="nt">td</span> <span class="na">class</span><span class="o">=</span><span class="s">&quot;code&quot;</span><span class="p">&gt;</span>
+    <span class="p">&lt;</span><span class="nt">pre</span><span class="p">&gt;&lt;</span><span class="nt">span</span> <span class="na">class</span><span class="o">=</span><span class="s">&quot;Ke&quot;</span><span class="p">&gt;</span>def <span class="p">&lt;/</span><span class="nt">span</span><span class="p">&gt;&lt;</span><span class="nt">span</span> <span class="na">class</span><span class="o">=</span><span class="s">&quot;NaFu&quot;</span><span class="p">&gt;</span>foo<span class="p">&lt;/</span><span class="nt">span</span><span class="p">&gt;</span>(bar):
+      <span class="p">&lt;</span><span class="nt">span</span> <span class="na">class</span><span class="o">=</span><span class="s">&quot;Ke&quot;</span><span class="p">&gt;</span>pass<span class="p">&lt;/</span><span class="nt">span</span><span class="p">&gt;</span>
+    <span class="p">&lt;/</span><span class="nt">pre</span><span class="p">&gt;</span>
+  <span class="p">&lt;/</span><span class="nt">td</span><span class="p">&gt;</span>
+<span class="p">&lt;/</span><span class="nt">tr</span><span class="p">&gt;&lt;/</span><span class="nt">table</span><span class="p">&gt;&lt;/</span><span class="nt">div</span><span class="p">&gt;</span>
+</pre></div>
+</div>
+<p>(whitespace added to improve clarity).</p>
+<p>Wrapping can be disabled using the <cite>nowrap</cite> option.</p>
+<p>A list of lines can be specified using the <cite>hl_lines</cite> option to make these
+lines highlighted (as of Pygments 0.11).</p>
+<p>With the <cite>full</cite> option, a complete HTML 4 document is output, including
+the style definitions inside a <code class="docutils literal notranslate"><span class="pre">&lt;style&gt;</span></code> tag, or in a separate file if
+the <cite>cssfile</cite> option is given.</p>
+<p>When <cite>tagsfile</cite> is set to the path of a ctags index file, it is used to
+generate hyperlinks from names to their definition.  You must enable
+<cite>lineanchors</cite> and run ctags with the <cite>-n</cite> option for this to work.  The
+<cite>python-ctags</cite> module from PyPI must be installed to use this feature;
+otherwise a <cite>RuntimeError</cite> will be raised.</p>
+<p>The <cite>get_style_defs(arg=’’)</cite> method of a <cite>HtmlFormatter</cite> returns a string
+containing CSS rules for the CSS classes used by the formatter. The
+argument <cite>arg</cite> can be used to specify additional CSS selectors that
+are prepended to the classes. A call <cite>fmter.get_style_defs(‘td .code’)</cite>
+would result in the following CSS classes:</p>
+<div class="highlight-css notranslate"><div class="highlight"><pre><span></span><span class="nt">td</span> <span class="p">.</span><span class="nc">code</span> <span class="p">.</span><span class="nc">kw</span> <span class="p">{</span> <span class="k">font-weight</span><span class="p">:</span> <span class="kc">bold</span><span class="p">;</span> <span class="k">color</span><span class="p">:</span> <span class="mh">#00FF00</span> <span class="p">}</span>
+<span class="nt">td</span> <span class="p">.</span><span class="nc">code</span> <span class="p">.</span><span class="nc">cm</span> <span class="p">{</span> <span class="k">color</span><span class="p">:</span> <span class="mh">#999999</span> <span class="p">}</span>
+<span class="o">...</span>
+</pre></div>
+</div>
+<p>If you have Pygments 0.6 or higher, you can also pass a list or tuple to the
+<cite>get_style_defs()</cite> method to request multiple prefixes for the tokens:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">formatter</span><span class="o">.</span><span class="n">get_style_defs</span><span class="p">([</span><span class="s1">&#39;div.syntax pre&#39;</span><span class="p">,</span> <span class="s1">&#39;pre.syntax&#39;</span><span class="p">])</span>
+</pre></div>
+</div>
+<p>The output would then look like this:</p>
+<div class="highlight-css notranslate"><div class="highlight"><pre><span></span><span class="nt">div</span><span class="p">.</span><span class="nc">syntax</span> <span class="nt">pre</span> <span class="p">.</span><span class="nc">kw</span><span class="o">,</span>
+<span class="nt">pre</span><span class="p">.</span><span class="nc">syntax</span> <span class="p">.</span><span class="nc">kw</span> <span class="p">{</span> <span class="k">font-weight</span><span class="p">:</span> <span class="kc">bold</span><span class="p">;</span> <span class="k">color</span><span class="p">:</span> <span class="mh">#00FF00</span> <span class="p">}</span>
+<span class="nt">div</span><span class="p">.</span><span class="nc">syntax</span> <span class="nt">pre</span> <span class="p">.</span><span class="nc">cm</span><span class="o">,</span>
+<span class="nt">pre</span><span class="p">.</span><span class="nc">syntax</span> <span class="p">.</span><span class="nc">cm</span> <span class="p">{</span> <span class="k">color</span><span class="p">:</span> <span class="mh">#999999</span> <span class="p">}</span>
+<span class="o">...</span>
+</pre></div>
+</div>
+<p>Additional options accepted:</p>
+<dl>
+<dt><cite>nowrap</cite></dt><dd><p>If set to <code class="docutils literal notranslate"><span class="pre">True</span></code>, don’t wrap the tokens at all, not even inside a <code class="docutils literal notranslate"><span class="pre">&lt;pre&gt;</span></code>
+tag. This disables most other options (default: <code class="docutils literal notranslate"><span class="pre">False</span></code>).</p>
+</dd>
+<dt><cite>full</cite></dt><dd><p>Tells the formatter to output a “full” document, i.e. a complete
+self-contained document (default: <code class="docutils literal notranslate"><span class="pre">False</span></code>).</p>
+</dd>
+<dt><cite>title</cite></dt><dd><p>If <cite>full</cite> is true, the title that should be used to caption the
+document (default: <code class="docutils literal notranslate"><span class="pre">''</span></code>).</p>
+</dd>
+<dt><cite>style</cite></dt><dd><p>The style to use, can be a string or a Style subclass (default:
+<code class="docutils literal notranslate"><span class="pre">'default'</span></code>). This option has no effect if the <cite>cssfile</cite>
+and <cite>noclobber_cssfile</cite> option are given and the file specified in
+<cite>cssfile</cite> exists.</p>
+</dd>
+<dt><cite>noclasses</cite></dt><dd><p>If set to true, token <code class="docutils literal notranslate"><span class="pre">&lt;span&gt;</span></code> tags will not use CSS classes, but
+inline styles. This is not recommended for larger pieces of code since
+it increases output size by quite a bit (default: <code class="docutils literal notranslate"><span class="pre">False</span></code>).</p>
+</dd>
+<dt><cite>classprefix</cite></dt><dd><p>Since the token types use relatively short class names, they may clash
+with some of your own class names. In this case you can use the
+<cite>classprefix</cite> option to give a string to prepend to all Pygments-generated
+CSS class names for token types.
+Note that this option also affects the output of <cite>get_style_defs()</cite>.</p>
+</dd>
+<dt><cite>cssclass</cite></dt><dd><p>CSS class for the wrapping <code class="docutils literal notranslate"><span class="pre">&lt;div&gt;</span></code> tag (default: <code class="docutils literal notranslate"><span class="pre">'highlight'</span></code>).
+If you set this option, the default selector for <cite>get_style_defs()</cite>
+will be this class.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.9: </span>If you select the <code class="docutils literal notranslate"><span class="pre">'table'</span></code> line numbers, the wrapping table will
+have a CSS class of this string plus <code class="docutils literal notranslate"><span class="pre">'table'</span></code>, the default is
+accordingly <code class="docutils literal notranslate"><span class="pre">'highlighttable'</span></code>.</p>
+</div>
+</dd>
+<dt><cite>cssstyles</cite></dt><dd><p>Inline CSS styles for the wrapping <code class="docutils literal notranslate"><span class="pre">&lt;div&gt;</span></code> tag (default: <code class="docutils literal notranslate"><span class="pre">''</span></code>).</p>
+</dd>
+<dt><cite>prestyles</cite></dt><dd><p>Inline CSS styles for the <code class="docutils literal notranslate"><span class="pre">&lt;pre&gt;</span></code> tag (default: <code class="docutils literal notranslate"><span class="pre">''</span></code>).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.11.</span></p>
+</div>
+</dd>
+<dt><cite>cssfile</cite></dt><dd><p>If the <cite>full</cite> option is true and this option is given, it must be the
+name of an external file. If the filename does not include an absolute
+path, the file’s path will be assumed to be relative to the main output
+file’s path, if the latter can be found. The stylesheet is then written
+to this file instead of the HTML file.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd>
+<dt><cite>noclobber_cssfile</cite></dt><dd><p>If <cite>cssfile</cite> is given and the specified file exists, the css file will
+not be overwritten. This allows the use of the <cite>full</cite> option in
+combination with a user specified css file. Default is <code class="docutils literal notranslate"><span class="pre">False</span></code>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd>
+<dt><cite>linenos</cite></dt><dd><p>If set to <code class="docutils literal notranslate"><span class="pre">'table'</span></code>, output line numbers as a table with two cells,
+one containing the line numbers, the other the whole code.  This is
+copy-and-paste-friendly, but may cause alignment problems with some
+browsers or fonts.  If set to <code class="docutils literal notranslate"><span class="pre">'inline'</span></code>, the line numbers will be
+integrated in the <code class="docutils literal notranslate"><span class="pre">&lt;pre&gt;</span></code> tag that contains the code (that setting
+is <em>new in Pygments 0.8</em>).</p>
+<p>For compatibility with Pygments 0.7 and earlier, every true value
+except <code class="docutils literal notranslate"><span class="pre">'inline'</span></code> means the same as <code class="docutils literal notranslate"><span class="pre">'table'</span></code> (in particular, that
+means also <code class="docutils literal notranslate"><span class="pre">True</span></code>).</p>
+<p>The default value is <code class="docutils literal notranslate"><span class="pre">False</span></code>, which means no line numbers at all.</p>
+<p><strong>Note:</strong> with the default (“table”) line number mechanism, the line
+numbers and code can have different line heights in Internet Explorer
+unless you give the enclosing <code class="docutils literal notranslate"><span class="pre">&lt;pre&gt;</span></code> tags an explicit <code class="docutils literal notranslate"><span class="pre">line-height</span></code>
+CSS property (you get the default line spacing with <code class="docutils literal notranslate"><span class="pre">line-height:</span>
+<span class="pre">125%</span></code>).</p>
+</dd>
+<dt><cite>hl_lines</cite></dt><dd><p>Specify a list of lines to be highlighted.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.11.</span></p>
+</div>
+</dd>
+<dt><cite>linenostart</cite></dt><dd><p>The line number for the first line (default: <code class="docutils literal notranslate"><span class="pre">1</span></code>).</p>
+</dd>
+<dt><cite>linenostep</cite></dt><dd><p>If set to a number n &gt; 1, only every nth line number is printed.</p>
+</dd>
+<dt><cite>linenospecial</cite></dt><dd><p>If set to a number n &gt; 0, every nth line number is given the CSS
+class <code class="docutils literal notranslate"><span class="pre">&quot;special&quot;</span></code> (default: <code class="docutils literal notranslate"><span class="pre">0</span></code>).</p>
+</dd>
+<dt><cite>nobackground</cite></dt><dd><p>If set to <code class="docutils literal notranslate"><span class="pre">True</span></code>, the formatter won’t output the background color
+for the wrapping element (this automatically defaults to <code class="docutils literal notranslate"><span class="pre">False</span></code>
+when there is no wrapping element [eg: no argument for the
+<cite>get_syntax_defs</cite> method given]) (default: <code class="docutils literal notranslate"><span class="pre">False</span></code>).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd>
+<dt><cite>lineseparator</cite></dt><dd><p>This string is output between lines of code. It defaults to <code class="docutils literal notranslate"><span class="pre">&quot;\n&quot;</span></code>,
+which is enough to break a line inside <code class="docutils literal notranslate"><span class="pre">&lt;pre&gt;</span></code> tags, but you can
+e.g. set it to <code class="docutils literal notranslate"><span class="pre">&quot;&lt;br&gt;&quot;</span></code> to get HTML line breaks.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+</dd>
+<dt><cite>lineanchors</cite></dt><dd><p>If set to a nonempty string, e.g. <code class="docutils literal notranslate"><span class="pre">foo</span></code>, the formatter will wrap each
+output line in an anchor tag with a <code class="docutils literal notranslate"><span class="pre">name</span></code> of <code class="docutils literal notranslate"><span class="pre">foo-linenumber</span></code>.
+This allows easy linking to certain lines.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.9.</span></p>
+</div>
+</dd>
+<dt><cite>linespans</cite></dt><dd><p>If set to a nonempty string, e.g. <code class="docutils literal notranslate"><span class="pre">foo</span></code>, the formatter will wrap each
+output line in a span tag with an <code class="docutils literal notranslate"><span class="pre">id</span></code> of <code class="docutils literal notranslate"><span class="pre">foo-linenumber</span></code>.
+This allows easy access to lines via javascript.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd>
+<dt><cite>anchorlinenos</cite></dt><dd><p>If set to <cite>True</cite>, will wrap line numbers in &lt;a&gt; tags. Used in
+combination with <cite>linenos</cite> and <cite>lineanchors</cite>.</p>
+</dd>
+<dt><cite>tagsfile</cite></dt><dd><p>If set to the path of a ctags file, wrap names in anchor tags that
+link to their definitions. <cite>lineanchors</cite> should be used, and the
+tags file should specify line numbers (see the <cite>-n</cite> option to ctags).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd>
+<dt><cite>tagurlformat</cite></dt><dd><p>A string formatting pattern used to generate links to ctags definitions.
+Available variables are <cite>%(path)s</cite>, <cite>%(fname)s</cite> and <cite>%(fext)s</cite>.
+Defaults to an empty string, resulting in just <cite>#prefix-number</cite> links.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd>
+<dt><cite>filename</cite></dt><dd><p>A string used to generate a filename when rendering <code class="docutils literal notranslate"><span class="pre">&lt;pre&gt;</span></code> blocks,
+for example if displaying source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd>
+<dt><cite>wrapcode</cite></dt><dd><p>Wrap the code inside <code class="docutils literal notranslate"><span class="pre">&lt;pre&gt;</span></code> blocks using <code class="docutils literal notranslate"><span class="pre">&lt;code&gt;</span></code>, as recommended
+by the HTML5 specification.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd>
+</dl>
+<p><strong>Subclassing the HTML formatter</strong></p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+<p>The HTML formatter is now built in a way that allows easy subclassing, thus
+customizing the output HTML code. The <cite>format()</cite> method calls
+<cite>self._format_lines()</cite> which returns a generator that yields tuples of <code class="docutils literal notranslate"><span class="pre">(1,</span>
+<span class="pre">line)</span></code>, where the <code class="docutils literal notranslate"><span class="pre">1</span></code> indicates that the <code class="docutils literal notranslate"><span class="pre">line</span></code> is a line of the
+formatted source code.</p>
+<p>If the <cite>nowrap</cite> option is set, the generator is the iterated over and the
+resulting HTML is output.</p>
+<p>Otherwise, <cite>format()</cite> calls <cite>self.wrap()</cite>, which wraps the generator with
+other generators. These may add some HTML code to the one generated by
+<cite>_format_lines()</cite>, either by modifying the lines generated by the latter,
+then yielding them again with <code class="docutils literal notranslate"><span class="pre">(1,</span> <span class="pre">line)</span></code>, and/or by yielding other HTML
+code before or after the lines, with <code class="docutils literal notranslate"><span class="pre">(0,</span> <span class="pre">html)</span></code>. The distinction between
+source lines and other code makes it possible to wrap the generator multiple
+times.</p>
+<p>The default <cite>wrap()</cite> implementation adds a <code class="docutils literal notranslate"><span class="pre">&lt;div&gt;</span></code> and a <code class="docutils literal notranslate"><span class="pre">&lt;pre&gt;</span></code> tag.</p>
+<p>A custom <cite>HtmlFormatter</cite> subclass could look like this:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="k">class</span> <span class="nc">CodeHtmlFormatter</span><span class="p">(</span><span class="n">HtmlFormatter</span><span class="p">):</span>
+
+    <span class="k">def</span> <span class="nf">wrap</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">source</span><span class="p">,</span> <span class="n">outfile</span><span class="p">):</span>
+        <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_wrap_code</span><span class="p">(</span><span class="n">source</span><span class="p">)</span>
+
+    <span class="k">def</span> <span class="nf">_wrap_code</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">source</span><span class="p">):</span>
+        <span class="k">yield</span> <span class="mi">0</span><span class="p">,</span> <span class="s1">&#39;&lt;code&gt;&#39;</span>
+        <span class="k">for</span> <span class="n">i</span><span class="p">,</span> <span class="n">t</span> <span class="ow">in</span> <span class="n">source</span><span class="p">:</span>
+            <span class="k">if</span> <span class="n">i</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
+                <span class="c1"># it&#39;s a line of formatted code</span>
+                <span class="n">t</span> <span class="o">+=</span> <span class="s1">&#39;&lt;br&gt;&#39;</span>
+            <span class="k">yield</span> <span class="n">i</span><span class="p">,</span> <span class="n">t</span>
+        <span class="k">yield</span> <span class="mi">0</span><span class="p">,</span> <span class="s1">&#39;&lt;/code&gt;&#39;</span>
+</pre></div>
+</div>
+<p>This results in wrapping the formatted lines with a <code class="docutils literal notranslate"><span class="pre">&lt;code&gt;</span></code> tag, where the
+source lines are broken using <code class="docutils literal notranslate"><span class="pre">&lt;br&gt;</span></code> tags.</p>
+<p>After calling <cite>wrap()</cite>, the <cite>format()</cite> method also adds the “line numbers”
+and/or “full document” wrappers if the respective options are set. Then, all
+HTML yielded by the wrapped generator is output.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="IRCFormatter">
+<em class="property">class </em><code class="sig-name descname">IRCFormatter</code><a class="headerlink" href="#IRCFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>irc, IRC</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+</dl>
+<p>Format tokens with IRC color sequences</p>
+<p>The <cite>get_style_defs()</cite> method doesn’t do anything special since there is
+no support for common styles.</p>
+<p>Options accepted:</p>
+<dl class="simple">
+<dt><cite>bg</cite></dt><dd><p>Set to <code class="docutils literal notranslate"><span class="pre">&quot;light&quot;</span></code> or <code class="docutils literal notranslate"><span class="pre">&quot;dark&quot;</span></code> depending on the terminal’s background
+(default: <code class="docutils literal notranslate"><span class="pre">&quot;light&quot;</span></code>).</p>
+</dd>
+<dt><cite>colorscheme</cite></dt><dd><p>A dictionary mapping token types to (lightbg, darkbg) color names or
+<code class="docutils literal notranslate"><span class="pre">None</span></code> (default: <code class="docutils literal notranslate"><span class="pre">None</span></code> = use builtin colorscheme).</p>
+</dd>
+<dt><cite>linenos</cite></dt><dd><p>Set to <code class="docutils literal notranslate"><span class="pre">True</span></code> to have line numbers in the output as well
+(default: <code class="docutils literal notranslate"><span class="pre">False</span></code> = no line numbers).</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="ImageFormatter">
+<em class="property">class </em><code class="sig-name descname">ImageFormatter</code><a class="headerlink" href="#ImageFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>img, IMG, png</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.png</p>
+</dd>
+</dl>
+<p>Create a PNG image from source code. This uses the Python Imaging Library to
+generate a pixmap from the source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+<p>Additional options accepted:</p>
+<dl>
+<dt><cite>image_format</cite></dt><dd><p>An image format to output to that is recognised by PIL, these include:</p>
+<ul class="simple">
+<li><p>“PNG” (default)</p></li>
+<li><p>“JPEG”</p></li>
+<li><p>“BMP”</p></li>
+<li><p>“GIF”</p></li>
+</ul>
+</dd>
+<dt><cite>line_pad</cite></dt><dd><p>The extra spacing (in pixels) between each line of text.</p>
+<p>Default: 2</p>
+</dd>
+<dt><cite>font_name</cite></dt><dd><p>The font name to be used as the base font from which others, such as
+bold and italic fonts will be generated.  This really should be a
+monospace font to look sane.</p>
+<dl class="simple">
+<dt>Default: “Courier New” on Windows, “Menlo” on Mac OS, and</dt><dd><p>“DejaVu Sans Mono” on *nix</p>
+</dd>
+</dl>
+</dd>
+<dt><cite>font_size</cite></dt><dd><p>The font size in points to be used.</p>
+<p>Default: 14</p>
+</dd>
+<dt><cite>image_pad</cite></dt><dd><p>The padding, in pixels to be used at each edge of the resulting image.</p>
+<p>Default: 10</p>
+</dd>
+<dt><cite>line_numbers</cite></dt><dd><p>Whether line numbers should be shown: True/False</p>
+<p>Default: True</p>
+</dd>
+<dt><cite>line_number_start</cite></dt><dd><p>The line number of the first line.</p>
+<p>Default: 1</p>
+</dd>
+<dt><cite>line_number_step</cite></dt><dd><p>The step used when printing line numbers.</p>
+<p>Default: 1</p>
+</dd>
+<dt><cite>line_number_bg</cite></dt><dd><p>The background colour (in “#123456” format) of the line number bar, or
+None to use the style background color.</p>
+<p>Default: “#eed”</p>
+</dd>
+<dt><cite>line_number_fg</cite></dt><dd><p>The text color of the line numbers (in “#123456”-like format).</p>
+<p>Default: “#886”</p>
+</dd>
+<dt><cite>line_number_chars</cite></dt><dd><p>The number of columns of line numbers allowable in the line number
+margin.</p>
+<p>Default: 2</p>
+</dd>
+<dt><cite>line_number_bold</cite></dt><dd><p>Whether line numbers will be bold: True/False</p>
+<p>Default: False</p>
+</dd>
+<dt><cite>line_number_italic</cite></dt><dd><p>Whether line numbers will be italicized: True/False</p>
+<p>Default: False</p>
+</dd>
+<dt><cite>line_number_separator</cite></dt><dd><p>Whether a line will be drawn between the line number area and the
+source code area: True/False</p>
+<p>Default: True</p>
+</dd>
+<dt><cite>line_number_pad</cite></dt><dd><p>The horizontal padding (in pixels) between the line number margin, and
+the source code area.</p>
+<p>Default: 6</p>
+</dd>
+<dt><cite>hl_lines</cite></dt><dd><p>Specify a list of lines to be highlighted.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.2.</span></p>
+</div>
+<p>Default: empty list</p>
+</dd>
+<dt><cite>hl_color</cite></dt><dd><p>Specify the color for highlighting lines.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.2.</span></p>
+</div>
+<p>Default: highlight color of the selected style</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="JpgImageFormatter">
+<em class="property">class </em><code class="sig-name descname">JpgImageFormatter</code><a class="headerlink" href="#JpgImageFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>jpg, jpeg</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.jpg</p>
+</dd>
+</dl>
+<p>Create a JPEG image from source code. This uses the Python Imaging Library to
+generate a pixmap from the source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="LatexFormatter">
+<em class="property">class </em><code class="sig-name descname">LatexFormatter</code><a class="headerlink" href="#LatexFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>latex, tex</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.tex</p>
+</dd>
+</dl>
+<p>Format tokens as LaTeX code. This needs the <cite>fancyvrb</cite> and <cite>color</cite>
+standard packages.</p>
+<p>Without the <cite>full</cite> option, code is formatted as one <code class="docutils literal notranslate"><span class="pre">Verbatim</span></code>
+environment, like this:</p>
+<div class="highlight-latex notranslate"><div class="highlight"><pre><span></span><span class="k">\begin</span><span class="nb">{</span>Verbatim<span class="nb">}</span>[commandchars=<span class="k">\\\{\}</span>]
+<span class="k">\PY</span><span class="nb">{</span>k<span class="nb">}{</span>def <span class="nb">}</span><span class="k">\PY</span><span class="nb">{</span>n+nf<span class="nb">}{</span>foo<span class="nb">}</span>(<span class="k">\PY</span><span class="nb">{</span>n<span class="nb">}{</span>bar<span class="nb">}</span>):
+    <span class="k">\PY</span><span class="nb">{</span>k<span class="nb">}{</span>pass<span class="nb">}</span>
+<span class="k">\end</span><span class="nb">{</span>Verbatim<span class="nb">}</span>
+</pre></div>
+</div>
+<p>The special command used here (<code class="docutils literal notranslate"><span class="pre">\PY</span></code>) and all the other macros it needs
+are output by the <cite>get_style_defs</cite> method.</p>
+<p>With the <cite>full</cite> option, a complete LaTeX document is output, including
+the command definitions in the preamble.</p>
+<p>The <cite>get_style_defs()</cite> method of a <cite>LatexFormatter</cite> returns a string
+containing <code class="docutils literal notranslate"><span class="pre">\def</span></code> commands defining the macros needed inside the
+<code class="docutils literal notranslate"><span class="pre">Verbatim</span></code> environments.</p>
+<p>Additional options accepted:</p>
+<dl>
+<dt><cite>style</cite></dt><dd><p>The style to use, can be a string or a Style subclass (default:
+<code class="docutils literal notranslate"><span class="pre">'default'</span></code>).</p>
+</dd>
+<dt><cite>full</cite></dt><dd><p>Tells the formatter to output a “full” document, i.e. a complete
+self-contained document (default: <code class="docutils literal notranslate"><span class="pre">False</span></code>).</p>
+</dd>
+<dt><cite>title</cite></dt><dd><p>If <cite>full</cite> is true, the title that should be used to caption the
+document (default: <code class="docutils literal notranslate"><span class="pre">''</span></code>).</p>
+</dd>
+<dt><cite>docclass</cite></dt><dd><p>If the <cite>full</cite> option is enabled, this is the document class to use
+(default: <code class="docutils literal notranslate"><span class="pre">'article'</span></code>).</p>
+</dd>
+<dt><cite>preamble</cite></dt><dd><p>If the <cite>full</cite> option is enabled, this can be further preamble commands,
+e.g. <code class="docutils literal notranslate"><span class="pre">\usepackage</span></code> (default: <code class="docutils literal notranslate"><span class="pre">''</span></code>).</p>
+</dd>
+<dt><cite>linenos</cite></dt><dd><p>If set to <code class="docutils literal notranslate"><span class="pre">True</span></code>, output line numbers (default: <code class="docutils literal notranslate"><span class="pre">False</span></code>).</p>
+</dd>
+<dt><cite>linenostart</cite></dt><dd><p>The line number for the first line (default: <code class="docutils literal notranslate"><span class="pre">1</span></code>).</p>
+</dd>
+<dt><cite>linenostep</cite></dt><dd><p>If set to a number n &gt; 1, only every nth line number is printed.</p>
+</dd>
+<dt><cite>verboptions</cite></dt><dd><p>Additional options given to the Verbatim environment (see the <em>fancyvrb</em>
+docs for possible values) (default: <code class="docutils literal notranslate"><span class="pre">''</span></code>).</p>
+</dd>
+<dt><cite>commandprefix</cite></dt><dd><p>The LaTeX commands used to produce colored output are constructed
+using this prefix and some letters (default: <code class="docutils literal notranslate"><span class="pre">'PY'</span></code>).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+<div class="versionchanged">
+<p><span class="versionmodified changed">Changed in version 0.10: </span>The default is now <code class="docutils literal notranslate"><span class="pre">'PY'</span></code> instead of <code class="docutils literal notranslate"><span class="pre">'C'</span></code>.</p>
+</div>
+</dd>
+<dt><cite>texcomments</cite></dt><dd><p>If set to <code class="docutils literal notranslate"><span class="pre">True</span></code>, enables LaTeX comment lines.  That is, LaTex markup
+in comment tokens is not escaped so that LaTeX can render it (default:
+<code class="docutils literal notranslate"><span class="pre">False</span></code>).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.2.</span></p>
+</div>
+</dd>
+<dt><cite>mathescape</cite></dt><dd><p>If set to <code class="docutils literal notranslate"><span class="pre">True</span></code>, enables LaTeX math mode escape in comments. That
+is, <code class="docutils literal notranslate"><span class="pre">'$...$'</span></code> inside a comment will trigger math mode (default:
+<code class="docutils literal notranslate"><span class="pre">False</span></code>).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.2.</span></p>
+</div>
+</dd>
+<dt><cite>escapeinside</cite></dt><dd><p>If set to a string of length 2, enables escaping to LaTeX. Text
+delimited by these 2 characters is read as LaTeX code and
+typeset accordingly. It has no effect in string literals. It has
+no effect in comments if <cite>texcomments</cite> or <cite>mathescape</cite> is
+set. (default: <code class="docutils literal notranslate"><span class="pre">''</span></code>).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd>
+<dt><cite>envname</cite></dt><dd><p>Allows you to pick an alternative environment name replacing Verbatim.
+The alternate environment still has to support Verbatim’s option syntax.
+(default: <code class="docutils literal notranslate"><span class="pre">'Verbatim'</span></code>).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="NullFormatter">
+<em class="property">class </em><code class="sig-name descname">NullFormatter</code><a class="headerlink" href="#NullFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>text, null</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.txt</p>
+</dd>
+</dl>
+<p>Output the text unchanged without any formatting.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="RawTokenFormatter">
+<em class="property">class </em><code class="sig-name descname">RawTokenFormatter</code><a class="headerlink" href="#RawTokenFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>raw, tokens</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.raw</p>
+</dd>
+</dl>
+<p>Format tokens as a raw representation for storing token streams.</p>
+<p>The format is <code class="docutils literal notranslate"><span class="pre">tokentype&lt;TAB&gt;repr(tokenstring)\n</span></code>. The output can later
+be converted to a token stream with the <cite>RawTokenLexer</cite>, described in the
+<a class="reference internal" href="lexers.html"><span class="doc">lexer list</span></a>.</p>
+<p>Only two options are accepted:</p>
+<dl>
+<dt><cite>compress</cite></dt><dd><p>If set to <code class="docutils literal notranslate"><span class="pre">'gz'</span></code> or <code class="docutils literal notranslate"><span class="pre">'bz2'</span></code>, compress the output with the given
+compression algorithm after encoding (default: <code class="docutils literal notranslate"><span class="pre">''</span></code>).</p>
+</dd>
+<dt><cite>error_color</cite></dt><dd><p>If set to a color name, highlight error tokens using that color.  If
+set but with no value, defaults to <code class="docutils literal notranslate"><span class="pre">'red'</span></code>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.11.</span></p>
+</div>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="RtfFormatter">
+<em class="property">class </em><code class="sig-name descname">RtfFormatter</code><a class="headerlink" href="#RtfFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>rtf</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rtf</p>
+</dd>
+</dl>
+<p>Format tokens as RTF markup. This formatter automatically outputs full RTF
+documents with color information and other useful stuff. Perfect for Copy and
+Paste into Microsoft(R) Word(R) documents.</p>
+<p>Please note that <code class="docutils literal notranslate"><span class="pre">encoding</span></code> and <code class="docutils literal notranslate"><span class="pre">outencoding</span></code> options are ignored.
+The RTF format is ASCII natively, but handles unicode characters correctly
+thanks to escape sequences.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+<p>Additional options accepted:</p>
+<dl>
+<dt><cite>style</cite></dt><dd><p>The style to use, can be a string or a Style subclass (default:
+<code class="docutils literal notranslate"><span class="pre">'default'</span></code>).</p>
+</dd>
+<dt><cite>fontface</cite></dt><dd><p>The used font family, for example <code class="docutils literal notranslate"><span class="pre">Bitstream</span> <span class="pre">Vera</span> <span class="pre">Sans</span></code>. Defaults to
+some generic font which is supposed to have fixed width.</p>
+</dd>
+<dt><cite>fontsize</cite></dt><dd><p>Size of the font used. Size is specified in half points. The
+default is 24 half-points, giving a size 12 font.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="SvgFormatter">
+<em class="property">class </em><code class="sig-name descname">SvgFormatter</code><a class="headerlink" href="#SvgFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>svg</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.svg</p>
+</dd>
+</dl>
+<p>Format tokens as an SVG graphics file.  This formatter is still experimental.
+Each line of code is a <code class="docutils literal notranslate"><span class="pre">&lt;text&gt;</span></code> element with explicit <code class="docutils literal notranslate"><span class="pre">x</span></code> and <code class="docutils literal notranslate"><span class="pre">y</span></code>
+coordinates containing <code class="docutils literal notranslate"><span class="pre">&lt;tspan&gt;</span></code> elements with the individual token styles.</p>
+<p>By default, this formatter outputs a full SVG document including doctype
+declaration and the <code class="docutils literal notranslate"><span class="pre">&lt;svg&gt;</span></code> root element.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.9.</span></p>
+</div>
+<p>Additional options accepted:</p>
+<dl class="simple">
+<dt><cite>nowrap</cite></dt><dd><p>Don’t wrap the SVG <code class="docutils literal notranslate"><span class="pre">&lt;text&gt;</span></code> elements in <code class="docutils literal notranslate"><span class="pre">&lt;svg&gt;&lt;g&gt;</span></code> elements and
+don’t add a XML declaration and a doctype.  If true, the <cite>fontfamily</cite>
+and <cite>fontsize</cite> options are ignored.  Defaults to <code class="docutils literal notranslate"><span class="pre">False</span></code>.</p>
+</dd>
+<dt><cite>fontfamily</cite></dt><dd><p>The value to give the wrapping <code class="docutils literal notranslate"><span class="pre">&lt;g&gt;</span></code> element’s <code class="docutils literal notranslate"><span class="pre">font-family</span></code>
+attribute, defaults to <code class="docutils literal notranslate"><span class="pre">&quot;monospace&quot;</span></code>.</p>
+</dd>
+<dt><cite>fontsize</cite></dt><dd><p>The value to give the wrapping <code class="docutils literal notranslate"><span class="pre">&lt;g&gt;</span></code> element’s <code class="docutils literal notranslate"><span class="pre">font-size</span></code>
+attribute, defaults to <code class="docutils literal notranslate"><span class="pre">&quot;14px&quot;</span></code>.</p>
+</dd>
+<dt><cite>xoffset</cite></dt><dd><p>Starting offset in X direction, defaults to <code class="docutils literal notranslate"><span class="pre">0</span></code>.</p>
+</dd>
+<dt><cite>yoffset</cite></dt><dd><p>Starting offset in Y direction, defaults to the font size if it is given
+in pixels, or <code class="docutils literal notranslate"><span class="pre">20</span></code> else.  (This is necessary since text coordinates
+refer to the text baseline, not the top edge.)</p>
+</dd>
+<dt><cite>ystep</cite></dt><dd><p>Offset to add to the Y coordinate for each subsequent line.  This should
+roughly be the text size plus 5.  It defaults to that value if the text
+size is given in pixels, or <code class="docutils literal notranslate"><span class="pre">25</span></code> else.</p>
+</dd>
+<dt><cite>spacehack</cite></dt><dd><p>Convert spaces in the source to <code class="docutils literal notranslate"><span class="pre">&amp;#160;</span></code>, which are non-breaking
+spaces.  SVG provides the <code class="docutils literal notranslate"><span class="pre">xml:space</span></code> attribute to control how
+whitespace inside tags is handled, in theory, the <code class="docutils literal notranslate"><span class="pre">preserve</span></code> value
+could be used to keep all whitespace as-is.  However, many current SVG
+viewers don’t obey that rule, so this option is provided as a workaround
+and defaults to <code class="docutils literal notranslate"><span class="pre">True</span></code>.</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="Terminal256Formatter">
+<em class="property">class </em><code class="sig-name descname">Terminal256Formatter</code><a class="headerlink" href="#Terminal256Formatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>terminal256, console256, 256</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+</dl>
+<p>Format tokens with ANSI color sequences, for output in a 256-color
+terminal or console.  Like in <cite>TerminalFormatter</cite> color sequences
+are terminated at newlines, so that paging the output works correctly.</p>
+<p>The formatter takes colors from a style defined by the <cite>style</cite> option
+and converts them to nearest ANSI 256-color escape sequences. Bold and
+underline attributes from the style are preserved (and displayed).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.9.</span></p>
+</div>
+<div class="versionchanged">
+<p><span class="versionmodified changed">Changed in version 2.2: </span>If the used style defines foreground colors in the form <code class="docutils literal notranslate"><span class="pre">#ansi*</span></code>, then
+<cite>Terminal256Formatter</cite> will map these to non extended foreground color.
+See <a class="reference internal" href="styles.html#ansiterminalstyle"><span class="std std-ref">Terminal Styles</span></a> for more information.</p>
+</div>
+<div class="versionchanged">
+<p><span class="versionmodified changed">Changed in version 2.4: </span>The ANSI color names have been updated with names that are easier to
+understand and align with colornames of other projects and terminals.
+See <a class="reference internal" href="styles.html#new-ansi-color-names"><span class="std std-ref">this table</span></a> for more information.</p>
+</div>
+<p>Options accepted:</p>
+<dl class="simple">
+<dt><cite>style</cite></dt><dd><p>The style to use, can be a string or a Style subclass (default:
+<code class="docutils literal notranslate"><span class="pre">'default'</span></code>).</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="TerminalFormatter">
+<em class="property">class </em><code class="sig-name descname">TerminalFormatter</code><a class="headerlink" href="#TerminalFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>terminal, console</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+</dl>
+<p>Format tokens with ANSI color sequences, for output in a text console.
+Color sequences are terminated at newlines, so that paging the output
+works correctly.</p>
+<p>The <cite>get_style_defs()</cite> method doesn’t do anything special since there is
+no support for common styles.</p>
+<p>Options accepted:</p>
+<dl class="simple">
+<dt><cite>bg</cite></dt><dd><p>Set to <code class="docutils literal notranslate"><span class="pre">&quot;light&quot;</span></code> or <code class="docutils literal notranslate"><span class="pre">&quot;dark&quot;</span></code> depending on the terminal’s background
+(default: <code class="docutils literal notranslate"><span class="pre">&quot;light&quot;</span></code>).</p>
+</dd>
+<dt><cite>colorscheme</cite></dt><dd><p>A dictionary mapping token types to (lightbg, darkbg) color names or
+<code class="docutils literal notranslate"><span class="pre">None</span></code> (default: <code class="docutils literal notranslate"><span class="pre">None</span></code> = use builtin colorscheme).</p>
+</dd>
+<dt><cite>linenos</cite></dt><dd><p>Set to <code class="docutils literal notranslate"><span class="pre">True</span></code> to have line numbers on the terminal output as well
+(default: <code class="docutils literal notranslate"><span class="pre">False</span></code> = no line numbers).</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="TerminalTrueColorFormatter">
+<em class="property">class </em><code class="sig-name descname">TerminalTrueColorFormatter</code><a class="headerlink" href="#TerminalTrueColorFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>terminal16m, console16m, 16m</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+</dl>
+<p>Format tokens with ANSI color sequences, for output in a true-color
+terminal or console.  Like in <cite>TerminalFormatter</cite> color sequences
+are terminated at newlines, so that paging the output works correctly.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+<p>Options accepted:</p>
+<dl class="simple">
+<dt><cite>style</cite></dt><dd><p>The style to use, can be a string or a Style subclass (default:
+<code class="docutils literal notranslate"><span class="pre">'default'</span></code>).</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="TestcaseFormatter">
+<em class="property">class </em><code class="sig-name descname">TestcaseFormatter</code><a class="headerlink" href="#TestcaseFormatter" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>testcase</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+</dl>
+<p>Format tokens as appropriate for a new testcase.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/index.html b/doc/_build/html/docs/index.html
new file mode 100644 (file)
index 0000000..23e0745
--- /dev/null
@@ -0,0 +1,179 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Pygments documentation &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Download and installation" href="../download.html" />
+    <link rel="prev" title="Welcome!" href="../index.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="#">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="../index.html"
+                        title="previous chapter">Welcome!</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="../download.html"
+                        title="next chapter">Download and installation</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/index.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="pygments-documentation">
+<h1>Pygments documentation<a class="headerlink" href="#pygments-documentation" title="Permalink to this headline">¶</a></h1>
+<p><strong>Starting with Pygments</strong></p>
+<div class="toctree-wrapper compound">
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="../download.html">Download and installation</a></li>
+<li class="toctree-l1"><a class="reference internal" href="quickstart.html">Introduction and Quickstart</a></li>
+<li class="toctree-l1"><a class="reference internal" href="cmdline.html">Command Line Interface</a></li>
+</ul>
+</div>
+<p><strong>Builtin components</strong></p>
+<div class="toctree-wrapper compound">
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="lexers.html">Available lexers</a></li>
+<li class="toctree-l1"><a class="reference internal" href="filters.html">Filters</a></li>
+<li class="toctree-l1"><a class="reference internal" href="formatters.html">Available formatters</a></li>
+<li class="toctree-l1"><a class="reference internal" href="styles.html">Styles</a></li>
+</ul>
+</div>
+<p><strong>Reference</strong></p>
+<div class="toctree-wrapper compound">
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="unicode.html">Unicode and Encodings</a></li>
+<li class="toctree-l1"><a class="reference internal" href="tokens.html">Builtin Tokens</a></li>
+<li class="toctree-l1"><a class="reference internal" href="api.html">The full Pygments API</a></li>
+</ul>
+</div>
+<p><strong>Hacking for Pygments</strong></p>
+<div class="toctree-wrapper compound">
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="lexerdevelopment.html">Write your own lexer</a></li>
+<li class="toctree-l1"><a class="reference internal" href="formatterdevelopment.html">Write your own formatter</a></li>
+<li class="toctree-l1"><a class="reference internal" href="filterdevelopment.html">Write your own filter</a></li>
+<li class="toctree-l1"><a class="reference internal" href="plugins.html">Register Plugins</a></li>
+</ul>
+</div>
+<p><strong>Hints and tricks</strong></p>
+<div class="toctree-wrapper compound">
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="rstdirective.html">Using Pygments in ReST documents</a></li>
+<li class="toctree-l1"><a class="reference internal" href="moinmoin.html">Using Pygments with MoinMoin</a></li>
+<li class="toctree-l1"><a class="reference internal" href="java.html">Use Pygments in Java</a></li>
+<li class="toctree-l1"><a class="reference internal" href="integrate.html">Using Pygments in various scenarios</a></li>
+</ul>
+</div>
+<p><strong>About Pygments</strong></p>
+<div class="toctree-wrapper compound">
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="changelog.html">Pygments changelog</a></li>
+<li class="toctree-l1"><a class="reference internal" href="authors.html">Full contributor list</a></li>
+</ul>
+</div>
+<p>If you find bugs or have suggestions for the documentation, please submit them
+on <cite>GitHub &lt;https://github.com/pygments/pygments&gt;</cite>.</p>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/integrate.html b/doc/_build/html/docs/integrate.html
new file mode 100644 (file)
index 0000000..22ac261
--- /dev/null
@@ -0,0 +1,166 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Using Pygments in various scenarios &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Pygments changelog" href="changelog.html" />
+    <link rel="prev" title="Use Pygments in Java" href="java.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="../index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Using Pygments in various scenarios</a><ul>
+<li><a class="reference internal" href="#markdown">Markdown</a></li>
+<li><a class="reference internal" href="#textmate">TextMate</a></li>
+<li><a class="reference internal" href="#bash-completion">Bash completion</a></li>
+<li><a class="reference internal" href="#wrappers-for-other-languages">Wrappers for other languages</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="java.html"
+                        title="previous chapter">Use Pygments in Java</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="changelog.html"
+                        title="next chapter">Pygments changelog</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/integrate.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="using-pygments-in-various-scenarios">
+<h1>Using Pygments in various scenarios<a class="headerlink" href="#using-pygments-in-various-scenarios" title="Permalink to this headline">¶</a></h1>
+<div class="section" id="markdown">
+<h2>Markdown<a class="headerlink" href="#markdown" title="Permalink to this headline">¶</a></h2>
+<p>Since Pygments 0.9, the distribution ships <a class="reference external" href="http://www.freewisdom.org/projects/python-markdown/">Markdown</a> preprocessor sample code
+that uses Pygments to render source code in
+<code class="file docutils literal notranslate"><span class="pre">external/markdown-processor.py</span></code>.  You can copy and adapt it to your
+liking.</p>
+</div>
+<div class="section" id="textmate">
+<h2>TextMate<a class="headerlink" href="#textmate" title="Permalink to this headline">¶</a></h2>
+<p>Antonio Cangiano has created a Pygments bundle for TextMate that allows to
+colorize code via a simple menu option.  It can be found <a class="reference external" href="http://antoniocangiano.com/2008/10/28/pygments-textmate-bundle/">here</a>.</p>
+</div>
+<div class="section" id="bash-completion">
+<h2>Bash completion<a class="headerlink" href="#bash-completion" title="Permalink to this headline">¶</a></h2>
+<p>The source distribution contains a file <code class="docutils literal notranslate"><span class="pre">external/pygments.bashcomp</span></code> that
+sets up completion for the <code class="docutils literal notranslate"><span class="pre">pygmentize</span></code> command in bash.</p>
+</div>
+<div class="section" id="wrappers-for-other-languages">
+<h2>Wrappers for other languages<a class="headerlink" href="#wrappers-for-other-languages" title="Permalink to this headline">¶</a></h2>
+<p>These libraries provide Pygments highlighting for users of other languages
+than Python:</p>
+<ul class="simple">
+<li><p><a class="reference external" href="https://github.com/tmm1/pygments.rb">pygments.rb</a>, a pygments wrapper for Ruby</p></li>
+<li><p><a class="reference external" href="https://github.com/bfontaine/clygments">Clygments</a>, a pygments wrapper for
+Clojure</p></li>
+<li><p><a class="reference external" href="https://github.com/capynet/PHPygments">PHPygments</a>, a pygments wrapper for PHP</p></li>
+</ul>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/java.html b/doc/_build/html/docs/java.html
new file mode 100644 (file)
index 0000000..4dbb483
--- /dev/null
@@ -0,0 +1,184 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Use Pygments in Java &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Using Pygments in various scenarios" href="integrate.html" />
+    <link rel="prev" title="Using Pygments with MoinMoin" href="moinmoin.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="moinmoin.html"
+                        title="previous chapter">Using Pygments with MoinMoin</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="integrate.html"
+                        title="next chapter">Using Pygments in various scenarios</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/java.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="use-pygments-in-java">
+<h1>Use Pygments in Java<a class="headerlink" href="#use-pygments-in-java" title="Permalink to this headline">¶</a></h1>
+<p>Thanks to <a class="reference external" href="http://www.jython.org">Jython</a> it is possible to use Pygments in
+Java.</p>
+<p>This page is a simple tutorial to get an idea of how this works. You can
+then look at the <a class="reference external" href="http://www.jython.org/docs/">Jython documentation</a> for more
+advanced uses.</p>
+<p>Since version 1.5, Pygments is deployed on <a class="reference external" href="http://repo1.maven.org/maven2/org/pygments/pygments/">Maven Central</a> as a JAR, as is Jython
+which makes it a lot easier to create a Java project.</p>
+<p>Here is an example of a <a class="reference external" href="http://www.maven.org">Maven</a> <code class="docutils literal notranslate"><span class="pre">pom.xml</span></code> file for a
+project running Pygments:</p>
+<div class="highlight-xml notranslate"><div class="highlight"><pre><span></span><span class="cp">&lt;?xml version=&quot;1.0&quot; encoding=&quot;UTF-8&quot;?&gt;</span>
+
+<span class="nt">&lt;project</span> <span class="na">xmlns=</span><span class="s">&quot;http://maven.apache.org/POM/4.0.0&quot;</span>
+         <span class="na">xmlns:xsi=</span><span class="s">&quot;http://www.w3.org/2001/XMLSchema-instance&quot;</span>
+         <span class="na">xsi:schemaLocation=</span><span class="s">&quot;http://maven.apache.org/POM/4.0.0</span>
+<span class="s">                             http://maven.apache.org/maven-v4_0_0.xsd&quot;</span><span class="nt">&gt;</span>
+  <span class="nt">&lt;modelVersion&gt;</span>4.0.0<span class="nt">&lt;/modelVersion&gt;</span>
+  <span class="nt">&lt;groupId&gt;</span>example<span class="nt">&lt;/groupId&gt;</span>
+  <span class="nt">&lt;artifactId&gt;</span>example<span class="nt">&lt;/artifactId&gt;</span>
+  <span class="nt">&lt;version&gt;</span>1.0-SNAPSHOT<span class="nt">&lt;/version&gt;</span>
+  <span class="nt">&lt;dependencies&gt;</span>
+    <span class="nt">&lt;dependency&gt;</span>
+      <span class="nt">&lt;groupId&gt;</span>org.python<span class="nt">&lt;/groupId&gt;</span>
+      <span class="nt">&lt;artifactId&gt;</span>jython-standalone<span class="nt">&lt;/artifactId&gt;</span>
+      <span class="nt">&lt;version&gt;</span>2.5.3<span class="nt">&lt;/version&gt;</span>
+    <span class="nt">&lt;/dependency&gt;</span>
+    <span class="nt">&lt;dependency&gt;</span>
+      <span class="nt">&lt;groupId&gt;</span>org.pygments<span class="nt">&lt;/groupId&gt;</span>
+      <span class="nt">&lt;artifactId&gt;</span>pygments<span class="nt">&lt;/artifactId&gt;</span>
+      <span class="nt">&lt;version&gt;</span>1.5<span class="nt">&lt;/version&gt;</span>
+      <span class="nt">&lt;scope&gt;</span>runtime<span class="nt">&lt;/scope&gt;</span>
+    <span class="nt">&lt;/dependency&gt;</span>
+  <span class="nt">&lt;/dependencies&gt;</span>
+<span class="nt">&lt;/project&gt;</span>
+</pre></div>
+</div>
+<p>The following Java example:</p>
+<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="n">PythonInterpreter</span> <span class="n">interpreter</span> <span class="o">=</span> <span class="k">new</span> <span class="n">PythonInterpreter</span><span class="p">();</span>
+
+<span class="c1">// Set a variable with the content you want to work with</span>
+<span class="n">interpreter</span><span class="p">.</span><span class="na">set</span><span class="p">(</span><span class="s">&quot;code&quot;</span><span class="p">,</span> <span class="n">code</span><span class="p">);</span>
+
+<span class="c1">// Simple use Pygments as you would in Python</span>
+<span class="n">interpreter</span><span class="p">.</span><span class="na">exec</span><span class="p">(</span><span class="s">&quot;from pygments import highlight\n&quot;</span>
+    <span class="o">+</span> <span class="s">&quot;from pygments.lexers import PythonLexer\n&quot;</span>
+    <span class="o">+</span> <span class="s">&quot;from pygments.formatters import HtmlFormatter\n&quot;</span>
+    <span class="o">+</span> <span class="s">&quot;\nresult = highlight(code, PythonLexer(), HtmlFormatter())&quot;</span><span class="p">);</span>
+
+<span class="c1">// Get the result that has been set in a variable</span>
+<span class="n">System</span><span class="p">.</span><span class="na">out</span><span class="p">.</span><span class="na">println</span><span class="p">(</span><span class="n">interpreter</span><span class="p">.</span><span class="na">get</span><span class="p">(</span><span class="s">&quot;result&quot;</span><span class="p">,</span> <span class="n">String</span><span class="p">.</span><span class="na">class</span><span class="p">));</span>
+</pre></div>
+</div>
+<p>will print something like:</p>
+<div class="highlight-html notranslate"><div class="highlight"><pre><span></span><span class="p">&lt;</span><span class="nt">div</span> <span class="na">class</span><span class="o">=</span><span class="s">&quot;highlight&quot;</span><span class="p">&gt;</span>
+<span class="p">&lt;</span><span class="nt">pre</span><span class="p">&gt;&lt;</span><span class="nt">span</span> <span class="na">class</span><span class="o">=</span><span class="s">&quot;k&quot;</span><span class="p">&gt;</span>print<span class="p">&lt;/</span><span class="nt">span</span><span class="p">&gt;</span> <span class="p">&lt;</span><span class="nt">span</span> <span class="na">class</span><span class="o">=</span><span class="s">&quot;s&quot;</span><span class="p">&gt;</span><span class="ni">&amp;quot;</span>Hello World<span class="ni">&amp;quot;</span><span class="p">&lt;/</span><span class="nt">span</span><span class="p">&gt;&lt;/</span><span class="nt">pre</span><span class="p">&gt;</span>
+<span class="p">&lt;/</span><span class="nt">div</span><span class="p">&gt;</span>
+</pre></div>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/lexerdevelopment.html b/doc/_build/html/docs/lexerdevelopment.html
new file mode 100644 (file)
index 0000000..75ede3b
--- /dev/null
@@ -0,0 +1,774 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Write your own lexer &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Write your own formatter" href="formatterdevelopment.html" />
+    <link rel="prev" title="The full Pygments API" href="api.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="../index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Write your own lexer</a><ul>
+<li><a class="reference internal" href="#regexlexer">RegexLexer</a></li>
+<li><a class="reference internal" href="#adding-and-testing-a-new-lexer">Adding and testing a new lexer</a></li>
+<li><a class="reference internal" href="#regex-flags">Regex Flags</a></li>
+<li><a class="reference internal" href="#scanning-multiple-tokens-at-once">Scanning multiple tokens at once</a></li>
+<li><a class="reference internal" href="#changing-states">Changing states</a></li>
+<li><a class="reference internal" href="#advanced-state-tricks">Advanced state tricks</a></li>
+<li><a class="reference internal" href="#subclassing-lexers-derived-from-regexlexer">Subclassing lexers derived from RegexLexer</a></li>
+<li><a class="reference internal" href="#using-multiple-lexers">Using multiple lexers</a></li>
+<li><a class="reference internal" href="#delegating-lexer">Delegating Lexer</a></li>
+<li><a class="reference internal" href="#callbacks">Callbacks</a></li>
+<li><a class="reference internal" href="#the-extendedregexlexer-class">The ExtendedRegexLexer class</a></li>
+<li><a class="reference internal" href="#handling-lists-of-keywords">Handling Lists of Keywords</a></li>
+<li><a class="reference internal" href="#modifying-token-streams">Modifying Token Streams</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="api.html"
+                        title="previous chapter">The full Pygments API</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="formatterdevelopment.html"
+                        title="next chapter">Write your own formatter</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/lexerdevelopment.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="write-your-own-lexer">
+<h1>Write your own lexer<a class="headerlink" href="#write-your-own-lexer" title="Permalink to this headline">¶</a></h1>
+<p>If a lexer for your favorite language is missing in the Pygments package, you
+can easily write your own and extend Pygments.</p>
+<p>All you need can be found inside the <a class="reference internal" href="api.html#module-pygments.lexer" title="pygments.lexer"><code class="xref py py-mod docutils literal notranslate"><span class="pre">pygments.lexer</span></code></a> module.  As you can
+read in the <a class="reference internal" href="api.html"><span class="doc">API documentation</span></a>, a lexer is a class that is
+initialized with some keyword arguments (the lexer options) and that provides a
+<a class="reference internal" href="api.html#pygments.lexer.Lexer.get_tokens_unprocessed" title="pygments.lexer.Lexer.get_tokens_unprocessed"><code class="xref py py-meth docutils literal notranslate"><span class="pre">get_tokens_unprocessed()</span></code></a> method which is given a string or unicode
+object with the data to lex.</p>
+<p>The <a class="reference internal" href="api.html#pygments.lexer.Lexer.get_tokens_unprocessed" title="pygments.lexer.Lexer.get_tokens_unprocessed"><code class="xref py py-meth docutils literal notranslate"><span class="pre">get_tokens_unprocessed()</span></code></a> method must return an iterator or iterable
+containing tuples in the form <code class="docutils literal notranslate"><span class="pre">(index,</span> <span class="pre">token,</span> <span class="pre">value)</span></code>.  Normally you don’t
+need to do this since there are base lexers that do most of the work and that
+you can subclass.</p>
+<div class="section" id="regexlexer">
+<h2>RegexLexer<a class="headerlink" href="#regexlexer" title="Permalink to this headline">¶</a></h2>
+<p>The lexer base class used by almost all of Pygments’ lexers is the
+<code class="xref py py-class docutils literal notranslate"><span class="pre">RegexLexer</span></code>.  This class allows you to define lexing rules in terms of
+<em>regular expressions</em> for different <em>states</em>.</p>
+<p>States are groups of regular expressions that are matched against the input
+string at the <em>current position</em>.  If one of these expressions matches, a
+corresponding action is performed (such as yielding a token with a specific
+type, or changing state), the current position is set to where the last match
+ended and the matching process continues with the first regex of the current
+state.</p>
+<p>Lexer states are kept on a stack: each time a new state is entered, the new
+state is pushed onto the stack.  The most basic lexers (like the <cite>DiffLexer</cite>)
+just need one state.</p>
+<p>Each state is defined as a list of tuples in the form (<cite>regex</cite>, <cite>action</cite>,
+<cite>new_state</cite>) where the last item is optional.  In the most basic form, <cite>action</cite>
+is a token type (like <cite>Name.Builtin</cite>).  That means: When <cite>regex</cite> matches, emit a
+token with the match text and type <cite>tokentype</cite> and push <cite>new_state</cite> on the state
+stack.  If the new state is <code class="docutils literal notranslate"><span class="pre">'#pop'</span></code>, the topmost state is popped from the
+stack instead.  To pop more than one state, use <code class="docutils literal notranslate"><span class="pre">'#pop:2'</span></code> and so on.
+<code class="docutils literal notranslate"><span class="pre">'#push'</span></code> is a synonym for pushing the current state on the stack.</p>
+<p>The following example shows the <cite>DiffLexer</cite> from the builtin lexers.  Note that
+it contains some additional attributes <cite>name</cite>, <cite>aliases</cite> and <cite>filenames</cite> which
+aren’t required for a lexer.  They are used by the builtin lexer lookup
+functions.</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.lexer</span> <span class="kn">import</span> <span class="n">RegexLexer</span>
+<span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="o">*</span>
+
+<span class="k">class</span> <span class="nc">DiffLexer</span><span class="p">(</span><span class="n">RegexLexer</span><span class="p">):</span>
+    <span class="n">name</span> <span class="o">=</span> <span class="s1">&#39;Diff&#39;</span>
+    <span class="n">aliases</span> <span class="o">=</span> <span class="p">[</span><span class="s1">&#39;diff&#39;</span><span class="p">]</span>
+    <span class="n">filenames</span> <span class="o">=</span> <span class="p">[</span><span class="s1">&#39;*.diff&#39;</span><span class="p">]</span>
+
+    <span class="n">tokens</span> <span class="o">=</span> <span class="p">{</span>
+        <span class="s1">&#39;root&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39; .*\n&#39;</span><span class="p">,</span> <span class="n">Text</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;\+.*\n&#39;</span><span class="p">,</span> <span class="n">Generic</span><span class="o">.</span><span class="n">Inserted</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;-.*\n&#39;</span><span class="p">,</span> <span class="n">Generic</span><span class="o">.</span><span class="n">Deleted</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;@.*\n&#39;</span><span class="p">,</span> <span class="n">Generic</span><span class="o">.</span><span class="n">Subheading</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;Index.*\n&#39;</span><span class="p">,</span> <span class="n">Generic</span><span class="o">.</span><span class="n">Heading</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;=.*\n&#39;</span><span class="p">,</span> <span class="n">Generic</span><span class="o">.</span><span class="n">Heading</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;.*\n&#39;</span><span class="p">,</span> <span class="n">Text</span><span class="p">),</span>
+        <span class="p">]</span>
+    <span class="p">}</span>
+</pre></div>
+</div>
+<p>As you can see this lexer only uses one state.  When the lexer starts scanning
+the text, it first checks if the current character is a space.  If this is true
+it scans everything until newline and returns the data as a <cite>Text</cite> token (which
+is the “no special highlighting” token).</p>
+<p>If this rule doesn’t match, it checks if the current char is a plus sign.  And
+so on.</p>
+<p>If no rule matches at the current position, the current char is emitted as an
+<cite>Error</cite> token that indicates a lexing error, and the position is increased by
+one.</p>
+</div>
+<div class="section" id="adding-and-testing-a-new-lexer">
+<h2>Adding and testing a new lexer<a class="headerlink" href="#adding-and-testing-a-new-lexer" title="Permalink to this headline">¶</a></h2>
+<p>The easiest way to use a new lexer is to use Pygments’ support for loading
+the lexer from a file relative to your current directory.</p>
+<p>First, change the name of your lexer class to CustomLexer:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.lexer</span> <span class="kn">import</span> <span class="n">RegexLexer</span>
+<span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="o">*</span>
+
+<span class="k">class</span> <span class="nc">CustomLexer</span><span class="p">(</span><span class="n">RegexLexer</span><span class="p">):</span>
+    <span class="sd">&quot;&quot;&quot;All your lexer code goes here!&quot;&quot;&quot;</span>
+</pre></div>
+</div>
+<p>Then you can load the lexer from the command line with the additional
+flag <code class="docutils literal notranslate"><span class="pre">-x</span></code>:</p>
+<div class="highlight-console notranslate"><div class="highlight"><pre><span></span><span class="gp">$</span> pygmentize -l your_lexer_file.py -x
+</pre></div>
+</div>
+<p>To specify a class name other than CustomLexer, append it with a colon:</p>
+<div class="highlight-console notranslate"><div class="highlight"><pre><span></span><span class="gp">$</span> pygmentize -l your_lexer.py:SomeLexer -x
+</pre></div>
+</div>
+<p>Or, using the Python API:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="c1"># For a lexer named CustomLexer</span>
+<span class="n">your_lexer</span> <span class="o">=</span> <span class="n">load_lexer_from_file</span><span class="p">(</span><span class="n">filename</span><span class="p">,</span> <span class="o">**</span><span class="n">options</span><span class="p">)</span>
+
+<span class="c1"># For a lexer named MyNewLexer</span>
+<span class="n">your_named_lexer</span> <span class="o">=</span> <span class="n">load_lexer_from_file</span><span class="p">(</span><span class="n">filename</span><span class="p">,</span> <span class="s2">&quot;MyNewLexer&quot;</span><span class="p">,</span> <span class="o">**</span><span class="n">options</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>When loading custom lexers and formatters, be extremely careful to use only
+trusted files; Pygments will perform the equivalent of <code class="docutils literal notranslate"><span class="pre">eval</span></code> on them.</p>
+<p>If you only want to use your lexer with the Pygments API, you can import and
+instantiate the lexer yourself, then pass it to <a class="reference internal" href="api.html#pygments.highlight" title="pygments.highlight"><code class="xref py py-func docutils literal notranslate"><span class="pre">pygments.highlight()</span></code></a>.</p>
+<p>To prepare your new lexer for inclusion in the Pygments distribution, so that it
+will be found when passing filenames or lexer aliases from the command line, you
+have to perform the following steps.</p>
+<p>First, change to the current directory containing the Pygments source code.  You
+will need to have either an unpacked source tarball, or (preferably) a copy
+cloned from GitHub.</p>
+<div class="highlight-console notranslate"><div class="highlight"><pre><span></span><span class="gp">$</span> <span class="nb">cd</span> .../pygments-main
+</pre></div>
+</div>
+<p>Select a matching module under <code class="docutils literal notranslate"><span class="pre">pygments/lexers</span></code>, or create a new module for
+your lexer class.</p>
+<p>Next, make sure the lexer is known from outside of the module.  All modules in
+the <code class="docutils literal notranslate"><span class="pre">pygments.lexers</span></code> package specify <code class="docutils literal notranslate"><span class="pre">__all__</span></code>. For example,
+<code class="docutils literal notranslate"><span class="pre">esoteric.py</span></code> sets:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">__all__</span> <span class="o">=</span> <span class="p">[</span><span class="s1">&#39;BrainfuckLexer&#39;</span><span class="p">,</span> <span class="s1">&#39;BefungeLexer&#39;</span><span class="p">,</span> <span class="o">...</span><span class="p">]</span>
+</pre></div>
+</div>
+<p>Add the name of your lexer class to this list (or create the list if your lexer
+is the only class in the module).</p>
+<p>Finally the lexer can be made publicly known by rebuilding the lexer mapping:</p>
+<div class="highlight-console notranslate"><div class="highlight"><pre><span></span><span class="gp">$</span> make mapfiles
+</pre></div>
+</div>
+<p>To test the new lexer, store an example file with the proper extension in
+<code class="docutils literal notranslate"><span class="pre">tests/examplefiles</span></code>.  For example, to test your <code class="docutils literal notranslate"><span class="pre">DiffLexer</span></code>, add a
+<code class="docutils literal notranslate"><span class="pre">tests/examplefiles/example.diff</span></code> containing a sample diff output.</p>
+<p>Now you can use pygmentize to render your example to HTML:</p>
+<div class="highlight-console notranslate"><div class="highlight"><pre><span></span><span class="gp">$</span> ./pygmentize -O full -f html -o /tmp/example.html tests/examplefiles/example.diff
+</pre></div>
+</div>
+<p>Note that this explicitly calls the <code class="docutils literal notranslate"><span class="pre">pygmentize</span></code> in the current directory
+by preceding it with <code class="docutils literal notranslate"><span class="pre">./</span></code>. This ensures your modifications are used.
+Otherwise a possibly already installed, unmodified version without your new
+lexer would have been called from the system search path (<code class="docutils literal notranslate"><span class="pre">$PATH</span></code>).</p>
+<p>To view the result, open <code class="docutils literal notranslate"><span class="pre">/tmp/example.html</span></code> in your browser.</p>
+<p>Once the example renders as expected, you should run the complete test suite:</p>
+<div class="highlight-console notranslate"><div class="highlight"><pre><span></span><span class="gp">$</span> make <span class="nb">test</span>
+</pre></div>
+</div>
+<p>It also tests that your lexer fulfills the lexer API and certain invariants,
+such as that the concatenation of all token text is the same as the input text.</p>
+</div>
+<div class="section" id="regex-flags">
+<h2>Regex Flags<a class="headerlink" href="#regex-flags" title="Permalink to this headline">¶</a></h2>
+<p>You can either define regex flags locally in the regex (<code class="docutils literal notranslate"><span class="pre">r'(?x)foo</span> <span class="pre">bar'</span></code>) or
+globally by adding a <cite>flags</cite> attribute to your lexer class.  If no attribute is
+defined, it defaults to <cite>re.MULTILINE</cite>.  For more information about regular
+expression flags see the page about <a class="reference external" href="http://docs.python.org/library/re.html#regular-expression-syntax">regular expressions</a> in the Python
+documentation.</p>
+</div>
+<div class="section" id="scanning-multiple-tokens-at-once">
+<h2>Scanning multiple tokens at once<a class="headerlink" href="#scanning-multiple-tokens-at-once" title="Permalink to this headline">¶</a></h2>
+<p>So far, the <cite>action</cite> element in the rule tuple of regex, action and state has
+been a single token type.  Now we look at the first of several other possible
+values.</p>
+<p>Here is a more complex lexer that highlights INI files.  INI files consist of
+sections, comments and <code class="docutils literal notranslate"><span class="pre">key</span> <span class="pre">=</span> <span class="pre">value</span></code> pairs:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.lexer</span> <span class="kn">import</span> <span class="n">RegexLexer</span><span class="p">,</span> <span class="n">bygroups</span>
+<span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="o">*</span>
+
+<span class="k">class</span> <span class="nc">IniLexer</span><span class="p">(</span><span class="n">RegexLexer</span><span class="p">):</span>
+    <span class="n">name</span> <span class="o">=</span> <span class="s1">&#39;INI&#39;</span>
+    <span class="n">aliases</span> <span class="o">=</span> <span class="p">[</span><span class="s1">&#39;ini&#39;</span><span class="p">,</span> <span class="s1">&#39;cfg&#39;</span><span class="p">]</span>
+    <span class="n">filenames</span> <span class="o">=</span> <span class="p">[</span><span class="s1">&#39;*.ini&#39;</span><span class="p">,</span> <span class="s1">&#39;*.cfg&#39;</span><span class="p">]</span>
+
+    <span class="n">tokens</span> <span class="o">=</span> <span class="p">{</span>
+        <span class="s1">&#39;root&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;\s+&#39;</span><span class="p">,</span> <span class="n">Text</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;;.*?$&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;\[.*?\]$&#39;</span><span class="p">,</span> <span class="n">Keyword</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;(.*?)(\s*)(=)(\s*)(.*?)$&#39;</span><span class="p">,</span>
+             <span class="n">bygroups</span><span class="p">(</span><span class="n">Name</span><span class="o">.</span><span class="n">Attribute</span><span class="p">,</span> <span class="n">Text</span><span class="p">,</span> <span class="n">Operator</span><span class="p">,</span> <span class="n">Text</span><span class="p">,</span> <span class="n">String</span><span class="p">))</span>
+        <span class="p">]</span>
+    <span class="p">}</span>
+</pre></div>
+</div>
+<p>The lexer first looks for whitespace, comments and section names.  Later it
+looks for a line that looks like a key, value pair, separated by an <code class="docutils literal notranslate"><span class="pre">'='</span></code>
+sign, and optional whitespace.</p>
+<p>The <cite>bygroups</cite> helper yields each capturing group in the regex with a different
+token type.  First the <cite>Name.Attribute</cite> token, then a <cite>Text</cite> token for the
+optional whitespace, after that a <cite>Operator</cite> token for the equals sign. Then a
+<cite>Text</cite> token for the whitespace again.  The rest of the line is returned as
+<cite>String</cite>.</p>
+<p>Note that for this to work, every part of the match must be inside a capturing
+group (a <code class="docutils literal notranslate"><span class="pre">(...)</span></code>), and there must not be any nested capturing groups.  If you
+nevertheless need a group, use a non-capturing group defined using this syntax:
+<code class="docutils literal notranslate"><span class="pre">(?:some|words|here)</span></code> (note the <code class="docutils literal notranslate"><span class="pre">?:</span></code> after the beginning parenthesis).</p>
+<p>If you find yourself needing a capturing group inside the regex which shouldn’t
+be part of the output but is used in the regular expressions for backreferencing
+(eg: <code class="docutils literal notranslate"><span class="pre">r'(&lt;(foo|bar)&gt;)(.*?)(&lt;/\2&gt;)'</span></code>), you can pass <cite>None</cite> to the bygroups
+function and that group will be skipped in the output.</p>
+</div>
+<div class="section" id="changing-states">
+<h2>Changing states<a class="headerlink" href="#changing-states" title="Permalink to this headline">¶</a></h2>
+<p>Many lexers need multiple states to work as expected.  For example, some
+languages allow multiline comments to be nested.  Since this is a recursive
+pattern it’s impossible to lex just using regular expressions.</p>
+<p>Here is a lexer that recognizes C++ style comments (multi-line with <code class="docutils literal notranslate"><span class="pre">/*</span> <span class="pre">*/</span></code>
+and single-line with <code class="docutils literal notranslate"><span class="pre">//</span></code> until end of line):</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.lexer</span> <span class="kn">import</span> <span class="n">RegexLexer</span>
+<span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="o">*</span>
+
+<span class="k">class</span> <span class="nc">CppCommentLexer</span><span class="p">(</span><span class="n">RegexLexer</span><span class="p">):</span>
+    <span class="n">name</span> <span class="o">=</span> <span class="s1">&#39;Example Lexer with states&#39;</span>
+
+    <span class="n">tokens</span> <span class="o">=</span> <span class="p">{</span>
+        <span class="s1">&#39;root&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;[^/]+&#39;</span><span class="p">,</span> <span class="n">Text</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;/\*&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="o">.</span><span class="n">Multiline</span><span class="p">,</span> <span class="s1">&#39;comment&#39;</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;//.*?$&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="o">.</span><span class="n">Singleline</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;/&#39;</span><span class="p">,</span> <span class="n">Text</span><span class="p">)</span>
+        <span class="p">],</span>
+        <span class="s1">&#39;comment&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;[^*/]&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="o">.</span><span class="n">Multiline</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;/\*&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="o">.</span><span class="n">Multiline</span><span class="p">,</span> <span class="s1">&#39;#push&#39;</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;\*/&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="o">.</span><span class="n">Multiline</span><span class="p">,</span> <span class="s1">&#39;#pop&#39;</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;[*/]&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="o">.</span><span class="n">Multiline</span><span class="p">)</span>
+        <span class="p">]</span>
+    <span class="p">}</span>
+</pre></div>
+</div>
+<p>This lexer starts lexing in the <code class="docutils literal notranslate"><span class="pre">'root'</span></code> state. It tries to match as much as
+possible until it finds a slash (<code class="docutils literal notranslate"><span class="pre">'/'</span></code>).  If the next character after the slash
+is an asterisk (<code class="docutils literal notranslate"><span class="pre">'*'</span></code>) the <cite>RegexLexer</cite> sends those two characters to the
+output stream marked as <cite>Comment.Multiline</cite> and continues lexing with the rules
+defined in the <code class="docutils literal notranslate"><span class="pre">'comment'</span></code> state.</p>
+<p>If there wasn’t an asterisk after the slash, the <cite>RegexLexer</cite> checks if it’s a
+Singleline comment (i.e. followed by a second slash).  If this also wasn’t the
+case it must be a single slash, which is not a comment starter (the separate
+regex for a single slash must also be given, else the slash would be marked as
+an error token).</p>
+<p>Inside the <code class="docutils literal notranslate"><span class="pre">'comment'</span></code> state, we do the same thing again.  Scan until the
+lexer finds a star or slash.  If it’s the opening of a multiline comment, push
+the <code class="docutils literal notranslate"><span class="pre">'comment'</span></code> state on the stack and continue scanning, again in the
+<code class="docutils literal notranslate"><span class="pre">'comment'</span></code> state.  Else, check if it’s the end of the multiline comment.  If
+yes, pop one state from the stack.</p>
+<p>Note: If you pop from an empty stack you’ll get an <cite>IndexError</cite>.  (There is an
+easy way to prevent this from happening: don’t <code class="docutils literal notranslate"><span class="pre">'#pop'</span></code> in the root state).</p>
+<p>If the <cite>RegexLexer</cite> encounters a newline that is flagged as an error token, the
+stack is emptied and the lexer continues scanning in the <code class="docutils literal notranslate"><span class="pre">'root'</span></code> state.  This
+can help producing error-tolerant highlighting for erroneous input, e.g. when a
+single-line string is not closed.</p>
+</div>
+<div class="section" id="advanced-state-tricks">
+<h2>Advanced state tricks<a class="headerlink" href="#advanced-state-tricks" title="Permalink to this headline">¶</a></h2>
+<p>There are a few more things you can do with states:</p>
+<ul>
+<li><p>You can push multiple states onto the stack if you give a tuple instead of a
+simple string as the third item in a rule tuple.  For example, if you want to
+match a comment containing a directive, something like:</p>
+<div class="highlight-text notranslate"><div class="highlight"><pre><span></span>/* &lt;processing directive&gt;    rest of comment */
+</pre></div>
+</div>
+<p>you can use this rule:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">tokens</span> <span class="o">=</span> <span class="p">{</span>
+    <span class="s1">&#39;root&#39;</span><span class="p">:</span> <span class="p">[</span>
+        <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;/\* &lt;&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="p">,</span> <span class="p">(</span><span class="s1">&#39;comment&#39;</span><span class="p">,</span> <span class="s1">&#39;directive&#39;</span><span class="p">)),</span>
+        <span class="o">...</span>
+    <span class="p">],</span>
+    <span class="s1">&#39;directive&#39;</span><span class="p">:</span> <span class="p">[</span>
+        <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;[^&gt;]*&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="o">.</span><span class="n">Directive</span><span class="p">),</span>
+        <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;&gt;&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="p">,</span> <span class="s1">&#39;#pop&#39;</span><span class="p">),</span>
+    <span class="p">],</span>
+    <span class="s1">&#39;comment&#39;</span><span class="p">:</span> <span class="p">[</span>
+        <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;[^*]+&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="p">),</span>
+        <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;\*/&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="p">,</span> <span class="s1">&#39;#pop&#39;</span><span class="p">),</span>
+        <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;\*&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="p">),</span>
+    <span class="p">]</span>
+<span class="p">}</span>
+</pre></div>
+</div>
+<p>When this encounters the above sample, first <code class="docutils literal notranslate"><span class="pre">'comment'</span></code> and <code class="docutils literal notranslate"><span class="pre">'directive'</span></code>
+are pushed onto the stack, then the lexer continues in the directive state
+until it finds the closing <code class="docutils literal notranslate"><span class="pre">&gt;</span></code>, then it continues in the comment state until
+the closing <code class="docutils literal notranslate"><span class="pre">*/</span></code>.  Then, both states are popped from the stack again and
+lexing continues in the root state.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.9: </span>The tuple can contain the special <code class="docutils literal notranslate"><span class="pre">'#push'</span></code> and <code class="docutils literal notranslate"><span class="pre">'#pop'</span></code> (but not
+<code class="docutils literal notranslate"><span class="pre">'#pop:n'</span></code>) directives.</p>
+</div>
+</li>
+<li><p>You can include the rules of a state in the definition of another.  This is
+done by using <cite>include</cite> from <cite>pygments.lexer</cite>:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.lexer</span> <span class="kn">import</span> <span class="n">RegexLexer</span><span class="p">,</span> <span class="n">bygroups</span><span class="p">,</span> <span class="n">include</span>
+<span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="o">*</span>
+
+<span class="k">class</span> <span class="nc">ExampleLexer</span><span class="p">(</span><span class="n">RegexLexer</span><span class="p">):</span>
+    <span class="n">tokens</span> <span class="o">=</span> <span class="p">{</span>
+        <span class="s1">&#39;comments&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;/\*.*?\*/&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;//.*?\n&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="p">),</span>
+        <span class="p">],</span>
+        <span class="s1">&#39;root&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="n">include</span><span class="p">(</span><span class="s1">&#39;comments&#39;</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;(function )(\w+)( {)&#39;</span><span class="p">,</span>
+             <span class="n">bygroups</span><span class="p">(</span><span class="n">Keyword</span><span class="p">,</span> <span class="n">Name</span><span class="p">,</span> <span class="n">Keyword</span><span class="p">),</span> <span class="s1">&#39;function&#39;</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;.&#39;</span><span class="p">,</span> <span class="n">Text</span><span class="p">),</span>
+        <span class="p">],</span>
+        <span class="s1">&#39;function&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;[^}/]+&#39;</span><span class="p">,</span> <span class="n">Text</span><span class="p">),</span>
+            <span class="n">include</span><span class="p">(</span><span class="s1">&#39;comments&#39;</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;/&#39;</span><span class="p">,</span> <span class="n">Text</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;\}&#39;</span><span class="p">,</span> <span class="n">Keyword</span><span class="p">,</span> <span class="s1">&#39;#pop&#39;</span><span class="p">),</span>
+        <span class="p">]</span>
+    <span class="p">}</span>
+</pre></div>
+</div>
+<p>This is a hypothetical lexer for a language that consist of functions and
+comments.  Because comments can occur at toplevel and in functions, we need
+rules for comments in both states.  As you can see, the <cite>include</cite> helper saves
+repeating rules that occur more than once (in this example, the state
+<code class="docutils literal notranslate"><span class="pre">'comment'</span></code> will never be entered by the lexer, as it’s only there to be
+included in <code class="docutils literal notranslate"><span class="pre">'root'</span></code> and <code class="docutils literal notranslate"><span class="pre">'function'</span></code>).</p>
+</li>
+<li><p>Sometimes, you may want to “combine” a state from existing ones.  This is
+possible with the <cite>combined</cite> helper from <cite>pygments.lexer</cite>.</p>
+<p>If you, instead of a new state, write <code class="docutils literal notranslate"><span class="pre">combined('state1',</span> <span class="pre">'state2')</span></code> as the
+third item of a rule tuple, a new anonymous state will be formed from state1
+and state2 and if the rule matches, the lexer will enter this state.</p>
+<p>This is not used very often, but can be helpful in some cases, such as the
+<cite>PythonLexer</cite>’s string literal processing.</p>
+</li>
+<li><p>If you want your lexer to start lexing in a different state you can modify the
+stack by overriding the <cite>get_tokens_unprocessed()</cite> method:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.lexer</span> <span class="kn">import</span> <span class="n">RegexLexer</span>
+
+<span class="k">class</span> <span class="nc">ExampleLexer</span><span class="p">(</span><span class="n">RegexLexer</span><span class="p">):</span>
+    <span class="n">tokens</span> <span class="o">=</span> <span class="p">{</span><span class="o">...</span><span class="p">}</span>
+
+    <span class="k">def</span> <span class="nf">get_tokens_unprocessed</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">text</span><span class="p">,</span> <span class="n">stack</span><span class="o">=</span><span class="p">(</span><span class="s1">&#39;root&#39;</span><span class="p">,</span> <span class="s1">&#39;otherstate&#39;</span><span class="p">)):</span>
+        <span class="k">for</span> <span class="n">item</span> <span class="ow">in</span> <span class="n">RegexLexer</span><span class="o">.</span><span class="n">get_tokens_unprocessed</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">text</span><span class="p">,</span> <span class="n">stack</span><span class="p">):</span>
+            <span class="k">yield</span> <span class="n">item</span>
+</pre></div>
+</div>
+<p>Some lexers like the <cite>PhpLexer</cite> use this to make the leading <code class="docutils literal notranslate"><span class="pre">&lt;?php</span></code>
+preprocessor comments optional.  Note that you can crash the lexer easily by
+putting values into the stack that don’t exist in the token map.  Also
+removing <code class="docutils literal notranslate"><span class="pre">'root'</span></code> from the stack can result in strange errors!</p>
+</li>
+<li><p>In some lexers, a state should be popped if anything is encountered that isn’t
+matched by a rule in the state.  You could use an empty regex at the end of
+the state list, but Pygments provides a more obvious way of spelling that:
+<code class="docutils literal notranslate"><span class="pre">default('#pop')</span></code> is equivalent to <code class="docutils literal notranslate"><span class="pre">('',</span> <span class="pre">Text,</span> <span class="pre">'#pop')</span></code>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</li>
+</ul>
+</div>
+<div class="section" id="subclassing-lexers-derived-from-regexlexer">
+<h2>Subclassing lexers derived from RegexLexer<a class="headerlink" href="#subclassing-lexers-derived-from-regexlexer" title="Permalink to this headline">¶</a></h2>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+<p>Sometimes multiple languages are very similar, but should still be lexed by
+different lexer classes.</p>
+<p>When subclassing a lexer derived from RegexLexer, the <code class="docutils literal notranslate"><span class="pre">tokens</span></code> dictionaries
+defined in the parent and child class are merged.  For example:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.lexer</span> <span class="kn">import</span> <span class="n">RegexLexer</span><span class="p">,</span> <span class="n">inherit</span>
+<span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="o">*</span>
+
+<span class="k">class</span> <span class="nc">BaseLexer</span><span class="p">(</span><span class="n">RegexLexer</span><span class="p">):</span>
+    <span class="n">tokens</span> <span class="o">=</span> <span class="p">{</span>
+        <span class="s1">&#39;root&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="s1">&#39;[a-z]+&#39;</span><span class="p">,</span> <span class="n">Name</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;/\*&#39;</span><span class="p">,</span> <span class="n">Comment</span><span class="p">,</span> <span class="s1">&#39;comment&#39;</span><span class="p">),</span>
+            <span class="p">(</span><span class="s1">&#39;&quot;&#39;</span><span class="p">,</span> <span class="n">String</span><span class="p">,</span> <span class="s1">&#39;string&#39;</span><span class="p">),</span>
+            <span class="p">(</span><span class="s1">&#39;\s+&#39;</span><span class="p">,</span> <span class="n">Text</span><span class="p">),</span>
+        <span class="p">],</span>
+        <span class="s1">&#39;string&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="s1">&#39;[^&quot;]+&#39;</span><span class="p">,</span> <span class="n">String</span><span class="p">),</span>
+            <span class="p">(</span><span class="s1">&#39;&quot;&#39;</span><span class="p">,</span> <span class="n">String</span><span class="p">,</span> <span class="s1">&#39;#pop&#39;</span><span class="p">),</span>
+        <span class="p">],</span>
+        <span class="s1">&#39;comment&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="o">...</span>
+        <span class="p">],</span>
+    <span class="p">}</span>
+
+<span class="k">class</span> <span class="nc">DerivedLexer</span><span class="p">(</span><span class="n">BaseLexer</span><span class="p">):</span>
+    <span class="n">tokens</span> <span class="o">=</span> <span class="p">{</span>
+        <span class="s1">&#39;root&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="s1">&#39;[0-9]+&#39;</span><span class="p">,</span> <span class="n">Number</span><span class="p">),</span>
+            <span class="n">inherit</span><span class="p">,</span>
+        <span class="p">],</span>
+        <span class="s1">&#39;string&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;[^&quot;</span><span class="se">\\</span><span class="s1">]+&#39;</span><span class="p">,</span> <span class="n">String</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;</span><span class="se">\\</span><span class="s1">.&#39;</span><span class="p">,</span> <span class="n">String</span><span class="o">.</span><span class="n">Escape</span><span class="p">),</span>
+            <span class="p">(</span><span class="s1">&#39;&quot;&#39;</span><span class="p">,</span> <span class="n">String</span><span class="p">,</span> <span class="s1">&#39;#pop&#39;</span><span class="p">),</span>
+        <span class="p">],</span>
+    <span class="p">}</span>
+</pre></div>
+</div>
+<p>The <cite>BaseLexer</cite> defines two states, lexing names and strings.  The
+<cite>DerivedLexer</cite> defines its own tokens dictionary, which extends the definitions
+of the base lexer:</p>
+<ul class="simple">
+<li><p>The “root” state has an additional rule and then the special object <cite>inherit</cite>,
+which tells Pygments to insert the token definitions of the parent class at
+that point.</p></li>
+<li><p>The “string” state is replaced entirely, since there is not <cite>inherit</cite> rule.</p></li>
+<li><p>The “comment” state is inherited entirely.</p></li>
+</ul>
+</div>
+<div class="section" id="using-multiple-lexers">
+<h2>Using multiple lexers<a class="headerlink" href="#using-multiple-lexers" title="Permalink to this headline">¶</a></h2>
+<p>Using multiple lexers for the same input can be tricky.  One of the easiest
+combination techniques is shown here: You can replace the action entry in a rule
+tuple with a lexer class.  The matched text will then be lexed with that lexer,
+and the resulting tokens will be yielded.</p>
+<p>For example, look at this stripped-down HTML lexer:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.lexer</span> <span class="kn">import</span> <span class="n">RegexLexer</span><span class="p">,</span> <span class="n">bygroups</span><span class="p">,</span> <span class="n">using</span>
+<span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="o">*</span>
+<span class="kn">from</span> <span class="nn">pygments.lexers.javascript</span> <span class="kn">import</span> <span class="n">JavascriptLexer</span>
+
+<span class="k">class</span> <span class="nc">HtmlLexer</span><span class="p">(</span><span class="n">RegexLexer</span><span class="p">):</span>
+    <span class="n">name</span> <span class="o">=</span> <span class="s1">&#39;HTML&#39;</span>
+    <span class="n">aliases</span> <span class="o">=</span> <span class="p">[</span><span class="s1">&#39;html&#39;</span><span class="p">]</span>
+    <span class="n">filenames</span> <span class="o">=</span> <span class="p">[</span><span class="s1">&#39;*.html&#39;</span><span class="p">,</span> <span class="s1">&#39;*.htm&#39;</span><span class="p">]</span>
+
+    <span class="n">flags</span> <span class="o">=</span> <span class="n">re</span><span class="o">.</span><span class="n">IGNORECASE</span> <span class="o">|</span> <span class="n">re</span><span class="o">.</span><span class="n">DOTALL</span>
+    <span class="n">tokens</span> <span class="o">=</span> <span class="p">{</span>
+        <span class="s1">&#39;root&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="s1">&#39;[^&lt;&amp;]+&#39;</span><span class="p">,</span> <span class="n">Text</span><span class="p">),</span>
+            <span class="p">(</span><span class="s1">&#39;&amp;.*?;&#39;</span><span class="p">,</span> <span class="n">Name</span><span class="o">.</span><span class="n">Entity</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;&lt;\s*script\s*&#39;</span><span class="p">,</span> <span class="n">Name</span><span class="o">.</span><span class="n">Tag</span><span class="p">,</span> <span class="p">(</span><span class="s1">&#39;script-content&#39;</span><span class="p">,</span> <span class="s1">&#39;tag&#39;</span><span class="p">)),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;&lt;\s*[a-zA-Z0-9:]+&#39;</span><span class="p">,</span> <span class="n">Name</span><span class="o">.</span><span class="n">Tag</span><span class="p">,</span> <span class="s1">&#39;tag&#39;</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;&lt;\s*/\s*[a-zA-Z0-9:]+\s*&gt;&#39;</span><span class="p">,</span> <span class="n">Name</span><span class="o">.</span><span class="n">Tag</span><span class="p">),</span>
+        <span class="p">],</span>
+        <span class="s1">&#39;script-content&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;(.+?)(&lt;\s*/\s*script\s*&gt;)&#39;</span><span class="p">,</span>
+             <span class="n">bygroups</span><span class="p">(</span><span class="n">using</span><span class="p">(</span><span class="n">JavascriptLexer</span><span class="p">),</span> <span class="n">Name</span><span class="o">.</span><span class="n">Tag</span><span class="p">),</span>
+             <span class="s1">&#39;#pop&#39;</span><span class="p">),</span>
+        <span class="p">]</span>
+    <span class="p">}</span>
+</pre></div>
+</div>
+<p>Here the content of a <code class="docutils literal notranslate"><span class="pre">&lt;script&gt;</span></code> tag is passed to a newly created instance of
+a <cite>JavascriptLexer</cite> and not processed by the <cite>HtmlLexer</cite>.  This is done using
+the <cite>using</cite> helper that takes the other lexer class as its parameter.</p>
+<p>Note the combination of <cite>bygroups</cite> and <cite>using</cite>.  This makes sure that the
+content up to the <code class="docutils literal notranslate"><span class="pre">&lt;/script&gt;</span></code> end tag is processed by the <cite>JavascriptLexer</cite>,
+while the end tag is yielded as a normal token with the <cite>Name.Tag</cite> type.</p>
+<p>Also note the <code class="docutils literal notranslate"><span class="pre">(r'&lt;\s*script\s*',</span> <span class="pre">Name.Tag,</span> <span class="pre">('script-content',</span> <span class="pre">'tag'))</span></code> rule.
+Here, two states are pushed onto the state stack, <code class="docutils literal notranslate"><span class="pre">'script-content'</span></code> and
+<code class="docutils literal notranslate"><span class="pre">'tag'</span></code>.  That means that first <code class="docutils literal notranslate"><span class="pre">'tag'</span></code> is processed, which will lex
+attributes and the closing <code class="docutils literal notranslate"><span class="pre">&gt;</span></code>, then the <code class="docutils literal notranslate"><span class="pre">'tag'</span></code> state is popped and the
+next state on top of the stack will be <code class="docutils literal notranslate"><span class="pre">'script-content'</span></code>.</p>
+<p>Since you cannot refer to the class currently being defined, use <cite>this</cite>
+(imported from <cite>pygments.lexer</cite>) to refer to the current lexer class, i.e.
+<code class="docutils literal notranslate"><span class="pre">using(this)</span></code>.  This construct may seem unnecessary, but this is often the
+most obvious way of lexing arbitrary syntax between fixed delimiters without
+introducing deeply nested states.</p>
+<p>The <cite>using()</cite> helper has a special keyword argument, <cite>state</cite>, which works as
+follows: if given, the lexer to use initially is not in the <code class="docutils literal notranslate"><span class="pre">&quot;root&quot;</span></code> state,
+but in the state given by this argument.  This does not work with advanced
+<cite>RegexLexer</cite> subclasses such as <cite>ExtendedRegexLexer</cite> (see below).</p>
+<p>Any other keywords arguments passed to <cite>using()</cite> are added to the keyword
+arguments used to create the lexer.</p>
+</div>
+<div class="section" id="delegating-lexer">
+<h2>Delegating Lexer<a class="headerlink" href="#delegating-lexer" title="Permalink to this headline">¶</a></h2>
+<p>Another approach for nested lexers is the <cite>DelegatingLexer</cite> which is for example
+used for the template engine lexers.  It takes two lexers as arguments on
+initialisation: a <cite>root_lexer</cite> and a <cite>language_lexer</cite>.</p>
+<p>The input is processed as follows: First, the whole text is lexed with the
+<cite>language_lexer</cite>.  All tokens yielded with the special type of <code class="docutils literal notranslate"><span class="pre">Other</span></code> are
+then concatenated and given to the <cite>root_lexer</cite>.  The language tokens of the
+<cite>language_lexer</cite> are then inserted into the <cite>root_lexer</cite>’s token stream at the
+appropriate positions.</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.lexer</span> <span class="kn">import</span> <span class="n">DelegatingLexer</span>
+<span class="kn">from</span> <span class="nn">pygments.lexers.web</span> <span class="kn">import</span> <span class="n">HtmlLexer</span><span class="p">,</span> <span class="n">PhpLexer</span>
+
+<span class="k">class</span> <span class="nc">HtmlPhpLexer</span><span class="p">(</span><span class="n">DelegatingLexer</span><span class="p">):</span>
+    <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">**</span><span class="n">options</span><span class="p">):</span>
+        <span class="nb">super</span><span class="p">(</span><span class="n">HtmlPhpLexer</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="n">HtmlLexer</span><span class="p">,</span> <span class="n">PhpLexer</span><span class="p">,</span> <span class="o">**</span><span class="n">options</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>This procedure ensures that e.g. HTML with template tags in it is highlighted
+correctly even if the template tags are put into HTML tags or attributes.</p>
+<p>If you want to change the needle token <code class="docutils literal notranslate"><span class="pre">Other</span></code> to something else, you can give
+the lexer another token type as the third parameter:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">DelegatingLexer</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="n">MyLexer</span><span class="p">,</span> <span class="n">OtherLexer</span><span class="p">,</span> <span class="n">Text</span><span class="p">,</span> <span class="o">**</span><span class="n">options</span><span class="p">)</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="callbacks">
+<h2>Callbacks<a class="headerlink" href="#callbacks" title="Permalink to this headline">¶</a></h2>
+<p>Sometimes the grammar of a language is so complex that a lexer would be unable
+to process it just by using regular expressions and stacks.</p>
+<p>For this, the <cite>RegexLexer</cite> allows callbacks to be given in rule tuples, instead
+of token types (<cite>bygroups</cite> and <cite>using</cite> are nothing else but preimplemented
+callbacks).  The callback must be a function taking two arguments:</p>
+<ul class="simple">
+<li><p>the lexer itself</p></li>
+<li><p>the match object for the last matched rule</p></li>
+</ul>
+<p>The callback must then return an iterable of (or simply yield) <code class="docutils literal notranslate"><span class="pre">(index,</span>
+<span class="pre">tokentype,</span> <span class="pre">value)</span></code> tuples, which are then just passed through by
+<cite>get_tokens_unprocessed()</cite>.  The <code class="docutils literal notranslate"><span class="pre">index</span></code> here is the position of the token in
+the input string, <code class="docutils literal notranslate"><span class="pre">tokentype</span></code> is the normal token type (like <cite>Name.Builtin</cite>),
+and <code class="docutils literal notranslate"><span class="pre">value</span></code> the associated part of the input string.</p>
+<p>You can see an example here:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.lexer</span> <span class="kn">import</span> <span class="n">RegexLexer</span>
+<span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="n">Generic</span>
+
+<span class="k">class</span> <span class="nc">HypotheticLexer</span><span class="p">(</span><span class="n">RegexLexer</span><span class="p">):</span>
+
+    <span class="k">def</span> <span class="nf">headline_callback</span><span class="p">(</span><span class="n">lexer</span><span class="p">,</span> <span class="n">match</span><span class="p">):</span>
+        <span class="n">equal_signs</span> <span class="o">=</span> <span class="n">match</span><span class="o">.</span><span class="n">group</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
+        <span class="n">text</span> <span class="o">=</span> <span class="n">match</span><span class="o">.</span><span class="n">group</span><span class="p">(</span><span class="mi">2</span><span class="p">)</span>
+        <span class="k">yield</span> <span class="n">match</span><span class="o">.</span><span class="n">start</span><span class="p">(),</span> <span class="n">Generic</span><span class="o">.</span><span class="n">Headline</span><span class="p">,</span> <span class="n">equal_signs</span> <span class="o">+</span> <span class="n">text</span> <span class="o">+</span> <span class="n">equal_signs</span>
+
+    <span class="n">tokens</span> <span class="o">=</span> <span class="p">{</span>
+        <span class="s1">&#39;root&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;(=+)(.*?)(\1)&#39;</span><span class="p">,</span> <span class="n">headline_callback</span><span class="p">)</span>
+        <span class="p">]</span>
+    <span class="p">}</span>
+</pre></div>
+</div>
+<p>If the regex for the <cite>headline_callback</cite> matches, the function is called with
+the match object.  Note that after the callback is done, processing continues
+normally, that is, after the end of the previous match.  The callback has no
+possibility to influence the position.</p>
+<p>There are not really any simple examples for lexer callbacks, but you can see
+them in action e.g. in the <cite>SMLLexer</cite> class in <a class="reference external" href="https://github.com/pygments/pygments/blob/master/pygments/lexers/ml.py">ml.py</a>.</p>
+</div>
+<div class="section" id="the-extendedregexlexer-class">
+<h2>The ExtendedRegexLexer class<a class="headerlink" href="#the-extendedregexlexer-class" title="Permalink to this headline">¶</a></h2>
+<p>The <cite>RegexLexer</cite>, even with callbacks, unfortunately isn’t powerful enough for
+the funky syntax rules of languages such as Ruby.</p>
+<p>But fear not; even then you don’t have to abandon the regular expression
+approach: Pygments has a subclass of <cite>RegexLexer</cite>, the <cite>ExtendedRegexLexer</cite>.
+All features known from RegexLexers are available here too, and the tokens are
+specified in exactly the same way, <em>except</em> for one detail:</p>
+<p>The <cite>get_tokens_unprocessed()</cite> method holds its internal state data not as local
+variables, but in an instance of the <cite>pygments.lexer.LexerContext</cite> class, and
+that instance is passed to callbacks as a third argument. This means that you
+can modify the lexer state in callbacks.</p>
+<p>The <cite>LexerContext</cite> class has the following members:</p>
+<ul class="simple">
+<li><p><cite>text</cite> – the input text</p></li>
+<li><p><cite>pos</cite> – the current starting position that is used for matching regexes</p></li>
+<li><p><cite>stack</cite> – a list containing the state stack</p></li>
+<li><p><cite>end</cite> – the maximum position to which regexes are matched, this defaults to
+the length of <cite>text</cite></p></li>
+</ul>
+<p>Additionally, the <cite>get_tokens_unprocessed()</cite> method can be given a
+<cite>LexerContext</cite> instead of a string and will then process this context instead of
+creating a new one for the string argument.</p>
+<p>Note that because you can set the current position to anything in the callback,
+it won’t be automatically be set by the caller after the callback is finished.
+For example, this is how the hypothetical lexer above would be written with the
+<cite>ExtendedRegexLexer</cite>:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.lexer</span> <span class="kn">import</span> <span class="n">ExtendedRegexLexer</span>
+<span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="n">Generic</span>
+
+<span class="k">class</span> <span class="nc">ExHypotheticLexer</span><span class="p">(</span><span class="n">ExtendedRegexLexer</span><span class="p">):</span>
+
+    <span class="k">def</span> <span class="nf">headline_callback</span><span class="p">(</span><span class="n">lexer</span><span class="p">,</span> <span class="n">match</span><span class="p">,</span> <span class="n">ctx</span><span class="p">):</span>
+        <span class="n">equal_signs</span> <span class="o">=</span> <span class="n">match</span><span class="o">.</span><span class="n">group</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
+        <span class="n">text</span> <span class="o">=</span> <span class="n">match</span><span class="o">.</span><span class="n">group</span><span class="p">(</span><span class="mi">2</span><span class="p">)</span>
+        <span class="k">yield</span> <span class="n">match</span><span class="o">.</span><span class="n">start</span><span class="p">(),</span> <span class="n">Generic</span><span class="o">.</span><span class="n">Headline</span><span class="p">,</span> <span class="n">equal_signs</span> <span class="o">+</span> <span class="n">text</span> <span class="o">+</span> <span class="n">equal_signs</span>
+        <span class="n">ctx</span><span class="o">.</span><span class="n">pos</span> <span class="o">=</span> <span class="n">match</span><span class="o">.</span><span class="n">end</span><span class="p">()</span>
+
+    <span class="n">tokens</span> <span class="o">=</span> <span class="p">{</span>
+        <span class="s1">&#39;root&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;(=+)(.*?)(\1)&#39;</span><span class="p">,</span> <span class="n">headline_callback</span><span class="p">)</span>
+        <span class="p">]</span>
+    <span class="p">}</span>
+</pre></div>
+</div>
+<p>This might sound confusing (and it can really be). But it is needed, and for an
+example look at the Ruby lexer in <a class="reference external" href="https://github.com/pygments/pygments/blob/master/pygments/lexers/ruby.py">ruby.py</a>.</p>
+</div>
+<div class="section" id="handling-lists-of-keywords">
+<h2>Handling Lists of Keywords<a class="headerlink" href="#handling-lists-of-keywords" title="Permalink to this headline">¶</a></h2>
+<p>For a relatively short list (hundreds) you can construct an optimized regular
+expression directly using <code class="docutils literal notranslate"><span class="pre">words()</span></code> (longer lists, see next section).  This
+function handles a few things for you automatically, including escaping
+metacharacters and Python’s first-match rather than longest-match in
+alternations.  Feel free to put the lists themselves in
+<code class="docutils literal notranslate"><span class="pre">pygments/lexers/_$lang_builtins.py</span></code> (see examples there), and generated by
+code if possible.</p>
+<p>An example of using <code class="docutils literal notranslate"><span class="pre">words()</span></code> is something like:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.lexer</span> <span class="kn">import</span> <span class="n">RegexLexer</span><span class="p">,</span> <span class="n">words</span><span class="p">,</span> <span class="n">Name</span>
+
+<span class="k">class</span> <span class="nc">MyLexer</span><span class="p">(</span><span class="n">RegexLexer</span><span class="p">):</span>
+
+    <span class="n">tokens</span> <span class="o">=</span> <span class="p">{</span>
+        <span class="s1">&#39;root&#39;</span><span class="p">:</span> <span class="p">[</span>
+            <span class="p">(</span><span class="n">words</span><span class="p">((</span><span class="s1">&#39;else&#39;</span><span class="p">,</span> <span class="s1">&#39;elseif&#39;</span><span class="p">),</span> <span class="n">suffix</span><span class="o">=</span><span class="sa">r</span><span class="s1">&#39;\b&#39;</span><span class="p">),</span> <span class="n">Name</span><span class="o">.</span><span class="n">Builtin</span><span class="p">),</span>
+            <span class="p">(</span><span class="sa">r</span><span class="s1">&#39;\w+&#39;</span><span class="p">,</span> <span class="n">Name</span><span class="p">),</span>
+        <span class="p">],</span>
+    <span class="p">}</span>
+</pre></div>
+</div>
+<p>As you can see, you can add <code class="docutils literal notranslate"><span class="pre">prefix</span></code> and <code class="docutils literal notranslate"><span class="pre">suffix</span></code> parts to the constructed
+regex.</p>
+</div>
+<div class="section" id="modifying-token-streams">
+<h2>Modifying Token Streams<a class="headerlink" href="#modifying-token-streams" title="Permalink to this headline">¶</a></h2>
+<p>Some languages ship a lot of builtin functions (for example PHP).  The total
+amount of those functions differs from system to system because not everybody
+has every extension installed.  In the case of PHP there are over 3000 builtin
+functions.  That’s an incredibly huge amount of functions, much more than you
+want to put into a regular expression.</p>
+<p>But because only <cite>Name</cite> tokens can be function names this is solvable by
+overriding the <code class="docutils literal notranslate"><span class="pre">get_tokens_unprocessed()</span></code> method.  The following lexer
+subclasses the <cite>PythonLexer</cite> so that it highlights some additional names as
+pseudo keywords:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.lexers.python</span> <span class="kn">import</span> <span class="n">PythonLexer</span>
+<span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="n">Name</span><span class="p">,</span> <span class="n">Keyword</span>
+
+<span class="k">class</span> <span class="nc">MyPythonLexer</span><span class="p">(</span><span class="n">PythonLexer</span><span class="p">):</span>
+    <span class="n">EXTRA_KEYWORDS</span> <span class="o">=</span> <span class="nb">set</span><span class="p">((</span><span class="s1">&#39;foo&#39;</span><span class="p">,</span> <span class="s1">&#39;bar&#39;</span><span class="p">,</span> <span class="s1">&#39;foobar&#39;</span><span class="p">,</span> <span class="s1">&#39;barfoo&#39;</span><span class="p">,</span> <span class="s1">&#39;spam&#39;</span><span class="p">,</span> <span class="s1">&#39;eggs&#39;</span><span class="p">))</span>
+
+    <span class="k">def</span> <span class="nf">get_tokens_unprocessed</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">text</span><span class="p">):</span>
+        <span class="k">for</span> <span class="n">index</span><span class="p">,</span> <span class="n">token</span><span class="p">,</span> <span class="n">value</span> <span class="ow">in</span> <span class="n">PythonLexer</span><span class="o">.</span><span class="n">get_tokens_unprocessed</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">text</span><span class="p">):</span>
+            <span class="k">if</span> <span class="n">token</span> <span class="ow">is</span> <span class="n">Name</span> <span class="ow">and</span> <span class="n">value</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">EXTRA_KEYWORDS</span><span class="p">:</span>
+                <span class="k">yield</span> <span class="n">index</span><span class="p">,</span> <span class="n">Keyword</span><span class="o">.</span><span class="n">Pseudo</span><span class="p">,</span> <span class="n">value</span>
+            <span class="k">else</span><span class="p">:</span>
+                <span class="k">yield</span> <span class="n">index</span><span class="p">,</span> <span class="n">token</span><span class="p">,</span> <span class="n">value</span>
+</pre></div>
+</div>
+<p>The <cite>PhpLexer</cite> and <cite>LuaLexer</cite> use this method to resolve builtin functions.</p>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/lexers.html b/doc/_build/html/docs/lexers.html
new file mode 100644 (file)
index 0000000..9669146
--- /dev/null
@@ -0,0 +1,10359 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Available lexers &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Filters" href="filters.html" />
+    <link rel="prev" title="Command Line Interface" href="cmdline.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="../index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Available lexers</a><ul>
+<li><a class="reference internal" href="#lexers-for-actionscript-and-mxml">Lexers for ActionScript and MXML</a></li>
+<li><a class="reference internal" href="#lexers-for-computer-algebra-systems">Lexers for computer algebra systems</a></li>
+<li><a class="reference internal" href="#lexers-for-ambienttalk-language">Lexers for AmbientTalk language</a></li>
+<li><a class="reference internal" href="#lexers-for-the-ampl-language">Lexers for the AMPL language</a></li>
+<li><a class="reference internal" href="#lexers-for-apl">Lexers for APL</a></li>
+<li><a class="reference internal" href="#lexer-for-archetype-related-syntaxes-including">Lexer for Archetype-related syntaxes, including:</a></li>
+<li><a class="reference internal" href="#lexers-for-assembly-languages">Lexers for assembly languages</a></li>
+<li><a class="reference internal" href="#lexers-for-automation-scripting-languages">Lexers for automation scripting languages</a></li>
+<li><a class="reference internal" href="#lexers-for-basic-like-languages-other-than-vb-net">Lexers for BASIC like languages (other than VB.net)</a></li>
+<li><a class="reference internal" href="#lexers-for-bibtex-bibliography-data-and-styles">Lexers for BibTeX bibliography data and styles</a></li>
+<li><a class="reference internal" href="#lexers-for-the-boa-language">Lexers for the Boa language</a></li>
+<li><a class="reference internal" href="#lexers-for-business-oriented-languages">Lexers for “business-oriented” languages</a></li>
+<li><a class="reference internal" href="#lexers-for-c-c-languages">Lexers for C/C++ languages</a></li>
+<li><a class="reference internal" href="#lexers-for-other-c-like-languages">Lexers for other C-like languages</a></li>
+<li><a class="reference internal" href="#lexers-for-the-cap-n-proto-schema-language">Lexers for the Cap’n Proto schema language</a></li>
+<li><a class="reference internal" href="#lexer-for-the-chapel-language">Lexer for the Chapel language</a></li>
+<li><a class="reference internal" href="#lexer-for-the-clean-language">Lexer for the Clean language</a></li>
+<li><a class="reference internal" href="#lexers-for-configuration-file-formats">Lexers for configuration file formats</a></li>
+<li><a class="reference internal" href="#lexers-for-misc-console-output">Lexers for misc console output</a></li>
+<li><a class="reference internal" href="#lexer-for-crystal">Lexer for Crystal</a></li>
+<li><a class="reference internal" href="#lexers-for-csound-languages">Lexers for Csound languages</a></li>
+<li><a class="reference internal" href="#lexers-for-css-and-related-stylesheet-formats">Lexers for CSS and related stylesheet formats</a></li>
+<li><a class="reference internal" href="#lexers-for-d-languages">Lexers for D languages</a></li>
+<li><a class="reference internal" href="#pygments-lexers-for-dalvik-vm-related-languages">Pygments lexers for Dalvik VM-related languages</a></li>
+<li><a class="reference internal" href="#lexers-for-data-file-format">Lexers for data file format</a></li>
+<li><a class="reference internal" href="#lexers-for-diff-patch-formats">Lexers for diff/patch formats</a></li>
+<li><a class="reference internal" href="#lexers-for-net-languages">Lexers for .net languages</a></li>
+<li><a class="reference internal" href="#lexers-for-various-domain-specific-languages">Lexers for various domain-specific languages</a></li>
+<li><a class="reference internal" href="#lexers-for-the-dylan-language">Lexers for the Dylan language</a></li>
+<li><a class="reference internal" href="#lexers-for-the-ecl-language">Lexers for the ECL language</a></li>
+<li><a class="reference internal" href="#lexer-for-the-eiffel-language">Lexer for the Eiffel language</a></li>
+<li><a class="reference internal" href="#lexer-for-the-elm-programming-language">Lexer for the Elm programming language</a></li>
+<li><a class="reference internal" href="#lexer-for-the-raw-e-mail">Lexer for the raw E-mail</a></li>
+<li><a class="reference internal" href="#lexers-for-erlang">Lexers for Erlang</a></li>
+<li><a class="reference internal" href="#lexers-for-esoteric-languages">Lexers for esoteric languages</a></li>
+<li><a class="reference internal" href="#pygments-lexers-for-ezhil-language">Pygments lexers for Ezhil language</a></li>
+<li><a class="reference internal" href="#lexers-for-the-factor-language">Lexers for the Factor language</a></li>
+<li><a class="reference internal" href="#lexer-for-the-fantom-language">Lexer for the Fantom language</a></li>
+<li><a class="reference internal" href="#lexer-for-the-felix-language">Lexer for the Felix language</a></li>
+<li><a class="reference internal" href="#lexer-for-floscript">Lexer for FloScript</a></li>
+<li><a class="reference internal" href="#lexer-for-the-forth-language">Lexer for the Forth language</a></li>
+<li><a class="reference internal" href="#lexers-for-fortran-languages">Lexers for Fortran languages</a></li>
+<li><a class="reference internal" href="#simple-lexer-for-microsoft-visual-foxpro-source-code">Simple lexer for Microsoft Visual FoxPro source code</a></li>
+<li><a class="reference internal" href="#lexer-for-freefem-language">Lexer for FreeFem++ language</a></li>
+<li><a class="reference internal" href="#lexers-for-the-google-go-language">Lexers for the Google Go language</a></li>
+<li><a class="reference internal" href="#lexers-for-grammer-notations-like-bnf">Lexers for grammer notations like BNF</a></li>
+<li><a class="reference internal" href="#lexers-for-graph-query-languages">Lexers for graph query languages</a></li>
+<li><a class="reference internal" href="#lexers-for-computer-graphics-and-plotting-related-languages">Lexers for computer graphics and plotting related languages</a></li>
+<li><a class="reference internal" href="#lexers-for-haskell-and-related-languages">Lexers for Haskell and related languages</a></li>
+<li><a class="reference internal" href="#lexers-for-haxe-and-related-stuff">Lexers for Haxe and related stuff</a></li>
+<li><a class="reference internal" href="#lexers-for-hardware-descriptor-languages">Lexers for hardware descriptor languages</a></li>
+<li><a class="reference internal" href="#lexers-for-hexadecimal-dumps">Lexers for hexadecimal dumps</a></li>
+<li><a class="reference internal" href="#lexers-for-html-xml-and-related-markup">Lexers for HTML, XML and related markup</a></li>
+<li><a class="reference internal" href="#lexers-for-idl">Lexers for IDL</a></li>
+<li><a class="reference internal" href="#lexers-for-igor-pro">Lexers for Igor Pro</a></li>
+<li><a class="reference internal" href="#lexers-for-inferno-os-and-all-the-related-stuff">Lexers for Inferno os and all the related stuff</a></li>
+<li><a class="reference internal" href="#lexers-for-installer-packager-dsls-and-formats">Lexers for installer/packager DSLs and formats</a></li>
+<li><a class="reference internal" href="#lexers-for-interactive-fiction-languages">Lexers for interactive fiction languages</a></li>
+<li><a class="reference internal" href="#lexers-for-the-io-language">Lexers for the Io language</a></li>
+<li><a class="reference internal" href="#lexer-for-the-j-programming-language">Lexer for the J programming language</a></li>
+<li><a class="reference internal" href="#lexers-for-javascript-and-related-languages">Lexers for JavaScript and related languages</a></li>
+<li><a class="reference internal" href="#lexers-for-the-julia-language">Lexers for the Julia language</a></li>
+<li><a class="reference internal" href="#pygments-lexers-for-jvm-languages">Pygments lexers for JVM languages</a></li>
+<li><a class="reference internal" href="#lexers-for-lispy-languages">Lexers for Lispy languages</a></li>
+<li><a class="reference internal" href="#lexers-for-makefiles-and-similar">Lexers for Makefiles and similar</a></li>
+<li><a class="reference internal" href="#lexers-for-non-html-markup-languages">Lexers for non-HTML markup languages</a></li>
+<li><a class="reference internal" href="#lexers-for-matlab-and-related-languages">Lexers for Matlab and related languages</a></li>
+<li><a class="reference internal" href="#lexer-for-multipurpose-internet-mail-extensions-mime-data">Lexer for Multipurpose Internet Mail Extensions (MIME) data</a></li>
+<li><a class="reference internal" href="#lexers-for-ml-family-languages">Lexers for ML family languages</a></li>
+<li><a class="reference internal" href="#lexers-for-modeling-languages">Lexers for modeling languages</a></li>
+<li><a class="reference internal" href="#multi-dialect-lexer-for-modula-2">Multi-Dialect Lexer for Modula-2</a></li>
+<li><a class="reference internal" href="#lexer-for-the-monte-programming-language">Lexer for the Monte programming language</a></li>
+<li><a class="reference internal" href="#lexers-for-ncar-command-language">Lexers for NCAR Command Language</a></li>
+<li><a class="reference internal" href="#lexer-for-the-nim-language-formerly-known-as-nimrod">Lexer for the Nim language (formerly known as Nimrod)</a></li>
+<li><a class="reference internal" href="#lexer-for-the-nit-language">Lexer for the Nit language</a></li>
+<li><a class="reference internal" href="#lexers-for-the-nixos-nix-language">Lexers for the NixOS Nix language</a></li>
+<li><a class="reference internal" href="#lexers-for-oberon-family-languages">Lexers for Oberon family languages</a></li>
+<li><a class="reference internal" href="#lexers-for-objective-c-family-languages">Lexers for Objective-C family languages</a></li>
+<li><a class="reference internal" href="#lexers-for-the-ooc-language">Lexers for the Ooc language</a></li>
+<li><a class="reference internal" href="#lexer-for-parasail">Lexer for ParaSail</a></li>
+<li><a class="reference internal" href="#lexers-for-parser-generators">Lexers for parser generators</a></li>
+<li><a class="reference internal" href="#lexers-for-pascal-family-languages">Lexers for Pascal family languages</a></li>
+<li><a class="reference internal" href="#lexers-for-the-pawn-languages">Lexers for the Pawn languages</a></li>
+<li><a class="reference internal" href="#lexers-for-perl-and-related-languages">Lexers for Perl and related languages</a></li>
+<li><a class="reference internal" href="#lexers-for-php-and-related-languages">Lexers for PHP and related languages</a></li>
+<li><a class="reference internal" href="#lexers-for-pony-and-related-languages">Lexers for Pony and related languages</a></li>
+<li><a class="reference internal" href="#lexer-for-praat">Lexer for Praat</a></li>
+<li><a class="reference internal" href="#lexers-for-prolog-and-prolog-like-languages">Lexers for Prolog and Prolog-like languages</a></li>
+<li><a class="reference internal" href="#lexers-for-python-and-related-languages">Lexers for Python and related languages</a></li>
+<li><a class="reference internal" href="#lexer-for-qvt-operational-language">Lexer for QVT Operational language</a></li>
+<li><a class="reference internal" href="#lexers-for-the-r-s-languages">Lexers for the R/S languages</a></li>
+<li><a class="reference internal" href="#lexers-for-semantic-web-and-rdf-query-languages-and-markup">Lexers for semantic web and RDF query languages and markup</a></li>
+<li><a class="reference internal" href="#lexers-for-the-rebol-and-related-languages">Lexers for the REBOL and related languages</a></li>
+<li><a class="reference internal" href="#lexer-for-resource-definition-files">Lexer for resource definition files</a></li>
+<li><a class="reference internal" href="#lexer-for-relax-ng-compact-syntax">Lexer for Relax-NG Compact syntax</a></li>
+<li><a class="reference internal" href="#lexers-for-roboconf-dsl">Lexers for Roboconf DSL</a></li>
+<li><a class="reference internal" href="#lexer-for-robot-framework">Lexer for Robot Framework</a></li>
+<li><a class="reference internal" href="#lexers-for-ruby-and-related-languages">Lexers for Ruby and related languages</a></li>
+<li><a class="reference internal" href="#lexers-for-the-rust-language">Lexers for the Rust language</a></li>
+<li><a class="reference internal" href="#lexer-for-sas">Lexer for SAS</a></li>
+<li><a class="reference internal" href="#lexer-for-scdoc-a-simple-man-page-generator">Lexer for scdoc, a simple man page generator</a></li>
+<li><a class="reference internal" href="#lexer-for-scripting-and-embedded-languages">Lexer for scripting and embedded languages</a></li>
+<li><a class="reference internal" href="#lexer-for-smart-game-format-sgf-file-format">Lexer for Smart Game Format (sgf) file format</a></li>
+<li><a class="reference internal" href="#lexers-for-various-shells">Lexers for various shells</a></li>
+<li><a class="reference internal" href="#lexer-for-the-slash-programming">Lexer for the Slash programming</a></li>
+<li><a class="reference internal" href="#lexers-for-smalltalk-and-related-languages">Lexers for Smalltalk and related languages</a></li>
+<li><a class="reference internal" href="#lexers-for-the-smv-languages">Lexers for the SMV languages</a></li>
+<li><a class="reference internal" href="#lexers-for-the-snobol-language">Lexers for the SNOBOL language</a></li>
+<li><a class="reference internal" href="#lexers-for-solidity">Lexers for Solidity</a></li>
+<li><a class="reference internal" href="#special-lexers">Special lexers</a></li>
+<li><a class="reference internal" href="#lexers-for-various-sql-dialects-and-related-interactive-sessions">Lexers for various SQL dialects and related interactive sessions</a></li>
+<li><a class="reference internal" href="#lexer-for-stata">Lexer for Stata</a></li>
+<li><a class="reference internal" href="#lexer-for-supercollider">Lexer for SuperCollider</a></li>
+<li><a class="reference internal" href="#lexers-for-tcl-and-related-languages">Lexers for Tcl and related languages</a></li>
+<li><a class="reference internal" href="#lexers-for-various-template-engines-markup">Lexers for various template engines’ markup</a></li>
+<li><a class="reference internal" href="#lexer-for-tera-term-macro-files">Lexer for Tera Term macro files</a></li>
+<li><a class="reference internal" href="#lexers-for-testing-languages">Lexers for testing languages</a></li>
+<li><a class="reference internal" href="#lexers-for-languages-related-to-text-processing">Lexers for languages related to text processing</a></li>
+<li><a class="reference internal" href="#lexers-for-various-text-formats">Lexers for various text formats</a></li>
+<li><a class="reference internal" href="#lexers-for-theorem-proving-languages">Lexers for theorem-proving languages</a></li>
+<li><a class="reference internal" href="#lexer-for-riverbed-s-trafficscript-rts-language">Lexer for RiverBed’s TrafficScript (RTS) language</a></li>
+<li><a class="reference internal" href="#lexers-for-typoscript">Lexers for TypoScript</a></li>
+<li><a class="reference internal" href="#lexers-for-the-icon-and-unicon-languages-including-ucode-vm">Lexers for the Icon and Unicon languages, including ucode VM</a></li>
+<li><a class="reference internal" href="#lexers-for-urbiscript-language">Lexers for UrbiScript language</a></li>
+<li><a class="reference internal" href="#lexers-for-varnish-configuration">Lexers for Varnish configuration</a></li>
+<li><a class="reference internal" href="#lexer-for-intermediate-verification-languages-ivls">Lexer for Intermediate Verification Languages (IVLs)</a></li>
+<li><a class="reference internal" href="#lexers-for-misc-web-stuff">Lexers for misc. web stuff</a></li>
+<li><a class="reference internal" href="#lexers-for-the-whiley-language">Lexers for the Whiley language</a></li>
+<li><a class="reference internal" href="#lexers-for-the-x10-programming-language">Lexers for the X10 programming language</a></li>
+<li><a class="reference internal" href="#lexers-for-xorg-configs">Lexers for Xorg configs</a></li>
+<li><a class="reference internal" href="#lexers-for-zig">Lexers for Zig</a></li>
+<li><a class="reference internal" href="#iterating-over-all-lexers">Iterating over all lexers</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="cmdline.html"
+                        title="previous chapter">Command Line Interface</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="filters.html"
+                        title="next chapter">Filters</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/lexers.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="available-lexers">
+<h1>Available lexers<a class="headerlink" href="#available-lexers" title="Permalink to this headline">¶</a></h1>
+<p>This page lists all available builtin lexers and the options they take.</p>
+<p>Currently, <strong>all lexers</strong> support these options:</p>
+<dl>
+<dt><cite>stripnl</cite></dt><dd><p>Strip leading and trailing newlines from the input (default: <code class="docutils literal notranslate"><span class="pre">True</span></code>)</p>
+</dd>
+<dt><cite>stripall</cite></dt><dd><p>Strip all leading and trailing whitespace from the input (default:
+<code class="docutils literal notranslate"><span class="pre">False</span></code>).</p>
+</dd>
+<dt><cite>ensurenl</cite></dt><dd><p>Make sure that the input ends with a newline (default: <code class="docutils literal notranslate"><span class="pre">True</span></code>).  This
+is required for some lexers that consume input linewise.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.3.</span></p>
+</div>
+</dd>
+<dt><cite>tabsize</cite></dt><dd><p>If given and greater than 0, expand tabs in the input (default: <code class="docutils literal notranslate"><span class="pre">0</span></code>).</p>
+</dd>
+<dt><cite>encoding</cite></dt><dd><p>If given, must be an encoding name (such as <code class="docutils literal notranslate"><span class="pre">&quot;utf-8&quot;</span></code>). This encoding
+will be used to convert the input string to Unicode (if it is not already
+a Unicode string). The default is <code class="docutils literal notranslate"><span class="pre">&quot;guess&quot;</span></code>.</p>
+<p>If this option is set to <code class="docutils literal notranslate"><span class="pre">&quot;guess&quot;</span></code>, a simple UTF-8 vs. Latin-1
+detection is used, if it is set to <code class="docutils literal notranslate"><span class="pre">&quot;chardet&quot;</span></code>, the
+<a class="reference external" href="https://chardet.github.io/">chardet library</a> is used to
+guess the encoding of the input.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd>
+</dl>
+<p>The “Short Names” field lists the identifiers that can be used with the
+<cite>get_lexer_by_name()</cite> function.</p>
+<p>These lexers are builtin and can be imported from <cite>pygments.lexers</cite>:</p>
+<span class="target" id="module-pygments.lexers.actionscript"></span><div class="section" id="lexers-for-actionscript-and-mxml">
+<h2>Lexers for ActionScript and MXML<a class="headerlink" href="#lexers-for-actionscript-and-mxml" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.actionscript.ActionScript3Lexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.actionscript.</code><code class="sig-name descname">ActionScript3Lexer</code><a class="headerlink" href="#pygments.lexers.actionscript.ActionScript3Lexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>as3, actionscript3</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.as</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-actionscript3, text/x-actionscript3, text/actionscript3</p>
+</dd>
+</dl>
+<p>For ActionScript 3 source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.11.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.actionscript.ActionScriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.actionscript.</code><code class="sig-name descname">ActionScriptLexer</code><a class="headerlink" href="#pygments.lexers.actionscript.ActionScriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>as, actionscript</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.as</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-actionscript, text/x-actionscript, text/actionscript</p>
+</dd>
+</dl>
+<p>For ActionScript source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.9.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.actionscript.MxmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.actionscript.</code><code class="sig-name descname">MxmlLexer</code><a class="headerlink" href="#pygments.lexers.actionscript.MxmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>mxml</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.mxml</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For MXML markup.
+Nested AS3 in &lt;script&gt; tags is highlighted by the appropriate lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.algebra"></span></div>
+<div class="section" id="lexers-for-computer-algebra-systems">
+<h2>Lexers for computer algebra systems<a class="headerlink" href="#lexers-for-computer-algebra-systems" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.algebra.BCLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.algebra.</code><code class="sig-name descname">BCLexer</code><a class="headerlink" href="#pygments.lexers.algebra.BCLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>bc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.bc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A <a class="reference external" href="https://www.gnu.org/software/bc/">BC</a> lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.algebra.GAPLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.algebra.</code><code class="sig-name descname">GAPLexer</code><a class="headerlink" href="#pygments.lexers.algebra.GAPLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>gap</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.g, *.gd, *.gi, *.gap</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.gap-system.org">GAP</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.algebra.MathematicaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.algebra.</code><code class="sig-name descname">MathematicaLexer</code><a class="headerlink" href="#pygments.lexers.algebra.MathematicaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>mathematica, mma, nb</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.nb, *.cdf, *.nbp, *.ma</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/mathematica, application/vnd.wolfram.mathematica, application/vnd.wolfram.mathematica.package, application/vnd.wolfram.cdf</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://www.wolfram.com/mathematica/">Mathematica</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.algebra.MuPADLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.algebra.</code><code class="sig-name descname">MuPADLexer</code><a class="headerlink" href="#pygments.lexers.algebra.MuPADLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>mupad</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.mu</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A <a class="reference external" href="http://www.mupad.com">MuPAD</a> lexer.
+Contributed by Christopher Creutzig &lt;<a class="reference external" href="mailto:christopher&#37;&#52;&#48;creutzig&#46;de">christopher<span>&#64;</span>creutzig<span>&#46;</span>de</a>&gt;.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.8.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.ambient"></span></div>
+<div class="section" id="lexers-for-ambienttalk-language">
+<h2>Lexers for AmbientTalk language<a class="headerlink" href="#lexers-for-ambienttalk-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.ambient.AmbientTalkLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.ambient.</code><code class="sig-name descname">AmbientTalkLexer</code><a class="headerlink" href="#pygments.lexers.ambient.AmbientTalkLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>at, ambienttalk, ambienttalk/2</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.at</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-ambienttalk</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="https://code.google.com/p/ambienttalk">AmbientTalk</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.ampl"></span></div>
+<div class="section" id="lexers-for-the-ampl-language">
+<h2>Lexers for the AMPL language<a class="headerlink" href="#lexers-for-the-ampl-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.ampl.AmplLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.ampl.</code><code class="sig-name descname">AmplLexer</code><a class="headerlink" href="#pygments.lexers.ampl.AmplLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ampl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.run</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://ampl.com/">AMPL</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.apl"></span></div>
+<div class="section" id="lexers-for-apl">
+<h2>Lexers for APL<a class="headerlink" href="#lexers-for-apl" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.apl.APLLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.apl.</code><code class="sig-name descname">APLLexer</code><a class="headerlink" href="#pygments.lexers.apl.APLLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>apl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.apl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A simple APL lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.archetype"></span></div>
+<div class="section" id="lexer-for-archetype-related-syntaxes-including">
+<h2>Lexer for Archetype-related syntaxes, including:<a class="headerlink" href="#lexer-for-archetype-related-syntaxes-including" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.archetype.AdlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.archetype.</code><code class="sig-name descname">AdlLexer</code><a class="headerlink" href="#pygments.lexers.archetype.AdlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>adl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.adl, *.adls, *.adlf, *.adlx</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for ADL syntax.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.archetype.CadlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.archetype.</code><code class="sig-name descname">CadlLexer</code><a class="headerlink" href="#pygments.lexers.archetype.CadlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cadl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cadl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for cADL syntax.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.archetype.OdinLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.archetype.</code><code class="sig-name descname">OdinLexer</code><a class="headerlink" href="#pygments.lexers.archetype.OdinLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>odin</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.odin</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/odin</p>
+</dd>
+</dl>
+<p>Lexer for ODIN syntax.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.asm"></span></div>
+<div class="section" id="lexers-for-assembly-languages">
+<h2>Lexers for assembly languages<a class="headerlink" href="#lexers-for-assembly-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.asm.CObjdumpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.asm.</code><code class="sig-name descname">CObjdumpLexer</code><a class="headerlink" href="#pygments.lexers.asm.CObjdumpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>c-objdump</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.c-objdump</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-c-objdump</p>
+</dd>
+</dl>
+<p>For the output of ‘objdump -Sr on compiled C files’</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.asm.Ca65Lexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.asm.</code><code class="sig-name descname">Ca65Lexer</code><a class="headerlink" href="#pygments.lexers.asm.Ca65Lexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ca65</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.s</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For ca65 assembler sources.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.asm.CppObjdumpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.asm.</code><code class="sig-name descname">CppObjdumpLexer</code><a class="headerlink" href="#pygments.lexers.asm.CppObjdumpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cpp-objdump, c++-objdumb, cxx-objdump</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cpp-objdump, *.c++-objdump, *.cxx-objdump</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-cpp-objdump</p>
+</dd>
+</dl>
+<p>For the output of ‘objdump -Sr on compiled C++ files’</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.asm.DObjdumpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.asm.</code><code class="sig-name descname">DObjdumpLexer</code><a class="headerlink" href="#pygments.lexers.asm.DObjdumpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>d-objdump</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.d-objdump</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-d-objdump</p>
+</dd>
+</dl>
+<p>For the output of ‘objdump -Sr on compiled D files’</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.asm.Dasm16Lexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.asm.</code><code class="sig-name descname">Dasm16Lexer</code><a class="headerlink" href="#pygments.lexers.asm.Dasm16Lexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>dasm16</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.dasm16, *.dasm</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-dasm16</p>
+</dd>
+</dl>
+<p>Simple lexer for DCPU-16 Assembly</p>
+<p>Check <a class="reference external" href="http://0x10c.com/doc/dcpu-16.txt">http://0x10c.com/doc/dcpu-16.txt</a></p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.asm.GasLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.asm.</code><code class="sig-name descname">GasLexer</code><a class="headerlink" href="#pygments.lexers.asm.GasLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>gas, asm</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.s, *.S</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-gas</p>
+</dd>
+</dl>
+<p>For Gas (AT&amp;T) assembly code.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.asm.HsailLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.asm.</code><code class="sig-name descname">HsailLexer</code><a class="headerlink" href="#pygments.lexers.asm.HsailLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>hsail, hsa</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.hsail</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-hsail</p>
+</dd>
+</dl>
+<p>For HSAIL assembly code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.asm.LlvmLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.asm.</code><code class="sig-name descname">LlvmLexer</code><a class="headerlink" href="#pygments.lexers.asm.LlvmLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>llvm</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ll</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-llvm</p>
+</dd>
+</dl>
+<p>For LLVM assembly code.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.asm.NasmLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.asm.</code><code class="sig-name descname">NasmLexer</code><a class="headerlink" href="#pygments.lexers.asm.NasmLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>nasm</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.asm, *.ASM</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-nasm</p>
+</dd>
+</dl>
+<p>For Nasm (Intel) assembly code.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.asm.NasmObjdumpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.asm.</code><code class="sig-name descname">NasmObjdumpLexer</code><a class="headerlink" href="#pygments.lexers.asm.NasmObjdumpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>objdump-nasm</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.objdump-intel</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-nasm-objdump</p>
+</dd>
+</dl>
+<p>For the output of ‘objdump -d -M intel’.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.asm.ObjdumpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.asm.</code><code class="sig-name descname">ObjdumpLexer</code><a class="headerlink" href="#pygments.lexers.asm.ObjdumpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>objdump</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.objdump</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-objdump</p>
+</dd>
+</dl>
+<p>For the output of ‘objdump -dr’</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.asm.TasmLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.asm.</code><code class="sig-name descname">TasmLexer</code><a class="headerlink" href="#pygments.lexers.asm.TasmLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>tasm</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.asm, *.ASM, *.tasm</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-tasm</p>
+</dd>
+</dl>
+<p>For Tasm (Turbo Assembler) assembly code.</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.automation"></span></div>
+<div class="section" id="lexers-for-automation-scripting-languages">
+<h2>Lexers for automation scripting languages<a class="headerlink" href="#lexers-for-automation-scripting-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.automation.AutoItLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.automation.</code><code class="sig-name descname">AutoItLexer</code><a class="headerlink" href="#pygments.lexers.automation.AutoItLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>autoit</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.au3</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-autoit</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.autoitscript.com/site/autoit/">AutoIt</a> files.</p>
+<p>AutoIt is a freeware BASIC-like scripting language
+designed for automating the Windows GUI and general scripting</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.automation.AutohotkeyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.automation.</code><code class="sig-name descname">AutohotkeyLexer</code><a class="headerlink" href="#pygments.lexers.automation.AutohotkeyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ahk, autohotkey</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ahk, *.ahkl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-autohotkey</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.autohotkey.com/">autohotkey</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.basic"></span></div>
+<div class="section" id="lexers-for-basic-like-languages-other-than-vb-net">
+<h2>Lexers for BASIC like languages (other than VB.net)<a class="headerlink" href="#lexers-for-basic-like-languages-other-than-vb-net" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.basic.BBCBasicLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.basic.</code><code class="sig-name descname">BBCBasicLexer</code><a class="headerlink" href="#pygments.lexers.basic.BBCBasicLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>bbcbasic</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.bbc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>BBC Basic was supplied on the BBC Micro, and later Acorn RISC OS.
+It is also used by BBC Basic For Windows.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.basic.BlitzBasicLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.basic.</code><code class="sig-name descname">BlitzBasicLexer</code><a class="headerlink" href="#pygments.lexers.basic.BlitzBasicLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>blitzbasic, b3d, bplus</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.bb, *.decls</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-bb</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://blitzbasic.com">BlitzBasic</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.basic.BlitzMaxLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.basic.</code><code class="sig-name descname">BlitzMaxLexer</code><a class="headerlink" href="#pygments.lexers.basic.BlitzMaxLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>blitzmax, bmax</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.bmx</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-bmx</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://blitzbasic.com">BlitzMax</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.basic.CbmBasicV2Lexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.basic.</code><code class="sig-name descname">CbmBasicV2Lexer</code><a class="headerlink" href="#pygments.lexers.basic.CbmBasicV2Lexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cbmbas</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.bas</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For CBM BASIC V2 sources.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.basic.MonkeyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.basic.</code><code class="sig-name descname">MonkeyLexer</code><a class="headerlink" href="#pygments.lexers.basic.MonkeyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>monkey</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.monkey</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-monkey</p>
+</dd>
+</dl>
+<p>For
+<a class="reference external" href="https://en.wikipedia.org/wiki/Monkey_(programming_language)">Monkey</a>
+source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.basic.QBasicLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.basic.</code><code class="sig-name descname">QBasicLexer</code><a class="headerlink" href="#pygments.lexers.basic.QBasicLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>qbasic, basic</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.BAS, *.bas</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/basic</p>
+</dd>
+</dl>
+<p>For
+<a class="reference external" href="http://en.wikipedia.org/wiki/QBasic">QBasic</a>
+source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.basic.VBScriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.basic.</code><code class="sig-name descname">VBScriptLexer</code><a class="headerlink" href="#pygments.lexers.basic.VBScriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>vbscript</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.vbs, *.VBS</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>VBScript is scripting language that is modeled on Visual Basic.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.bibtex"></span></div>
+<div class="section" id="lexers-for-bibtex-bibliography-data-and-styles">
+<h2>Lexers for BibTeX bibliography data and styles<a class="headerlink" href="#lexers-for-bibtex-bibliography-data-and-styles" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.bibtex.BSTLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.bibtex.</code><code class="sig-name descname">BSTLexer</code><a class="headerlink" href="#pygments.lexers.bibtex.BSTLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>bst, bst-pybtex</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.bst</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A lexer for BibTeX bibliography styles.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.bibtex.BibTeXLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.bibtex.</code><code class="sig-name descname">BibTeXLexer</code><a class="headerlink" href="#pygments.lexers.bibtex.BibTeXLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>bib, bibtex</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.bib</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-bibtex</p>
+</dd>
+</dl>
+<p>A lexer for BibTeX bibliography data format.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.boa"></span></div>
+<div class="section" id="lexers-for-the-boa-language">
+<h2>Lexers for the Boa language<a class="headerlink" href="#lexers-for-the-boa-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.boa.BoaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.boa.</code><code class="sig-name descname">BoaLexer</code><a class="headerlink" href="#pygments.lexers.boa.BoaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>boa</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.boa</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for the <a class="reference external" href="http://boa.cs.iastate.edu/docs/">Boa</a> language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.business"></span></div>
+<div class="section" id="lexers-for-business-oriented-languages">
+<h2>Lexers for “business-oriented” languages<a class="headerlink" href="#lexers-for-business-oriented-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.business.ABAPLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.business.</code><code class="sig-name descname">ABAPLexer</code><a class="headerlink" href="#pygments.lexers.business.ABAPLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>abap</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.abap, *.ABAP</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-abap</p>
+</dd>
+</dl>
+<p>Lexer for ABAP, SAP’s integrated language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.business.CobolFreeformatLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.business.</code><code class="sig-name descname">CobolFreeformatLexer</code><a class="headerlink" href="#pygments.lexers.business.CobolFreeformatLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cobolfree</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cbl, *.CBL</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for Free format OpenCOBOL code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.business.CobolLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.business.</code><code class="sig-name descname">CobolLexer</code><a class="headerlink" href="#pygments.lexers.business.CobolLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cobol</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cob, *.COB, *.cpy, *.CPY</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-cobol</p>
+</dd>
+</dl>
+<p>Lexer for OpenCOBOL code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.business.GoodDataCLLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.business.</code><code class="sig-name descname">GoodDataCLLexer</code><a class="headerlink" href="#pygments.lexers.business.GoodDataCLLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>gooddata-cl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.gdc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-gooddata-cl</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/com/gooddata/processor/COMMANDS.txt">GoodData-CL</a>
+script files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.business.MaqlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.business.</code><code class="sig-name descname">MaqlLexer</code><a class="headerlink" href="#pygments.lexers.business.MaqlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>maql</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.maql</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-gooddata-maql, application/x-gooddata-maql</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html">GoodData MAQL</a>
+scripts.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.business.OpenEdgeLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.business.</code><code class="sig-name descname">OpenEdgeLexer</code><a class="headerlink" href="#pygments.lexers.business.OpenEdgeLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>openedge, abl, progress</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.p, *.cls</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-openedge, application/x-openedge</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://web.progress.com/en/openedge/abl.html">OpenEdge ABL (formerly Progress)</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.c_cpp"></span></div>
+<div class="section" id="lexers-for-c-c-languages">
+<h2>Lexers for C/C++ languages<a class="headerlink" href="#lexers-for-c-c-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.c_cpp.CLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.c_cpp.</code><code class="sig-name descname">CLexer</code><a class="headerlink" href="#pygments.lexers.c_cpp.CLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>c</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.c, *.h, *.idc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-chdr, text/x-csrc</p>
+</dd>
+</dl>
+<p>For C source code with preprocessor directives.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.c_cpp.CppLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.c_cpp.</code><code class="sig-name descname">CppLexer</code><a class="headerlink" href="#pygments.lexers.c_cpp.CppLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cpp, c++</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cpp, *.hpp, *.c++, *.h++, *.cc, *.hh, *.cxx, *.hxx, *.C, *.H, *.cp, *.CPP</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-c++hdr, text/x-c++src</p>
+</dd>
+</dl>
+<p>For C++ source code with preprocessor directives.</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.c_like"></span></div>
+<div class="section" id="lexers-for-other-c-like-languages">
+<h2>Lexers for other C-like languages<a class="headerlink" href="#lexers-for-other-c-like-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.c_like.ArduinoLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.c_like.</code><code class="sig-name descname">ArduinoLexer</code><a class="headerlink" href="#pygments.lexers.c_like.ArduinoLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>arduino</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ino</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-arduino</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://arduino.cc/">Arduino(tm)</a> source.</p>
+<p>This is an extension of the CppLexer, as the Arduino® Language is a superset
+of C++</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.c_like.CharmciLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.c_like.</code><code class="sig-name descname">CharmciLexer</code><a class="headerlink" href="#pygments.lexers.c_like.CharmciLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>charmci</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ci</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://charm.cs.illinois.edu">Charm++</a> interface files (.ci).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.c_like.ClayLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.c_like.</code><code class="sig-name descname">ClayLexer</code><a class="headerlink" href="#pygments.lexers.c_like.ClayLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>clay</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.clay</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-clay</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://claylabs.com/clay/">Clay</a> source.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.c_like.CudaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.c_like.</code><code class="sig-name descname">CudaLexer</code><a class="headerlink" href="#pygments.lexers.c_like.CudaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cuda, cu</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cu, *.cuh</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-cuda</p>
+</dd>
+</dl>
+<p>For NVIDIA <a class="reference external" href="http://developer.nvidia.com/category/zone/cuda-zone">CUDA™</a>
+source.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.c_like.ECLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.c_like.</code><code class="sig-name descname">ECLexer</code><a class="headerlink" href="#pygments.lexers.c_like.ECLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ec</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ec, *.eh</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-echdr, text/x-ecsrc</p>
+</dd>
+</dl>
+<p>For eC source code with preprocessor directives.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.c_like.MqlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.c_like.</code><code class="sig-name descname">MqlLexer</code><a class="headerlink" href="#pygments.lexers.c_like.MqlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>mql, mq4, mq5, mql4, mql5</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.mq4, *.mq5, *.mqh</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-mql</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://docs.mql4.com/">MQL4</a> and
+<a class="reference external" href="http://www.mql5.com/en/docs">MQL5</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.c_like.NesCLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.c_like.</code><code class="sig-name descname">NesCLexer</code><a class="headerlink" href="#pygments.lexers.c_like.NesCLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>nesc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.nc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-nescsrc</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://github.com/tinyos/nesc">nesC</a> source code with preprocessor
+directives.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.c_like.PikeLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.c_like.</code><code class="sig-name descname">PikeLexer</code><a class="headerlink" href="#pygments.lexers.c_like.PikeLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>pike</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pike, *.pmod</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-pike</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://pike.lysator.liu.se/">Pike</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.c_like.SwigLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.c_like.</code><code class="sig-name descname">SwigLexer</code><a class="headerlink" href="#pygments.lexers.c_like.SwigLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>swig</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.swg, *.i</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/swig</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.swig.org/">SWIG</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.c_like.ValaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.c_like.</code><code class="sig-name descname">ValaLexer</code><a class="headerlink" href="#pygments.lexers.c_like.ValaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>vala, vapi</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.vala, *.vapi</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-vala</p>
+</dd>
+</dl>
+<p>For Vala source code with preprocessor directives.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.capnproto"></span></div>
+<div class="section" id="lexers-for-the-cap-n-proto-schema-language">
+<h2>Lexers for the Cap’n Proto schema language<a class="headerlink" href="#lexers-for-the-cap-n-proto-schema-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.capnproto.CapnProtoLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.capnproto.</code><code class="sig-name descname">CapnProtoLexer</code><a class="headerlink" href="#pygments.lexers.capnproto.CapnProtoLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>capnp</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.capnp</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://capnproto.org">Cap’n Proto</a> source.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.chapel"></span></div>
+<div class="section" id="lexer-for-the-chapel-language">
+<h2>Lexer for the Chapel language<a class="headerlink" href="#lexer-for-the-chapel-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.chapel.ChapelLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.chapel.</code><code class="sig-name descname">ChapelLexer</code><a class="headerlink" href="#pygments.lexers.chapel.ChapelLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>chapel, chpl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.chpl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://chapel.cray.com/">Chapel</a> source.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.clean"></span></div>
+<div class="section" id="lexer-for-the-clean-language">
+<h2>Lexer for the Clean language<a class="headerlink" href="#lexer-for-the-clean-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.clean.CleanLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.clean.</code><code class="sig-name descname">CleanLexer</code><a class="headerlink" href="#pygments.lexers.clean.CleanLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>clean</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.icl, *.dcl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for the general purpose, state-of-the-art, pure and lazy functional
+programming language Clean (<a class="reference external" href="http://clean.cs.ru.nl/Clean">http://clean.cs.ru.nl/Clean</a>).</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.configs"></span></div>
+<div class="section" id="lexers-for-configuration-file-formats">
+<h2>Lexers for configuration file formats<a class="headerlink" href="#lexers-for-configuration-file-formats" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.configs.ApacheConfLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">ApacheConfLexer</code><a class="headerlink" href="#pygments.lexers.configs.ApacheConfLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>apacheconf, aconf, apache</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>.htaccess, apache.conf, apache2.conf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-apacheconf</p>
+</dd>
+</dl>
+<p>Lexer for configuration files following the Apache config file
+format.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.AugeasLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">AugeasLexer</code><a class="headerlink" href="#pygments.lexers.configs.AugeasLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>augeas</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.aug</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://augeas.net">Augeas</a>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.Cfengine3Lexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">Cfengine3Lexer</code><a class="headerlink" href="#pygments.lexers.configs.Cfengine3Lexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cfengine3, cf3</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://cfengine.org">CFEngine3</a> policy files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.DockerLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">DockerLexer</code><a class="headerlink" href="#pygments.lexers.configs.DockerLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>docker, dockerfile</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>Dockerfile, *.docker</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-dockerfile-config</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://docker.io">Docker</a> configuration files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.IniLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">IniLexer</code><a class="headerlink" href="#pygments.lexers.configs.IniLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ini, cfg, dosini</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ini, *.cfg, *.inf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-ini, text/inf</p>
+</dd>
+</dl>
+<p>Lexer for configuration files in INI style.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.KconfigLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">KconfigLexer</code><a class="headerlink" href="#pygments.lexers.configs.KconfigLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>kconfig, menuconfig, linux-config, kernel-config</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>Kconfig, *Config.in*, external.in*, standard-modules.in</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-kconfig</p>
+</dd>
+</dl>
+<p>For Linux-style Kconfig files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.LighttpdConfLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">LighttpdConfLexer</code><a class="headerlink" href="#pygments.lexers.configs.LighttpdConfLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>lighty, lighttpd</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-lighttpd-conf</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://lighttpd.net/">Lighttpd</a> configuration files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.11.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.NginxConfLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">NginxConfLexer</code><a class="headerlink" href="#pygments.lexers.configs.NginxConfLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>nginx</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>nginx.conf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-nginx-conf</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://nginx.net/">Nginx</a> configuration files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.11.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.PacmanConfLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">PacmanConfLexer</code><a class="headerlink" href="#pygments.lexers.configs.PacmanConfLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>pacmanconf</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>pacman.conf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="https://www.archlinux.org/pacman/pacman.conf.5.html">pacman.conf</a>.</p>
+<p>Actually, IniLexer works almost fine for this format,
+but it yield error token. It is because pacman.conf has
+a form without assignment like:</p>
+<blockquote>
+<div><p>UseSyslog
+Color
+TotalDownload
+CheckSpace
+VerbosePkgLists</p>
+</div></blockquote>
+<p>These are flags to switch on.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.PkgConfigLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">PkgConfigLexer</code><a class="headerlink" href="#pygments.lexers.configs.PkgConfigLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>pkgconfig</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://www.freedesktop.org/wiki/Software/pkg-config/">pkg-config</a>
+(see also <a class="reference external" href="http://linux.die.net/man/1/pkg-config">manual page</a>).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.PropertiesLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">PropertiesLexer</code><a class="headerlink" href="#pygments.lexers.configs.PropertiesLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>properties, jproperties</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.properties</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-java-properties</p>
+</dd>
+</dl>
+<p>Lexer for configuration files in Java’s properties format.</p>
+<p>Note: trailing whitespace counts as part of the value as per spec</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.RegeditLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">RegeditLexer</code><a class="headerlink" href="#pygments.lexers.configs.RegeditLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>registry</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.reg</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-windows-registry</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://en.wikipedia.org/wiki/Windows_Registry#.REG_files">Windows Registry</a> files produced
+by regedit.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.SquidConfLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">SquidConfLexer</code><a class="headerlink" href="#pygments.lexers.configs.SquidConfLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>squidconf, squid.conf, squid</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>squid.conf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-squidconf</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://www.squid-cache.org/">squid</a> configuration files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.9.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.TOMLLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">TOMLLexer</code><a class="headerlink" href="#pygments.lexers.configs.TOMLLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>toml</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.toml</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="https://github.com/toml-lang/toml">TOML</a>, a simple language
+for config files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.TermcapLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">TermcapLexer</code><a class="headerlink" href="#pygments.lexers.configs.TermcapLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>termcap</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>termcap, termcap.src</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for termcap database source.</p>
+<p>This is very simple and minimal.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.TerminfoLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">TerminfoLexer</code><a class="headerlink" href="#pygments.lexers.configs.TerminfoLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>terminfo</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>terminfo, terminfo.src</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for terminfo database source.</p>
+<p>This is very simple and minimal.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.configs.TerraformLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.configs.</code><code class="sig-name descname">TerraformLexer</code><a class="headerlink" href="#pygments.lexers.configs.TerraformLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>terraform, tf</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.tf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-tf, application/x-terraform</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="https://www.terraform.io/">terraformi .tf files</a>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.console"></span></div>
+<div class="section" id="lexers-for-misc-console-output">
+<h2>Lexers for misc console output<a class="headerlink" href="#lexers-for-misc-console-output" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.console.PyPyLogLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.console.</code><code class="sig-name descname">PyPyLogLexer</code><a class="headerlink" href="#pygments.lexers.console.PyPyLogLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>pypylog, pypy</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pypylog</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-pypylog</p>
+</dd>
+</dl>
+<p>Lexer for PyPy log files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.console.VCTreeStatusLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.console.</code><code class="sig-name descname">VCTreeStatusLexer</code><a class="headerlink" href="#pygments.lexers.console.VCTreeStatusLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>vctreestatus</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For colorizing output of version control status commands, like “hg
+status” or “svn status”.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.crystal"></span></div>
+<div class="section" id="lexer-for-crystal">
+<h2>Lexer for Crystal<a class="headerlink" href="#lexer-for-crystal" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.crystal.CrystalLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.crystal.</code><code class="sig-name descname">CrystalLexer</code><a class="headerlink" href="#pygments.lexers.crystal.CrystalLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cr, crystal</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cr</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-crystal</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://crystal-lang.org">Crystal</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.csound"></span></div>
+<div class="section" id="lexers-for-csound-languages">
+<h2>Lexers for Csound languages<a class="headerlink" href="#lexers-for-csound-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.csound.CsoundDocumentLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.csound.</code><code class="sig-name descname">CsoundDocumentLexer</code><a class="headerlink" href="#pygments.lexers.csound.CsoundDocumentLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>csound-document, csound-csd</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.csd</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://csound.com">Csound</a> documents.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.csound.CsoundOrchestraLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.csound.</code><code class="sig-name descname">CsoundOrchestraLexer</code><a class="headerlink" href="#pygments.lexers.csound.CsoundOrchestraLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>csound, csound-orc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.orc, *.udo</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://csound.com">Csound</a> orchestras.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.csound.CsoundScoreLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.csound.</code><code class="sig-name descname">CsoundScoreLexer</code><a class="headerlink" href="#pygments.lexers.csound.CsoundScoreLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>csound-score, csound-sco</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sco</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://csound.com">Csound</a> scores.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.css"></span></div>
+<div class="section" id="lexers-for-css-and-related-stylesheet-formats">
+<h2>Lexers for CSS and related stylesheet formats<a class="headerlink" href="#lexers-for-css-and-related-stylesheet-formats" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.css.CssLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.css.</code><code class="sig-name descname">CssLexer</code><a class="headerlink" href="#pygments.lexers.css.CssLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>css</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.css</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/css</p>
+</dd>
+</dl>
+<p>For CSS (Cascading Style Sheets).</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.css.LessCssLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.css.</code><code class="sig-name descname">LessCssLexer</code><a class="headerlink" href="#pygments.lexers.css.LessCssLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>less</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.less</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-less-css</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://lesscss.org/">LESS</a> styleshets.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.css.SassLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.css.</code><code class="sig-name descname">SassLexer</code><a class="headerlink" href="#pygments.lexers.css.SassLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>sass</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sass</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-sass</p>
+</dd>
+</dl>
+<p>For Sass stylesheets.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.3.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.css.ScssLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.css.</code><code class="sig-name descname">ScssLexer</code><a class="headerlink" href="#pygments.lexers.css.ScssLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>scss</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.scss</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-scss</p>
+</dd>
+</dl>
+<p>For SCSS stylesheets.</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.d"></span></div>
+<div class="section" id="lexers-for-d-languages">
+<h2>Lexers for D languages<a class="headerlink" href="#lexers-for-d-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.d.CrocLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.d.</code><code class="sig-name descname">CrocLexer</code><a class="headerlink" href="#pygments.lexers.d.CrocLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>croc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.croc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-crocsrc</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://jfbillingsley.com/croc">Croc</a> source.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.d.DLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.d.</code><code class="sig-name descname">DLexer</code><a class="headerlink" href="#pygments.lexers.d.DLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>d</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.d, *.di</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-dsrc</p>
+</dd>
+</dl>
+<p>For D source.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.d.MiniDLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.d.</code><code class="sig-name descname">MiniDLexer</code><a class="headerlink" href="#pygments.lexers.d.MiniDLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>minid</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-minidsrc</p>
+</dd>
+</dl>
+<p>For MiniD source. MiniD is now known as Croc.</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.dalvik"></span></div>
+<div class="section" id="pygments-lexers-for-dalvik-vm-related-languages">
+<h2>Pygments lexers for Dalvik VM-related languages<a class="headerlink" href="#pygments-lexers-for-dalvik-vm-related-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.dalvik.SmaliLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dalvik.</code><code class="sig-name descname">SmaliLexer</code><a class="headerlink" href="#pygments.lexers.dalvik.SmaliLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>smali</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.smali</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/smali</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://code.google.com/p/smali/">Smali</a> (Android/Dalvik) assembly
+code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.data"></span></div>
+<div class="section" id="lexers-for-data-file-format">
+<h2>Lexers for data file format<a class="headerlink" href="#lexers-for-data-file-format" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.data.JsonBareObjectLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.data.</code><code class="sig-name descname">JsonBareObjectLexer</code><a class="headerlink" href="#pygments.lexers.data.JsonBareObjectLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>json-object</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/json-object</p>
+</dd>
+</dl>
+<p>For JSON data structures (with missing object curly braces).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.data.JsonLdLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.data.</code><code class="sig-name descname">JsonLdLexer</code><a class="headerlink" href="#pygments.lexers.data.JsonLdLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>jsonld, json-ld</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.jsonld</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/ld+json</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://json-ld.org/">JSON-LD</a> linked data.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.data.JsonLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.data.</code><code class="sig-name descname">JsonLexer</code><a class="headerlink" href="#pygments.lexers.data.JsonLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>json</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.json</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/json</p>
+</dd>
+</dl>
+<p>For JSON data structures.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.data.YamlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.data.</code><code class="sig-name descname">YamlLexer</code><a class="headerlink" href="#pygments.lexers.data.YamlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>yaml</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.yaml, *.yml</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-yaml</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://yaml.org/">YAML</a>, a human-friendly data serialization
+language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.11.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.diff"></span></div>
+<div class="section" id="lexers-for-diff-patch-formats">
+<h2>Lexers for diff/patch formats<a class="headerlink" href="#lexers-for-diff-patch-formats" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.diff.DarcsPatchLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.diff.</code><code class="sig-name descname">DarcsPatchLexer</code><a class="headerlink" href="#pygments.lexers.diff.DarcsPatchLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>dpatch</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.dpatch, *.darcspatch</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>DarcsPatchLexer is a lexer for the various versions of the darcs patch
+format.  Examples of this format are derived by commands such as
+<code class="docutils literal notranslate"><span class="pre">darcs</span> <span class="pre">annotate</span> <span class="pre">--patch</span></code> and <code class="docutils literal notranslate"><span class="pre">darcs</span> <span class="pre">send</span></code>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.diff.DiffLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.diff.</code><code class="sig-name descname">DiffLexer</code><a class="headerlink" href="#pygments.lexers.diff.DiffLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>diff, udiff</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.diff, *.patch</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-diff, text/x-patch</p>
+</dd>
+</dl>
+<p>Lexer for unified or context-style diffs or patches.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.diff.WDiffLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.diff.</code><code class="sig-name descname">WDiffLexer</code><a class="headerlink" href="#pygments.lexers.diff.WDiffLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>wdiff</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.wdiff</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A <a class="reference external" href="https://www.gnu.org/software/wdiff/">wdiff</a> lexer.</p>
+<p>Note that:</p>
+<ul class="simple">
+<li><p>only to normal output (without option like -l).</p></li>
+<li><p>if target files of wdiff contain “[-“, “-]”, “{+”, “+}”,
+especially they are unbalanced, this lexer will get confusing.</p></li>
+</ul>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.dotnet"></span></div>
+<div class="section" id="lexers-for-net-languages">
+<h2>Lexers for .net languages<a class="headerlink" href="#lexers-for-net-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.dotnet.BooLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dotnet.</code><code class="sig-name descname">BooLexer</code><a class="headerlink" href="#pygments.lexers.dotnet.BooLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>boo</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.boo</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-boo</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://boo.codehaus.org/">Boo</a> source code.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dotnet.CSharpAspxLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dotnet.</code><code class="sig-name descname">CSharpAspxLexer</code><a class="headerlink" href="#pygments.lexers.dotnet.CSharpAspxLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>aspx-cs</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.aspx, *.asax, *.ascx, *.ashx, *.asmx, *.axd</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for highlighting C# within ASP.NET pages.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dotnet.CSharpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dotnet.</code><code class="sig-name descname">CSharpLexer</code><a class="headerlink" href="#pygments.lexers.dotnet.CSharpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>csharp, c#</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cs</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-csharp</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://msdn2.microsoft.com/en-us/vcsharp/default.aspx">C#</a>
+source code.</p>
+<p>Additional options accepted:</p>
+<dl>
+<dt><cite>unicodelevel</cite></dt><dd><p>Determines which Unicode characters this lexer allows for identifiers.
+The possible values are:</p>
+<ul class="simple">
+<li><p><code class="docutils literal notranslate"><span class="pre">none</span></code> – only the ASCII letters and numbers are allowed. This
+is the fastest selection.</p></li>
+<li><p><code class="docutils literal notranslate"><span class="pre">basic</span></code> – all Unicode characters from the specification except
+category <code class="docutils literal notranslate"><span class="pre">Lo</span></code> are allowed.</p></li>
+<li><p><code class="docutils literal notranslate"><span class="pre">full</span></code> – all Unicode characters as specified in the C# specs
+are allowed.  Note that this means a considerable slowdown since the
+<code class="docutils literal notranslate"><span class="pre">Lo</span></code> category has more than 40,000 characters in it!</p></li>
+</ul>
+<p>The default value is <code class="docutils literal notranslate"><span class="pre">basic</span></code>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.8.</span></p>
+</div>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dotnet.FSharpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dotnet.</code><code class="sig-name descname">FSharpLexer</code><a class="headerlink" href="#pygments.lexers.dotnet.FSharpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>fsharp, f#</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.fs, *.fsi</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-fsharp</p>
+</dd>
+</dl>
+<p>For the <a class="reference external" href="https://fsharp.org/">F# language</a> (version 3.0).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dotnet.NemerleLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dotnet.</code><code class="sig-name descname">NemerleLexer</code><a class="headerlink" href="#pygments.lexers.dotnet.NemerleLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>nemerle</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.n</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-nemerle</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://nemerle.org">Nemerle</a> source code.</p>
+<p>Additional options accepted:</p>
+<dl>
+<dt><cite>unicodelevel</cite></dt><dd><p>Determines which Unicode characters this lexer allows for identifiers.
+The possible values are:</p>
+<ul class="simple">
+<li><p><code class="docutils literal notranslate"><span class="pre">none</span></code> – only the ASCII letters and numbers are allowed. This
+is the fastest selection.</p></li>
+<li><p><code class="docutils literal notranslate"><span class="pre">basic</span></code> – all Unicode characters from the specification except
+category <code class="docutils literal notranslate"><span class="pre">Lo</span></code> are allowed.</p></li>
+<li><p><code class="docutils literal notranslate"><span class="pre">full</span></code> – all Unicode characters as specified in the C# specs
+are allowed.  Note that this means a considerable slowdown since the
+<code class="docutils literal notranslate"><span class="pre">Lo</span></code> category has more than 40,000 characters in it!</p></li>
+</ul>
+<p>The default value is <code class="docutils literal notranslate"><span class="pre">basic</span></code>.</p>
+</dd>
+</dl>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dotnet.VbNetAspxLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dotnet.</code><code class="sig-name descname">VbNetAspxLexer</code><a class="headerlink" href="#pygments.lexers.dotnet.VbNetAspxLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>aspx-vb</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.aspx, *.asax, *.ascx, *.ashx, *.asmx, *.axd</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for highlighting Visual Basic.net within ASP.NET pages.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dotnet.VbNetLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dotnet.</code><code class="sig-name descname">VbNetLexer</code><a class="headerlink" href="#pygments.lexers.dotnet.VbNetLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>vb.net, vbnet</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.vb, *.bas</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-vbnet, text/x-vba</p>
+</dd>
+</dl>
+<p>For
+<a class="reference external" href="http://msdn2.microsoft.com/en-us/vbasic/default.aspx">Visual Basic.NET</a>
+source code.</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.dsls"></span></div>
+<div class="section" id="lexers-for-various-domain-specific-languages">
+<h2>Lexers for various domain-specific languages<a class="headerlink" href="#lexers-for-various-domain-specific-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.dsls.AlloyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dsls.</code><code class="sig-name descname">AlloyLexer</code><a class="headerlink" href="#pygments.lexers.dsls.AlloyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>alloy</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.als</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-alloy</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://alloy.mit.edu">Alloy</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dsls.CrmshLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dsls.</code><code class="sig-name descname">CrmshLexer</code><a class="headerlink" href="#pygments.lexers.dsls.CrmshLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>crmsh, pcmk</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.crmsh, *.pcmk</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://crmsh.github.io/">crmsh</a> configuration files
+for Pacemaker clusters.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dsls.FlatlineLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dsls.</code><code class="sig-name descname">FlatlineLexer</code><a class="headerlink" href="#pygments.lexers.dsls.FlatlineLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>flatline</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-flatline</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="https://github.com/bigmlcom/flatline">Flatline</a> expressions.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dsls.MscgenLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dsls.</code><code class="sig-name descname">MscgenLexer</code><a class="headerlink" href="#pygments.lexers.dsls.MscgenLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>mscgen, msc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.msc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.mcternan.me.uk/mscgen/">Mscgen</a> files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dsls.PanLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dsls.</code><code class="sig-name descname">PanLexer</code><a class="headerlink" href="#pygments.lexers.dsls.PanLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>pan</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pan</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://github.com/quattor/pan/">pan</a> source files.</p>
+<p>Based on tcsh lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dsls.ProtoBufLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dsls.</code><code class="sig-name descname">ProtoBufLexer</code><a class="headerlink" href="#pygments.lexers.dsls.ProtoBufLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>protobuf, proto</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.proto</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://code.google.com/p/protobuf/">Protocol Buffer</a>
+definition files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dsls.PuppetLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dsls.</code><code class="sig-name descname">PuppetLexer</code><a class="headerlink" href="#pygments.lexers.dsls.PuppetLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>puppet</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pp</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://puppetlabs.com/">Puppet</a> configuration DSL.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dsls.RslLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dsls.</code><code class="sig-name descname">RslLexer</code><a class="headerlink" href="#pygments.lexers.dsls.RslLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>rsl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rsl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/rsl</p>
+</dd>
+</dl>
+<p><a class="reference external" href="http://en.wikipedia.org/wiki/RAISE">RSL</a> is the formal specification
+language used in RAISE (Rigorous Approach to Industrial Software Engineering)
+method.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dsls.SnowballLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dsls.</code><code class="sig-name descname">SnowballLexer</code><a class="headerlink" href="#pygments.lexers.dsls.SnowballLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>snowball</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sbl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://snowballstem.org/">Snowball</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dsls.ThriftLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dsls.</code><code class="sig-name descname">ThriftLexer</code><a class="headerlink" href="#pygments.lexers.dsls.ThriftLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>thrift</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.thrift</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-thrift</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://thrift.apache.org/">Thrift</a> interface definitions.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dsls.VGLLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dsls.</code><code class="sig-name descname">VGLLexer</code><a class="headerlink" href="#pygments.lexers.dsls.VGLLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>vgl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rpf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.thermoscientific.com/samplemanager">SampleManager VGL</a>
+source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dsls.ZeekLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dsls.</code><code class="sig-name descname">ZeekLexer</code><a class="headerlink" href="#pygments.lexers.dsls.ZeekLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>zeek, bro</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.zeek, *.bro</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://www.zeek.org/">Zeek</a> scripts.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.5.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.dylan"></span></div>
+<div class="section" id="lexers-for-the-dylan-language">
+<h2>Lexers for the Dylan language<a class="headerlink" href="#lexers-for-the-dylan-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.dylan.DylanConsoleLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dylan.</code><code class="sig-name descname">DylanConsoleLexer</code><a class="headerlink" href="#pygments.lexers.dylan.DylanConsoleLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>dylan-console, dylan-repl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.dylan-console</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-dylan-console</p>
+</dd>
+</dl>
+<p>For Dylan interactive console output like:</p>
+<div class="highlight-dylan-console notranslate"><div class="highlight"><pre><span></span><span class="gp">?</span> <span class="k">let</span> <span class="n">a</span> <span class="o">=</span> <span class="mi">1</span><span class="p">;</span>
+<span class="go">=&gt; 1</span>
+<span class="gp">?</span> <span class="n">a</span>
+<span class="go">=&gt; 1</span>
+</pre></div>
+</div>
+<p>This is based on a copy of the RubyConsoleLexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dylan.DylanLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dylan.</code><code class="sig-name descname">DylanLexer</code><a class="headerlink" href="#pygments.lexers.dylan.DylanLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>dylan</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.dylan, *.dyl, *.intr</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-dylan</p>
+</dd>
+</dl>
+<p>For the <a class="reference external" href="http://www.opendylan.org/">Dylan</a> language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.dylan.DylanLidLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.dylan.</code><code class="sig-name descname">DylanLidLexer</code><a class="headerlink" href="#pygments.lexers.dylan.DylanLidLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>dylan-lid, lid</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.lid, *.hdp</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-dylan-lid</p>
+</dd>
+</dl>
+<p>For Dylan LID (Library Interchange Definition) files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.ecl"></span></div>
+<div class="section" id="lexers-for-the-ecl-language">
+<h2>Lexers for the ECL language<a class="headerlink" href="#lexers-for-the-ecl-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.ecl.ECLLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.ecl.</code><code class="sig-name descname">ECLLexer</code><a class="headerlink" href="#pygments.lexers.ecl.ECLLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ecl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ecl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-ecl</p>
+</dd>
+</dl>
+<p>Lexer for the declarative big-data <a class="reference external" href="http://hpccsystems.com/community/docs/ecl-language-reference/html">ECL</a>
+language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.eiffel"></span></div>
+<div class="section" id="lexer-for-the-eiffel-language">
+<h2>Lexer for the Eiffel language<a class="headerlink" href="#lexer-for-the-eiffel-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.eiffel.EiffelLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.eiffel.</code><code class="sig-name descname">EiffelLexer</code><a class="headerlink" href="#pygments.lexers.eiffel.EiffelLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>eiffel</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.e</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-eiffel</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.eiffel.com">Eiffel</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.elm"></span></div>
+<div class="section" id="lexer-for-the-elm-programming-language">
+<h2>Lexer for the Elm programming language<a class="headerlink" href="#lexer-for-the-elm-programming-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.elm.ElmLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.elm.</code><code class="sig-name descname">ElmLexer</code><a class="headerlink" href="#pygments.lexers.elm.ElmLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>elm</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.elm</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-elm</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://elm-lang.org/">Elm</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.email"></span></div>
+<div class="section" id="lexer-for-the-raw-e-mail">
+<h2>Lexer for the raw E-mail<a class="headerlink" href="#lexer-for-the-raw-e-mail" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.email.EmailLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.email.</code><code class="sig-name descname">EmailLexer</code><a class="headerlink" href="#pygments.lexers.email.EmailLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>email, eml</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.eml</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>message/rfc822</p>
+</dd>
+</dl>
+<p>Lexer for raw E-mail.</p>
+<p>Additional options accepted:</p>
+<dl class="simple">
+<dt><cite>highlight-X-header</cite></dt><dd><p>Highlight the fields of <code class="docutils literal notranslate"><span class="pre">X-</span></code> user-defined email header. (default:
+<code class="docutils literal notranslate"><span class="pre">False</span></code>).</p>
+</dd>
+</dl>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.5.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.erlang"></span></div>
+<div class="section" id="lexers-for-erlang">
+<h2>Lexers for Erlang<a class="headerlink" href="#lexers-for-erlang" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.erlang.ElixirConsoleLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.erlang.</code><code class="sig-name descname">ElixirConsoleLexer</code><a class="headerlink" href="#pygments.lexers.erlang.ElixirConsoleLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>iex</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-elixir-shellsession</p>
+</dd>
+</dl>
+<p>For Elixir interactive console (iex) output like:</p>
+<div class="highlight-iex notranslate"><div class="highlight"><pre><span></span><span class="gp">iex&gt; </span><span class="p">[</span><span class="n">head</span> <span class="o">|</span> <span class="n">tail</span><span class="p">]</span> <span class="o">=</span> <span class="p">[</span><span class="mi">1</span><span class="p">,</span><span class="mi">2</span><span class="p">,</span><span class="mi">3</span><span class="p">]</span>
+<span class="go">[1,2,3]</span>
+<span class="gp">iex&gt; </span><span class="n">head</span>
+<span class="go">1</span>
+<span class="gp">iex&gt; </span><span class="n">tail</span>
+<span class="go">[2,3]</span>
+<span class="gp">iex&gt; </span><span class="p">[</span><span class="n">head</span> <span class="o">|</span> <span class="n">tail</span><span class="p">]</span>
+<span class="go">[1,2,3]</span>
+<span class="gp">iex&gt; </span><span class="n">length</span> <span class="p">[</span><span class="n">head</span> <span class="o">|</span> <span class="n">tail</span><span class="p">]</span>
+<span class="go">3</span>
+</pre></div>
+</div>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.erlang.ElixirLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.erlang.</code><code class="sig-name descname">ElixirLexer</code><a class="headerlink" href="#pygments.lexers.erlang.ElixirLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>elixir, ex, exs</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ex, *.exs</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-elixir</p>
+</dd>
+</dl>
+<p>For the <a class="reference external" href="http://elixir-lang.org">Elixir language</a>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.erlang.ErlangLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.erlang.</code><code class="sig-name descname">ErlangLexer</code><a class="headerlink" href="#pygments.lexers.erlang.ErlangLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>erlang</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.erl, *.hrl, *.es, *.escript</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-erlang</p>
+</dd>
+</dl>
+<p>For the Erlang functional programming language.</p>
+<p>Blame Jeremy Thurgood (<a class="reference external" href="http://jerith.za.net/">http://jerith.za.net/</a>).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.9.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.erlang.ErlangShellLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.erlang.</code><code class="sig-name descname">ErlangShellLexer</code><a class="headerlink" href="#pygments.lexers.erlang.ErlangShellLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>erl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.erl-sh</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-erl-shellsession</p>
+</dd>
+</dl>
+<p>Shell sessions in erl (for Erlang code).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.esoteric"></span></div>
+<div class="section" id="lexers-for-esoteric-languages">
+<h2>Lexers for esoteric languages<a class="headerlink" href="#lexers-for-esoteric-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.esoteric.AheuiLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.esoteric.</code><code class="sig-name descname">AheuiLexer</code><a class="headerlink" href="#pygments.lexers.esoteric.AheuiLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>aheui</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.aheui</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p><a class="reference external" href="http://aheui.github.io/">Aheui</a> Lexer.</p>
+<p><a class="reference external" href="http://aheui.github.io/">Aheui</a> is esoteric language based on Korean alphabets.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.esoteric.BefungeLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.esoteric.</code><code class="sig-name descname">BefungeLexer</code><a class="headerlink" href="#pygments.lexers.esoteric.BefungeLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>befunge</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.befunge</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-befunge</p>
+</dd>
+</dl>
+<p>Lexer for the esoteric <a class="reference external" href="http://en.wikipedia.org/wiki/Befunge">Befunge</a>
+language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.esoteric.BrainfuckLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.esoteric.</code><code class="sig-name descname">BrainfuckLexer</code><a class="headerlink" href="#pygments.lexers.esoteric.BrainfuckLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>brainfuck, bf</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.bf, *.b</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-brainfuck</p>
+</dd>
+</dl>
+<p>Lexer for the esoteric <a class="reference external" href="http://www.muppetlabs.com/~breadbox/bf/">BrainFuck</a>
+language.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.esoteric.CAmkESLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.esoteric.</code><code class="sig-name descname">CAmkESLexer</code><a class="headerlink" href="#pygments.lexers.esoteric.CAmkESLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>camkes, idl4</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.camkes, *.idl4</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Basic lexer for the input language for the
+<a class="reference external" href="https://sel4.systems/CAmkES/">CAmkES</a> component platform.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.esoteric.CapDLLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.esoteric.</code><code class="sig-name descname">CapDLLexer</code><a class="headerlink" href="#pygments.lexers.esoteric.CapDLLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>capdl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cdl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Basic lexer for
+<a class="reference external" href="https://ssrg.nicta.com.au/publications/nictaabstracts/Kuz_KLW_10.abstract.pml">CapDL</a>.</p>
+<p>The source of the primary tool that reads such specifications is available
+at <a class="reference external" href="https://github.com/seL4/capdl/tree/master/capDL-tool">https://github.com/seL4/capdl/tree/master/capDL-tool</a>. Note that this
+lexer only supports a subset of the grammar. For example, identifiers can
+shadow type names, but these instances are currently incorrectly
+highlighted as types. Supporting this would need a stateful lexer that is
+considered unnecessarily complex for now.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.esoteric.RedcodeLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.esoteric.</code><code class="sig-name descname">RedcodeLexer</code><a class="headerlink" href="#pygments.lexers.esoteric.RedcodeLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>redcode</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cw</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A simple Redcode lexer based on ICWS’94.
+Contributed by Adam Blinkinsop &lt;<a class="reference external" href="mailto:blinks&#37;&#52;&#48;acm&#46;org">blinks<span>&#64;</span>acm<span>&#46;</span>org</a>&gt;.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.8.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.ezhil"></span></div>
+<div class="section" id="pygments-lexers-for-ezhil-language">
+<h2>Pygments lexers for Ezhil language<a class="headerlink" href="#pygments-lexers-for-ezhil-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.ezhil.EzhilLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.ezhil.</code><code class="sig-name descname">EzhilLexer</code><a class="headerlink" href="#pygments.lexers.ezhil.EzhilLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ezhil</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.n</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-ezhil</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://ezhillang.org">Ezhil, a Tamil script-based programming language</a></p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.factor"></span></div>
+<div class="section" id="lexers-for-the-factor-language">
+<h2>Lexers for the Factor language<a class="headerlink" href="#lexers-for-the-factor-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.factor.FactorLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.factor.</code><code class="sig-name descname">FactorLexer</code><a class="headerlink" href="#pygments.lexers.factor.FactorLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>factor</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.factor</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-factor</p>
+</dd>
+</dl>
+<p>Lexer for the <a class="reference external" href="http://factorcode.org">Factor</a> language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.fantom"></span></div>
+<div class="section" id="lexer-for-the-fantom-language">
+<h2>Lexer for the Fantom language<a class="headerlink" href="#lexer-for-the-fantom-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.fantom.FantomLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.fantom.</code><code class="sig-name descname">FantomLexer</code><a class="headerlink" href="#pygments.lexers.fantom.FantomLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>fan</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.fan</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-fantom</p>
+</dd>
+</dl>
+<p>For Fantom source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.felix"></span></div>
+<div class="section" id="lexer-for-the-felix-language">
+<h2>Lexer for the Felix language<a class="headerlink" href="#lexer-for-the-felix-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.felix.FelixLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.felix.</code><code class="sig-name descname">FelixLexer</code><a class="headerlink" href="#pygments.lexers.felix.FelixLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>felix, flx</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.flx, *.flxh</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-felix</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.felix-lang.org">Felix</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.floscript"></span></div>
+<div class="section" id="lexer-for-floscript">
+<h2>Lexer for FloScript<a class="headerlink" href="#lexer-for-floscript" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.floscript.FloScriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.floscript.</code><code class="sig-name descname">FloScriptLexer</code><a class="headerlink" href="#pygments.lexers.floscript.FloScriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>floscript, flo</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.flo</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://github.com/ioflo/ioflo">FloScript</a> configuration language source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.forth"></span></div>
+<div class="section" id="lexer-for-the-forth-language">
+<h2>Lexer for the Forth language<a class="headerlink" href="#lexer-for-the-forth-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.forth.ForthLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.forth.</code><code class="sig-name descname">ForthLexer</code><a class="headerlink" href="#pygments.lexers.forth.ForthLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>forth</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.frt, *.fs</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-forth</p>
+</dd>
+</dl>
+<p>Lexer for Forth files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.fortran"></span></div>
+<div class="section" id="lexers-for-fortran-languages">
+<h2>Lexers for Fortran languages<a class="headerlink" href="#lexers-for-fortran-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.fortran.FortranFixedLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.fortran.</code><code class="sig-name descname">FortranFixedLexer</code><a class="headerlink" href="#pygments.lexers.fortran.FortranFixedLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>fortranfixed</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.f, *.F</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for fixed format Fortran.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.fortran.FortranLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.fortran.</code><code class="sig-name descname">FortranLexer</code><a class="headerlink" href="#pygments.lexers.fortran.FortranLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>fortran</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.f03, *.f90, *.F03, *.F90</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-fortran</p>
+</dd>
+</dl>
+<p>Lexer for FORTRAN 90 code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.foxpro"></span></div>
+<div class="section" id="simple-lexer-for-microsoft-visual-foxpro-source-code">
+<h2>Simple lexer for Microsoft Visual FoxPro source code<a class="headerlink" href="#simple-lexer-for-microsoft-visual-foxpro-source-code" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.foxpro.FoxProLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.foxpro.</code><code class="sig-name descname">FoxProLexer</code><a class="headerlink" href="#pygments.lexers.foxpro.FoxProLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>foxpro, vfp, clipper, xbase</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.PRG, *.prg</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for Microsoft Visual FoxPro language.</p>
+<p>FoxPro syntax allows to shorten all keywords and function names
+to 4 characters.  Shortened forms are not recognized by this lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.freefem"></span></div>
+<div class="section" id="lexer-for-freefem-language">
+<h2>Lexer for FreeFem++ language<a class="headerlink" href="#lexer-for-freefem-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.freefem.FreeFemLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.freefem.</code><code class="sig-name descname">FreeFemLexer</code><a class="headerlink" href="#pygments.lexers.freefem.FreeFemLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>freefem</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.edp</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-freefem</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://freefem.org/">FreeFem++</a> source.</p>
+<p>This is an extension of the CppLexer, as the FreeFem Language is a superset
+of C++.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.go"></span></div>
+<div class="section" id="lexers-for-the-google-go-language">
+<h2>Lexers for the Google Go language<a class="headerlink" href="#lexers-for-the-google-go-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.go.GoLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.go.</code><code class="sig-name descname">GoLexer</code><a class="headerlink" href="#pygments.lexers.go.GoLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>go</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.go</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-gosrc</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://golang.org">Go</a> source.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.grammar_notation"></span></div>
+<div class="section" id="lexers-for-grammer-notations-like-bnf">
+<h2>Lexers for grammer notations like BNF<a class="headerlink" href="#lexers-for-grammer-notations-like-bnf" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.grammar_notation.AbnfLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.grammar_notation.</code><code class="sig-name descname">AbnfLexer</code><a class="headerlink" href="#pygments.lexers.grammar_notation.AbnfLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>abnf</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.abnf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-abnf</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://www.ietf.org/rfc/rfc7405.txt">IETF 7405 ABNF</a>
+(Updates <a class="reference external" href="http://www.ietf.org/rfc/rfc5234.txt">5234</a>)
+grammars.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.grammar_notation.BnfLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.grammar_notation.</code><code class="sig-name descname">BnfLexer</code><a class="headerlink" href="#pygments.lexers.grammar_notation.BnfLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>bnf</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.bnf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-bnf</p>
+</dd>
+</dl>
+<p>This lexer is for grammer notations which are similar to
+original BNF.</p>
+<p>In order to maximize a number of targets of this lexer,
+let’s decide some designs:</p>
+<ul class="simple">
+<li><p>We don’t distinguish <cite>Terminal Symbol</cite>.</p></li>
+<li><p>We do assume that <cite>NonTerminal Symbol</cite> are always enclosed
+with arrow brackets.</p></li>
+<li><p>We do assume that <cite>NonTerminal Symbol</cite> may include
+any printable characters except arrow brackets and ASCII 0x20.
+This assumption is for <a class="reference external" href="http://www.rfc-base.org/txt/rfc-5511.txt">RBNF</a>.</p></li>
+<li><p>We do assume that target notation doesn’t support comment.</p></li>
+<li><p>We don’t distinguish any operators and punctuation except
+<cite>::=</cite>.</p></li>
+</ul>
+<p>Though these desision making might cause too minimal highlighting
+and you might be disappointed, but it is reasonable for us.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.grammar_notation.JsgfLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.grammar_notation.</code><code class="sig-name descname">JsgfLexer</code><a class="headerlink" href="#pygments.lexers.grammar_notation.JsgfLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>jsgf</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.jsgf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/jsgf, application/x-jsgf, text/jsgf</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://www.w3.org/TR/jsgf/">JSpeech Grammar Format</a>
+grammars.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.graph"></span></div>
+<div class="section" id="lexers-for-graph-query-languages">
+<h2>Lexers for graph query languages<a class="headerlink" href="#lexers-for-graph-query-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.graph.CypherLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.graph.</code><code class="sig-name descname">CypherLexer</code><a class="headerlink" href="#pygments.lexers.graph.CypherLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cypher</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cyp, *.cypher</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://neo4j.com/docs/developer-manual/3.3/cypher/">Cypher Query Language</a></p>
+<p>For the Cypher version in Neo4j 3.3</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.graphics"></span></div>
+<div class="section" id="lexers-for-computer-graphics-and-plotting-related-languages">
+<h2>Lexers for computer graphics and plotting related languages<a class="headerlink" href="#lexers-for-computer-graphics-and-plotting-related-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.graphics.AsymptoteLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.graphics.</code><code class="sig-name descname">AsymptoteLexer</code><a class="headerlink" href="#pygments.lexers.graphics.AsymptoteLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>asy, asymptote</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.asy</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-asymptote</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://asymptote.sf.net/">Asymptote</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.graphics.GLShaderLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.graphics.</code><code class="sig-name descname">GLShaderLexer</code><a class="headerlink" href="#pygments.lexers.graphics.GLShaderLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>glsl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.vert, *.frag, *.geo</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-glslsrc</p>
+</dd>
+</dl>
+<p>GLSL (OpenGL Shader) lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.graphics.GnuplotLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.graphics.</code><code class="sig-name descname">GnuplotLexer</code><a class="headerlink" href="#pygments.lexers.graphics.GnuplotLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>gnuplot</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.plot, *.plt</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-gnuplot</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://gnuplot.info/">Gnuplot</a> plotting scripts.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.11.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.graphics.HLSLShaderLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.graphics.</code><code class="sig-name descname">HLSLShaderLexer</code><a class="headerlink" href="#pygments.lexers.graphics.HLSLShaderLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>hlsl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.hlsl, *.hlsli</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-hlsl</p>
+</dd>
+</dl>
+<p>HLSL (Microsoft Direct3D Shader) lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.3.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.graphics.PostScriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.graphics.</code><code class="sig-name descname">PostScriptLexer</code><a class="headerlink" href="#pygments.lexers.graphics.PostScriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>postscript, postscr</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ps, *.eps</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/postscript</p>
+</dd>
+</dl>
+<p>Lexer for PostScript files.</p>
+<p>The PostScript Language Reference published by Adobe at
+&lt;<a class="reference external" href="http://partners.adobe.com/public/developer/en/ps/PLRM.pdf">http://partners.adobe.com/public/developer/en/ps/PLRM.pdf</a>&gt;
+is the authority for this.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.graphics.PovrayLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.graphics.</code><code class="sig-name descname">PovrayLexer</code><a class="headerlink" href="#pygments.lexers.graphics.PovrayLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>pov</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pov, *.inc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-povray</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.povray.org/">Persistence of Vision Raytracer</a> files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.11.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.haskell"></span></div>
+<div class="section" id="lexers-for-haskell-and-related-languages">
+<h2>Lexers for Haskell and related languages<a class="headerlink" href="#lexers-for-haskell-and-related-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.haskell.AgdaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.haskell.</code><code class="sig-name descname">AgdaLexer</code><a class="headerlink" href="#pygments.lexers.haskell.AgdaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>agda</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.agda</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-agda</p>
+</dd>
+</dl>
+<p>For the <a class="reference external" href="http://wiki.portal.chalmers.se/agda/pmwiki.php">Agda</a>
+dependently typed functional programming language and proof assistant.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.haskell.CryptolLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.haskell.</code><code class="sig-name descname">CryptolLexer</code><a class="headerlink" href="#pygments.lexers.haskell.CryptolLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cryptol, cry</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cry</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-cryptol</p>
+</dd>
+</dl>
+<p>FIXME: A Cryptol2 lexer based on the lexemes defined in the Haskell 98 Report.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.haskell.HaskellLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.haskell.</code><code class="sig-name descname">HaskellLexer</code><a class="headerlink" href="#pygments.lexers.haskell.HaskellLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>haskell, hs</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.hs</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-haskell</p>
+</dd>
+</dl>
+<p>A Haskell lexer based on the lexemes defined in the Haskell 98 Report.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.8.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.haskell.HspecLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.haskell.</code><code class="sig-name descname">HspecLexer</code><a class="headerlink" href="#pygments.lexers.haskell.HspecLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>hspec</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A Haskell lexer with support for Hspec constructs.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.haskell.IdrisLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.haskell.</code><code class="sig-name descname">IdrisLexer</code><a class="headerlink" href="#pygments.lexers.haskell.IdrisLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>idris, idr</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.idr</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-idris</p>
+</dd>
+</dl>
+<p>A lexer for the dependently typed programming language Idris.</p>
+<p>Based on the Haskell and Agda Lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.haskell.KokaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.haskell.</code><code class="sig-name descname">KokaLexer</code><a class="headerlink" href="#pygments.lexers.haskell.KokaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>koka</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.kk, *.kki</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-koka</p>
+</dd>
+</dl>
+<p>Lexer for the <a class="reference external" href="http://koka.codeplex.com">Koka</a>
+language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.haskell.LiterateAgdaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.haskell.</code><code class="sig-name descname">LiterateAgdaLexer</code><a class="headerlink" href="#pygments.lexers.haskell.LiterateAgdaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>lagda, literate-agda</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.lagda</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-literate-agda</p>
+</dd>
+</dl>
+<p>For Literate Agda source.</p>
+<p>Additional options accepted:</p>
+<dl class="simple">
+<dt><cite>litstyle</cite></dt><dd><p>If given, must be <code class="docutils literal notranslate"><span class="pre">&quot;bird&quot;</span></code> or <code class="docutils literal notranslate"><span class="pre">&quot;latex&quot;</span></code>.  If not given, the style
+is autodetected: if the first non-whitespace character in the source
+is a backslash or percent character, LaTeX is assumed, else Bird.</p>
+</dd>
+</dl>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.haskell.LiterateCryptolLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.haskell.</code><code class="sig-name descname">LiterateCryptolLexer</code><a class="headerlink" href="#pygments.lexers.haskell.LiterateCryptolLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>lcry, literate-cryptol, lcryptol</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.lcry</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-literate-cryptol</p>
+</dd>
+</dl>
+<p>For Literate Cryptol (Bird-style or LaTeX) source.</p>
+<p>Additional options accepted:</p>
+<dl class="simple">
+<dt><cite>litstyle</cite></dt><dd><p>If given, must be <code class="docutils literal notranslate"><span class="pre">&quot;bird&quot;</span></code> or <code class="docutils literal notranslate"><span class="pre">&quot;latex&quot;</span></code>.  If not given, the style
+is autodetected: if the first non-whitespace character in the source
+is a backslash or percent character, LaTeX is assumed, else Bird.</p>
+</dd>
+</dl>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.haskell.LiterateHaskellLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.haskell.</code><code class="sig-name descname">LiterateHaskellLexer</code><a class="headerlink" href="#pygments.lexers.haskell.LiterateHaskellLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>lhs, literate-haskell, lhaskell</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.lhs</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-literate-haskell</p>
+</dd>
+</dl>
+<p>For Literate Haskell (Bird-style or LaTeX) source.</p>
+<p>Additional options accepted:</p>
+<dl class="simple">
+<dt><cite>litstyle</cite></dt><dd><p>If given, must be <code class="docutils literal notranslate"><span class="pre">&quot;bird&quot;</span></code> or <code class="docutils literal notranslate"><span class="pre">&quot;latex&quot;</span></code>.  If not given, the style
+is autodetected: if the first non-whitespace character in the source
+is a backslash or percent character, LaTeX is assumed, else Bird.</p>
+</dd>
+</dl>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.9.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.haskell.LiterateIdrisLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.haskell.</code><code class="sig-name descname">LiterateIdrisLexer</code><a class="headerlink" href="#pygments.lexers.haskell.LiterateIdrisLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>lidr, literate-idris, lidris</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.lidr</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-literate-idris</p>
+</dd>
+</dl>
+<p>For Literate Idris (Bird-style or LaTeX) source.</p>
+<p>Additional options accepted:</p>
+<dl class="simple">
+<dt><cite>litstyle</cite></dt><dd><p>If given, must be <code class="docutils literal notranslate"><span class="pre">&quot;bird&quot;</span></code> or <code class="docutils literal notranslate"><span class="pre">&quot;latex&quot;</span></code>.  If not given, the style
+is autodetected: if the first non-whitespace character in the source
+is a backslash or percent character, LaTeX is assumed, else Bird.</p>
+</dd>
+</dl>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.haxe"></span></div>
+<div class="section" id="lexers-for-haxe-and-related-stuff">
+<h2>Lexers for Haxe and related stuff<a class="headerlink" href="#lexers-for-haxe-and-related-stuff" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.haxe.HaxeLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.haxe.</code><code class="sig-name descname">HaxeLexer</code><a class="headerlink" href="#pygments.lexers.haxe.HaxeLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>hx, haxe, hxsl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.hx, *.hxsl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/haxe, text/x-haxe, text/x-hx</p>
+</dd>
+</dl>
+<p>For Haxe source code (<a class="reference external" href="http://haxe.org/">http://haxe.org/</a>).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.3.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.haxe.HxmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.haxe.</code><code class="sig-name descname">HxmlLexer</code><a class="headerlink" href="#pygments.lexers.haxe.HxmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>haxeml, hxml</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.hxml</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://haxe.org/doc/compiler">haXe build</a> files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.hdl"></span></div>
+<div class="section" id="lexers-for-hardware-descriptor-languages">
+<h2>Lexers for hardware descriptor languages<a class="headerlink" href="#lexers-for-hardware-descriptor-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.hdl.SystemVerilogLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.hdl.</code><code class="sig-name descname">SystemVerilogLexer</code><a class="headerlink" href="#pygments.lexers.hdl.SystemVerilogLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>systemverilog, sv</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sv, *.svh</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-systemverilog</p>
+</dd>
+</dl>
+<p>Extends verilog lexer to recognise all SystemVerilog keywords from IEEE
+1800-2009 standard.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.hdl.VerilogLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.hdl.</code><code class="sig-name descname">VerilogLexer</code><a class="headerlink" href="#pygments.lexers.hdl.VerilogLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>verilog, v</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.v</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-verilog</p>
+</dd>
+</dl>
+<p>For verilog source code with preprocessor directives.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.hdl.VhdlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.hdl.</code><code class="sig-name descname">VhdlLexer</code><a class="headerlink" href="#pygments.lexers.hdl.VhdlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>vhdl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.vhdl, *.vhd</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-vhdl</p>
+</dd>
+</dl>
+<p>For VHDL source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.hexdump"></span></div>
+<div class="section" id="lexers-for-hexadecimal-dumps">
+<h2>Lexers for hexadecimal dumps<a class="headerlink" href="#lexers-for-hexadecimal-dumps" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.hexdump.HexdumpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.hexdump.</code><code class="sig-name descname">HexdumpLexer</code><a class="headerlink" href="#pygments.lexers.hexdump.HexdumpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>hexdump</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For typical hex dump output formats by the UNIX and GNU/Linux tools <code class="docutils literal notranslate"><span class="pre">hexdump</span></code>,
+<code class="docutils literal notranslate"><span class="pre">hd</span></code>, <code class="docutils literal notranslate"><span class="pre">hexcat</span></code>, <code class="docutils literal notranslate"><span class="pre">od</span></code> and <code class="docutils literal notranslate"><span class="pre">xxd</span></code>, and the DOS tool <code class="docutils literal notranslate"><span class="pre">DEBUG</span></code>. For example:</p>
+<div class="highlight-hexdump notranslate"><div class="highlight"><pre><span></span><span class="nl">00000000</span>  <span class="mh">7f</span> <span class="mh">45</span> <span class="mh">4c</span> <span class="mh">46</span> <span class="mh">02</span> <span class="mh">01</span> <span class="mh">01</span> <span class="mh">00</span>  <span class="mh">00</span> <span class="mh">00</span> <span class="mh">00</span> <span class="mh">00</span> <span class="mh">00</span> <span class="mh">00</span> <span class="mh">00</span> <span class="mh">00</span>  <span class="p">|</span><span class="s">.ELF............</span><span class="p">|</span>
+<span class="nl">00000010</span>  <span class="mh">02</span> <span class="mh">00</span> <span class="mh">3e</span> <span class="mh">00</span> <span class="mh">01</span> <span class="mh">00</span> <span class="mh">00</span> <span class="mh">00</span>  <span class="mh">c5</span> <span class="mh">48</span> <span class="mh">40</span> <span class="mh">00</span> <span class="mh">00</span> <span class="mh">00</span> <span class="mh">00</span> <span class="mh">00</span>  <span class="p">|</span><span class="s">..&gt;......H@.....</span><span class="p">|</span>
+</pre></div>
+</div>
+<p>The specific supported formats are the outputs of:</p>
+<ul class="simple">
+<li><p><code class="docutils literal notranslate"><span class="pre">hexdump</span> <span class="pre">FILE</span></code></p></li>
+<li><p><code class="docutils literal notranslate"><span class="pre">hexdump</span> <span class="pre">-C</span> <span class="pre">FILE</span></code> – the <cite>canonical</cite> format used in the example.</p></li>
+<li><p><code class="docutils literal notranslate"><span class="pre">hd</span> <span class="pre">FILE</span></code> – same as <code class="docutils literal notranslate"><span class="pre">hexdump</span> <span class="pre">-C</span> <span class="pre">FILE</span></code>.</p></li>
+<li><p><code class="docutils literal notranslate"><span class="pre">hexcat</span> <span class="pre">FILE</span></code></p></li>
+<li><p><code class="docutils literal notranslate"><span class="pre">od</span> <span class="pre">-t</span> <span class="pre">x1z</span> <span class="pre">FILE</span></code></p></li>
+<li><p><code class="docutils literal notranslate"><span class="pre">xxd</span> <span class="pre">FILE</span></code></p></li>
+<li><p><code class="docutils literal notranslate"><span class="pre">DEBUG.EXE</span> <span class="pre">FILE.COM</span></code> and entering <code class="docutils literal notranslate"><span class="pre">d</span></code> to the prompt.</p></li>
+</ul>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.html"></span></div>
+<div class="section" id="lexers-for-html-xml-and-related-markup">
+<h2>Lexers for HTML, XML and related markup<a class="headerlink" href="#lexers-for-html-xml-and-related-markup" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.html.DtdLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.html.</code><code class="sig-name descname">DtdLexer</code><a class="headerlink" href="#pygments.lexers.html.DtdLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>dtd</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.dtd</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/xml-dtd</p>
+</dd>
+</dl>
+<p>A lexer for DTDs (Document Type Definitions).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.html.HamlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.html.</code><code class="sig-name descname">HamlLexer</code><a class="headerlink" href="#pygments.lexers.html.HamlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>haml</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.haml</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-haml</p>
+</dd>
+</dl>
+<p>For Haml markup.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.3.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.html.HtmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.html.</code><code class="sig-name descname">HtmlLexer</code><a class="headerlink" href="#pygments.lexers.html.HtmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.html, *.htm, *.xhtml, *.xslt</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/html, application/xhtml+xml</p>
+</dd>
+</dl>
+<p>For HTML 4 and XHTML 1 markup. Nested JavaScript and CSS is highlighted
+by the appropriate lexer.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.html.PugLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.html.</code><code class="sig-name descname">PugLexer</code><a class="headerlink" href="#pygments.lexers.html.PugLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>pug, jade</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pug, *.jade</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-pug, text/x-jade</p>
+</dd>
+</dl>
+<p>For Pug markup.
+Pug is a variant of Scaml, see:
+<a class="reference external" href="http://scalate.fusesource.org/documentation/scaml-reference.html">http://scalate.fusesource.org/documentation/scaml-reference.html</a></p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.html.ScamlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.html.</code><code class="sig-name descname">ScamlLexer</code><a class="headerlink" href="#pygments.lexers.html.ScamlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>scaml</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.scaml</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-scaml</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://scalate.fusesource.org/">Scaml markup</a>.  Scaml is Haml for Scala.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.html.XmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.html.</code><code class="sig-name descname">XmlLexer</code><a class="headerlink" href="#pygments.lexers.html.XmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xml</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.xml, *.xsl, *.rss, *.xslt, *.xsd, *.wsdl, *.wsf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/xml, application/xml, image/svg+xml, application/rss+xml, application/atom+xml</p>
+</dd>
+</dl>
+<p>Generic lexer for XML (eXtensible Markup Language).</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.html.XsltLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.html.</code><code class="sig-name descname">XsltLexer</code><a class="headerlink" href="#pygments.lexers.html.XsltLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xslt</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.xsl, *.xslt, *.xpl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/xsl+xml, application/xslt+xml</p>
+</dd>
+</dl>
+<p>A lexer for XSLT.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.idl"></span></div>
+<div class="section" id="lexers-for-idl">
+<h2>Lexers for IDL<a class="headerlink" href="#lexers-for-idl" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.idl.IDLLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.idl.</code><code class="sig-name descname">IDLLexer</code><a class="headerlink" href="#pygments.lexers.idl.IDLLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>idl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pro</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/idl</p>
+</dd>
+</dl>
+<p>Pygments Lexer for IDL (Interactive Data Language).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.igor"></span></div>
+<div class="section" id="lexers-for-igor-pro">
+<h2>Lexers for Igor Pro<a class="headerlink" href="#lexers-for-igor-pro" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.igor.IgorLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.igor.</code><code class="sig-name descname">IgorLexer</code><a class="headerlink" href="#pygments.lexers.igor.IgorLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>igor, igorpro</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ipf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/ipf</p>
+</dd>
+</dl>
+<p>Pygments Lexer for Igor Pro procedure files (.ipf).
+See <a class="reference external" href="http://www.wavemetrics.com/">http://www.wavemetrics.com/</a> and <a class="reference external" href="http://www.igorexchange.com/">http://www.igorexchange.com/</a>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.inferno"></span></div>
+<div class="section" id="lexers-for-inferno-os-and-all-the-related-stuff">
+<h2>Lexers for Inferno os and all the related stuff<a class="headerlink" href="#lexers-for-inferno-os-and-all-the-related-stuff" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.inferno.LimboLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.inferno.</code><code class="sig-name descname">LimboLexer</code><a class="headerlink" href="#pygments.lexers.inferno.LimboLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>limbo</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.b</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/limbo</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://www.vitanuova.com/inferno/limbo.html">Limbo programming language</a></p>
+<dl class="simple">
+<dt>TODO:</dt><dd><ul class="simple">
+<li><p>maybe implement better var declaration highlighting</p></li>
+<li><p>some simple syntax error highlighting</p></li>
+</ul>
+</dd>
+</dl>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.installers"></span></div>
+<div class="section" id="lexers-for-installer-packager-dsls-and-formats">
+<h2>Lexers for installer/packager DSLs and formats<a class="headerlink" href="#lexers-for-installer-packager-dsls-and-formats" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.installers.DebianControlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.installers.</code><code class="sig-name descname">DebianControlLexer</code><a class="headerlink" href="#pygments.lexers.installers.DebianControlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>control, debcontrol</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>control</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for Debian <code class="docutils literal notranslate"><span class="pre">control</span></code> files and <code class="docutils literal notranslate"><span class="pre">apt-cache</span> <span class="pre">show</span> <span class="pre">&lt;pkg&gt;</span></code> outputs.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.9.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.installers.NSISLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.installers.</code><code class="sig-name descname">NSISLexer</code><a class="headerlink" href="#pygments.lexers.installers.NSISLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>nsis, nsi, nsh</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.nsi, *.nsh</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-nsis</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://nsis.sourceforge.net/">NSIS</a> scripts.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.installers.RPMSpecLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.installers.</code><code class="sig-name descname">RPMSpecLexer</code><a class="headerlink" href="#pygments.lexers.installers.RPMSpecLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>spec</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.spec</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-rpm-spec</p>
+</dd>
+</dl>
+<p>For RPM <code class="docutils literal notranslate"><span class="pre">.spec</span></code> files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.installers.SourcesListLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.installers.</code><code class="sig-name descname">SourcesListLexer</code><a class="headerlink" href="#pygments.lexers.installers.SourcesListLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>sourceslist, sources.list, debsources</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>sources.list</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer that highlights debian sources.list files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.int_fiction"></span></div>
+<div class="section" id="lexers-for-interactive-fiction-languages">
+<h2>Lexers for interactive fiction languages<a class="headerlink" href="#lexers-for-interactive-fiction-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.int_fiction.Inform6Lexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.int_fiction.</code><code class="sig-name descname">Inform6Lexer</code><a class="headerlink" href="#pygments.lexers.int_fiction.Inform6Lexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>inform6, i6</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.inf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://inform-fiction.org/">Inform 6</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.int_fiction.Inform6TemplateLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.int_fiction.</code><code class="sig-name descname">Inform6TemplateLexer</code><a class="headerlink" href="#pygments.lexers.int_fiction.Inform6TemplateLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>i6t</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.i6t</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://inform7.com/sources/src/i6template/Woven/index.html">Inform 6 template</a> code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.int_fiction.Inform7Lexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.int_fiction.</code><code class="sig-name descname">Inform7Lexer</code><a class="headerlink" href="#pygments.lexers.int_fiction.Inform7Lexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>inform7, i7</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ni, *.i7x</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://inform7.com/">Inform 7</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.int_fiction.Tads3Lexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.int_fiction.</code><code class="sig-name descname">Tads3Lexer</code><a class="headerlink" href="#pygments.lexers.int_fiction.Tads3Lexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>tads3</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.t</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.tads.org/">TADS 3</a> source code.</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.iolang"></span></div>
+<div class="section" id="lexers-for-the-io-language">
+<h2>Lexers for the Io language<a class="headerlink" href="#lexers-for-the-io-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.iolang.IoLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.iolang.</code><code class="sig-name descname">IoLexer</code><a class="headerlink" href="#pygments.lexers.iolang.IoLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>io</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.io</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-iosrc</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://iolanguage.com/">Io</a> (a small, prototype-based
+programming language) source.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.j"></span></div>
+<div class="section" id="lexer-for-the-j-programming-language">
+<h2>Lexer for the J programming language<a class="headerlink" href="#lexer-for-the-j-programming-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.j.JLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.j.</code><code class="sig-name descname">JLexer</code><a class="headerlink" href="#pygments.lexers.j.JLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>j</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ijs</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-j</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://jsoftware.com/">J</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.javascript"></span></div>
+<div class="section" id="lexers-for-javascript-and-related-languages">
+<h2>Lexers for JavaScript and related languages<a class="headerlink" href="#lexers-for-javascript-and-related-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.javascript.CoffeeScriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.javascript.</code><code class="sig-name descname">CoffeeScriptLexer</code><a class="headerlink" href="#pygments.lexers.javascript.CoffeeScriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>coffee-script, coffeescript, coffee</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.coffee</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/coffeescript</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://coffeescript.org">CoffeeScript</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.3.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.javascript.DartLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.javascript.</code><code class="sig-name descname">DartLexer</code><a class="headerlink" href="#pygments.lexers.javascript.DartLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>dart</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.dart</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-dart</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://dartlang.org/">Dart</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.javascript.EarlGreyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.javascript.</code><code class="sig-name descname">EarlGreyLexer</code><a class="headerlink" href="#pygments.lexers.javascript.EarlGreyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>earl-grey, earlgrey, eg</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.eg</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-earl-grey</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://breuleux.github.io/earl-grey/">Earl-Grey</a> source code.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.javascript.JavascriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.javascript.</code><code class="sig-name descname">JavascriptLexer</code><a class="headerlink" href="#pygments.lexers.javascript.JavascriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>js, javascript</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.js, *.jsm</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/javascript, application/x-javascript, text/x-javascript, text/javascript</p>
+</dd>
+</dl>
+<p>For JavaScript source code.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.javascript.JuttleLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.javascript.</code><code class="sig-name descname">JuttleLexer</code><a class="headerlink" href="#pygments.lexers.javascript.JuttleLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>juttle, juttle</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.juttle</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/juttle, application/x-juttle, text/x-juttle, text/juttle</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://github.com/juttle/juttle">Juttle</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.javascript.KalLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.javascript.</code><code class="sig-name descname">KalLexer</code><a class="headerlink" href="#pygments.lexers.javascript.KalLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>kal</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.kal</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/kal, application/kal</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://rzimmerman.github.io/kal">Kal</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.javascript.LassoLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.javascript.</code><code class="sig-name descname">LassoLexer</code><a class="headerlink" href="#pygments.lexers.javascript.LassoLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>lasso, lassoscript</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.lasso, *.lasso[89]</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-lasso</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.lassosoft.com/">Lasso</a> source code, covering both Lasso 9
+syntax and LassoScript for Lasso 8.6 and earlier. For Lasso embedded in
+HTML, use the <cite>LassoHtmlLexer</cite>.</p>
+<p>Additional options accepted:</p>
+<dl class="simple">
+<dt><cite>builtinshighlighting</cite></dt><dd><p>If given and <code class="docutils literal notranslate"><span class="pre">True</span></code>, highlight builtin types, traits, methods, and
+members (default: <code class="docutils literal notranslate"><span class="pre">True</span></code>).</p>
+</dd>
+<dt><cite>requiredelimiters</cite></dt><dd><p>If given and <code class="docutils literal notranslate"><span class="pre">True</span></code>, only highlight code between delimiters as Lasso
+(default: <code class="docutils literal notranslate"><span class="pre">False</span></code>).</p>
+</dd>
+</dl>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.javascript.LiveScriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.javascript.</code><code class="sig-name descname">LiveScriptLexer</code><a class="headerlink" href="#pygments.lexers.javascript.LiveScriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>live-script, livescript</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ls</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/livescript</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://gkz.github.com/LiveScript/">LiveScript</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.javascript.MaskLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.javascript.</code><code class="sig-name descname">MaskLexer</code><a class="headerlink" href="#pygments.lexers.javascript.MaskLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>mask</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.mask</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-mask</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://github.com/atmajs/MaskJS">Mask</a> markup.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.javascript.ObjectiveJLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.javascript.</code><code class="sig-name descname">ObjectiveJLexer</code><a class="headerlink" href="#pygments.lexers.javascript.ObjectiveJLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>objective-j, objectivej, obj-j, objj</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.j</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-objective-j</p>
+</dd>
+</dl>
+<p>For Objective-J source code with preprocessor directives.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.3.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.javascript.TypeScriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.javascript.</code><code class="sig-name descname">TypeScriptLexer</code><a class="headerlink" href="#pygments.lexers.javascript.TypeScriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ts, typescript</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ts, *.tsx</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-typescript</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://typescriptlang.org/">TypeScript</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.julia"></span></div>
+<div class="section" id="lexers-for-the-julia-language">
+<h2>Lexers for the Julia language<a class="headerlink" href="#lexers-for-the-julia-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.julia.JuliaConsoleLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.julia.</code><code class="sig-name descname">JuliaConsoleLexer</code><a class="headerlink" href="#pygments.lexers.julia.JuliaConsoleLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>jlcon</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For Julia console sessions. Modeled after MatlabSessionLexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.julia.JuliaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.julia.</code><code class="sig-name descname">JuliaLexer</code><a class="headerlink" href="#pygments.lexers.julia.JuliaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>julia, jl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.jl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-julia, application/x-julia</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://julialang.org/">Julia</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.jvm"></span></div>
+<div class="section" id="pygments-lexers-for-jvm-languages">
+<h2>Pygments lexers for JVM languages<a class="headerlink" href="#pygments-lexers-for-jvm-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.jvm.AspectJLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">AspectJLexer</code><a class="headerlink" href="#pygments.lexers.jvm.AspectJLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>aspectj</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.aj</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-aspectj</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.eclipse.org/aspectj/">AspectJ</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.jvm.CeylonLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">CeylonLexer</code><a class="headerlink" href="#pygments.lexers.jvm.CeylonLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ceylon</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ceylon</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-ceylon</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://ceylon-lang.org/">Ceylon</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.jvm.ClojureLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">ClojureLexer</code><a class="headerlink" href="#pygments.lexers.jvm.ClojureLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>clojure, clj</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.clj</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-clojure, application/x-clojure</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://clojure.org/">Clojure</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.11.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.jvm.ClojureScriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">ClojureScriptLexer</code><a class="headerlink" href="#pygments.lexers.jvm.ClojureScriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>clojurescript, cljs</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cljs</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-clojurescript, application/x-clojurescript</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://clojure.org/clojurescript">ClojureScript</a>
+source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.jvm.GoloLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">GoloLexer</code><a class="headerlink" href="#pygments.lexers.jvm.GoloLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>golo</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.golo</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://golo-lang.org/">Golo</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.jvm.GosuLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">GosuLexer</code><a class="headerlink" href="#pygments.lexers.jvm.GosuLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>gosu</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.gs, *.gsx, *.gsp, *.vark</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-gosu</p>
+</dd>
+</dl>
+<p>For Gosu source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.jvm.GosuTemplateLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">GosuTemplateLexer</code><a class="headerlink" href="#pygments.lexers.jvm.GosuTemplateLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>gst</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.gst</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-gosu-template</p>
+</dd>
+</dl>
+<p>For Gosu templates.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.jvm.GroovyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">GroovyLexer</code><a class="headerlink" href="#pygments.lexers.jvm.GroovyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>groovy</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.groovy, *.gradle</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-groovy</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://groovy.codehaus.org/">Groovy</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.jvm.IokeLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">IokeLexer</code><a class="headerlink" href="#pygments.lexers.jvm.IokeLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ioke, ik</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ik</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-iokesrc</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://ioke.org/">Ioke</a> (a strongly typed, dynamic,
+prototype based programming language) source.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.jvm.JasminLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">JasminLexer</code><a class="headerlink" href="#pygments.lexers.jvm.JasminLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>jasmin, jasminxt</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.j</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://jasmin.sourceforge.net/">Jasmin</a> assembly code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.jvm.JavaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">JavaLexer</code><a class="headerlink" href="#pygments.lexers.jvm.JavaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>java</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.java</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-java</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://www.oracle.com/technetwork/java/">Java</a> source code.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.jvm.KotlinLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">KotlinLexer</code><a class="headerlink" href="#pygments.lexers.jvm.KotlinLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>kotlin</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.kt</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-kotlin</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://kotlinlang.org/">Kotlin</a>
+source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.jvm.PigLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">PigLexer</code><a class="headerlink" href="#pygments.lexers.jvm.PigLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>pig</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pig</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-pig</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://pig.apache.org/">Pig Latin</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.jvm.SarlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">SarlLexer</code><a class="headerlink" href="#pygments.lexers.jvm.SarlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>sarl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sarl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-sarl</p>
+</dd>
+</dl>
+</dd></dl>
+
+<p>    For <a class="reference external" href="http://www.sarl.io">SARL</a> source code.</p>
+<p>    
+       .. versionadded:: 2.4</p>
+<p></p>
+<dl class="class">
+<dt id="pygments.lexers.jvm.ScalaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">ScalaLexer</code><a class="headerlink" href="#pygments.lexers.jvm.ScalaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>scala</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.scala</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-scala</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.scala-lang.org">Scala</a> source code.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.jvm.XtendLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.jvm.</code><code class="sig-name descname">XtendLexer</code><a class="headerlink" href="#pygments.lexers.jvm.XtendLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xtend</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.xtend</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-xtend</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://xtend-lang.org/">Xtend</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.lisp"></span></div>
+<div class="section" id="lexers-for-lispy-languages">
+<h2>Lexers for Lispy languages<a class="headerlink" href="#lexers-for-lispy-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.lisp.CPSALexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.lisp.</code><code class="sig-name descname">CPSALexer</code><a class="headerlink" href="#pygments.lexers.lisp.CPSALexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cpsa</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cpsa</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A CPSA lexer based on the CPSA language as of version 2.2.12</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.lisp.CommonLispLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.lisp.</code><code class="sig-name descname">CommonLispLexer</code><a class="headerlink" href="#pygments.lexers.lisp.CommonLispLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>common-lisp, cl, lisp</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cl, *.lisp</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-common-lisp</p>
+</dd>
+</dl>
+<p>A Common Lisp lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.9.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.lisp.EmacsLispLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.lisp.</code><code class="sig-name descname">EmacsLispLexer</code><a class="headerlink" href="#pygments.lexers.lisp.EmacsLispLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>emacs, elisp, emacs-lisp</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.el</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-elisp, application/x-elisp</p>
+</dd>
+</dl>
+<p>An ELisp lexer, parsing a stream and outputting the tokens
+needed to highlight elisp code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.lisp.FennelLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.lisp.</code><code class="sig-name descname">FennelLexer</code><a class="headerlink" href="#pygments.lexers.lisp.FennelLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>fennel, fnl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.fnl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A lexer for the <a class="reference external" href="https://fennel-lang.org">Fennel programming language</a>.</p>
+<p>Fennel compiles to Lua, so all the Lua builtins are recognized as well
+as the special forms that are particular to the Fennel compiler.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.3.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.lisp.HyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.lisp.</code><code class="sig-name descname">HyLexer</code><a class="headerlink" href="#pygments.lexers.lisp.HyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>hylang</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.hy</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-hy, application/x-hy</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://hylang.org/">Hy</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.lisp.NewLispLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.lisp.</code><code class="sig-name descname">NewLispLexer</code><a class="headerlink" href="#pygments.lexers.lisp.NewLispLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>newlisp</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.lsp, *.nl, *.kif</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-newlisp, application/x-newlisp</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.newlisp.org/">newLISP.</a> source code (version 10.3.0).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.lisp.RacketLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.lisp.</code><code class="sig-name descname">RacketLexer</code><a class="headerlink" href="#pygments.lexers.lisp.RacketLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>racket, rkt</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rkt, *.rktd, *.rktl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-racket, application/x-racket</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://racket-lang.org/">Racket</a> source code (formerly
+known as PLT Scheme).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.lisp.SchemeLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.lisp.</code><code class="sig-name descname">SchemeLexer</code><a class="headerlink" href="#pygments.lexers.lisp.SchemeLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>scheme, scm</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.scm, *.ss</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-scheme, application/x-scheme</p>
+</dd>
+</dl>
+<p>A Scheme lexer, parsing a stream and outputting the tokens
+needed to highlight scheme code.
+This lexer could be most probably easily subclassed to parse
+other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp.</p>
+<p>This parser is checked with pastes from the LISP pastebin
+at <a class="reference external" href="http://paste.lisp.org/">http://paste.lisp.org/</a> to cover as much syntax as possible.</p>
+<p>It supports the full Scheme syntax as defined in R5RS.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.lisp.ShenLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.lisp.</code><code class="sig-name descname">ShenLexer</code><a class="headerlink" href="#pygments.lexers.lisp.ShenLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>shen</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.shen</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-shen, application/x-shen</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://shenlanguage.org/">Shen</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.lisp.XtlangLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.lisp.</code><code class="sig-name descname">XtlangLexer</code><a class="headerlink" href="#pygments.lexers.lisp.XtlangLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>extempore</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.xtm</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>An xtlang lexer for the <a class="reference external" href="http://extempore.moso.com.au">Extempore programming environment</a>.</p>
+<p>This is a mixture of Scheme and xtlang, really. Keyword lists are
+taken from the Extempore Emacs mode
+(<a class="reference external" href="https://github.com/extemporelang/extempore-emacs-mode">https://github.com/extemporelang/extempore-emacs-mode</a>)</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.make"></span></div>
+<div class="section" id="lexers-for-makefiles-and-similar">
+<h2>Lexers for Makefiles and similar<a class="headerlink" href="#lexers-for-makefiles-and-similar" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.make.BaseMakefileLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.make.</code><code class="sig-name descname">BaseMakefileLexer</code><a class="headerlink" href="#pygments.lexers.make.BaseMakefileLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>basemake</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for simple Makefiles (no preprocessing).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.make.CMakeLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.make.</code><code class="sig-name descname">CMakeLexer</code><a class="headerlink" href="#pygments.lexers.make.CMakeLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cmake</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cmake, CMakeLists.txt</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-cmake</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://cmake.org/Wiki/CMake">CMake</a> files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.make.MakefileLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.make.</code><code class="sig-name descname">MakefileLexer</code><a class="headerlink" href="#pygments.lexers.make.MakefileLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>make, makefile, mf, bsdmake</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.mak, *.mk, Makefile, makefile, Makefile.*, GNUmakefile</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-makefile</p>
+</dd>
+</dl>
+<p>Lexer for BSD and GNU make extensions (lenient enough to handle both in
+the same file even).</p>
+<p><em>Rewritten in Pygments 0.10.</em></p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.markup"></span></div>
+<div class="section" id="lexers-for-non-html-markup-languages">
+<h2>Lexers for non-HTML markup languages<a class="headerlink" href="#lexers-for-non-html-markup-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.markup.BBCodeLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.markup.</code><code class="sig-name descname">BBCodeLexer</code><a class="headerlink" href="#pygments.lexers.markup.BBCodeLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>bbcode</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-bbcode</p>
+</dd>
+</dl>
+<p>A lexer that highlights BBCode(-like) syntax.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.markup.GroffLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.markup.</code><code class="sig-name descname">GroffLexer</code><a class="headerlink" href="#pygments.lexers.markup.GroffLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>groff, nroff, man</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.[1234567], *.man</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-troff, text/troff</p>
+</dd>
+</dl>
+<p>Lexer for the (g)roff typesetting language, supporting groff
+extensions. Mainly useful for highlighting manpage sources.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.markup.MarkdownLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.markup.</code><code class="sig-name descname">MarkdownLexer</code><a class="headerlink" href="#pygments.lexers.markup.MarkdownLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>md</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.md</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-markdown</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://help.github.com/categories/writing-on-github/">Markdown</a> markup.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.markup.MoinWikiLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.markup.</code><code class="sig-name descname">MoinWikiLexer</code><a class="headerlink" href="#pygments.lexers.markup.MoinWikiLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>trac-wiki, moin</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-trac-wiki</p>
+</dd>
+</dl>
+<p>For MoinMoin (and Trac) Wiki markup.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.markup.MozPreprocCssLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.markup.</code><code class="sig-name descname">MozPreprocCssLexer</code><a class="headerlink" href="#pygments.lexers.markup.MozPreprocCssLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>css+mozpreproc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.css.in</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>MozPreprocHashLexer</cite> that highlights unlexed data with the
+<cite>CssLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.markup.MozPreprocHashLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.markup.</code><code class="sig-name descname">MozPreprocHashLexer</code><a class="headerlink" href="#pygments.lexers.markup.MozPreprocHashLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>mozhashpreproc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for Mozilla Preprocessor files (with ‘#’ as the marker).</p>
+<p>Other data is left untouched.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.markup.MozPreprocJavascriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.markup.</code><code class="sig-name descname">MozPreprocJavascriptLexer</code><a class="headerlink" href="#pygments.lexers.markup.MozPreprocJavascriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>javascript+mozpreproc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.js.in</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>MozPreprocHashLexer</cite> that highlights unlexed data with the
+<cite>JavascriptLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.markup.MozPreprocPercentLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.markup.</code><code class="sig-name descname">MozPreprocPercentLexer</code><a class="headerlink" href="#pygments.lexers.markup.MozPreprocPercentLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>mozpercentpreproc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for Mozilla Preprocessor files (with ‘%’ as the marker).</p>
+<p>Other data is left untouched.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.markup.MozPreprocXulLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.markup.</code><code class="sig-name descname">MozPreprocXulLexer</code><a class="headerlink" href="#pygments.lexers.markup.MozPreprocXulLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xul+mozpreproc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.xul.in</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>MozPreprocHashLexer</cite> that highlights unlexed data with the
+<cite>XmlLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.markup.RstLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.markup.</code><code class="sig-name descname">RstLexer</code><a class="headerlink" href="#pygments.lexers.markup.RstLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>rst, rest, restructuredtext</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rst, *.rest</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-rst, text/prs.fallenstein.rst</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://docutils.sf.net/rst.html">reStructuredText</a> markup.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+<p>Additional options accepted:</p>
+<dl>
+<dt><cite>handlecodeblocks</cite></dt><dd><p>Highlight the contents of <code class="docutils literal notranslate"><span class="pre">..</span> <span class="pre">sourcecode::</span> <span class="pre">language</span></code>,
+<code class="docutils literal notranslate"><span class="pre">..</span> <span class="pre">code::</span> <span class="pre">language</span></code> and <code class="docutils literal notranslate"><span class="pre">..</span> <span class="pre">code-block::</span> <span class="pre">language</span></code>
+directives with a lexer for the given language (default:
+<code class="docutils literal notranslate"><span class="pre">True</span></code>).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.8.</span></p>
+</div>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.markup.TexLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.markup.</code><code class="sig-name descname">TexLexer</code><a class="headerlink" href="#pygments.lexers.markup.TexLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>tex, latex</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.tex, *.aux, *.toc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-tex, text/x-latex</p>
+</dd>
+</dl>
+<p>Lexer for the TeX and LaTeX typesetting languages.</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.matlab"></span></div>
+<div class="section" id="lexers-for-matlab-and-related-languages">
+<h2>Lexers for Matlab and related languages<a class="headerlink" href="#lexers-for-matlab-and-related-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.matlab.MatlabLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.matlab.</code><code class="sig-name descname">MatlabLexer</code><a class="headerlink" href="#pygments.lexers.matlab.MatlabLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>matlab</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.m</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/matlab</p>
+</dd>
+</dl>
+<p>For Matlab source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.matlab.MatlabSessionLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.matlab.</code><code class="sig-name descname">MatlabSessionLexer</code><a class="headerlink" href="#pygments.lexers.matlab.MatlabSessionLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>matlabsession</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For Matlab sessions.  Modeled after PythonConsoleLexer.
+Contributed by Ken Schutte &lt;<a class="reference external" href="mailto:kschutte&#37;&#52;&#48;csail&#46;mit&#46;edu">kschutte<span>&#64;</span>csail<span>&#46;</span>mit<span>&#46;</span>edu</a>&gt;.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.matlab.OctaveLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.matlab.</code><code class="sig-name descname">OctaveLexer</code><a class="headerlink" href="#pygments.lexers.matlab.OctaveLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>octave</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.m</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/octave</p>
+</dd>
+</dl>
+<p>For GNU Octave source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.matlab.ScilabLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.matlab.</code><code class="sig-name descname">ScilabLexer</code><a class="headerlink" href="#pygments.lexers.matlab.ScilabLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>scilab</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sci, *.sce, *.tst</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/scilab</p>
+</dd>
+</dl>
+<p>For Scilab source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.mime"></span></div>
+<div class="section" id="lexer-for-multipurpose-internet-mail-extensions-mime-data">
+<h2>Lexer for Multipurpose Internet Mail Extensions (MIME) data<a class="headerlink" href="#lexer-for-multipurpose-internet-mail-extensions-mime-data" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.mime.MIMELexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.mime.</code><code class="sig-name descname">MIMELexer</code><a class="headerlink" href="#pygments.lexers.mime.MIMELexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>mime</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>multipart/mixed, multipart/related, multipart/alternative</p>
+</dd>
+</dl>
+<p>Lexer for Multipurpose Internet Mail Extensions (MIME) data. This lexer is
+designed to process the nested mulitpart data.</p>
+<p>It assumes that the given data contains both header and body (and is
+splitted by empty line). If no valid header is found, then the entire data
+would be treated as body.</p>
+<p>Additional options accepted:</p>
+<dl class="simple">
+<dt><cite>MIME-max-level</cite></dt><dd><p>Max recurssion level for nested MIME structure. Any negative number
+would treated as unlimited. (default: -1)</p>
+</dd>
+<dt><cite>Content-Type</cite></dt><dd><p>Treat the data as specific content type. Useful when header is
+missing, or this lexer would try to parse from header. (default:
+<cite>text/plain</cite>)</p>
+</dd>
+<dt><cite>Multipart-Boundary</cite></dt><dd><p>Set the default multipart boundary delimiter. This option is only used
+when <cite>Content-Type</cite> is <cite>multipart</cite> and header is missing. This lexer
+would try to parse from header by default. (default: None)</p>
+</dd>
+<dt><cite>Content-Transfer-Encoding</cite></dt><dd><p>Treat the data as specific encoding. Or this lexer would try to parse
+from header by default. (default: None)</p>
+</dd>
+</dl>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.5.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.ml"></span></div>
+<div class="section" id="lexers-for-ml-family-languages">
+<h2>Lexers for ML family languages<a class="headerlink" href="#lexers-for-ml-family-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.ml.OcamlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.ml.</code><code class="sig-name descname">OcamlLexer</code><a class="headerlink" href="#pygments.lexers.ml.OcamlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ocaml</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ml, *.mli, *.mll, *.mly</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-ocaml</p>
+</dd>
+</dl>
+<p>For the OCaml language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.ml.OpaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.ml.</code><code class="sig-name descname">OpaLexer</code><a class="headerlink" href="#pygments.lexers.ml.OpaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>opa</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.opa</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-opa</p>
+</dd>
+</dl>
+<p>Lexer for the Opa language (<a class="reference external" href="http://opalang.org">http://opalang.org</a>).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.ml.SMLLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.ml.</code><code class="sig-name descname">SMLLexer</code><a class="headerlink" href="#pygments.lexers.ml.SMLLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>sml</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sml, *.sig, *.fun</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-standardml, application/x-standardml</p>
+</dd>
+</dl>
+<p>For the Standard ML language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.modeling"></span></div>
+<div class="section" id="lexers-for-modeling-languages">
+<h2>Lexers for modeling languages<a class="headerlink" href="#lexers-for-modeling-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.modeling.BugsLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.modeling.</code><code class="sig-name descname">BugsLexer</code><a class="headerlink" href="#pygments.lexers.modeling.BugsLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>bugs, winbugs, openbugs</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.bug</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Pygments Lexer for <a class="reference external" href="http://www.openbugs.net/">OpenBugs</a> and WinBugs
+models.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.modeling.JagsLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.modeling.</code><code class="sig-name descname">JagsLexer</code><a class="headerlink" href="#pygments.lexers.modeling.JagsLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>jags</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.jag, *.bug</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Pygments Lexer for JAGS.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.modeling.ModelicaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.modeling.</code><code class="sig-name descname">ModelicaLexer</code><a class="headerlink" href="#pygments.lexers.modeling.ModelicaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>modelica</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.mo</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-modelica</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.modelica.org/">Modelica</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.modeling.StanLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.modeling.</code><code class="sig-name descname">StanLexer</code><a class="headerlink" href="#pygments.lexers.modeling.StanLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>stan</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.stan</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Pygments Lexer for Stan models.</p>
+<p>The Stan modeling language is specified in the <em>Stan Modeling Language
+User’s Guide and Reference Manual, v2.17.0</em>,
+<a class="reference external" href="https://github.com/stan-dev/stan/releases/download/v2.17.0/stan-reference-2.17.0.pdf">pdf</a>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.modula2"></span></div>
+<div class="section" id="multi-dialect-lexer-for-modula-2">
+<h2>Multi-Dialect Lexer for Modula-2<a class="headerlink" href="#multi-dialect-lexer-for-modula-2" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.modula2.Modula2Lexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.modula2.</code><code class="sig-name descname">Modula2Lexer</code><a class="headerlink" href="#pygments.lexers.modula2.Modula2Lexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>modula2, m2</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.def, *.mod</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-modula2</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.modula2.org/">Modula-2</a> source code.</p>
+<p>The Modula-2 lexer supports several dialects.  By default, it operates in
+fallback mode, recognising the <em>combined</em> literals, punctuation symbols
+and operators of all supported dialects, and the <em>combined</em> reserved words
+and builtins of PIM Modula-2, ISO Modula-2 and Modula-2 R10, while not
+differentiating between library defined identifiers.</p>
+<p>To select a specific dialect, a dialect option may be passed
+or a dialect tag may be embedded into a source file.</p>
+<p>Dialect Options:</p>
+<dl class="simple">
+<dt><cite>m2pim</cite></dt><dd><p>Select PIM Modula-2 dialect.</p>
+</dd>
+<dt><cite>m2iso</cite></dt><dd><p>Select ISO Modula-2 dialect.</p>
+</dd>
+<dt><cite>m2r10</cite></dt><dd><p>Select Modula-2 R10 dialect.</p>
+</dd>
+<dt><cite>objm2</cite></dt><dd><p>Select Objective Modula-2 dialect.</p>
+</dd>
+</dl>
+<p>The PIM and ISO dialect options may be qualified with a language extension.</p>
+<p>Language Extensions:</p>
+<dl class="simple">
+<dt><cite>+aglet</cite></dt><dd><p>Select Aglet Modula-2 extensions, available with m2iso.</p>
+</dd>
+<dt><cite>+gm2</cite></dt><dd><p>Select GNU Modula-2 extensions, available with m2pim.</p>
+</dd>
+<dt><cite>+p1</cite></dt><dd><p>Select p1 Modula-2 extensions, available with m2iso.</p>
+</dd>
+<dt><cite>+xds</cite></dt><dd><p>Select XDS Modula-2 extensions, available with m2iso.</p>
+</dd>
+</dl>
+<p>Passing a Dialect Option via Unix Commandline Interface</p>
+<p>Dialect options may be passed to the lexer using the <cite>dialect</cite> key.
+Only one such option should be passed. If multiple dialect options are
+passed, the first valid option is used, any subsequent options are ignored.</p>
+<p>Examples:</p>
+<dl class="simple">
+<dt><cite>$ pygmentize -O full,dialect=m2iso -f html -o /path/to/output /path/to/input</cite></dt><dd><p>Use ISO dialect to render input to HTML output</p>
+</dd>
+<dt><cite>$ pygmentize -O full,dialect=m2iso+p1 -f rtf -o /path/to/output /path/to/input</cite></dt><dd><p>Use ISO dialect with p1 extensions to render input to RTF output</p>
+</dd>
+</dl>
+<p>Embedding a Dialect Option within a source file</p>
+<p>A dialect option may be embedded in a source file in form of a dialect
+tag, a specially formatted comment that specifies a dialect option.</p>
+<p>Dialect Tag EBNF:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="n">dialectTag</span> <span class="p">:</span>
+    <span class="n">OpeningCommentDelim</span> <span class="n">Prefix</span> <span class="n">dialectOption</span> <span class="n">ClosingCommentDelim</span> <span class="p">;</span>
+
+<span class="n">dialectOption</span> <span class="p">:</span>
+    <span class="s1">&#39;m2pim&#39;</span> <span class="o">|</span> <span class="s1">&#39;m2iso&#39;</span> <span class="o">|</span> <span class="s1">&#39;m2r10&#39;</span> <span class="o">|</span> <span class="s1">&#39;objm2&#39;</span> <span class="o">|</span>
+    <span class="s1">&#39;m2iso+aglet&#39;</span> <span class="o">|</span> <span class="s1">&#39;m2pim+gm2&#39;</span> <span class="o">|</span> <span class="s1">&#39;m2iso+p1&#39;</span> <span class="o">|</span> <span class="s1">&#39;m2iso+xds&#39;</span> <span class="p">;</span>
+
+<span class="n">Prefix</span> <span class="p">:</span> <span class="s1">&#39;!&#39;</span> <span class="p">;</span>
+
+<span class="n">OpeningCommentDelim</span> <span class="p">:</span> <span class="s1">&#39;(*&#39;</span> <span class="p">;</span>
+
+<span class="n">ClosingCommentDelim</span> <span class="p">:</span> <span class="s1">&#39;*)&#39;</span> <span class="p">;</span>
+</pre></div>
+</div>
+<p>No whitespace is permitted between the tokens of a dialect tag.</p>
+<p>In the event that a source file contains multiple dialect tags, the first
+tag that contains a valid dialect option will be used and any subsequent
+dialect tags will be ignored.  Ideally, a dialect tag should be placed
+at the beginning of a source file.</p>
+<p>An embedded dialect tag overrides a dialect option set via command line.</p>
+<p>Examples:</p>
+<dl class="simple">
+<dt><code class="docutils literal notranslate"><span class="pre">(*!m2r10*)</span> <span class="pre">DEFINITION</span> <span class="pre">MODULE</span> <span class="pre">Foobar;</span> <span class="pre">...</span></code></dt><dd><p>Use Modula2 R10 dialect to render this source file.</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">(*!m2pim+gm2*)</span> <span class="pre">DEFINITION</span> <span class="pre">MODULE</span> <span class="pre">Bazbam;</span> <span class="pre">...</span></code></dt><dd><p>Use PIM dialect with GNU extensions to render this source file.</p>
+</dd>
+</dl>
+<p>Algol Publication Mode:</p>
+<p>In Algol publication mode, source text is rendered for publication of
+algorithms in scientific papers and academic texts, following the format
+of the Revised Algol-60 Language Report.  It is activated by passing
+one of two corresponding styles as an option:</p>
+<dl class="simple">
+<dt><cite>algol</cite></dt><dd><p>render reserved words lowercase underline boldface
+and builtins lowercase boldface italic</p>
+</dd>
+<dt><cite>algol_nu</cite></dt><dd><p>render reserved words lowercase boldface (no underlining)
+and builtins lowercase boldface italic</p>
+</dd>
+</dl>
+<p>The lexer automatically performs the required lowercase conversion when
+this mode is activated.</p>
+<p>Example:</p>
+<dl class="simple">
+<dt><code class="docutils literal notranslate"><span class="pre">$</span> <span class="pre">pygmentize</span> <span class="pre">-O</span> <span class="pre">full,style=algol</span> <span class="pre">-f</span> <span class="pre">latex</span> <span class="pre">-o</span> <span class="pre">/path/to/output</span> <span class="pre">/path/to/input</span></code></dt><dd><p>Render input file in Algol publication mode to LaTeX output.</p>
+</dd>
+</dl>
+<p>Rendering Mode of First Class ADT Identifiers:</p>
+<p>The rendering of standard library first class ADT identifiers is controlled
+by option flag “treat_stdlib_adts_as_builtins”.</p>
+<p>When this option is turned on, standard library ADT identifiers are rendered
+as builtins.  When it is turned off, they are rendered as ordinary library
+identifiers.</p>
+<p><cite>treat_stdlib_adts_as_builtins</cite> (default: On)</p>
+<p>The option is useful for dialects that support ADTs as first class objects
+and provide ADTs in the standard library that would otherwise be built-in.</p>
+<p>At present, only Modula-2 R10 supports library ADTs as first class objects
+and therefore, no ADT identifiers are defined for any other dialects.</p>
+<p>Example:</p>
+<dl class="simple">
+<dt><code class="docutils literal notranslate"><span class="pre">$</span> <span class="pre">pygmentize</span> <span class="pre">-O</span> <span class="pre">full,dialect=m2r10,treat_stdlib_adts_as_builtins=Off</span> <span class="pre">...</span></code></dt><dd><p>Render standard library ADTs as ordinary library types.</p>
+</dd>
+</dl>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.3.</span></p>
+</div>
+<div class="versionchanged">
+<p><span class="versionmodified changed">Changed in version 2.1: </span>Added multi-dialect support.</p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.monte"></span></div>
+<div class="section" id="lexer-for-the-monte-programming-language">
+<h2>Lexer for the Monte programming language<a class="headerlink" href="#lexer-for-the-monte-programming-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.monte.MonteLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.monte.</code><code class="sig-name descname">MonteLexer</code><a class="headerlink" href="#pygments.lexers.monte.MonteLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>monte</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.mt</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for the <a class="reference external" href="https://monte.readthedocs.io/">Monte</a> programming language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.ncl"></span></div>
+<div class="section" id="lexers-for-ncar-command-language">
+<h2>Lexers for NCAR Command Language<a class="headerlink" href="#lexers-for-ncar-command-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.ncl.NCLLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.ncl.</code><code class="sig-name descname">NCLLexer</code><a class="headerlink" href="#pygments.lexers.ncl.NCLLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ncl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ncl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/ncl</p>
+</dd>
+</dl>
+<p>Lexer for NCL code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.nimrod"></span></div>
+<div class="section" id="lexer-for-the-nim-language-formerly-known-as-nimrod">
+<h2>Lexer for the Nim language (formerly known as Nimrod)<a class="headerlink" href="#lexer-for-the-nim-language-formerly-known-as-nimrod" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.nimrod.NimrodLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.nimrod.</code><code class="sig-name descname">NimrodLexer</code><a class="headerlink" href="#pygments.lexers.nimrod.NimrodLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>nim, nimrod</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.nim, *.nimrod</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-nim</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://nim-lang.org/">Nim</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.nit"></span></div>
+<div class="section" id="lexer-for-the-nit-language">
+<h2>Lexer for the Nit language<a class="headerlink" href="#lexer-for-the-nit-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.nit.NitLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.nit.</code><code class="sig-name descname">NitLexer</code><a class="headerlink" href="#pygments.lexers.nit.NitLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>nit</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.nit</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://nitlanguage.org">nit</a> source.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.nix"></span></div>
+<div class="section" id="lexers-for-the-nixos-nix-language">
+<h2>Lexers for the NixOS Nix language<a class="headerlink" href="#lexers-for-the-nixos-nix-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.nix.NixLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.nix.</code><code class="sig-name descname">NixLexer</code><a class="headerlink" href="#pygments.lexers.nix.NixLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>nixos, nix</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.nix</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-nix</p>
+</dd>
+</dl>
+<p>For the <a class="reference external" href="http://nixos.org/nix/">Nix language</a>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.oberon"></span></div>
+<div class="section" id="lexers-for-oberon-family-languages">
+<h2>Lexers for Oberon family languages<a class="headerlink" href="#lexers-for-oberon-family-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.oberon.ComponentPascalLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.oberon.</code><code class="sig-name descname">ComponentPascalLexer</code><a class="headerlink" href="#pygments.lexers.oberon.ComponentPascalLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>componentpascal, cp</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cp, *.cps</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-component-pascal</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.oberon.ch/pdf/CP-Lang.pdf">Component Pascal</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.objective"></span></div>
+<div class="section" id="lexers-for-objective-c-family-languages">
+<h2>Lexers for Objective-C family languages<a class="headerlink" href="#lexers-for-objective-c-family-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.objective.LogosLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.objective.</code><code class="sig-name descname">LogosLexer</code><a class="headerlink" href="#pygments.lexers.objective.LogosLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>logos</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.x, *.xi, *.xm, *.xmi</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-logos</p>
+</dd>
+</dl>
+<p>For Logos + Objective-C source code with preprocessor directives.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.objective.ObjectiveCLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.objective.</code><code class="sig-name descname">ObjectiveCLexer</code><a class="headerlink" href="#pygments.lexers.objective.ObjectiveCLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>objective-c, objectivec, obj-c, objc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.m, *.h</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-objective-c</p>
+</dd>
+</dl>
+<p>For Objective-C source code with preprocessor directives.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.objective.ObjectiveCppLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.objective.</code><code class="sig-name descname">ObjectiveCppLexer</code><a class="headerlink" href="#pygments.lexers.objective.ObjectiveCppLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>objective-c++, objectivec++, obj-c++, objc++</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.mm, *.hh</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-objective-c++</p>
+</dd>
+</dl>
+<p>For Objective-C++ source code with preprocessor directives.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.objective.SwiftLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.objective.</code><code class="sig-name descname">SwiftLexer</code><a class="headerlink" href="#pygments.lexers.objective.SwiftLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>swift</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.swift</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-swift</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://developer.apple.com/swift/">Swift</a> source.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.ooc"></span></div>
+<div class="section" id="lexers-for-the-ooc-language">
+<h2>Lexers for the Ooc language<a class="headerlink" href="#lexers-for-the-ooc-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.ooc.OocLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.ooc.</code><code class="sig-name descname">OocLexer</code><a class="headerlink" href="#pygments.lexers.ooc.OocLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ooc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ooc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-ooc</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://ooc-lang.org/">Ooc</a> source code</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.parasail"></span></div>
+<div class="section" id="lexer-for-parasail">
+<h2>Lexer for ParaSail<a class="headerlink" href="#lexer-for-parasail" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.parasail.ParaSailLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parasail.</code><code class="sig-name descname">ParaSailLexer</code><a class="headerlink" href="#pygments.lexers.parasail.ParaSailLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>parasail</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.psi, *.psl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-parasail</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.parasail-lang.org">ParaSail</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.parsers"></span></div>
+<div class="section" id="lexers-for-parser-generators">
+<h2>Lexers for parser generators<a class="headerlink" href="#lexers-for-parser-generators" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.parsers.AntlrActionScriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">AntlrActionScriptLexer</code><a class="headerlink" href="#pygments.lexers.parsers.AntlrActionScriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>antlr-as, antlr-actionscript</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.G, *.g</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p><a class="reference external" href="http://www.antlr.org/">ANTLR</a> with ActionScript Target</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.AntlrCSharpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">AntlrCSharpLexer</code><a class="headerlink" href="#pygments.lexers.parsers.AntlrCSharpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>antlr-csharp, antlr-c#</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.G, *.g</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p><a class="reference external" href="http://www.antlr.org/">ANTLR</a> with C# Target</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.AntlrCppLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">AntlrCppLexer</code><a class="headerlink" href="#pygments.lexers.parsers.AntlrCppLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>antlr-cpp</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.G, *.g</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p><a class="reference external" href="http://www.antlr.org/">ANTLR</a> with CPP Target</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.AntlrJavaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">AntlrJavaLexer</code><a class="headerlink" href="#pygments.lexers.parsers.AntlrJavaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>antlr-java</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.G, *.g</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p><a class="reference external" href="http://www.antlr.org/">ANTLR</a> with Java Target</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1..</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.AntlrLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">AntlrLexer</code><a class="headerlink" href="#pygments.lexers.parsers.AntlrLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>antlr</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Generic <a class="reference external" href="http://www.antlr.org/">ANTLR</a> Lexer.
+Should not be called directly, instead
+use DelegatingLexer for your target language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.AntlrObjectiveCLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">AntlrObjectiveCLexer</code><a class="headerlink" href="#pygments.lexers.parsers.AntlrObjectiveCLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>antlr-objc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.G, *.g</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p><a class="reference external" href="http://www.antlr.org/">ANTLR</a> with Objective-C Target</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.AntlrPerlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">AntlrPerlLexer</code><a class="headerlink" href="#pygments.lexers.parsers.AntlrPerlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>antlr-perl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.G, *.g</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p><a class="reference external" href="http://www.antlr.org/">ANTLR</a> with Perl Target</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.AntlrPythonLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">AntlrPythonLexer</code><a class="headerlink" href="#pygments.lexers.parsers.AntlrPythonLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>antlr-python</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.G, *.g</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p><a class="reference external" href="http://www.antlr.org/">ANTLR</a> with Python Target</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.AntlrRubyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">AntlrRubyLexer</code><a class="headerlink" href="#pygments.lexers.parsers.AntlrRubyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>antlr-ruby, antlr-rb</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.G, *.g</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p><a class="reference external" href="http://www.antlr.org/">ANTLR</a> with Ruby Target</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.EbnfLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">EbnfLexer</code><a class="headerlink" href="#pygments.lexers.parsers.EbnfLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ebnf</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ebnf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-ebnf</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form">ISO/IEC 14977 EBNF</a>
+grammars.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.RagelCLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">RagelCLexer</code><a class="headerlink" href="#pygments.lexers.parsers.RagelCLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ragel-c</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A lexer for <a class="reference external" href="http://www.complang.org/ragel/">Ragel</a> in a C host file.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.RagelCppLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">RagelCppLexer</code><a class="headerlink" href="#pygments.lexers.parsers.RagelCppLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ragel-cpp</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A lexer for <a class="reference external" href="http://www.complang.org/ragel/">Ragel</a> in a CPP host file.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.RagelDLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">RagelDLexer</code><a class="headerlink" href="#pygments.lexers.parsers.RagelDLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ragel-d</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A lexer for <a class="reference external" href="http://www.complang.org/ragel/">Ragel</a> in a D host file.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.RagelEmbeddedLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">RagelEmbeddedLexer</code><a class="headerlink" href="#pygments.lexers.parsers.RagelEmbeddedLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ragel-em</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A lexer for <a class="reference external" href="http://www.complang.org/ragel/">Ragel</a> embedded in a host language file.</p>
+<p>This will only highlight Ragel statements. If you want host language
+highlighting then call the language-specific Ragel lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.RagelJavaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">RagelJavaLexer</code><a class="headerlink" href="#pygments.lexers.parsers.RagelJavaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ragel-java</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A lexer for <a class="reference external" href="http://www.complang.org/ragel/">Ragel</a> in a Java host file.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.RagelLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">RagelLexer</code><a class="headerlink" href="#pygments.lexers.parsers.RagelLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ragel</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A pure <a class="reference external" href="http://www.complang.org/ragel/">Ragel</a> lexer.  Use this for
+fragments of Ragel.  For <code class="docutils literal notranslate"><span class="pre">.rl</span></code> files, use RagelEmbeddedLexer instead
+(or one of the language-specific subclasses).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.RagelObjectiveCLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">RagelObjectiveCLexer</code><a class="headerlink" href="#pygments.lexers.parsers.RagelObjectiveCLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ragel-objc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A lexer for <a class="reference external" href="http://www.complang.org/ragel/">Ragel</a> in an Objective C host file.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.RagelRubyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">RagelRubyLexer</code><a class="headerlink" href="#pygments.lexers.parsers.RagelRubyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ragel-ruby, ragel-rb</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A lexer for <a class="reference external" href="http://www.complang.org/ragel/">Ragel</a> in a Ruby host file.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.parsers.TreetopLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.parsers.</code><code class="sig-name descname">TreetopLexer</code><a class="headerlink" href="#pygments.lexers.parsers.TreetopLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>treetop</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.treetop, *.tt</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A lexer for <a class="reference external" href="http://treetop.rubyforge.org/">Treetop</a> grammars.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.pascal"></span></div>
+<div class="section" id="lexers-for-pascal-family-languages">
+<h2>Lexers for Pascal family languages<a class="headerlink" href="#lexers-for-pascal-family-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.pascal.AdaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.pascal.</code><code class="sig-name descname">AdaLexer</code><a class="headerlink" href="#pygments.lexers.pascal.AdaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ada, ada95, ada2005</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.adb, *.ads, *.ada</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-ada</p>
+</dd>
+</dl>
+<p>For Ada source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.3.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.pascal.DelphiLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.pascal.</code><code class="sig-name descname">DelphiLexer</code><a class="headerlink" href="#pygments.lexers.pascal.DelphiLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>delphi, pas, pascal, objectpascal</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pas, *.dpr</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-pascal</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.borland.com/delphi/">Delphi</a> (Borland Object Pascal),
+Turbo Pascal and Free Pascal source code.</p>
+<p>Additional options accepted:</p>
+<dl class="simple">
+<dt><cite>turbopascal</cite></dt><dd><p>Highlight Turbo Pascal specific keywords (default: <code class="docutils literal notranslate"><span class="pre">True</span></code>).</p>
+</dd>
+<dt><cite>delphi</cite></dt><dd><p>Highlight Borland Delphi specific keywords (default: <code class="docutils literal notranslate"><span class="pre">True</span></code>).</p>
+</dd>
+<dt><cite>freepascal</cite></dt><dd><p>Highlight Free Pascal specific keywords (default: <code class="docutils literal notranslate"><span class="pre">True</span></code>).</p>
+</dd>
+<dt><cite>units</cite></dt><dd><p>A list of units that should be considered builtin, supported are
+<code class="docutils literal notranslate"><span class="pre">System</span></code>, <code class="docutils literal notranslate"><span class="pre">SysUtils</span></code>, <code class="docutils literal notranslate"><span class="pre">Classes</span></code> and <code class="docutils literal notranslate"><span class="pre">Math</span></code>.
+Default is to consider all of them builtin.</p>
+</dd>
+</dl>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.pawn"></span></div>
+<div class="section" id="lexers-for-the-pawn-languages">
+<h2>Lexers for the Pawn languages<a class="headerlink" href="#lexers-for-the-pawn-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.pawn.PawnLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.pawn.</code><code class="sig-name descname">PawnLexer</code><a class="headerlink" href="#pygments.lexers.pawn.PawnLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>pawn</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.p, *.pwn, *.inc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-pawn</p>
+</dd>
+</dl>
+<p>For Pawn source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.pawn.SourcePawnLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.pawn.</code><code class="sig-name descname">SourcePawnLexer</code><a class="headerlink" href="#pygments.lexers.pawn.SourcePawnLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>sp</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sp</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-sourcepawn</p>
+</dd>
+</dl>
+<p>For SourcePawn source code with preprocessor directives.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.perl"></span></div>
+<div class="section" id="lexers-for-perl-and-related-languages">
+<h2>Lexers for Perl and related languages<a class="headerlink" href="#lexers-for-perl-and-related-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.perl.Perl6Lexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.perl.</code><code class="sig-name descname">Perl6Lexer</code><a class="headerlink" href="#pygments.lexers.perl.Perl6Lexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>perl6, pl6</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pl, *.pm, *.nqp, *.p6, *.6pl, *.p6l, *.pl6, *.6pm, *.p6m, *.pm6, *.t</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-perl6, application/x-perl6</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.perl6.org">Perl 6</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.perl.PerlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.perl.</code><code class="sig-name descname">PerlLexer</code><a class="headerlink" href="#pygments.lexers.perl.PerlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>perl, pl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pl, *.pm, *.t</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-perl, application/x-perl</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.perl.org">Perl</a> source code.</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.php"></span></div>
+<div class="section" id="lexers-for-php-and-related-languages">
+<h2>Lexers for PHP and related languages<a class="headerlink" href="#lexers-for-php-and-related-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.php.PhpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.php.</code><code class="sig-name descname">PhpLexer</code><a class="headerlink" href="#pygments.lexers.php.PhpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>php, php3, php4, php5</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.php, *.php[345], *.inc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-php</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.php.net/">PHP</a> source code.
+For PHP embedded in HTML, use the <cite>HtmlPhpLexer</cite>.</p>
+<p>Additional options accepted:</p>
+<dl>
+<dt><cite>startinline</cite></dt><dd><p>If given and <code class="docutils literal notranslate"><span class="pre">True</span></code> the lexer starts highlighting with
+php code (i.e.: no starting <code class="docutils literal notranslate"><span class="pre">&lt;?php</span></code> required).  The default
+is <code class="docutils literal notranslate"><span class="pre">False</span></code>.</p>
+</dd>
+<dt><cite>funcnamehighlighting</cite></dt><dd><p>If given and <code class="docutils literal notranslate"><span class="pre">True</span></code>, highlight builtin function names
+(default: <code class="docutils literal notranslate"><span class="pre">True</span></code>).</p>
+</dd>
+<dt><cite>disabledmodules</cite></dt><dd><p>If given, must be a list of module names whose function names
+should not be highlighted. By default all modules are highlighted
+except the special <code class="docutils literal notranslate"><span class="pre">'unknown'</span></code> module that includes functions
+that are known to php but are undocumented.</p>
+<p>To get a list of allowed modules have a look into the
+<cite>_php_builtins</cite> module:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.lexers._php_builtins</span> <span class="kn">import</span> <span class="n">MODULES</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MODULES</span><span class="o">.</span><span class="n">keys</span><span class="p">()</span>
+<span class="go">[&#39;PHP Options/Info&#39;, &#39;Zip&#39;, &#39;dba&#39;, ...]</span>
+</pre></div>
+</div>
+<p>In fact the names of those modules match the module names from
+the php documentation.</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.php.ZephirLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.php.</code><code class="sig-name descname">ZephirLexer</code><a class="headerlink" href="#pygments.lexers.php.ZephirLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>zephir</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.zep</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://zephir-lang.com/">Zephir language</a> source code.</p>
+<p>Zephir is a compiled high level language aimed
+to the creation of C-extensions for PHP.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.pony"></span></div>
+<div class="section" id="lexers-for-pony-and-related-languages">
+<h2>Lexers for Pony and related languages<a class="headerlink" href="#lexers-for-pony-and-related-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.pony.PonyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.pony.</code><code class="sig-name descname">PonyLexer</code><a class="headerlink" href="#pygments.lexers.pony.PonyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>pony</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pony</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For Pony source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.praat"></span></div>
+<div class="section" id="lexer-for-praat">
+<h2>Lexer for Praat<a class="headerlink" href="#lexer-for-praat" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.praat.PraatLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.praat.</code><code class="sig-name descname">PraatLexer</code><a class="headerlink" href="#pygments.lexers.praat.PraatLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>praat</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.praat, *.proc, *.psc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.praat.org">Praat</a> scripts.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.prolog"></span></div>
+<div class="section" id="lexers-for-prolog-and-prolog-like-languages">
+<h2>Lexers for Prolog and Prolog-like languages<a class="headerlink" href="#lexers-for-prolog-and-prolog-like-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.prolog.LogtalkLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.prolog.</code><code class="sig-name descname">LogtalkLexer</code><a class="headerlink" href="#pygments.lexers.prolog.LogtalkLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>logtalk</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.lgt, *.logtalk</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-logtalk</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://logtalk.org/">Logtalk</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.prolog.PrologLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.prolog.</code><code class="sig-name descname">PrologLexer</code><a class="headerlink" href="#pygments.lexers.prolog.PrologLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>prolog</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ecl, *.prolog, *.pro, *.pl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-prolog</p>
+</dd>
+</dl>
+<p>Lexer for Prolog files.</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.python"></span></div>
+<div class="section" id="lexers-for-python-and-related-languages">
+<h2>Lexers for Python and related languages<a class="headerlink" href="#lexers-for-python-and-related-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.python.CythonLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.python.</code><code class="sig-name descname">CythonLexer</code><a class="headerlink" href="#pygments.lexers.python.CythonLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cython, pyx, pyrex</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pyx, *.pxd, *.pxi</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-cython, application/x-cython</p>
+</dd>
+</dl>
+<p>For Pyrex and <a class="reference external" href="http://cython.org">Cython</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.python.DgLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.python.</code><code class="sig-name descname">DgLexer</code><a class="headerlink" href="#pygments.lexers.python.DgLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>dg</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.dg</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-dg</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://pyos.github.com/dg">dg</a>,
+a functional and object-oriented programming language
+running on the CPython 3 VM.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.python.NumPyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.python.</code><code class="sig-name descname">NumPyLexer</code><a class="headerlink" href="#pygments.lexers.python.NumPyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>numpy</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>A Python lexer recognizing Numerical Python builtins.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.python.Python2Lexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.python.</code><code class="sig-name descname">Python2Lexer</code><a class="headerlink" href="#pygments.lexers.python.Python2Lexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>python2, py2</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-python2, application/x-python2</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.python.org">Python 2.x</a> source code.</p>
+<div class="versionchanged">
+<p><span class="versionmodified changed">Changed in version 2.5: </span>This class has been renamed from <code class="docutils literal notranslate"><span class="pre">PythonLexer</span></code>.  <code class="docutils literal notranslate"><span class="pre">PythonLexer</span></code> now
+refers to the Python 3 variant.  File name patterns like <code class="docutils literal notranslate"><span class="pre">*.py</span></code> have
+been moved to Python 3 as well.</p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.python.Python2TracebackLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.python.</code><code class="sig-name descname">Python2TracebackLexer</code><a class="headerlink" href="#pygments.lexers.python.Python2TracebackLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>py2tb</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.py2tb</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-python2-traceback</p>
+</dd>
+</dl>
+<p>For Python tracebacks.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+<div class="versionchanged">
+<p><span class="versionmodified changed">Changed in version 2.5: </span>This class has been renamed from <code class="docutils literal notranslate"><span class="pre">PythonTracebackLexer</span></code>.
+<code class="docutils literal notranslate"><span class="pre">PythonTracebackLexer</span></code> now refers to the Python 3 variant.</p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.python.PythonConsoleLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.python.</code><code class="sig-name descname">PythonConsoleLexer</code><a class="headerlink" href="#pygments.lexers.python.PythonConsoleLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>pycon</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-python-doctest</p>
+</dd>
+</dl>
+<p>For Python console output or doctests, such as:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">a</span> <span class="o">=</span> <span class="s1">&#39;foo&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">print</span> <span class="n">a</span>
+<span class="go">foo</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="mi">1</span> <span class="o">/</span> <span class="mi">0</span>
+<span class="gt">Traceback (most recent call last):</span>
+  File <span class="nb">&quot;&lt;stdin&gt;&quot;</span>, line <span class="m">1</span>, in <span class="n">&lt;module&gt;</span>
+<span class="gr">ZeroDivisionError</span>: <span class="n">integer division or modulo by zero</span>
+</pre></div>
+</div>
+<p>Additional options:</p>
+<dl>
+<dt><cite>python3</cite></dt><dd><p>Use Python 3 lexer for code.  Default is <code class="docutils literal notranslate"><span class="pre">True</span></code>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.0.</span></p>
+</div>
+<div class="versionchanged">
+<p><span class="versionmodified changed">Changed in version 2.5: </span>Now defaults to <code class="docutils literal notranslate"><span class="pre">True</span></code>.</p>
+</div>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.python.PythonLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.python.</code><code class="sig-name descname">PythonLexer</code><a class="headerlink" href="#pygments.lexers.python.PythonLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>python, py, sage, python3, py3</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.py, *.pyw, *.jy, *.sage, *.sc, SConstruct, SConscript, *.bzl, BUCK, BUILD, BUILD.bazel, WORKSPACE, *.tac</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-python, application/x-python, text/x-python3, application/x-python3</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.python.org">Python</a> source code (version 3.x).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+<div class="versionchanged">
+<p><span class="versionmodified changed">Changed in version 2.5: </span>This is now the default <code class="docutils literal notranslate"><span class="pre">PythonLexer</span></code>.  It is still available as the
+alias <code class="docutils literal notranslate"><span class="pre">Python3Lexer</span></code>.</p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.python.PythonTracebackLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.python.</code><code class="sig-name descname">PythonTracebackLexer</code><a class="headerlink" href="#pygments.lexers.python.PythonTracebackLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>pytb, py3tb</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pytb, *.py3tb</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-python-traceback, text/x-python3-traceback</p>
+</dd>
+</dl>
+<p>For Python 3.x tracebacks, with support for chained exceptions.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.0.</span></p>
+</div>
+<div class="versionchanged">
+<p><span class="versionmodified changed">Changed in version 2.5: </span>This is now the default <code class="docutils literal notranslate"><span class="pre">PythonTracebackLexer</span></code>.  It is still available
+as the alias <code class="docutils literal notranslate"><span class="pre">Python3TracebackLexer</span></code>.</p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.qvt"></span></div>
+<div class="section" id="lexer-for-qvt-operational-language">
+<h2>Lexer for QVT Operational language<a class="headerlink" href="#lexer-for-qvt-operational-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.qvt.QVToLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.qvt.</code><code class="sig-name descname">QVToLexer</code><a class="headerlink" href="#pygments.lexers.qvt.QVToLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>qvto, qvt</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.qvto</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For the <a class="reference external" href="http://www.omg.org/spec/QVT/1.1/">QVT Operational Mapping language</a>.</p>
+<p>Reference for implementing this: «Meta Object Facility (MOF) 2.0
+Query/View/Transformation Specification», Version 1.1 - January 2011
+(<a class="reference external" href="http://www.omg.org/spec/QVT/1.1/">http://www.omg.org/spec/QVT/1.1/</a>), see §8.4, «Concrete Syntax» in
+particular.</p>
+<p>Notable tokens assignments:</p>
+<ul class="simple">
+<li><p>Name.Class is assigned to the identifier following any of the following
+keywords: metamodel, class, exception, primitive, enum, transformation
+or library</p></li>
+<li><p>Name.Function is assigned to the names of mappings and queries</p></li>
+<li><p>Name.Builtin.Pseudo is assigned to the pre-defined variables ‘this’,
+‘self’ and ‘result’.</p></li>
+</ul>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.r"></span></div>
+<div class="section" id="lexers-for-the-r-s-languages">
+<h2>Lexers for the R/S languages<a class="headerlink" href="#lexers-for-the-r-s-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.r.RConsoleLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.r.</code><code class="sig-name descname">RConsoleLexer</code><a class="headerlink" href="#pygments.lexers.r.RConsoleLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>rconsole, rout</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.Rout</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For R console transcripts or R CMD BATCH output files.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.r.RdLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.r.</code><code class="sig-name descname">RdLexer</code><a class="headerlink" href="#pygments.lexers.r.RdLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>rd</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.Rd</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-r-doc</p>
+</dd>
+</dl>
+<p>Pygments Lexer for R documentation (Rd) files</p>
+<p>This is a very minimal implementation, highlighting little more
+than the macros. A description of Rd syntax is found in <a class="reference external" href="http://cran.r-project.org/doc/manuals/R-exts.html">Writing R
+Extensions</a>
+and <a class="reference external" href="http://developer.r-project.org/parseRd.pdf">Parsing Rd files</a>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.r.SLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.r.</code><code class="sig-name descname">SLexer</code><a class="headerlink" href="#pygments.lexers.r.SLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>splus, s, r</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.S, *.R, .Rhistory, .Rprofile, .Renviron</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/S-plus, text/S, text/x-r-source, text/x-r, text/x-R, text/x-r-history, text/x-r-profile</p>
+</dd>
+</dl>
+<p>For S, S-plus, and R source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.rdf"></span></div>
+<div class="section" id="lexers-for-semantic-web-and-rdf-query-languages-and-markup">
+<h2>Lexers for semantic web and RDF query languages and markup<a class="headerlink" href="#lexers-for-semantic-web-and-rdf-query-languages-and-markup" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.rdf.ShExCLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.rdf.</code><code class="sig-name descname">ShExCLexer</code><a class="headerlink" href="#pygments.lexers.rdf.ShExCLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>shexc, shex</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.shex</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/shex</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="https://shex.io/shex-semantics/#shexc">ShExC</a> shape expressions language syntax.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.rdf.SparqlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.rdf.</code><code class="sig-name descname">SparqlLexer</code><a class="headerlink" href="#pygments.lexers.rdf.SparqlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>sparql</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rq, *.sparql</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/sparql-query</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://www.w3.org/TR/rdf-sparql-query/">SPARQL</a> query language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.rdf.TurtleLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.rdf.</code><code class="sig-name descname">TurtleLexer</code><a class="headerlink" href="#pygments.lexers.rdf.TurtleLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>turtle</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ttl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/turtle, application/x-turtle</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://www.w3.org/TR/turtle/">Turtle</a> data language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.rebol"></span></div>
+<div class="section" id="lexers-for-the-rebol-and-related-languages">
+<h2>Lexers for the REBOL and related languages<a class="headerlink" href="#lexers-for-the-rebol-and-related-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.rebol.RebolLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.rebol.</code><code class="sig-name descname">RebolLexer</code><a class="headerlink" href="#pygments.lexers.rebol.RebolLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>rebol</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.r, *.r3, *.reb</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-rebol</p>
+</dd>
+</dl>
+<p>A <a class="reference external" href="http://www.rebol.com/">REBOL</a> lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.rebol.RedLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.rebol.</code><code class="sig-name descname">RedLexer</code><a class="headerlink" href="#pygments.lexers.rebol.RedLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>red, red/system</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.red, *.reds</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-red, text/x-red-system</p>
+</dd>
+</dl>
+<p>A <a class="reference external" href="http://www.red-lang.org/">Red-language</a> lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.resource"></span></div>
+<div class="section" id="lexer-for-resource-definition-files">
+<h2>Lexer for resource definition files<a class="headerlink" href="#lexer-for-resource-definition-files" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.resource.ResourceLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.resource.</code><code class="sig-name descname">ResourceLexer</code><a class="headerlink" href="#pygments.lexers.resource.ResourceLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>resource, resourcebundle</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://userguide.icu-project.org/locale/resources">ICU Resource bundles</a>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.rnc"></span></div>
+<div class="section" id="lexer-for-relax-ng-compact-syntax">
+<h2>Lexer for Relax-NG Compact syntax<a class="headerlink" href="#lexer-for-relax-ng-compact-syntax" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.rnc.RNCCompactLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.rnc.</code><code class="sig-name descname">RNCCompactLexer</code><a class="headerlink" href="#pygments.lexers.rnc.RNCCompactLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>rnc, rng-compact</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rnc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://relaxng.org">RelaxNG-compact</a> syntax.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.roboconf"></span></div>
+<div class="section" id="lexers-for-roboconf-dsl">
+<h2>Lexers for Roboconf DSL<a class="headerlink" href="#lexers-for-roboconf-dsl" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.roboconf.RoboconfGraphLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.roboconf.</code><code class="sig-name descname">RoboconfGraphLexer</code><a class="headerlink" href="#pygments.lexers.roboconf.RoboconfGraphLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>roboconf-graph</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.graph</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://roboconf.net/en/roboconf.html">Roboconf</a> graph files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.roboconf.RoboconfInstancesLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.roboconf.</code><code class="sig-name descname">RoboconfInstancesLexer</code><a class="headerlink" href="#pygments.lexers.roboconf.RoboconfInstancesLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>roboconf-instances</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.instances</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://roboconf.net/en/roboconf.html">Roboconf</a> instances files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.robotframework"></span></div>
+<div class="section" id="lexer-for-robot-framework">
+<h2>Lexer for Robot Framework<a class="headerlink" href="#lexer-for-robot-framework" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.robotframework.RobotFrameworkLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.robotframework.</code><code class="sig-name descname">RobotFrameworkLexer</code><a class="headerlink" href="#pygments.lexers.robotframework.RobotFrameworkLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>robotframework</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.robot</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-robotframework</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://robotframework.org">Robot Framework</a> test data.</p>
+<p>Supports both space and pipe separated plain text formats.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.ruby"></span></div>
+<div class="section" id="lexers-for-ruby-and-related-languages">
+<h2>Lexers for Ruby and related languages<a class="headerlink" href="#lexers-for-ruby-and-related-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.ruby.FancyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.ruby.</code><code class="sig-name descname">FancyLexer</code><a class="headerlink" href="#pygments.lexers.ruby.FancyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>fancy, fy</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.fy, *.fancypack</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-fancysrc</p>
+</dd>
+</dl>
+<p>Pygments Lexer For <a class="reference external" href="http://www.fancy-lang.org/">Fancy</a>.</p>
+<p>Fancy is a self-hosted, pure object-oriented, dynamic,
+class-based, concurrent general-purpose programming language
+running on Rubinius, the Ruby VM.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.ruby.RubyConsoleLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.ruby.</code><code class="sig-name descname">RubyConsoleLexer</code><a class="headerlink" href="#pygments.lexers.ruby.RubyConsoleLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>rbcon, irb</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-ruby-shellsession</p>
+</dd>
+</dl>
+<p>For Ruby interactive console (<strong>irb</strong>) output like:</p>
+<div class="highlight-rbcon notranslate"><div class="highlight"><pre><span></span><span class="gp">irb(main):001:0&gt; </span><span class="n">a</span> <span class="o">=</span> <span class="mi">1</span>
+<span class="go">=&gt; 1</span>
+<span class="gp">irb(main):002:0&gt; </span><span class="nb">puts</span> <span class="n">a</span>
+<span class="go">1</span>
+<span class="go">=&gt; nil</span>
+</pre></div>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.ruby.RubyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.ruby.</code><code class="sig-name descname">RubyLexer</code><a class="headerlink" href="#pygments.lexers.ruby.RubyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>rb, ruby, duby</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rb, *.rbw, Rakefile, *.rake, *.gemspec, *.rbx, *.duby, Gemfile</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-ruby, application/x-ruby</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.ruby-lang.org">Ruby</a> source code.</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.rust"></span></div>
+<div class="section" id="lexers-for-the-rust-language">
+<h2>Lexers for the Rust language<a class="headerlink" href="#lexers-for-the-rust-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.rust.RustLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.rust.</code><code class="sig-name descname">RustLexer</code><a class="headerlink" href="#pygments.lexers.rust.RustLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>rust, rs</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rs, *.rs.in</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/rust</p>
+</dd>
+</dl>
+<p>Lexer for the Rust programming language (version 1.10).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.sas"></span></div>
+<div class="section" id="lexer-for-sas">
+<h2>Lexer for SAS<a class="headerlink" href="#lexer-for-sas" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.sas.SASLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.sas.</code><code class="sig-name descname">SASLexer</code><a class="headerlink" href="#pygments.lexers.sas.SASLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>sas</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.SAS, *.sas</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-sas, text/sas, application/x-sas</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.sas.com/">SAS</a> files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.scdoc"></span></div>
+<div class="section" id="lexer-for-scdoc-a-simple-man-page-generator">
+<h2>Lexer for scdoc, a simple man page generator<a class="headerlink" href="#lexer-for-scdoc-a-simple-man-page-generator" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.scdoc.ScdocLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.scdoc.</code><code class="sig-name descname">ScdocLexer</code><a class="headerlink" href="#pygments.lexers.scdoc.ScdocLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>scdoc, scd</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.scd, *.scdoc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p><cite>scdoc</cite> is a simple man page generator for POSIX systems written in C99.
+<a class="reference external" href="https://git.sr.ht/~sircmpwn/scdoc">https://git.sr.ht/~sircmpwn/scdoc</a></p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.5.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.scripting"></span></div>
+<div class="section" id="lexer-for-scripting-and-embedded-languages">
+<h2>Lexer for scripting and embedded languages<a class="headerlink" href="#lexer-for-scripting-and-embedded-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.scripting.AppleScriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.scripting.</code><code class="sig-name descname">AppleScriptLexer</code><a class="headerlink" href="#pygments.lexers.scripting.AppleScriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>applescript</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.applescript</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://developer.apple.com/documentation/AppleScript/Conceptual/AppleScriptLangGuide">AppleScript source code</a>,
+including <a class="reference external" href="http://developer.apple.com/documentation/AppleScript/Reference/StudioReference">AppleScript Studio</a>.
+Contributed by Andreas Amann &lt;<a class="reference external" href="mailto:aamann&#37;&#52;&#48;mac&#46;com">aamann<span>&#64;</span>mac<span>&#46;</span>com</a>&gt;.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.scripting.ChaiscriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.scripting.</code><code class="sig-name descname">ChaiscriptLexer</code><a class="headerlink" href="#pygments.lexers.scripting.ChaiscriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>chai, chaiscript</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.chai</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-chaiscript, application/x-chaiscript</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://chaiscript.com/">ChaiScript</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.scripting.EasytrieveLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.scripting.</code><code class="sig-name descname">EasytrieveLexer</code><a class="headerlink" href="#pygments.lexers.scripting.EasytrieveLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>easytrieve</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ezt, *.mac</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-easytrieve</p>
+</dd>
+</dl>
+<p>Easytrieve Plus is a programming language for extracting, filtering and
+converting sequential data. Furthermore it can layout data for reports.
+It is mainly used on mainframe platforms and can access several of the
+mainframe’s native file formats. It is somewhat comparable to awk.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.scripting.HybrisLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.scripting.</code><code class="sig-name descname">HybrisLexer</code><a class="headerlink" href="#pygments.lexers.scripting.HybrisLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>hybris, hy</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.hy, *.hyb</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-hybris, application/x-hybris</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.hybris-lang.org">Hybris</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.scripting.JclLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.scripting.</code><code class="sig-name descname">JclLexer</code><a class="headerlink" href="#pygments.lexers.scripting.JclLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>jcl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.jcl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-jcl</p>
+</dd>
+</dl>
+<p><a class="reference external" href="http://publibz.boulder.ibm.com/cgi-bin/bookmgr_OS390/BOOKS/IEA2B570/CCONTENTS">Job Control Language (JCL)</a>
+is a scripting language used on mainframe platforms to instruct the system
+on how to run a batch job or start a subsystem. It is somewhat
+comparable to MS DOS batch and Unix shell scripts.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.scripting.LSLLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.scripting.</code><code class="sig-name descname">LSLLexer</code><a class="headerlink" href="#pygments.lexers.scripting.LSLLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>lsl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.lsl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-lsl</p>
+</dd>
+</dl>
+<p>For Second Life’s Linden Scripting Language source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.scripting.LuaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.scripting.</code><code class="sig-name descname">LuaLexer</code><a class="headerlink" href="#pygments.lexers.scripting.LuaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>lua</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.lua, *.wlua</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-lua, application/x-lua</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.lua.org">Lua</a> source code.</p>
+<p>Additional options accepted:</p>
+<dl>
+<dt><cite>func_name_highlighting</cite></dt><dd><p>If given and <code class="docutils literal notranslate"><span class="pre">True</span></code>, highlight builtin function names
+(default: <code class="docutils literal notranslate"><span class="pre">True</span></code>).</p>
+</dd>
+<dt><cite>disabled_modules</cite></dt><dd><p>If given, must be a list of module names whose function names
+should not be highlighted. By default all modules are highlighted.</p>
+<p>To get a list of allowed modules have a look into the
+<cite>_lua_builtins</cite> module:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.lexers._lua_builtins</span> <span class="kn">import</span> <span class="n">MODULES</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">MODULES</span><span class="o">.</span><span class="n">keys</span><span class="p">()</span>
+<span class="go">[&#39;string&#39;, &#39;coroutine&#39;, &#39;modules&#39;, &#39;io&#39;, &#39;basic&#39;, ...]</span>
+</pre></div>
+</div>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.scripting.MOOCodeLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.scripting.</code><code class="sig-name descname">MOOCodeLexer</code><a class="headerlink" href="#pygments.lexers.scripting.MOOCodeLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>moocode, moo</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.moo</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-moocode</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.moo.mud.org/">MOOCode</a> (the MOO scripting
+language).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.9.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.scripting.MoonScriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.scripting.</code><code class="sig-name descname">MoonScriptLexer</code><a class="headerlink" href="#pygments.lexers.scripting.MoonScriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>moon, moonscript</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.moon</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-moonscript, application/x-moonscript</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://moonscript.org">MoonScript</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.scripting.RexxLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.scripting.</code><code class="sig-name descname">RexxLexer</code><a class="headerlink" href="#pygments.lexers.scripting.RexxLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>rexx, arexx</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rexx, *.rex, *.rx, *.arexx</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-rexx</p>
+</dd>
+</dl>
+<p><a class="reference external" href="http://www.rexxinfo.org/">Rexx</a> is a scripting language available for
+a wide range of different platforms with its roots found on mainframe
+systems. It is popular for I/O- and data based tasks and can act as glue
+language to bind different applications together.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.sgf"></span></div>
+<div class="section" id="lexer-for-smart-game-format-sgf-file-format">
+<h2>Lexer for Smart Game Format (sgf) file format<a class="headerlink" href="#lexer-for-smart-game-format-sgf-file-format" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.sgf.SmartGameFormatLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.sgf.</code><code class="sig-name descname">SmartGameFormatLexer</code><a class="headerlink" href="#pygments.lexers.sgf.SmartGameFormatLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>sgf</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sgf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for Smart Game Format (sgf) file format.</p>
+<p>The format is used to store game records of board games for two players
+(mainly Go game).
+For more information about the definition of the format, see:
+<a class="reference external" href="https://www.red-bean.com/sgf/">https://www.red-bean.com/sgf/</a></p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.shell"></span></div>
+<div class="section" id="lexers-for-various-shells">
+<h2>Lexers for various shells<a class="headerlink" href="#lexers-for-various-shells" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.shell.BashLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.shell.</code><code class="sig-name descname">BashLexer</code><a class="headerlink" href="#pygments.lexers.shell.BashLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>bash, sh, ksh, zsh, shell</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sh, *.ksh, *.bash, *.ebuild, *.eclass, *.exheres-0, *.exlib, *.zsh, .bashrc, bashrc, .bash\*, bash\*, zshrc, .zshrc, PKGBUILD</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-sh, application/x-shellscript, text/x-shellscript</p>
+</dd>
+</dl>
+<p>Lexer for (ba|k|z|)sh shell scripts.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.shell.BashSessionLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.shell.</code><code class="sig-name descname">BashSessionLexer</code><a class="headerlink" href="#pygments.lexers.shell.BashSessionLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>console, shell-session</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sh-session, *.shell-session</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-shell-session, application/x-sh-session</p>
+</dd>
+</dl>
+<p>Lexer for simplistic shell sessions.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.shell.BatchLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.shell.</code><code class="sig-name descname">BatchLexer</code><a class="headerlink" href="#pygments.lexers.shell.BatchLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>bat, batch, dosbatch, winbatch</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.bat, *.cmd</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-dos-batch</p>
+</dd>
+</dl>
+<p>Lexer for the DOS/Windows Batch file format.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.shell.FishShellLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.shell.</code><code class="sig-name descname">FishShellLexer</code><a class="headerlink" href="#pygments.lexers.shell.FishShellLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>fish, fishshell</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.fish, *.load</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-fish</p>
+</dd>
+</dl>
+<p>Lexer for Fish shell scripts.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.shell.MSDOSSessionLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.shell.</code><code class="sig-name descname">MSDOSSessionLexer</code><a class="headerlink" href="#pygments.lexers.shell.MSDOSSessionLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>doscon</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for simplistic MSDOS sessions.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.shell.PowerShellLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.shell.</code><code class="sig-name descname">PowerShellLexer</code><a class="headerlink" href="#pygments.lexers.shell.PowerShellLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>powershell, posh, ps1, psm1</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ps1, *.psm1</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-powershell</p>
+</dd>
+</dl>
+<p>For Windows PowerShell code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.shell.PowerShellSessionLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.shell.</code><code class="sig-name descname">PowerShellSessionLexer</code><a class="headerlink" href="#pygments.lexers.shell.PowerShellSessionLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ps1con</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for simplistic Windows PowerShell sessions.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.shell.SlurmBashLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.shell.</code><code class="sig-name descname">SlurmBashLexer</code><a class="headerlink" href="#pygments.lexers.shell.SlurmBashLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>slurm, sbatch</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for (ba|k|z|)sh Slurm scripts.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.shell.TcshLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.shell.</code><code class="sig-name descname">TcshLexer</code><a class="headerlink" href="#pygments.lexers.shell.TcshLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>tcsh, csh</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.tcsh, *.csh</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-csh</p>
+</dd>
+</dl>
+<p>Lexer for tcsh scripts.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.shell.TcshSessionLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.shell.</code><code class="sig-name descname">TcshSessionLexer</code><a class="headerlink" href="#pygments.lexers.shell.TcshSessionLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>tcshcon</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for Tcsh sessions.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.slash"></span></div>
+<div class="section" id="lexer-for-the-slash-programming">
+<h2>Lexer for the <a class="reference external" href="https://github.com/arturadib/Slash-A">Slash</a> programming<a class="headerlink" href="#lexer-for-the-slash-programming" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.slash.SlashLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.slash.</code><code class="sig-name descname">SlashLexer</code><a class="headerlink" href="#pygments.lexers.slash.SlashLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>slash</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for the Slash programming language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.smalltalk"></span></div>
+<div class="section" id="lexers-for-smalltalk-and-related-languages">
+<h2>Lexers for Smalltalk and related languages<a class="headerlink" href="#lexers-for-smalltalk-and-related-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.smalltalk.NewspeakLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.smalltalk.</code><code class="sig-name descname">NewspeakLexer</code><a class="headerlink" href="#pygments.lexers.smalltalk.NewspeakLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>newspeak</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ns2</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-newspeak</p>
+</dd>
+</dl>
+<p>For <cite>Newspeak &lt;http://newspeaklanguage.org/&gt;</cite> syntax.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.smalltalk.SmalltalkLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.smalltalk.</code><code class="sig-name descname">SmalltalkLexer</code><a class="headerlink" href="#pygments.lexers.smalltalk.SmalltalkLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>smalltalk, squeak, st</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.st</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-smalltalk</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.smalltalk.org/">Smalltalk</a> syntax.
+Contributed by Stefan Matthias Aust.
+Rewritten by Nils Winter.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.smv"></span></div>
+<div class="section" id="lexers-for-the-smv-languages">
+<h2>Lexers for the SMV languages<a class="headerlink" href="#lexers-for-the-smv-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.smv.NuSMVLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.smv.</code><code class="sig-name descname">NuSMVLexer</code><a class="headerlink" href="#pygments.lexers.smv.NuSMVLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>nusmv</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.smv</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for the NuSMV language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.snobol"></span></div>
+<div class="section" id="lexers-for-the-snobol-language">
+<h2>Lexers for the SNOBOL language<a class="headerlink" href="#lexers-for-the-snobol-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.snobol.SnobolLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.snobol.</code><code class="sig-name descname">SnobolLexer</code><a class="headerlink" href="#pygments.lexers.snobol.SnobolLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>snobol</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.snobol</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-snobol</p>
+</dd>
+</dl>
+<p>Lexer for the SNOBOL4 programming language.</p>
+<p>Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
+Does not require spaces around binary operators.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.solidity"></span></div>
+<div class="section" id="lexers-for-solidity">
+<h2>Lexers for Solidity<a class="headerlink" href="#lexers-for-solidity" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.solidity.SolidityLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.solidity.</code><code class="sig-name descname">SolidityLexer</code><a class="headerlink" href="#pygments.lexers.solidity.SolidityLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>solidity</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sol</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For Solidity source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.5.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.special"></span></div>
+<div class="section" id="special-lexers">
+<h2>Special lexers<a class="headerlink" href="#special-lexers" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.special.RawTokenLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.special.</code><code class="sig-name descname">RawTokenLexer</code><a class="headerlink" href="#pygments.lexers.special.RawTokenLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>raw</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-pygments-tokens</p>
+</dd>
+</dl>
+<p>Recreate a token stream formatted with the <cite>RawTokenFormatter</cite>.  This
+lexer raises exceptions during parsing if the token stream in the
+file is malformed.</p>
+<p>Additional options accepted:</p>
+<dl class="simple">
+<dt><cite>compress</cite></dt><dd><p>If set to <code class="docutils literal notranslate"><span class="pre">&quot;gz&quot;</span></code> or <code class="docutils literal notranslate"><span class="pre">&quot;bz2&quot;</span></code>, decompress the token stream with
+the given compression algorithm before lexing (default: <code class="docutils literal notranslate"><span class="pre">&quot;&quot;</span></code>).</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.special.TextLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.special.</code><code class="sig-name descname">TextLexer</code><a class="headerlink" href="#pygments.lexers.special.TextLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>text</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.txt</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/plain</p>
+</dd>
+</dl>
+<p>“Null” lexer, doesn’t highlight anything.</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.sql"></span></div>
+<div class="section" id="lexers-for-various-sql-dialects-and-related-interactive-sessions">
+<h2>Lexers for various SQL dialects and related interactive sessions<a class="headerlink" href="#lexers-for-various-sql-dialects-and-related-interactive-sessions" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.sql.MySqlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.sql.</code><code class="sig-name descname">MySqlLexer</code><a class="headerlink" href="#pygments.lexers.sql.MySqlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>mysql</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-mysql</p>
+</dd>
+</dl>
+<p>Special lexer for MySQL.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.sql.PlPgsqlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.sql.</code><code class="sig-name descname">PlPgsqlLexer</code><a class="headerlink" href="#pygments.lexers.sql.PlPgsqlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>plpgsql</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-plpgsql</p>
+</dd>
+</dl>
+<p>Handle the extra syntax in Pl/pgSQL language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.sql.PostgresConsoleLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.sql.</code><code class="sig-name descname">PostgresConsoleLexer</code><a class="headerlink" href="#pygments.lexers.sql.PostgresConsoleLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>psql, postgresql-console, postgres-console</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-postgresql-psql</p>
+</dd>
+</dl>
+<p>Lexer for psql sessions.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.sql.PostgresLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.sql.</code><code class="sig-name descname">PostgresLexer</code><a class="headerlink" href="#pygments.lexers.sql.PostgresLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>postgresql, postgres</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-postgresql</p>
+</dd>
+</dl>
+<p>Lexer for the PostgreSQL dialect of SQL.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.sql.RqlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.sql.</code><code class="sig-name descname">RqlLexer</code><a class="headerlink" href="#pygments.lexers.sql.RqlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>rql</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rql</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-rql</p>
+</dd>
+</dl>
+<p>Lexer for Relation Query Language.</p>
+<p><a class="reference external" href="http://www.logilab.org/project/rql">RQL</a></p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.sql.SqlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.sql.</code><code class="sig-name descname">SqlLexer</code><a class="headerlink" href="#pygments.lexers.sql.SqlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>sql</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sql</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-sql</p>
+</dd>
+</dl>
+<p>Lexer for Structured Query Language. Currently, this lexer does
+not recognize any special syntax except ANSI SQL.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.sql.SqliteConsoleLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.sql.</code><code class="sig-name descname">SqliteConsoleLexer</code><a class="headerlink" href="#pygments.lexers.sql.SqliteConsoleLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>sqlite3</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sqlite3-console</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-sqlite3-console</p>
+</dd>
+</dl>
+<p>Lexer for example sessions using sqlite3.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.11.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.sql.TransactSqlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.sql.</code><code class="sig-name descname">TransactSqlLexer</code><a class="headerlink" href="#pygments.lexers.sql.TransactSqlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>tsql, t-sql</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sql</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-tsql</p>
+</dd>
+</dl>
+<p>Transact-SQL (T-SQL) is Microsoft’s and Sybase’s proprietary extension to
+SQL.</p>
+<p>The list of keywords includes ODBC and keywords reserved for future use..</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.stata"></span></div>
+<div class="section" id="lexer-for-stata">
+<h2>Lexer for Stata<a class="headerlink" href="#lexer-for-stata" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.stata.StataLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.stata.</code><code class="sig-name descname">StataLexer</code><a class="headerlink" href="#pygments.lexers.stata.StataLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>stata, do</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.do, *.ado</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-stata, text/stata, application/x-stata</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.stata.com/">Stata</a> do files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.supercollider"></span></div>
+<div class="section" id="lexer-for-supercollider">
+<h2>Lexer for SuperCollider<a class="headerlink" href="#lexer-for-supercollider" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.supercollider.SuperColliderLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.supercollider.</code><code class="sig-name descname">SuperColliderLexer</code><a class="headerlink" href="#pygments.lexers.supercollider.SuperColliderLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>sc, supercollider</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sc, *.scd</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/supercollider, text/supercollider</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://supercollider.github.io/">SuperCollider</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.tcl"></span></div>
+<div class="section" id="lexers-for-tcl-and-related-languages">
+<h2>Lexers for Tcl and related languages<a class="headerlink" href="#lexers-for-tcl-and-related-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.tcl.TclLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.tcl.</code><code class="sig-name descname">TclLexer</code><a class="headerlink" href="#pygments.lexers.tcl.TclLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>tcl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.tcl, *.rvt</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-tcl, text/x-script.tcl, application/x-tcl</p>
+</dd>
+</dl>
+<p>For Tcl source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.10.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.templates"></span></div>
+<div class="section" id="lexers-for-various-template-engines-markup">
+<h2>Lexers for various template engines’ markup<a class="headerlink" href="#lexers-for-various-template-engines-markup" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.templates.Angular2HtmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">Angular2HtmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.Angular2HtmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html+ng2</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ng2</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>Angular2Lexer</cite> that highlights unlexed data with the
+<cite>HtmlLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.Angular2Lexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">Angular2Lexer</code><a class="headerlink" href="#pygments.lexers.templates.Angular2Lexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ng2</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Generic
+<a class="reference external" href="http://victorsavkin.com/post/119943127151/angular-2-template-syntax">angular2</a>
+template lexer.</p>
+<p>Highlights only the Angular template tags (stuff between <cite>{{</cite> and <cite>}}</cite> and
+special attributes: ‘(event)=’, ‘[property]=’, ‘[(twoWayBinding)]=’).
+Everything else is left for a delegating lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.CheetahHtmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">CheetahHtmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.CheetahHtmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html+cheetah, html+spitfire, htmlcheetah</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/html+cheetah, text/html+spitfire</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>CheetahLexer</cite> that highlights unlexed data
+with the <cite>HtmlLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.CheetahJavascriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">CheetahJavascriptLexer</code><a class="headerlink" href="#pygments.lexers.templates.CheetahJavascriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>js+cheetah, javascript+cheetah, js+spitfire, javascript+spitfire</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-javascript+cheetah, text/x-javascript+cheetah, text/javascript+cheetah, application/x-javascript+spitfire, text/x-javascript+spitfire, text/javascript+spitfire</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>CheetahLexer</cite> that highlights unlexed data
+with the <cite>JavascriptLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.CheetahLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">CheetahLexer</code><a class="headerlink" href="#pygments.lexers.templates.CheetahLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cheetah, spitfire</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.tmpl, *.spt</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-cheetah, application/x-spitfire</p>
+</dd>
+</dl>
+<p>Generic <a class="reference external" href="http://www.cheetahtemplate.org/">cheetah templates</a> lexer. Code that isn’t Cheetah
+markup is yielded as <cite>Token.Other</cite>.  This also works for
+<a class="reference external" href="http://code.google.com/p/spitfire/">spitfire templates</a> which use the same syntax.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.CheetahXmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">CheetahXmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.CheetahXmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xml+cheetah, xml+spitfire</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/xml+cheetah, application/xml+spitfire</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>CheetahLexer</cite> that highlights unlexed data
+with the <cite>XmlLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.ColdfusionCFCLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">ColdfusionCFCLexer</code><a class="headerlink" href="#pygments.lexers.templates.ColdfusionCFCLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cfc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cfc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Coldfusion markup/script components</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.ColdfusionHtmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">ColdfusionHtmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.ColdfusionHtmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cfm</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cfm, *.cfml</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-coldfusion</p>
+</dd>
+</dl>
+<p>Coldfusion markup in html</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.ColdfusionLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">ColdfusionLexer</code><a class="headerlink" href="#pygments.lexers.templates.ColdfusionLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cfs</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Coldfusion statements</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.CssDjangoLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">CssDjangoLexer</code><a class="headerlink" href="#pygments.lexers.templates.CssDjangoLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>css+django, css+jinja</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/css+django, text/css+jinja</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>DjangoLexer</cite> that highlights unlexed data with the
+<cite>CssLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.CssErbLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">CssErbLexer</code><a class="headerlink" href="#pygments.lexers.templates.CssErbLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>css+erb, css+ruby</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/css+ruby</p>
+</dd>
+</dl>
+<p>Subclass of <cite>ErbLexer</cite> which highlights unlexed data with the <cite>CssLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.CssGenshiLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">CssGenshiLexer</code><a class="headerlink" href="#pygments.lexers.templates.CssGenshiLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>css+genshitext, css+genshi</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/css+genshi</p>
+</dd>
+</dl>
+<p>A lexer that highlights CSS definitions in genshi text templates.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.CssPhpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">CssPhpLexer</code><a class="headerlink" href="#pygments.lexers.templates.CssPhpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>css+php</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/css+php</p>
+</dd>
+</dl>
+<p>Subclass of <cite>PhpLexer</cite> which highlights unmatched data with the <cite>CssLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.CssSmartyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">CssSmartyLexer</code><a class="headerlink" href="#pygments.lexers.templates.CssSmartyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>css+smarty</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/css+smarty</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>SmartyLexer</cite> that highlights unlexed data with the
+<cite>CssLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.DjangoLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">DjangoLexer</code><a class="headerlink" href="#pygments.lexers.templates.DjangoLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>django, jinja</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-django-templating, application/x-jinja</p>
+</dd>
+</dl>
+<p>Generic <a class="reference external" href="http://www.djangoproject.com/documentation/templates/">django</a>
+and <a class="reference external" href="http://wsgiarea.pocoo.org/jinja/">jinja</a> template lexer.</p>
+<p>It just highlights django/jinja code between the preprocessor directives,
+other data is left untouched by the lexer.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.ErbLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">ErbLexer</code><a class="headerlink" href="#pygments.lexers.templates.ErbLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>erb</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-ruby-templating</p>
+</dd>
+</dl>
+<p>Generic <a class="reference external" href="http://ruby-doc.org/core/classes/ERB.html">ERB</a> (Ruby Templating)
+lexer.</p>
+<p>Just highlights ruby code between the preprocessor directives, other data
+is left untouched by the lexer.</p>
+<p>All options are also forwarded to the <cite>RubyLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.EvoqueHtmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">EvoqueHtmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.EvoqueHtmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html+evoque</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.html</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/html+evoque</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>EvoqueLexer</cite> that highlights unlexed data with the
+<cite>HtmlLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.EvoqueLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">EvoqueLexer</code><a class="headerlink" href="#pygments.lexers.templates.EvoqueLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>evoque</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.evoque</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-evoque</p>
+</dd>
+</dl>
+<p>For files using the Evoque templating system.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.EvoqueXmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">EvoqueXmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.EvoqueXmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xml+evoque</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.xml</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/xml+evoque</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>EvoqueLexer</cite> that highlights unlexed data with the
+<cite>XmlLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.GenshiLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">GenshiLexer</code><a class="headerlink" href="#pygments.lexers.templates.GenshiLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>genshi, kid, xml+genshi, xml+kid</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.kid</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-genshi, application/x-kid</p>
+</dd>
+</dl>
+<p>A lexer that highlights <a class="reference external" href="http://genshi.edgewall.org/">genshi</a> and
+<a class="reference external" href="http://kid-templating.org/">kid</a> kid XML templates.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.GenshiTextLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">GenshiTextLexer</code><a class="headerlink" href="#pygments.lexers.templates.GenshiTextLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>genshitext</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-genshi-text, text/x-genshi</p>
+</dd>
+</dl>
+<p>A lexer that highlights <a class="reference external" href="http://genshi.edgewall.org/">genshi</a> text
+templates.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.HandlebarsHtmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">HandlebarsHtmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.HandlebarsHtmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html+handlebars</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.handlebars, *.hbs</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/html+handlebars, text/x-handlebars-template</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>HandlebarsLexer</cite> that highlights unlexed data with the
+<cite>HtmlLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.HandlebarsLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">HandlebarsLexer</code><a class="headerlink" href="#pygments.lexers.templates.HandlebarsLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>handlebars</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Generic <cite>handlebars &lt;http://handlebarsjs.com/&gt;</cite> template lexer.</p>
+<p>Highlights only the Handlebars template tags (stuff between <cite>{{</cite> and <cite>}}</cite>).
+Everything else is left for a delegating lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.HtmlDjangoLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">HtmlDjangoLexer</code><a class="headerlink" href="#pygments.lexers.templates.HtmlDjangoLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html+django, html+jinja, htmldjango</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/html+django, text/html+jinja</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>DjangoLexer</cite> that highlights unlexed data with the
+<cite>HtmlLexer</cite>.</p>
+<p>Nested Javascript and CSS is highlighted too.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.HtmlGenshiLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">HtmlGenshiLexer</code><a class="headerlink" href="#pygments.lexers.templates.HtmlGenshiLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html+genshi, html+kid</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/html+genshi</p>
+</dd>
+</dl>
+<p>A lexer that highlights <a class="reference external" href="http://genshi.edgewall.org/">genshi</a> and
+<a class="reference external" href="http://kid-templating.org/">kid</a> kid HTML templates.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.HtmlPhpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">HtmlPhpLexer</code><a class="headerlink" href="#pygments.lexers.templates.HtmlPhpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html+php</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.phtml</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-php, application/x-httpd-php, application/x-httpd-php3, application/x-httpd-php4, application/x-httpd-php5</p>
+</dd>
+</dl>
+<p>Subclass of <cite>PhpLexer</cite> that highlights unhandled data with the <cite>HtmlLexer</cite>.</p>
+<p>Nested Javascript and CSS is highlighted too.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.HtmlSmartyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">HtmlSmartyLexer</code><a class="headerlink" href="#pygments.lexers.templates.HtmlSmartyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html+smarty</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/html+smarty</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>SmartyLexer</cite> that highlights unlexed data with the
+<cite>HtmlLexer</cite>.</p>
+<p>Nested Javascript and CSS is highlighted too.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.JavascriptDjangoLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">JavascriptDjangoLexer</code><a class="headerlink" href="#pygments.lexers.templates.JavascriptDjangoLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>js+django, javascript+django, js+jinja, javascript+jinja</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-javascript+django, application/x-javascript+jinja, text/x-javascript+django, text/x-javascript+jinja, text/javascript+django, text/javascript+jinja</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>DjangoLexer</cite> that highlights unlexed data with the
+<cite>JavascriptLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.JavascriptErbLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">JavascriptErbLexer</code><a class="headerlink" href="#pygments.lexers.templates.JavascriptErbLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>js+erb, javascript+erb, js+ruby, javascript+ruby</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-javascript+ruby, text/x-javascript+ruby, text/javascript+ruby</p>
+</dd>
+</dl>
+<p>Subclass of <cite>ErbLexer</cite> which highlights unlexed data with the
+<cite>JavascriptLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.JavascriptGenshiLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">JavascriptGenshiLexer</code><a class="headerlink" href="#pygments.lexers.templates.JavascriptGenshiLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>js+genshitext, js+genshi, javascript+genshitext, javascript+genshi</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-javascript+genshi, text/x-javascript+genshi, text/javascript+genshi</p>
+</dd>
+</dl>
+<p>A lexer that highlights javascript code in genshi text templates.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.JavascriptPhpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">JavascriptPhpLexer</code><a class="headerlink" href="#pygments.lexers.templates.JavascriptPhpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>js+php, javascript+php</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-javascript+php, text/x-javascript+php, text/javascript+php</p>
+</dd>
+</dl>
+<p>Subclass of <cite>PhpLexer</cite> which highlights unmatched data with the
+<cite>JavascriptLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.JavascriptSmartyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">JavascriptSmartyLexer</code><a class="headerlink" href="#pygments.lexers.templates.JavascriptSmartyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>js+smarty, javascript+smarty</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-javascript+smarty, text/x-javascript+smarty, text/javascript+smarty</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>SmartyLexer</cite> that highlights unlexed data with the
+<cite>JavascriptLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.JspLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">JspLexer</code><a class="headerlink" href="#pygments.lexers.templates.JspLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>jsp</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.jsp</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-jsp</p>
+</dd>
+</dl>
+<p>Lexer for Java Server Pages.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.LassoCssLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">LassoCssLexer</code><a class="headerlink" href="#pygments.lexers.templates.LassoCssLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>css+lasso</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/css+lasso</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>LassoLexer</cite> which highlights unhandled data with the
+<cite>CssLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.LassoHtmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">LassoHtmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.LassoHtmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html+lasso</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/html+lasso, application/x-httpd-lasso, application/x-httpd-lasso[89]</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>LassoLexer</cite> which highlights unhandled data with the
+<cite>HtmlLexer</cite>.</p>
+<p>Nested JavaScript and CSS is also highlighted.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.LassoJavascriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">LassoJavascriptLexer</code><a class="headerlink" href="#pygments.lexers.templates.LassoJavascriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>js+lasso, javascript+lasso</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-javascript+lasso, text/x-javascript+lasso, text/javascript+lasso</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>LassoLexer</cite> which highlights unhandled data with the
+<cite>JavascriptLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.LassoXmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">LassoXmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.LassoXmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xml+lasso</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/xml+lasso</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>LassoLexer</cite> which highlights unhandled data with the
+<cite>XmlLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.LiquidLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">LiquidLexer</code><a class="headerlink" href="#pygments.lexers.templates.LiquidLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>liquid</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.liquid</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://www.rubydoc.info/github/Shopify/liquid">Liquid templates</a>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.MakoCssLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">MakoCssLexer</code><a class="headerlink" href="#pygments.lexers.templates.MakoCssLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>css+mako</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/css+mako</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>MakoLexer</cite> that highlights unlexed data
+with the <cite>CssLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.MakoHtmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">MakoHtmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.MakoHtmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html+mako</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/html+mako</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>MakoLexer</cite> that highlights unlexed data
+with the <cite>HtmlLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.MakoJavascriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">MakoJavascriptLexer</code><a class="headerlink" href="#pygments.lexers.templates.MakoJavascriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>js+mako, javascript+mako</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-javascript+mako, text/x-javascript+mako, text/javascript+mako</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>MakoLexer</cite> that highlights unlexed data
+with the <cite>JavascriptLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.MakoLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">MakoLexer</code><a class="headerlink" href="#pygments.lexers.templates.MakoLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>mako</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.mao</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-mako</p>
+</dd>
+</dl>
+<p>Generic <a class="reference external" href="http://www.makotemplates.org/">mako templates</a> lexer. Code that isn’t Mako
+markup is yielded as <cite>Token.Other</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.MakoXmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">MakoXmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.MakoXmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xml+mako</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/xml+mako</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>MakoLexer</cite> that highlights unlexed data
+with the <cite>XmlLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.MasonLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">MasonLexer</code><a class="headerlink" href="#pygments.lexers.templates.MasonLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>mason</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.m, *.mhtml, *.mc, *.mi, autohandler, dhandler</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-mason</p>
+</dd>
+</dl>
+<p>Generic <a class="reference external" href="http://www.masonhq.com/">mason templates</a> lexer. Stolen from Myghty lexer. Code that isn’t
+Mason markup is HTML.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.MyghtyCssLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">MyghtyCssLexer</code><a class="headerlink" href="#pygments.lexers.templates.MyghtyCssLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>css+myghty</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/css+myghty</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>MyghtyLexer</cite> that highlights unlexed data
+with the <cite>CssLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.MyghtyHtmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">MyghtyHtmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.MyghtyHtmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html+myghty</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/html+myghty</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>MyghtyLexer</cite> that highlights unlexed data
+with the <cite>HtmlLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.MyghtyJavascriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">MyghtyJavascriptLexer</code><a class="headerlink" href="#pygments.lexers.templates.MyghtyJavascriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>js+myghty, javascript+myghty</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-javascript+myghty, text/x-javascript+myghty, text/javascript+mygthy</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>MyghtyLexer</cite> that highlights unlexed data
+with the <cite>JavascriptLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.MyghtyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">MyghtyLexer</code><a class="headerlink" href="#pygments.lexers.templates.MyghtyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>myghty</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.myt, autodelegate</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-myghty</p>
+</dd>
+</dl>
+<p>Generic <a class="reference external" href="http://www.myghty.org/">myghty templates</a> lexer. Code that isn’t Myghty
+markup is yielded as <cite>Token.Other</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.MyghtyXmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">MyghtyXmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.MyghtyXmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xml+myghty</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/xml+myghty</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>MyghtyLexer</cite> that highlights unlexed data
+with the <cite>XmlLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.RhtmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">RhtmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.RhtmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>rhtml, html+erb, html+ruby</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rhtml</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/html+ruby</p>
+</dd>
+</dl>
+<p>Subclass of the ERB lexer that highlights the unlexed data with the
+html lexer.</p>
+<p>Nested Javascript and CSS is highlighted too.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.SmartyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">SmartyLexer</code><a class="headerlink" href="#pygments.lexers.templates.SmartyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>smarty</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.tpl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-smarty</p>
+</dd>
+</dl>
+<p>Generic <a class="reference external" href="http://smarty.php.net/">Smarty</a> template lexer.</p>
+<p>Just highlights smarty code between the preprocessor directives, other
+data is left untouched by the lexer.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.SspLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">SspLexer</code><a class="headerlink" href="#pygments.lexers.templates.SspLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ssp</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ssp</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-ssp</p>
+</dd>
+</dl>
+<p>Lexer for Scalate Server Pages.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.TeaTemplateLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">TeaTemplateLexer</code><a class="headerlink" href="#pygments.lexers.templates.TeaTemplateLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>tea</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.tea</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-tea</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://teatrove.org/">Tea Templates</a>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.TwigHtmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">TwigHtmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.TwigHtmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html+twig</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.twig</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/html+twig</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>TwigLexer</cite> that highlights unlexed data with the
+<cite>HtmlLexer</cite>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.TwigLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">TwigLexer</code><a class="headerlink" href="#pygments.lexers.templates.TwigLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>twig</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-twig</p>
+</dd>
+</dl>
+<p><a class="reference external" href="http://twig.sensiolabs.org/">Twig</a> template lexer.</p>
+<p>It just highlights Twig code between the preprocessor directives,
+other data is left untouched by the lexer.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.VelocityHtmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">VelocityHtmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.VelocityHtmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>html+velocity</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/html+velocity</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>VelocityLexer</cite> that highlights unlexed data
+with the <cite>HtmlLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.VelocityLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">VelocityLexer</code><a class="headerlink" href="#pygments.lexers.templates.VelocityLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>velocity</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.vm, *.fhtml</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Generic <a class="reference external" href="http://velocity.apache.org/">Velocity</a> template lexer.</p>
+<p>Just highlights velocity directives and variable references, other
+data is left untouched by the lexer.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.VelocityXmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">VelocityXmlLexer</code><a class="headerlink" href="#pygments.lexers.templates.VelocityXmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xml+velocity</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/xml+velocity</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>VelocityLexer</cite> that highlights unlexed data
+with the <cite>XmlLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.XmlDjangoLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">XmlDjangoLexer</code><a class="headerlink" href="#pygments.lexers.templates.XmlDjangoLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xml+django, xml+jinja</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/xml+django, application/xml+jinja</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>DjangoLexer</cite> that highlights unlexed data with the
+<cite>XmlLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.XmlErbLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">XmlErbLexer</code><a class="headerlink" href="#pygments.lexers.templates.XmlErbLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xml+erb, xml+ruby</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/xml+ruby</p>
+</dd>
+</dl>
+<p>Subclass of <cite>ErbLexer</cite> which highlights data outside preprocessor
+directives with the <cite>XmlLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.XmlPhpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">XmlPhpLexer</code><a class="headerlink" href="#pygments.lexers.templates.XmlPhpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xml+php</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/xml+php</p>
+</dd>
+</dl>
+<p>Subclass of <cite>PhpLexer</cite> that highlights unhandled data with the <cite>XmlLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.XmlSmartyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">XmlSmartyLexer</code><a class="headerlink" href="#pygments.lexers.templates.XmlSmartyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xml+smarty</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/xml+smarty</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>SmartyLexer</cite> that highlights unlexed data with the
+<cite>XmlLexer</cite>.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.templates.YamlJinjaLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.templates.</code><code class="sig-name descname">YamlJinjaLexer</code><a class="headerlink" href="#pygments.lexers.templates.YamlJinjaLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>yaml+jinja, salt, sls</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sls</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-yaml+jinja, text/x-sls</p>
+</dd>
+</dl>
+<p>Subclass of the <cite>DjangoLexer</cite> that highlights unlexed data with the
+<cite>YamlLexer</cite>.</p>
+<p>Commonly used in Saltstack salt states.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.teraterm"></span></div>
+<div class="section" id="lexer-for-tera-term-macro-files">
+<h2>Lexer for Tera Term macro files<a class="headerlink" href="#lexer-for-tera-term-macro-files" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.teraterm.TeraTermLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.teraterm.</code><code class="sig-name descname">TeraTermLexer</code><a class="headerlink" href="#pygments.lexers.teraterm.TeraTermLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ttl, teraterm, teratermmacro</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.ttl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-teratermmacro</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://ttssh2.osdn.jp/">Tera Term</a> macro source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.testing"></span></div>
+<div class="section" id="lexers-for-testing-languages">
+<h2>Lexers for testing languages<a class="headerlink" href="#lexers-for-testing-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.testing.GherkinLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.testing.</code><code class="sig-name descname">GherkinLexer</code><a class="headerlink" href="#pygments.lexers.testing.GherkinLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cucumber, gherkin</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.feature</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-gherkin</p>
+</dd>
+</dl>
+<p>For <cite>Gherkin &lt;http://github.com/aslakhellesoy/gherkin/&gt;</cite> syntax.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.testing.TAPLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.testing.</code><code class="sig-name descname">TAPLexer</code><a class="headerlink" href="#pygments.lexers.testing.TAPLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>tap</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.tap</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For Test Anything Protocol (TAP) output.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.textedit"></span></div>
+<div class="section" id="lexers-for-languages-related-to-text-processing">
+<h2>Lexers for languages related to text processing<a class="headerlink" href="#lexers-for-languages-related-to-text-processing" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.textedit.AwkLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.textedit.</code><code class="sig-name descname">AwkLexer</code><a class="headerlink" href="#pygments.lexers.textedit.AwkLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>awk, gawk, mawk, nawk</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.awk</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-awk</p>
+</dd>
+</dl>
+<p>For Awk scripts.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.textedit.VimLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.textedit.</code><code class="sig-name descname">VimLexer</code><a class="headerlink" href="#pygments.lexers.textedit.VimLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>vim</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.vim, .vimrc, .exrc, .gvimrc, vimrc, exrc, gvimrc, vimrc, gvimrc</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-vim</p>
+</dd>
+</dl>
+<p>Lexer for VimL script files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.8.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.textfmts"></span></div>
+<div class="section" id="lexers-for-various-text-formats">
+<h2>Lexers for various text formats<a class="headerlink" href="#lexers-for-various-text-formats" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.textfmts.GettextLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.textfmts.</code><code class="sig-name descname">GettextLexer</code><a class="headerlink" href="#pygments.lexers.textfmts.GettextLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>pot, po</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.pot, *.po</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-gettext, text/x-gettext, text/gettext</p>
+</dd>
+</dl>
+<p>Lexer for Gettext catalog files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.9.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.textfmts.HttpLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.textfmts.</code><code class="sig-name descname">HttpLexer</code><a class="headerlink" href="#pygments.lexers.textfmts.HttpLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>http</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for HTTP sessions.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.textfmts.IrcLogsLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.textfmts.</code><code class="sig-name descname">IrcLogsLexer</code><a class="headerlink" href="#pygments.lexers.textfmts.IrcLogsLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>irc</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.weechatlog</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-irclog</p>
+</dd>
+</dl>
+<p>Lexer for IRC logs in <em>irssi</em>, <em>xchat</em> or <em>weechat</em> style.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.textfmts.NotmuchLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.textfmts.</code><code class="sig-name descname">NotmuchLexer</code><a class="headerlink" href="#pygments.lexers.textfmts.NotmuchLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>notmuch</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://notmuchmail.org/">Notmuch</a> email text format.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.5.</span></p>
+</div>
+<p>Additional options accepted:</p>
+<dl class="simple">
+<dt><cite>body_lexer</cite></dt><dd><p>If given, highlight the contents of the message body with the specified
+lexer, else guess it according to the body content (default: <code class="docutils literal notranslate"><span class="pre">None</span></code>).</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.textfmts.TodotxtLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.textfmts.</code><code class="sig-name descname">TodotxtLexer</code><a class="headerlink" href="#pygments.lexers.textfmts.TodotxtLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>todotxt</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>todo.txt, *.todotxt</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-todo</p>
+</dd>
+</dl>
+<p>Lexer for <a class="reference external" href="http://todotxt.com/">Todo.txt</a> todo list format.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.theorem"></span></div>
+<div class="section" id="lexers-for-theorem-proving-languages">
+<h2>Lexers for theorem-proving languages<a class="headerlink" href="#lexers-for-theorem-proving-languages" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.theorem.CoqLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.theorem.</code><code class="sig-name descname">CoqLexer</code><a class="headerlink" href="#pygments.lexers.theorem.CoqLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>coq</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.v</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-coq</p>
+</dd>
+</dl>
+<p>For the <a class="reference external" href="http://coq.inria.fr/">Coq</a> theorem prover.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.theorem.IsabelleLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.theorem.</code><code class="sig-name descname">IsabelleLexer</code><a class="headerlink" href="#pygments.lexers.theorem.IsabelleLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>isabelle</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.thy</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-isabelle</p>
+</dd>
+</dl>
+<p>For the <a class="reference external" href="http://isabelle.in.tum.de/">Isabelle</a> proof assistant.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.theorem.LeanLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.theorem.</code><code class="sig-name descname">LeanLexer</code><a class="headerlink" href="#pygments.lexers.theorem.LeanLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>lean</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.lean</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-lean</p>
+</dd>
+</dl>
+<p>For the <a class="reference external" href="https://github.com/leanprover/lean">Lean</a>
+theorem prover.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.trafficscript"></span></div>
+<div class="section" id="lexer-for-riverbed-s-trafficscript-rts-language">
+<h2>Lexer for RiverBed’s TrafficScript (RTS) language<a class="headerlink" href="#lexer-for-riverbed-s-trafficscript-rts-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.trafficscript.RtsLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.trafficscript.</code><code class="sig-name descname">RtsLexer</code><a class="headerlink" href="#pygments.lexers.trafficscript.RtsLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>rts, trafficscript</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.rts</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.riverbed.com/stingray">Riverbed Stingray Traffic Manager</a></p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.typoscript"></span></div>
+<div class="section" id="lexers-for-typoscript">
+<h2>Lexers for TypoScript<a class="headerlink" href="#lexers-for-typoscript" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.typoscript.TypoScriptCssDataLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.typoscript.</code><code class="sig-name descname">TypoScriptCssDataLexer</code><a class="headerlink" href="#pygments.lexers.typoscript.TypoScriptCssDataLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>typoscriptcssdata</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer that highlights markers, constants and registers within css blocks.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.typoscript.TypoScriptHtmlDataLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.typoscript.</code><code class="sig-name descname">TypoScriptHtmlDataLexer</code><a class="headerlink" href="#pygments.lexers.typoscript.TypoScriptHtmlDataLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>typoscripthtmldata</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer that highlights markers, constants and registers within html tags.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.typoscript.TypoScriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.typoscript.</code><code class="sig-name descname">TypoScriptLexer</code><a class="headerlink" href="#pygments.lexers.typoscript.TypoScriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>typoscript</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.typoscript</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-typoscript</p>
+</dd>
+</dl>
+<p>Lexer for TypoScript code.</p>
+<p><a class="reference external" href="http://docs.typo3.org/typo3cms/TyposcriptReference/">http://docs.typo3.org/typo3cms/TyposcriptReference/</a></p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.unicon"></span></div>
+<div class="section" id="lexers-for-the-icon-and-unicon-languages-including-ucode-vm">
+<h2>Lexers for the Icon and Unicon languages, including ucode VM<a class="headerlink" href="#lexers-for-the-icon-and-unicon-languages-including-ucode-vm" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.unicon.IconLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.unicon.</code><code class="sig-name descname">IconLexer</code><a class="headerlink" href="#pygments.lexers.unicon.IconLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>icon</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.icon, *.ICON</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for Icon.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.unicon.UcodeLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.unicon.</code><code class="sig-name descname">UcodeLexer</code><a class="headerlink" href="#pygments.lexers.unicon.UcodeLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>ucode</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.u, *.u1, *.u2</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for Icon ucode files.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.unicon.UniconLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.unicon.</code><code class="sig-name descname">UniconLexer</code><a class="headerlink" href="#pygments.lexers.unicon.UniconLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>unicon</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.icn</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/unicon</p>
+</dd>
+</dl>
+<p>For Unicon source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.4.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.urbi"></span></div>
+<div class="section" id="lexers-for-urbiscript-language">
+<h2>Lexers for UrbiScript language<a class="headerlink" href="#lexers-for-urbiscript-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.urbi.UrbiscriptLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.urbi.</code><code class="sig-name descname">UrbiscriptLexer</code><a class="headerlink" href="#pygments.lexers.urbi.UrbiscriptLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>urbiscript</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.u</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-urbiscript</p>
+</dd>
+</dl>
+<p>For UrbiScript source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.5.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.varnish"></span></div>
+<div class="section" id="lexers-for-varnish-configuration">
+<h2>Lexers for Varnish configuration<a class="headerlink" href="#lexers-for-varnish-configuration" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.varnish.VCLLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.varnish.</code><code class="sig-name descname">VCLLexer</code><a class="headerlink" href="#pygments.lexers.varnish.VCLLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>vcl</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.vcl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-vclsrc</p>
+</dd>
+</dl>
+<p>For Varnish Configuration Language (VCL).</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.varnish.VCLSnippetLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.varnish.</code><code class="sig-name descname">VCLSnippetLexer</code><a class="headerlink" href="#pygments.lexers.varnish.VCLSnippetLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>vclsnippets, vclsnippet</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>None</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-vclsnippet</p>
+</dd>
+</dl>
+<p>For Varnish Configuration Language snippets.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.verification"></span></div>
+<div class="section" id="lexer-for-intermediate-verification-languages-ivls">
+<h2>Lexer for Intermediate Verification Languages (IVLs)<a class="headerlink" href="#lexer-for-intermediate-verification-languages-ivls" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.verification.BoogieLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.verification.</code><code class="sig-name descname">BoogieLexer</code><a class="headerlink" href="#pygments.lexers.verification.BoogieLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>boogie</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.bpl</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://boogie.codeplex.com/">Boogie</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.1.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.verification.SilverLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.verification.</code><code class="sig-name descname">SilverLexer</code><a class="headerlink" href="#pygments.lexers.verification.SilverLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>silver</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.sil, *.vpr</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="https://bitbucket.org/viperproject/silver">Silver</a> source code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.webmisc"></span></div>
+<div class="section" id="lexers-for-misc-web-stuff">
+<h2>Lexers for misc. web stuff<a class="headerlink" href="#lexers-for-misc-web-stuff" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.webmisc.CirruLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.webmisc.</code><code class="sig-name descname">CirruLexer</code><a class="headerlink" href="#pygments.lexers.webmisc.CirruLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>cirru</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.cirru</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-cirru</p>
+</dd>
+</dl>
+<p>Syntax rules of Cirru can be found at:
+<a class="reference external" href="http://cirru.org/">http://cirru.org/</a></p>
+<ul class="simple">
+<li><p>using <code class="docutils literal notranslate"><span class="pre">()</span></code> for expressions, but restricted in a same line</p></li>
+<li><p>using <code class="docutils literal notranslate"><span class="pre">&quot;&quot;</span></code> for strings, with <code class="docutils literal notranslate"><span class="pre">\</span></code> for escaping chars</p></li>
+<li><p>using <code class="docutils literal notranslate"><span class="pre">$</span></code> as folding operator</p></li>
+<li><p>using <code class="docutils literal notranslate"><span class="pre">,</span></code> as unfolding operator</p></li>
+<li><p>using indentations for nested blocks</p></li>
+</ul>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.webmisc.DuelLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.webmisc.</code><code class="sig-name descname">DuelLexer</code><a class="headerlink" href="#pygments.lexers.webmisc.DuelLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>duel, jbst, jsonml+bst</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.duel, *.jbst</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-duel, text/x-jbst</p>
+</dd>
+</dl>
+<p>Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
+See <a class="reference external" href="http://duelengine.org/">http://duelengine.org/</a>.
+See <a class="reference external" href="http://jsonml.org/jbst/">http://jsonml.org/jbst/</a>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.webmisc.QmlLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.webmisc.</code><code class="sig-name descname">QmlLexer</code><a class="headerlink" href="#pygments.lexers.webmisc.QmlLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>qml, qbs</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.qml, *.qbs</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>application/x-qml, application/x-qt.qbs+qml</p>
+</dd>
+</dl>
+<p>For QML files. See <a class="reference external" href="http://doc.qt.digia.com/4.7/qdeclarativeintroduction.html">http://doc.qt.digia.com/4.7/qdeclarativeintroduction.html</a>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.6.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.webmisc.SlimLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.webmisc.</code><code class="sig-name descname">SlimLexer</code><a class="headerlink" href="#pygments.lexers.webmisc.SlimLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>slim</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.slim</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-slim</p>
+</dd>
+</dl>
+<p>For Slim markup.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.0.</span></p>
+</div>
+</dd></dl>
+
+<dl class="class">
+<dt id="pygments.lexers.webmisc.XQueryLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.webmisc.</code><code class="sig-name descname">XQueryLexer</code><a class="headerlink" href="#pygments.lexers.webmisc.XQueryLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xquery, xqy, xq, xql, xqm</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.xqy, *.xquery, *.xq, *.xql, *.xqm</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/xquery, application/xquery</p>
+</dd>
+</dl>
+<p>An XQuery lexer, parsing a stream and outputting the tokens needed to
+highlight xquery code.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 1.4.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.whiley"></span></div>
+<div class="section" id="lexers-for-the-whiley-language">
+<h2>Lexers for the Whiley language<a class="headerlink" href="#lexers-for-the-whiley-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.whiley.WhileyLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.whiley.</code><code class="sig-name descname">WhileyLexer</code><a class="headerlink" href="#pygments.lexers.whiley.WhileyLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>whiley</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.whiley</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-whiley</p>
+</dd>
+</dl>
+<p>Lexer for the Whiley programming language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.x10"></span></div>
+<div class="section" id="lexers-for-the-x10-programming-language">
+<h2>Lexers for the X10 programming language<a class="headerlink" href="#lexers-for-the-x10-programming-language" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.x10.X10Lexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.x10.</code><code class="sig-name descname">X10Lexer</code><a class="headerlink" href="#pygments.lexers.x10.X10Lexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>x10, xten</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.x10</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/x-x10</p>
+</dd>
+</dl>
+<p>For the X10 language.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.1.</span></p>
+</div>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.xorg"></span></div>
+<div class="section" id="lexers-for-xorg-configs">
+<h2>Lexers for Xorg configs<a class="headerlink" href="#lexers-for-xorg-configs" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.xorg.XorgLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.xorg.</code><code class="sig-name descname">XorgLexer</code><a class="headerlink" href="#pygments.lexers.xorg.XorgLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>xorg.conf</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>xorg.conf</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>None</p>
+</dd>
+</dl>
+<p>Lexer for xorg.conf file.</p>
+</dd></dl>
+
+<span class="target" id="module-pygments.lexers.zig"></span></div>
+<div class="section" id="lexers-for-zig">
+<h2>Lexers for Zig<a class="headerlink" href="#lexers-for-zig" title="Permalink to this headline">¶</a></h2>
+<dl class="class">
+<dt id="pygments.lexers.zig.ZigLexer">
+<em class="property">class </em><code class="sig-prename descclassname">pygments.lexers.zig.</code><code class="sig-name descname">ZigLexer</code><a class="headerlink" href="#pygments.lexers.zig.ZigLexer" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Short names</dt>
+<dd class="field-odd"><p>zig</p>
+</dd>
+<dt class="field-even">Filenames</dt>
+<dd class="field-even"><p>*.zig</p>
+</dd>
+<dt class="field-odd">MIME types</dt>
+<dd class="field-odd"><p>text/zig</p>
+</dd>
+</dl>
+<p>For <a class="reference external" href="http://www.ziglang.org">Zig</a> source code.</p>
+<p>grammar: <a class="reference external" href="https://ziglang.org/documentation/master/#Grammar">https://ziglang.org/documentation/master/#Grammar</a></p>
+</dd></dl>
+
+</div>
+<div class="section" id="iterating-over-all-lexers">
+<h2>Iterating over all lexers<a class="headerlink" href="#iterating-over-all-lexers" title="Permalink to this headline">¶</a></h2>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+<p>To get all lexers (both the builtin and the plugin ones), you can
+use the <cite>get_all_lexers()</cite> function from the <cite>pygments.lexers</cite>
+module:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.lexers</span> <span class="kn">import</span> <span class="n">get_all_lexers</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">i</span> <span class="o">=</span> <span class="n">get_all_lexers</span><span class="p">()</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">i</span><span class="o">.</span><span class="n">next</span><span class="p">()</span>
+<span class="go">(&#39;Diff&#39;, (&#39;diff&#39;,), (&#39;*.diff&#39;, &#39;*.patch&#39;), (&#39;text/x-diff&#39;, &#39;text/x-patch&#39;))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">i</span><span class="o">.</span><span class="n">next</span><span class="p">()</span>
+<span class="go">(&#39;Delphi&#39;, (&#39;delphi&#39;, &#39;objectpascal&#39;, &#39;pas&#39;, &#39;pascal&#39;), (&#39;*.pas&#39;,), (&#39;text/x-pascal&#39;,))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">i</span><span class="o">.</span><span class="n">next</span><span class="p">()</span>
+<span class="go">(&#39;XML+Ruby&#39;, (&#39;xml+erb&#39;, &#39;xml+ruby&#39;), (), ())</span>
+</pre></div>
+</div>
+<p>As you can see, the return value is an iterator which yields tuples
+in the form <code class="docutils literal notranslate"><span class="pre">(name,</span> <span class="pre">aliases,</span> <span class="pre">filetypes,</span> <span class="pre">mimetypes)</span></code>.</p>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/moinmoin.html b/doc/_build/html/docs/moinmoin.html
new file mode 100644 (file)
index 0000000..e3d72b9
--- /dev/null
@@ -0,0 +1,154 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Using Pygments with MoinMoin &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Use Pygments in Java" href="java.html" />
+    <link rel="prev" title="Using Pygments in ReST documents" href="rstdirective.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="rstdirective.html"
+                        title="previous chapter">Using Pygments in ReST documents</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="java.html"
+                        title="next chapter">Use Pygments in Java</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/moinmoin.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="using-pygments-with-moinmoin">
+<h1>Using Pygments with MoinMoin<a class="headerlink" href="#using-pygments-with-moinmoin" title="Permalink to this headline">¶</a></h1>
+<p>From Pygments 0.7, the source distribution ships a <a class="reference external" href="http://moinmoin.wikiwikiweb.de/">Moin</a> parser plugin that
+can be used to get Pygments highlighting in Moin wiki pages.</p>
+<p>To use it, copy the file <cite>external/moin-parser.py</cite> from the Pygments
+distribution to the <cite>data/plugin/parser</cite> subdirectory of your Moin instance.
+Edit the options at the top of the file (currently <code class="docutils literal notranslate"><span class="pre">ATTACHMENTS</span></code> and
+<code class="docutils literal notranslate"><span class="pre">INLINESTYLES</span></code>) and rename the file to the name that the parser directive
+should have. For example, if you name the file <code class="docutils literal notranslate"><span class="pre">code.py</span></code>, you can get a
+highlighted Python code sample with this Wiki markup:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="p">{{{</span>
+<span class="c1">#!code python</span>
+<span class="p">[</span><span class="o">...</span><span class="p">]</span>
+<span class="p">}}}</span>
+</pre></div>
+</div>
+<p>where <code class="docutils literal notranslate"><span class="pre">python</span></code> is the Pygments name of the lexer to use.</p>
+<p>Additionally, if you set the <code class="docutils literal notranslate"><span class="pre">ATTACHMENTS</span></code> option to True, Pygments will also
+be called for all attachments for whose filenames there is no other parser
+registered.</p>
+<p>You are responsible for including CSS rules that will map the Pygments CSS
+classes to colors. You can output a stylesheet file with <cite>pygmentize</cite>, put it
+into the <cite>htdocs</cite> directory of your Moin instance and then include it in the
+<cite>stylesheets</cite> configuration option in the Moin config, e.g.:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="n">stylesheets</span> <span class="o">=</span> <span class="p">[(</span><span class="s1">&#39;screen&#39;</span><span class="p">,</span> <span class="s1">&#39;/htdocs/pygments.css&#39;</span><span class="p">)]</span>
+</pre></div>
+</div>
+<p>If you do not want to do that and are willing to accept larger HTML output, you
+can set the <code class="docutils literal notranslate"><span class="pre">INLINESTYLES</span></code> option to True.</p>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/plugins.html b/doc/_build/html/docs/plugins.html
new file mode 100644 (file)
index 0000000..2cb93bf
--- /dev/null
@@ -0,0 +1,206 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Register Plugins &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Using Pygments in ReST documents" href="rstdirective.html" />
+    <link rel="prev" title="Write your own filter" href="filterdevelopment.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="../index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Register Plugins</a><ul>
+<li><a class="reference internal" href="#entrypoints">Entrypoints</a></li>
+<li><a class="reference internal" href="#how-to-use-entrypoints">How To Use Entrypoints</a></li>
+<li><a class="reference internal" href="#extending-the-core">Extending The Core</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="filterdevelopment.html"
+                        title="previous chapter">Write your own filter</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="rstdirective.html"
+                        title="next chapter">Using Pygments in ReST documents</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/plugins.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="register-plugins">
+<h1>Register Plugins<a class="headerlink" href="#register-plugins" title="Permalink to this headline">¶</a></h1>
+<p>If you want to extend Pygments without hacking the sources, but want to
+use the lexer/formatter/style/filter lookup functions (<cite>lexers.get_lexer_by_name</cite>
+et al.), you can use <a class="reference external" href="http://peak.telecommunity.com/DevCenter/setuptools">setuptools</a> entrypoints to add new lexers, formatters
+or styles as if they were in the Pygments core.</p>
+<p>That means you can use your highlighter modules with the <cite>pygmentize</cite> script,
+which relies on the mentioned functions.</p>
+<div class="section" id="entrypoints">
+<h2>Entrypoints<a class="headerlink" href="#entrypoints" title="Permalink to this headline">¶</a></h2>
+<p>Here is a list of setuptools entrypoints that Pygments understands:</p>
+<p><cite>pygments.lexers</cite></p>
+<blockquote>
+<div><p>This entrypoint is used for adding new lexers to the Pygments core.
+The name of the entrypoint values doesn’t really matter, Pygments extracts
+required metadata from the class definition:</p>
+<div class="highlight-ini notranslate"><div class="highlight"><pre><span></span><span class="k">[pygments.lexers]</span>
+<span class="na">yourlexer</span> <span class="o">=</span> <span class="s">yourmodule:YourLexer</span>
+</pre></div>
+</div>
+<p>Note that you have to define <code class="docutils literal notranslate"><span class="pre">name</span></code>, <code class="docutils literal notranslate"><span class="pre">aliases</span></code> and <code class="docutils literal notranslate"><span class="pre">filename</span></code>
+attributes so that you can use the highlighter from the command line:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="k">class</span> <span class="nc">YourLexer</span><span class="p">(</span><span class="o">...</span><span class="p">):</span>
+    <span class="n">name</span> <span class="o">=</span> <span class="s1">&#39;Name Of Your Lexer&#39;</span>
+    <span class="n">aliases</span> <span class="o">=</span> <span class="p">[</span><span class="s1">&#39;alias&#39;</span><span class="p">]</span>
+    <span class="n">filenames</span> <span class="o">=</span> <span class="p">[</span><span class="s1">&#39;*.ext&#39;</span><span class="p">]</span>
+</pre></div>
+</div>
+</div></blockquote>
+<p><cite>pygments.formatters</cite></p>
+<blockquote>
+<div><p>You can use this entrypoint to add new formatters to Pygments. The
+name of an entrypoint item is the name of the formatter. If you
+prefix the name with a slash it’s used as a filename pattern:</p>
+<div class="highlight-ini notranslate"><div class="highlight"><pre><span></span><span class="k">[pygments.formatters]</span>
+<span class="na">yourformatter</span> <span class="o">=</span> <span class="s">yourmodule:YourFormatter</span>
+<span class="na">/.ext</span> <span class="o">=</span> <span class="s">yourmodule:YourFormatter</span>
+</pre></div>
+</div>
+</div></blockquote>
+<p><cite>pygments.styles</cite></p>
+<blockquote>
+<div><p>To add a new style you can use this entrypoint. The name of the entrypoint
+is the name of the style:</p>
+<div class="highlight-ini notranslate"><div class="highlight"><pre><span></span><span class="k">[pygments.styles]</span>
+<span class="na">yourstyle</span> <span class="o">=</span> <span class="s">yourmodule:YourStyle</span>
+</pre></div>
+</div>
+</div></blockquote>
+<p><cite>pygments.filters</cite></p>
+<blockquote>
+<div><p>Use this entrypoint to register a new filter. The name of the
+entrypoint is the name of the filter:</p>
+<div class="highlight-ini notranslate"><div class="highlight"><pre><span></span><span class="k">[pygments.filters]</span>
+<span class="na">yourfilter</span> <span class="o">=</span> <span class="s">yourmodule:YourFilter</span>
+</pre></div>
+</div>
+</div></blockquote>
+</div>
+<div class="section" id="how-to-use-entrypoints">
+<h2>How To Use Entrypoints<a class="headerlink" href="#how-to-use-entrypoints" title="Permalink to this headline">¶</a></h2>
+<p>This documentation doesn’t explain how to use those entrypoints because this is
+covered in the <a class="reference external" href="http://peak.telecommunity.com/DevCenter/setuptools">setuptools documentation</a>. That page should cover everything
+you need to write a plugin.</p>
+</div>
+<div class="section" id="extending-the-core">
+<h2>Extending The Core<a class="headerlink" href="#extending-the-core" title="Permalink to this headline">¶</a></h2>
+<p>If you have written a Pygments plugin that is open source, please inform us
+about that. There is a high chance that we’ll add it to the Pygments
+distribution.</p>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/quickstart.html b/doc/_build/html/docs/quickstart.html
new file mode 100644 (file)
index 0000000..fa259ba
--- /dev/null
@@ -0,0 +1,302 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Introduction and Quickstart &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Command Line Interface" href="cmdline.html" />
+    <link rel="prev" title="Download and installation" href="../download.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="../index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Introduction and Quickstart</a><ul>
+<li><a class="reference internal" href="#architecture">Architecture</a></li>
+<li><a class="reference internal" href="#example">Example</a></li>
+<li><a class="reference internal" href="#options">Options</a></li>
+<li><a class="reference internal" href="#lexer-and-formatter-lookup">Lexer and formatter lookup</a></li>
+<li><a class="reference internal" href="#guessing-lexers">Guessing lexers</a></li>
+<li><a class="reference internal" href="#command-line-usage">Command line usage</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="../download.html"
+                        title="previous chapter">Download and installation</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="cmdline.html"
+                        title="next chapter">Command Line Interface</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/quickstart.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="introduction-and-quickstart">
+<h1>Introduction and Quickstart<a class="headerlink" href="#introduction-and-quickstart" title="Permalink to this headline">¶</a></h1>
+<p>Welcome to Pygments! This document explains the basic concepts and terms and
+gives a few examples of how to use the library.</p>
+<div class="section" id="architecture">
+<h2>Architecture<a class="headerlink" href="#architecture" title="Permalink to this headline">¶</a></h2>
+<p>There are four types of components that work together highlighting a piece of
+code:</p>
+<ul class="simple">
+<li><p>A <strong>lexer</strong> splits the source into tokens, fragments of the source that
+have a token type that determines what the text represents semantically
+(e.g., keyword, string, or comment). There is a lexer for every language
+or markup format that Pygments supports.</p></li>
+<li><p>The token stream can be piped through <strong>filters</strong>, which usually modify
+the token types or text fragments, e.g. uppercasing all keywords.</p></li>
+<li><p>A <strong>formatter</strong> then takes the token stream and writes it to an output
+file, in a format such as HTML, LaTeX or RTF.</p></li>
+<li><p>While writing the output, a <strong>style</strong> determines how to highlight all the
+different token types. It maps them to attributes like “red and bold”.</p></li>
+</ul>
+</div>
+<div class="section" id="example">
+<h2>Example<a class="headerlink" href="#example" title="Permalink to this headline">¶</a></h2>
+<p>Here is a small example for highlighting Python code:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments</span> <span class="kn">import</span> <span class="n">highlight</span>
+<span class="kn">from</span> <span class="nn">pygments.lexers</span> <span class="kn">import</span> <span class="n">PythonLexer</span>
+<span class="kn">from</span> <span class="nn">pygments.formatters</span> <span class="kn">import</span> <span class="n">HtmlFormatter</span>
+
+<span class="n">code</span> <span class="o">=</span> <span class="s1">&#39;print &quot;Hello World&quot;&#39;</span>
+<span class="nb">print</span><span class="p">(</span><span class="n">highlight</span><span class="p">(</span><span class="n">code</span><span class="p">,</span> <span class="n">PythonLexer</span><span class="p">(),</span> <span class="n">HtmlFormatter</span><span class="p">()))</span>
+</pre></div>
+</div>
+<p>which prints something like this:</p>
+<div class="highlight-html notranslate"><div class="highlight"><pre><span></span><span class="p">&lt;</span><span class="nt">div</span> <span class="na">class</span><span class="o">=</span><span class="s">&quot;highlight&quot;</span><span class="p">&gt;</span>
+<span class="p">&lt;</span><span class="nt">pre</span><span class="p">&gt;&lt;</span><span class="nt">span</span> <span class="na">class</span><span class="o">=</span><span class="s">&quot;k&quot;</span><span class="p">&gt;</span>print<span class="p">&lt;/</span><span class="nt">span</span><span class="p">&gt;</span> <span class="p">&lt;</span><span class="nt">span</span> <span class="na">class</span><span class="o">=</span><span class="s">&quot;s&quot;</span><span class="p">&gt;</span><span class="ni">&amp;quot;</span>Hello World<span class="ni">&amp;quot;</span><span class="p">&lt;/</span><span class="nt">span</span><span class="p">&gt;&lt;/</span><span class="nt">pre</span><span class="p">&gt;</span>
+<span class="p">&lt;/</span><span class="nt">div</span><span class="p">&gt;</span>
+</pre></div>
+</div>
+<p>As you can see, Pygments uses CSS classes (by default, but you can change that)
+instead of inline styles in order to avoid outputting redundant style information over
+and over. A CSS stylesheet that contains all CSS classes possibly used in the output
+can be produced by:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="nb">print</span><span class="p">(</span><span class="n">HtmlFormatter</span><span class="p">()</span><span class="o">.</span><span class="n">get_style_defs</span><span class="p">(</span><span class="s1">&#39;.highlight&#39;</span><span class="p">))</span>
+</pre></div>
+</div>
+<p>The argument to <code class="xref py py-func docutils literal notranslate"><span class="pre">get_style_defs()</span></code> is used as an additional CSS selector:
+the output may look like this:</p>
+<div class="highlight-css notranslate"><div class="highlight"><pre><span></span><span class="p">.</span><span class="nc">highlight</span> <span class="p">.</span><span class="nc">k</span> <span class="p">{</span> <span class="k">color</span><span class="p">:</span> <span class="mh">#AA22FF</span><span class="p">;</span> <span class="k">font-weight</span><span class="p">:</span> <span class="kc">bold</span> <span class="p">}</span>
+<span class="p">.</span><span class="nc">highlight</span> <span class="p">.</span><span class="nc">s</span> <span class="p">{</span> <span class="k">color</span><span class="p">:</span> <span class="mh">#BB4444</span> <span class="p">}</span>
+<span class="o">...</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="options">
+<h2>Options<a class="headerlink" href="#options" title="Permalink to this headline">¶</a></h2>
+<p>The <code class="xref py py-func docutils literal notranslate"><span class="pre">highlight()</span></code> function supports a fourth argument called <em>outfile</em>, it
+must be a file object if given. The formatted output will then be written to
+this file instead of being returned as a string.</p>
+<p>Lexers and formatters both support options. They are given to them as keyword
+arguments either to the class or to the lookup method:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments</span> <span class="kn">import</span> <span class="n">highlight</span>
+<span class="kn">from</span> <span class="nn">pygments.lexers</span> <span class="kn">import</span> <span class="n">get_lexer_by_name</span>
+<span class="kn">from</span> <span class="nn">pygments.formatters</span> <span class="kn">import</span> <span class="n">HtmlFormatter</span>
+
+<span class="n">lexer</span> <span class="o">=</span> <span class="n">get_lexer_by_name</span><span class="p">(</span><span class="s2">&quot;python&quot;</span><span class="p">,</span> <span class="n">stripall</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span>
+<span class="n">formatter</span> <span class="o">=</span> <span class="n">HtmlFormatter</span><span class="p">(</span><span class="n">linenos</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">cssclass</span><span class="o">=</span><span class="s2">&quot;source&quot;</span><span class="p">)</span>
+<span class="n">result</span> <span class="o">=</span> <span class="n">highlight</span><span class="p">(</span><span class="n">code</span><span class="p">,</span> <span class="n">lexer</span><span class="p">,</span> <span class="n">formatter</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>This makes the lexer strip all leading and trailing whitespace from the input
+(<cite>stripall</cite> option), lets the formatter output line numbers (<cite>linenos</cite> option),
+and sets the wrapping <code class="docutils literal notranslate"><span class="pre">&lt;div&gt;</span></code>’s class to <code class="docutils literal notranslate"><span class="pre">source</span></code> (instead of
+<code class="docutils literal notranslate"><span class="pre">highlight</span></code>).</p>
+<p>Important options include:</p>
+<dl class="simple">
+<dt><cite>encoding</cite><span class="classifier">for lexers and formatters</span></dt><dd><p>Since Pygments uses Unicode strings internally, this determines which
+encoding will be used to convert to or from byte strings.</p>
+</dd>
+<dt><cite>style</cite><span class="classifier">for formatters</span></dt><dd><p>The name of the style to use when writing the output.</p>
+</dd>
+</dl>
+<p>For an overview of builtin lexers and formatters and their options, visit the
+<a class="reference internal" href="lexers.html"><span class="doc">lexer</span></a> and <a class="reference internal" href="formatters.html"><span class="doc">formatters</span></a> lists.</p>
+<p>For a documentation on filters, see <a class="reference internal" href="filters.html"><span class="doc">this page</span></a>.</p>
+</div>
+<div class="section" id="lexer-and-formatter-lookup">
+<h2>Lexer and formatter lookup<a class="headerlink" href="#lexer-and-formatter-lookup" title="Permalink to this headline">¶</a></h2>
+<p>If you want to lookup a built-in lexer by its alias or a filename, you can use
+one of the following methods:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.lexers</span> <span class="kn">import</span> <span class="p">(</span><span class="n">get_lexer_by_name</span><span class="p">,</span>
+<span class="gp">... </span>    <span class="n">get_lexer_for_filename</span><span class="p">,</span> <span class="n">get_lexer_for_mimetype</span><span class="p">)</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">get_lexer_by_name</span><span class="p">(</span><span class="s1">&#39;python&#39;</span><span class="p">)</span>
+<span class="go">&lt;pygments.lexers.PythonLexer&gt;</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">get_lexer_for_filename</span><span class="p">(</span><span class="s1">&#39;spam.rb&#39;</span><span class="p">)</span>
+<span class="go">&lt;pygments.lexers.RubyLexer&gt;</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">get_lexer_for_mimetype</span><span class="p">(</span><span class="s1">&#39;text/x-perl&#39;</span><span class="p">)</span>
+<span class="go">&lt;pygments.lexers.PerlLexer&gt;</span>
+</pre></div>
+</div>
+<p>All these functions accept keyword arguments; they will be passed to the lexer
+as options.</p>
+<p>A similar API is available for formatters: use <a class="reference internal" href="api.html#pygments.formatters.get_formatter_by_name" title="pygments.formatters.get_formatter_by_name"><code class="xref py py-func docutils literal notranslate"><span class="pre">get_formatter_by_name()</span></code></a>
+and <a class="reference internal" href="api.html#pygments.formatters.get_formatter_for_filename" title="pygments.formatters.get_formatter_for_filename"><code class="xref py py-func docutils literal notranslate"><span class="pre">get_formatter_for_filename()</span></code></a> from the <a class="reference internal" href="api.html#module-pygments.formatters" title="pygments.formatters"><code class="xref py py-mod docutils literal notranslate"><span class="pre">pygments.formatters</span></code></a>
+module for this purpose.</p>
+</div>
+<div class="section" id="guessing-lexers">
+<h2>Guessing lexers<a class="headerlink" href="#guessing-lexers" title="Permalink to this headline">¶</a></h2>
+<p>If you don’t know the content of the file, or you want to highlight a file
+whose extension is ambiguous, such as <code class="docutils literal notranslate"><span class="pre">.html</span></code> (which could contain plain HTML
+or some template tags), use these functions:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.lexers</span> <span class="kn">import</span> <span class="n">guess_lexer</span><span class="p">,</span> <span class="n">guess_lexer_for_filename</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">guess_lexer</span><span class="p">(</span><span class="s1">&#39;#!/usr/bin/python</span><span class="se">\n</span><span class="s1">print &quot;Hello World!&quot;&#39;</span><span class="p">)</span>
+<span class="go">&lt;pygments.lexers.PythonLexer&gt;</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">guess_lexer_for_filename</span><span class="p">(</span><span class="s1">&#39;test.py&#39;</span><span class="p">,</span> <span class="s1">&#39;print &quot;Hello World!&quot;&#39;</span><span class="p">)</span>
+<span class="go">&lt;pygments.lexers.PythonLexer&gt;</span>
+</pre></div>
+</div>
+<p><a class="reference internal" href="api.html#pygments.lexers.guess_lexer" title="pygments.lexers.guess_lexer"><code class="xref py py-func docutils literal notranslate"><span class="pre">guess_lexer()</span></code></a> passes the given content to the lexer classes’
+<code class="xref py py-meth docutils literal notranslate"><span class="pre">analyse_text()</span></code> method and returns the one for which it returns the
+highest number.</p>
+<p>All lexers have two different filename pattern lists: the primary and the
+secondary one. The <a class="reference internal" href="api.html#pygments.lexers.get_lexer_for_filename" title="pygments.lexers.get_lexer_for_filename"><code class="xref py py-func docutils literal notranslate"><span class="pre">get_lexer_for_filename()</span></code></a> function only uses the
+primary list, whose entries are supposed to be unique among all lexers.
+<a class="reference internal" href="api.html#pygments.lexers.guess_lexer_for_filename" title="pygments.lexers.guess_lexer_for_filename"><code class="xref py py-func docutils literal notranslate"><span class="pre">guess_lexer_for_filename()</span></code></a>, however, will first loop through all lexers
+and look at the primary and secondary filename patterns if the filename matches.
+If only one lexer matches, it is returned, else the guessing mechanism of
+<a class="reference internal" href="api.html#pygments.lexers.guess_lexer" title="pygments.lexers.guess_lexer"><code class="xref py py-func docutils literal notranslate"><span class="pre">guess_lexer()</span></code></a> is used with the matching lexers.</p>
+<p>As usual, keyword arguments to these functions are given to the created lexer
+as options.</p>
+</div>
+<div class="section" id="command-line-usage">
+<h2>Command line usage<a class="headerlink" href="#command-line-usage" title="Permalink to this headline">¶</a></h2>
+<p>You can use Pygments from the command line, using the <strong class="program">pygmentize</strong>
+script:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize test.py
+</pre></div>
+</div>
+<p>will highlight the Python file test.py using ANSI escape sequences
+(a.k.a. terminal colors) and print the result to standard output.</p>
+<p>To output HTML, use the <code class="docutils literal notranslate"><span class="pre">-f</span></code> option:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -f html -o test.html test.py
+</pre></div>
+</div>
+<p>to write an HTML-highlighted version of test.py to the file test.html.
+Note that it will only be a snippet of HTML, if you want a full HTML document,
+use the “full” option:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -f html -O full -o test.html test.py
+</pre></div>
+</div>
+<p>This will produce a full HTML document with included stylesheet.</p>
+<p>A style can be selected with <code class="docutils literal notranslate"><span class="pre">-O</span> <span class="pre">style=&lt;name&gt;</span></code>.</p>
+<p>If you need a stylesheet for an existing HTML file using Pygments CSS classes,
+it can be created with:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ pygmentize -S default -f html &gt; style.css
+</pre></div>
+</div>
+<p>where <code class="docutils literal notranslate"><span class="pre">default</span></code> is the style name.</p>
+<p>More options and tricks and be found in the <a class="reference internal" href="cmdline.html"><span class="doc">command line reference</span></a>.</p>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/rstdirective.html b/doc/_build/html/docs/rstdirective.html
new file mode 100644 (file)
index 0000000..662330d
--- /dev/null
@@ -0,0 +1,134 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Using Pygments in ReST documents &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Using Pygments with MoinMoin" href="moinmoin.html" />
+    <link rel="prev" title="Register Plugins" href="plugins.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="plugins.html"
+                        title="previous chapter">Register Plugins</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="moinmoin.html"
+                        title="next chapter">Using Pygments with MoinMoin</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/rstdirective.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="using-pygments-in-rest-documents">
+<h1>Using Pygments in ReST documents<a class="headerlink" href="#using-pygments-in-rest-documents" title="Permalink to this headline">¶</a></h1>
+<p>Many Python people use <a class="reference external" href="http://docutils.sf.net/rst.html">ReST</a> for documentation their sourcecode, programs,
+scripts et cetera. This also means that documentation often includes sourcecode
+samples or snippets.</p>
+<p>You can easily enable Pygments support for your ReST texts using a custom
+directive – this is also how this documentation displays source code.</p>
+<p>From Pygments 0.9, the directive is shipped in the distribution as
+<cite>external/rst-directive.py</cite>.  You can copy and adapt this code to your liking.</p>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/styles.html b/doc/_build/html/docs/styles.html
new file mode 100644 (file)
index 0000000..00f7d27
--- /dev/null
@@ -0,0 +1,373 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Styles &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Unicode and Encodings" href="unicode.html" />
+    <link rel="prev" title="Available formatters" href="formatters.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="../index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Styles</a><ul>
+<li><a class="reference internal" href="#creating-own-styles">Creating Own Styles</a></li>
+<li><a class="reference internal" href="#style-rules">Style Rules</a></li>
+<li><a class="reference internal" href="#builtin-styles">Builtin Styles</a></li>
+<li><a class="reference internal" href="#getting-a-list-of-available-styles">Getting a list of available styles</a></li>
+<li><a class="reference internal" href="#terminal-styles">Terminal Styles</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="formatters.html"
+                        title="previous chapter">Available formatters</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="unicode.html"
+                        title="next chapter">Unicode and Encodings</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/styles.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="styles">
+<h1>Styles<a class="headerlink" href="#styles" title="Permalink to this headline">¶</a></h1>
+<p>Pygments comes with some builtin styles that work for both the HTML and
+LaTeX formatter.</p>
+<p>The builtin styles can be looked up with the <cite>get_style_by_name</cite> function:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.styles</span> <span class="kn">import</span> <span class="n">get_style_by_name</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">get_style_by_name</span><span class="p">(</span><span class="s1">&#39;colorful&#39;</span><span class="p">)</span>
+<span class="go">&lt;class &#39;pygments.styles.colorful.ColorfulStyle&#39;&gt;</span>
+</pre></div>
+</div>
+<p>You can pass a instance of a <cite>Style</cite> class to a formatter as the <cite>style</cite>
+option in form of a string:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.styles</span> <span class="kn">import</span> <span class="n">get_style_by_name</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.formatters</span> <span class="kn">import</span> <span class="n">HtmlFormatter</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">HtmlFormatter</span><span class="p">(</span><span class="n">style</span><span class="o">=</span><span class="s1">&#39;colorful&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">style</span>
+<span class="go">&lt;class &#39;pygments.styles.colorful.ColorfulStyle&#39;&gt;</span>
+</pre></div>
+</div>
+<p>Or you can also import your own style (which must be a subclass of
+<cite>pygments.style.Style</cite>) and pass it to the formatter:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">yourapp.yourmodule</span> <span class="kn">import</span> <span class="n">YourStyle</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.formatters</span> <span class="kn">import</span> <span class="n">HtmlFormatter</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">HtmlFormatter</span><span class="p">(</span><span class="n">style</span><span class="o">=</span><span class="n">YourStyle</span><span class="p">)</span><span class="o">.</span><span class="n">style</span>
+<span class="go">&lt;class &#39;yourapp.yourmodule.YourStyle&#39;&gt;</span>
+</pre></div>
+</div>
+<div class="section" id="creating-own-styles">
+<h2>Creating Own Styles<a class="headerlink" href="#creating-own-styles" title="Permalink to this headline">¶</a></h2>
+<p>So, how to create a style? All you have to do is to subclass <cite>Style</cite> and
+define some styles:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.style</span> <span class="kn">import</span> <span class="n">Style</span>
+<span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="n">Keyword</span><span class="p">,</span> <span class="n">Name</span><span class="p">,</span> <span class="n">Comment</span><span class="p">,</span> <span class="n">String</span><span class="p">,</span> <span class="n">Error</span><span class="p">,</span> \
+     <span class="n">Number</span><span class="p">,</span> <span class="n">Operator</span><span class="p">,</span> <span class="n">Generic</span>
+
+<span class="k">class</span> <span class="nc">YourStyle</span><span class="p">(</span><span class="n">Style</span><span class="p">):</span>
+    <span class="n">default_style</span> <span class="o">=</span> <span class="s2">&quot;&quot;</span>
+    <span class="n">styles</span> <span class="o">=</span> <span class="p">{</span>
+        <span class="n">Comment</span><span class="p">:</span>                <span class="s1">&#39;italic #888&#39;</span><span class="p">,</span>
+        <span class="n">Keyword</span><span class="p">:</span>                <span class="s1">&#39;bold #005&#39;</span><span class="p">,</span>
+        <span class="n">Name</span><span class="p">:</span>                   <span class="s1">&#39;#f00&#39;</span><span class="p">,</span>
+        <span class="n">Name</span><span class="o">.</span><span class="n">Function</span><span class="p">:</span>          <span class="s1">&#39;#0f0&#39;</span><span class="p">,</span>
+        <span class="n">Name</span><span class="o">.</span><span class="n">Class</span><span class="p">:</span>             <span class="s1">&#39;bold #0f0&#39;</span><span class="p">,</span>
+        <span class="n">String</span><span class="p">:</span>                 <span class="s1">&#39;bg:#eee #111&#39;</span>
+    <span class="p">}</span>
+</pre></div>
+</div>
+<p>That’s it. There are just a few rules. When you define a style for <cite>Name</cite>
+the style automatically also affects <cite>Name.Function</cite> and so on. If you
+defined <code class="docutils literal notranslate"><span class="pre">'bold'</span></code> and you don’t want boldface for a subtoken use <code class="docutils literal notranslate"><span class="pre">'nobold'</span></code>.</p>
+<p>(Philosophy: the styles aren’t written in CSS syntax since this way
+they can be used for a variety of formatters.)</p>
+<p><cite>default_style</cite> is the style inherited by all token types.</p>
+<p>To make the style usable for Pygments, you must</p>
+<ul class="simple">
+<li><p>either register it as a plugin (see <a class="reference internal" href="plugins.html"><span class="doc">the plugin docs</span></a>)</p></li>
+<li><p>or drop it into the <cite>styles</cite> subpackage of your Pygments distribution one style
+class per style, where the file name is the style name and the class name is
+<cite>StylenameClass</cite>. For example, if your style should be called
+<code class="docutils literal notranslate"><span class="pre">&quot;mondrian&quot;</span></code>, name the class <cite>MondrianStyle</cite>, put it into the file
+<code class="docutils literal notranslate"><span class="pre">mondrian.py</span></code> and this file into the <code class="docutils literal notranslate"><span class="pre">pygments.styles</span></code> subpackage
+directory.</p></li>
+</ul>
+</div>
+<div class="section" id="style-rules">
+<h2>Style Rules<a class="headerlink" href="#style-rules" title="Permalink to this headline">¶</a></h2>
+<p>Here a small overview of all allowed styles:</p>
+<dl class="simple">
+<dt><code class="docutils literal notranslate"><span class="pre">bold</span></code></dt><dd><p>render text as bold</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">nobold</span></code></dt><dd><p>don’t render text as bold (to prevent subtokens being highlighted bold)</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">italic</span></code></dt><dd><p>render text italic</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">noitalic</span></code></dt><dd><p>don’t render text as italic</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">underline</span></code></dt><dd><p>render text underlined</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">nounderline</span></code></dt><dd><p>don’t render text underlined</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">bg:</span></code></dt><dd><p>transparent background</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">bg:#000000</span></code></dt><dd><p>background color (black)</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">border:</span></code></dt><dd><p>no border</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">border:#ffffff</span></code></dt><dd><p>border color (white)</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">#ff0000</span></code></dt><dd><p>text color (red)</p>
+</dd>
+<dt><code class="docutils literal notranslate"><span class="pre">noinherit</span></code></dt><dd><p>don’t inherit styles from supertoken</p>
+</dd>
+</dl>
+<p>Note that there may not be a space between <code class="docutils literal notranslate"><span class="pre">bg:</span></code> and the color value
+since the style definition string is split at whitespace.
+Also, using named colors is not allowed since the supported color names
+vary for different formatters.</p>
+<p>Furthermore, not all lexers might support every style.</p>
+</div>
+<div class="section" id="builtin-styles">
+<h2>Builtin Styles<a class="headerlink" href="#builtin-styles" title="Permalink to this headline">¶</a></h2>
+<p>Pygments ships some builtin styles which are maintained by the Pygments team.</p>
+<p>To get a list of known styles you can use this snippet:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.styles</span> <span class="kn">import</span> <span class="n">STYLE_MAP</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">STYLE_MAP</span><span class="o">.</span><span class="n">keys</span><span class="p">()</span>
+<span class="go">[&#39;default&#39;, &#39;emacs&#39;, &#39;friendly&#39;, &#39;colorful&#39;]</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="getting-a-list-of-available-styles">
+<h2>Getting a list of available styles<a class="headerlink" href="#getting-a-list-of-available-styles" title="Permalink to this headline">¶</a></h2>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.6.</span></p>
+</div>
+<p>Because it could be that a plugin registered a style, there is
+a way to iterate over all styles:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.styles</span> <span class="kn">import</span> <span class="n">get_all_styles</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">styles</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">get_all_styles</span><span class="p">())</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="terminal-styles">
+<span id="ansiterminalstyle"></span><h2>Terminal Styles<a class="headerlink" href="#terminal-styles" title="Permalink to this headline">¶</a></h2>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 2.2.</span></p>
+</div>
+<p>Custom styles used with the 256-color terminal formatter can also map colors to
+use the 8 default ANSI colors.  To do so, use <code class="docutils literal notranslate"><span class="pre">ansigreen</span></code>, <code class="docutils literal notranslate"><span class="pre">ansibrightred</span></code> or
+any other colors defined in <code class="xref py py-attr docutils literal notranslate"><span class="pre">pygments.style.ansicolors</span></code>.  Foreground ANSI
+colors will be mapped to the corresponding <a class="reference external" href="https://en.wikipedia.org/wiki/ANSI_escape_code#Colors">escape codes 30 to 37</a> thus respecting any
+custom color mapping and themes provided by many terminal emulators.  Light
+variants are treated as foreground color with and an added bold flag.
+<code class="docutils literal notranslate"><span class="pre">bg:ansi&lt;color&gt;</span></code> will also be respected, except the light variant will be the
+same shade as their dark variant.</p>
+<p>See the following example where the color of the string <code class="docutils literal notranslate"><span class="pre">&quot;hello</span> <span class="pre">world&quot;</span></code> is
+governed by the escape sequence <code class="docutils literal notranslate"><span class="pre">\x1b[34;01m</span></code> (Ansi bright blue, Bold, 41 being red
+background) instead of an extended foreground &amp; background color.</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments</span> <span class="kn">import</span> <span class="n">highlight</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.style</span> <span class="kn">import</span> <span class="n">Style</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="n">Token</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.lexers</span> <span class="kn">import</span> <span class="n">Python3Lexer</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.formatters</span> <span class="kn">import</span> <span class="n">Terminal256Formatter</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">MyStyle</span><span class="p">(</span><span class="n">Style</span><span class="p">):</span>
+<span class="go">        styles = {</span>
+<span class="go">            Token.String:     &#39;ansibrightblue bg:ansibrightred&#39;,</span>
+<span class="go">        }</span>
+
+<span class="gp">&gt;&gt;&gt; </span><span class="n">code</span> <span class="o">=</span> <span class="s1">&#39;print(&quot;Hello World&quot;)&#39;</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">result</span> <span class="o">=</span> <span class="n">highlight</span><span class="p">(</span><span class="n">code</span><span class="p">,</span> <span class="n">Python3Lexer</span><span class="p">(),</span> <span class="n">Terminal256Formatter</span><span class="p">(</span><span class="n">style</span><span class="o">=</span><span class="n">MyStyle</span><span class="p">))</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="nb">print</span><span class="p">(</span><span class="n">result</span><span class="o">.</span><span class="n">encode</span><span class="p">())</span>
+<span class="go">b&#39;\x1b[34;41;01m&quot;\x1b[39;49;00m\x1b[34;41;01mHello World\x1b[39;49;00m\x1b[34;41;01m&quot;\x1b[39;49;00m&#39;</span>
+</pre></div>
+</div>
+<p>Colors specified using <code class="docutils literal notranslate"><span class="pre">ansi*</span></code> are converted to a default set of RGB colors
+when used with formatters other than the terminal-256 formatter.</p>
+<p>By definition of ANSI, the following colors are considered “light” colors, and
+will be rendered by most terminals as bold:</p>
+<ul class="simple">
+<li><p>“brightblack” (darkgrey), “brightred”, “brightgreen”, “brightyellow”, “brightblue”,
+“brightmagenta”, “brightcyan”, “white”</p></li>
+</ul>
+<p>The following are considered “dark” colors and will be rendered as non-bold:</p>
+<ul class="simple">
+<li><p>“black”, “red”, “green”, “yellow”, “blue”, “magenta”, “cyan”,
+“gray”</p></li>
+</ul>
+<p>Exact behavior might depends on the terminal emulator you are using, and its
+settings.</p>
+<div class="versionchanged" id="new-ansi-color-names">
+<p><span class="versionmodified changed">Changed in version 2.4.</span></p>
+</div>
+<p>The definition of the ANSI color names has changed.
+New names are easier to understand and align to the colors used in other projects.</p>
+<table class="docutils align-default">
+<colgroup>
+<col style="width: 51%" />
+<col style="width: 49%" />
+</colgroup>
+<thead>
+<tr class="row-odd"><th class="head"><p>New names</p></th>
+<th class="head"><p>Pygments up to 2.3</p></th>
+</tr>
+</thead>
+<tbody>
+<tr class="row-even"><td><p><code class="docutils literal notranslate"><span class="pre">ansiblack</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansiblack</span></code></p></td>
+</tr>
+<tr class="row-odd"><td><p><code class="docutils literal notranslate"><span class="pre">ansired</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansidarkred</span></code></p></td>
+</tr>
+<tr class="row-even"><td><p><code class="docutils literal notranslate"><span class="pre">ansigreen</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansidarkgreen</span></code></p></td>
+</tr>
+<tr class="row-odd"><td><p><code class="docutils literal notranslate"><span class="pre">ansiyellow</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansibrown</span></code></p></td>
+</tr>
+<tr class="row-even"><td><p><code class="docutils literal notranslate"><span class="pre">ansiblue</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansidarkblue</span></code></p></td>
+</tr>
+<tr class="row-odd"><td><p><code class="docutils literal notranslate"><span class="pre">ansimagenta</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansipurple</span></code></p></td>
+</tr>
+<tr class="row-even"><td><p><code class="docutils literal notranslate"><span class="pre">ansicyan</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansiteal</span></code></p></td>
+</tr>
+<tr class="row-odd"><td><p><code class="docutils literal notranslate"><span class="pre">ansigray</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansilightgray</span></code></p></td>
+</tr>
+<tr class="row-even"><td><p><code class="docutils literal notranslate"><span class="pre">ansibrightblack</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansidarkgray</span></code></p></td>
+</tr>
+<tr class="row-odd"><td><p><code class="docutils literal notranslate"><span class="pre">ansibrightred</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansired</span></code></p></td>
+</tr>
+<tr class="row-even"><td><p><code class="docutils literal notranslate"><span class="pre">ansibrightgreen</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansigreen</span></code></p></td>
+</tr>
+<tr class="row-odd"><td><p><code class="docutils literal notranslate"><span class="pre">ansibrightyellow</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansiyellow</span></code></p></td>
+</tr>
+<tr class="row-even"><td><p><code class="docutils literal notranslate"><span class="pre">ansibrightblue</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansiblue</span></code></p></td>
+</tr>
+<tr class="row-odd"><td><p><code class="docutils literal notranslate"><span class="pre">ansibrightmagenta</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansifuchsia</span></code></p></td>
+</tr>
+<tr class="row-even"><td><p><code class="docutils literal notranslate"><span class="pre">ansibrightcyan</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansiturquoise</span></code></p></td>
+</tr>
+<tr class="row-odd"><td><p><code class="docutils literal notranslate"><span class="pre">ansiwhite</span></code></p></td>
+<td><p><code class="docutils literal notranslate"><span class="pre">#ansiwhite</span></code></p></td>
+</tr>
+</tbody>
+</table>
+<p>Old ANSI color names are deprecated but will still work.</p>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/tokens.html b/doc/_build/html/docs/tokens.html
new file mode 100644 (file)
index 0000000..787deb6
--- /dev/null
@@ -0,0 +1,469 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Builtin Tokens &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="The full Pygments API" href="api.html" />
+    <link rel="prev" title="Unicode and Encodings" href="unicode.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="../index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Builtin Tokens</a><ul>
+<li><a class="reference internal" href="#keyword-tokens">Keyword Tokens</a></li>
+<li><a class="reference internal" href="#name-tokens">Name Tokens</a></li>
+<li><a class="reference internal" href="#literals">Literals</a></li>
+<li><a class="reference internal" href="#operators">Operators</a></li>
+<li><a class="reference internal" href="#punctuation">Punctuation</a></li>
+<li><a class="reference internal" href="#comments">Comments</a></li>
+<li><a class="reference internal" href="#generic-tokens">Generic Tokens</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="unicode.html"
+                        title="previous chapter">Unicode and Encodings</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="api.html"
+                        title="next chapter">The full Pygments API</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/tokens.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="module-pygments.token">
+<span id="builtin-tokens"></span><h1>Builtin Tokens<a class="headerlink" href="#module-pygments.token" title="Permalink to this headline">¶</a></h1>
+<p>In the <a class="reference internal" href="#module-pygments.token" title="pygments.token"><code class="xref py py-mod docutils literal notranslate"><span class="pre">pygments.token</span></code></a> module, there is a special object called <cite>Token</cite>
+that is used to create token types.</p>
+<p>You can create a new token type by accessing an attribute of <cite>Token</cite>:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="n">Token</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">Token</span><span class="o">.</span><span class="n">String</span>
+<span class="go">Token.String</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">Token</span><span class="o">.</span><span class="n">String</span> <span class="ow">is</span> <span class="n">Token</span><span class="o">.</span><span class="n">String</span>
+<span class="go">True</span>
+</pre></div>
+</div>
+<p>Note that tokens are singletons so you can use the <code class="docutils literal notranslate"><span class="pre">is</span></code> operator for comparing
+token types.</p>
+<p>As of Pygments 0.7 you can also use the <code class="docutils literal notranslate"><span class="pre">in</span></code> operator to perform set tests:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="n">Comment</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">Comment</span><span class="o">.</span><span class="n">Single</span> <span class="ow">in</span> <span class="n">Comment</span>
+<span class="go">True</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">Comment</span> <span class="ow">in</span> <span class="n">Comment</span><span class="o">.</span><span class="n">Multi</span>
+<span class="go">False</span>
+</pre></div>
+</div>
+<p>This can be useful in <a class="reference internal" href="filters.html"><span class="doc">filters</span></a> and if you write lexers on your
+own without using the base lexers.</p>
+<p>You can also split a token type into a hierarchy, and get the parent of it:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">String</span><span class="o">.</span><span class="n">split</span><span class="p">()</span>
+<span class="go">[Token, Token.Literal, Token.Literal.String]</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">String</span><span class="o">.</span><span class="n">parent</span>
+<span class="go">Token.Literal</span>
+</pre></div>
+</div>
+<p>In principle, you can create an unlimited number of token types but nobody can
+guarantee that a style would define style rules for a token type. Because of
+that, Pygments proposes some global token types defined in the
+<cite>pygments.token.STANDARD_TYPES</cite> dict.</p>
+<p>For some tokens aliases are already defined:</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="n">String</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">String</span>
+<span class="go">Token.Literal.String</span>
+</pre></div>
+</div>
+<p>Inside the <a class="reference internal" href="#module-pygments.token" title="pygments.token"><code class="xref py py-mod docutils literal notranslate"><span class="pre">pygments.token</span></code></a> module the following aliases are defined:</p>
+<table class="docutils align-default">
+<colgroup>
+<col style="width: 17%" />
+<col style="width: 36%" />
+<col style="width: 47%" />
+</colgroup>
+<tbody>
+<tr class="row-odd"><td><p><cite>Text</cite></p></td>
+<td><p><cite>Token.Text</cite></p></td>
+<td><p>for any type of text data</p></td>
+</tr>
+<tr class="row-even"><td><p><cite>Whitespace</cite></p></td>
+<td><p><cite>Token.Text.Whitespace</cite></p></td>
+<td><p>for specially highlighted whitespace</p></td>
+</tr>
+<tr class="row-odd"><td><p><cite>Error</cite></p></td>
+<td><p><cite>Token.Error</cite></p></td>
+<td><p>represents lexer errors</p></td>
+</tr>
+<tr class="row-even"><td><p><cite>Other</cite></p></td>
+<td><p><cite>Token.Other</cite></p></td>
+<td><p>special token for data not
+matched by a parser (e.g. HTML
+markup in PHP code)</p></td>
+</tr>
+<tr class="row-odd"><td><p><cite>Keyword</cite></p></td>
+<td><p><cite>Token.Keyword</cite></p></td>
+<td><p>any kind of keywords</p></td>
+</tr>
+<tr class="row-even"><td><p><cite>Name</cite></p></td>
+<td><p><cite>Token.Name</cite></p></td>
+<td><p>variable/function names</p></td>
+</tr>
+<tr class="row-odd"><td><p><cite>Literal</cite></p></td>
+<td><p><cite>Token.Literal</cite></p></td>
+<td><p>Any literals</p></td>
+</tr>
+<tr class="row-even"><td><p><cite>String</cite></p></td>
+<td><p><cite>Token.Literal.String</cite></p></td>
+<td><p>string literals</p></td>
+</tr>
+<tr class="row-odd"><td><p><cite>Number</cite></p></td>
+<td><p><cite>Token.Literal.Number</cite></p></td>
+<td><p>number literals</p></td>
+</tr>
+<tr class="row-even"><td><p><cite>Operator</cite></p></td>
+<td><p><cite>Token.Operator</cite></p></td>
+<td><p>operators (<code class="docutils literal notranslate"><span class="pre">+</span></code>, <code class="docutils literal notranslate"><span class="pre">not</span></code>…)</p></td>
+</tr>
+<tr class="row-odd"><td><p><cite>Punctuation</cite></p></td>
+<td><p><cite>Token.Punctuation</cite></p></td>
+<td><p>punctuation (<code class="docutils literal notranslate"><span class="pre">[</span></code>, <code class="docutils literal notranslate"><span class="pre">(</span></code>…)</p></td>
+</tr>
+<tr class="row-even"><td><p><cite>Comment</cite></p></td>
+<td><p><cite>Token.Comment</cite></p></td>
+<td><p>any kind of comments</p></td>
+</tr>
+<tr class="row-odd"><td><p><cite>Generic</cite></p></td>
+<td><p><cite>Token.Generic</cite></p></td>
+<td><p>generic tokens (have a look at
+the explanation below)</p></td>
+</tr>
+</tbody>
+</table>
+<p>The <cite>Whitespace</cite> token type is new in Pygments 0.8. It is used only by the
+<cite>VisibleWhitespaceFilter</cite> currently.</p>
+<p>Normally you just create token types using the already defined aliases. For each
+of those token aliases, a number of subtypes exists (excluding the special tokens
+<cite>Token.Text</cite>, <cite>Token.Error</cite> and <cite>Token.Other</cite>)</p>
+<p>The <cite>is_token_subtype()</cite> function in the <cite>pygments.token</cite> module can be used to
+test if a token type is a subtype of another (such as <cite>Name.Tag</cite> and <cite>Name</cite>).
+(This is the same as <code class="docutils literal notranslate"><span class="pre">Name.Tag</span> <span class="pre">in</span> <span class="pre">Name</span></code>. The overloaded <cite>in</cite> operator was newly
+introduced in Pygments 0.7, the function still exists for backwards
+compatibility.)</p>
+<p>With Pygments 0.7, it’s also possible to convert strings to token types (for example
+if you want to supply a token from the command line):</p>
+<div class="highlight-pycon notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pygments.token</span> <span class="kn">import</span> <span class="n">String</span><span class="p">,</span> <span class="n">string_to_tokentype</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">string_to_tokentype</span><span class="p">(</span><span class="s2">&quot;String&quot;</span><span class="p">)</span>
+<span class="go">Token.Literal.String</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">string_to_tokentype</span><span class="p">(</span><span class="s2">&quot;Token.Literal.String&quot;</span><span class="p">)</span>
+<span class="go">Token.Literal.String</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">string_to_tokentype</span><span class="p">(</span><span class="n">String</span><span class="p">)</span>
+<span class="go">Token.Literal.String</span>
+</pre></div>
+</div>
+<div class="section" id="keyword-tokens">
+<h2>Keyword Tokens<a class="headerlink" href="#keyword-tokens" title="Permalink to this headline">¶</a></h2>
+<dl class="simple">
+<dt><cite>Keyword</cite></dt><dd><p>For any kind of keyword (especially if it doesn’t match any of the
+subtypes of course).</p>
+</dd>
+<dt><cite>Keyword.Constant</cite></dt><dd><p>For keywords that are constants (e.g. <code class="docutils literal notranslate"><span class="pre">None</span></code> in future Python versions).</p>
+</dd>
+<dt><cite>Keyword.Declaration</cite></dt><dd><p>For keywords used for variable declaration (e.g. <code class="docutils literal notranslate"><span class="pre">var</span></code> in some programming
+languages like JavaScript).</p>
+</dd>
+<dt><cite>Keyword.Namespace</cite></dt><dd><p>For keywords used for namespace declarations (e.g. <code class="docutils literal notranslate"><span class="pre">import</span></code> in Python and
+Java and <code class="docutils literal notranslate"><span class="pre">package</span></code> in Java).</p>
+</dd>
+<dt><cite>Keyword.Pseudo</cite></dt><dd><p>For keywords that aren’t really keywords (e.g. <code class="docutils literal notranslate"><span class="pre">None</span></code> in old Python
+versions).</p>
+</dd>
+<dt><cite>Keyword.Reserved</cite></dt><dd><p>For reserved keywords.</p>
+</dd>
+<dt><cite>Keyword.Type</cite></dt><dd><p>For builtin types that can’t be used as identifiers (e.g. <code class="docutils literal notranslate"><span class="pre">int</span></code>,
+<code class="docutils literal notranslate"><span class="pre">char</span></code> etc. in C).</p>
+</dd>
+</dl>
+</div>
+<div class="section" id="name-tokens">
+<h2>Name Tokens<a class="headerlink" href="#name-tokens" title="Permalink to this headline">¶</a></h2>
+<dl class="simple">
+<dt><cite>Name</cite></dt><dd><p>For any name (variable names, function names, classes).</p>
+</dd>
+<dt><cite>Name.Attribute</cite></dt><dd><p>For all attributes (e.g. in HTML tags).</p>
+</dd>
+<dt><cite>Name.Builtin</cite></dt><dd><p>Builtin names; names that are available in the global namespace.</p>
+</dd>
+<dt><cite>Name.Builtin.Pseudo</cite></dt><dd><p>Builtin names that are implicit (e.g. <code class="docutils literal notranslate"><span class="pre">self</span></code> in Ruby, <code class="docutils literal notranslate"><span class="pre">this</span></code> in Java).</p>
+</dd>
+<dt><cite>Name.Class</cite></dt><dd><p>Class names. Because no lexer can know if a name is a class or a function
+or something else this token is meant for class declarations.</p>
+</dd>
+<dt><cite>Name.Constant</cite></dt><dd><p>Token type for constants. In some languages you can recognise a token by the
+way it’s defined (the value after a <code class="docutils literal notranslate"><span class="pre">const</span></code> keyword for example). In
+other languages constants are uppercase by definition (Ruby).</p>
+</dd>
+<dt><cite>Name.Decorator</cite></dt><dd><p>Token type for decorators. Decorators are syntactic elements in the Python
+language. Similar syntax elements exist in C# and Java.</p>
+</dd>
+<dt><cite>Name.Entity</cite></dt><dd><p>Token type for special entities. (e.g. <code class="docutils literal notranslate"><span class="pre">&amp;nbsp;</span></code> in HTML).</p>
+</dd>
+<dt><cite>Name.Exception</cite></dt><dd><p>Token type for exception names (e.g. <code class="docutils literal notranslate"><span class="pre">RuntimeError</span></code> in Python). Some languages
+define exceptions in the function signature (Java). You can highlight
+the name of that exception using this token then.</p>
+</dd>
+<dt><cite>Name.Function</cite></dt><dd><p>Token type for function names.</p>
+</dd>
+<dt><cite>Name.Function.Magic</cite></dt><dd><p>same as <cite>Name.Function</cite> but for special function names that have an implicit use
+in a language (e.g. <code class="docutils literal notranslate"><span class="pre">__init__</span></code> method in Python).</p>
+</dd>
+<dt><cite>Name.Label</cite></dt><dd><p>Token type for label names (e.g. in languages that support <code class="docutils literal notranslate"><span class="pre">goto</span></code>).</p>
+</dd>
+<dt><cite>Name.Namespace</cite></dt><dd><p>Token type for namespaces. (e.g. import paths in Java/Python), names following
+the <code class="docutils literal notranslate"><span class="pre">module</span></code>/<code class="docutils literal notranslate"><span class="pre">namespace</span></code> keyword in other languages.</p>
+</dd>
+<dt><cite>Name.Other</cite></dt><dd><p>Other names. Normally unused.</p>
+</dd>
+<dt><cite>Name.Tag</cite></dt><dd><p>Tag names (in HTML/XML markup or configuration files).</p>
+</dd>
+<dt><cite>Name.Variable</cite></dt><dd><p>Token type for variables. Some languages have prefixes for variable names
+(PHP, Ruby, Perl). You can highlight them using this token.</p>
+</dd>
+<dt><cite>Name.Variable.Class</cite></dt><dd><p>same as <cite>Name.Variable</cite> but for class variables (also static variables).</p>
+</dd>
+<dt><cite>Name.Variable.Global</cite></dt><dd><p>same as <cite>Name.Variable</cite> but for global variables (used in Ruby, for
+example).</p>
+</dd>
+<dt><cite>Name.Variable.Instance</cite></dt><dd><p>same as <cite>Name.Variable</cite> but for instance variables.</p>
+</dd>
+<dt><cite>Name.Variable.Magic</cite></dt><dd><p>same as <cite>Name.Variable</cite> but for special variable names that have an implicit use
+in a language (e.g. <code class="docutils literal notranslate"><span class="pre">__doc__</span></code> in Python).</p>
+</dd>
+</dl>
+</div>
+<div class="section" id="literals">
+<h2>Literals<a class="headerlink" href="#literals" title="Permalink to this headline">¶</a></h2>
+<dl class="simple">
+<dt><cite>Literal</cite></dt><dd><p>For any literal (if not further defined).</p>
+</dd>
+<dt><cite>Literal.Date</cite></dt><dd><p>for date literals (e.g. <code class="docutils literal notranslate"><span class="pre">42d</span></code> in Boo).</p>
+</dd>
+<dt><cite>String</cite></dt><dd><p>For any string literal.</p>
+</dd>
+<dt><cite>String.Affix</cite></dt><dd><p>Token type for affixes that further specify the type of the string they’re
+attached to (e.g. the prefixes <code class="docutils literal notranslate"><span class="pre">r</span></code> and <code class="docutils literal notranslate"><span class="pre">u8</span></code> in <code class="docutils literal notranslate"><span class="pre">r&quot;foo&quot;</span></code> and <code class="docutils literal notranslate"><span class="pre">u8&quot;foo&quot;</span></code>).</p>
+</dd>
+<dt><cite>String.Backtick</cite></dt><dd><p>Token type for strings enclosed in backticks.</p>
+</dd>
+<dt><cite>String.Char</cite></dt><dd><p>Token type for single characters (e.g. Java, C).</p>
+</dd>
+<dt><cite>String.Delimiter</cite></dt><dd><p>Token type for delimiting identifiers in “heredoc”, raw and other similar
+strings (e.g. the word <code class="docutils literal notranslate"><span class="pre">END</span></code> in Perl code <code class="docutils literal notranslate"><span class="pre">print</span> <span class="pre">&lt;&lt;'END';</span></code>).</p>
+</dd>
+<dt><cite>String.Doc</cite></dt><dd><p>Token type for documentation strings (for example Python).</p>
+</dd>
+<dt><cite>String.Double</cite></dt><dd><p>Double quoted strings.</p>
+</dd>
+<dt><cite>String.Escape</cite></dt><dd><p>Token type for escape sequences in strings.</p>
+</dd>
+<dt><cite>String.Heredoc</cite></dt><dd><p>Token type for “heredoc” strings (e.g. in Ruby or Perl).</p>
+</dd>
+<dt><cite>String.Interpol</cite></dt><dd><p>Token type for interpolated parts in strings (e.g. <code class="docutils literal notranslate"><span class="pre">#{foo}</span></code> in Ruby).</p>
+</dd>
+<dt><cite>String.Other</cite></dt><dd><p>Token type for any other strings (for example <code class="docutils literal notranslate"><span class="pre">%q{foo}</span></code> string constructs
+in Ruby).</p>
+</dd>
+<dt><cite>String.Regex</cite></dt><dd><p>Token type for regular expression literals (e.g. <code class="docutils literal notranslate"><span class="pre">/foo/</span></code> in JavaScript).</p>
+</dd>
+<dt><cite>String.Single</cite></dt><dd><p>Token type for single quoted strings.</p>
+</dd>
+<dt><cite>String.Symbol</cite></dt><dd><p>Token type for symbols (e.g. <code class="docutils literal notranslate"><span class="pre">:foo</span></code> in LISP or Ruby).</p>
+</dd>
+<dt><cite>Number</cite></dt><dd><p>Token type for any number literal.</p>
+</dd>
+<dt><cite>Number.Bin</cite></dt><dd><p>Token type for binary literals (e.g. <code class="docutils literal notranslate"><span class="pre">0b101010</span></code>).</p>
+</dd>
+<dt><cite>Number.Float</cite></dt><dd><p>Token type for float literals (e.g. <code class="docutils literal notranslate"><span class="pre">42.0</span></code>).</p>
+</dd>
+<dt><cite>Number.Hex</cite></dt><dd><p>Token type for hexadecimal number literals (e.g. <code class="docutils literal notranslate"><span class="pre">0xdeadbeef</span></code>).</p>
+</dd>
+<dt><cite>Number.Integer</cite></dt><dd><p>Token type for integer literals (e.g. <code class="docutils literal notranslate"><span class="pre">42</span></code>).</p>
+</dd>
+<dt><cite>Number.Integer.Long</cite></dt><dd><p>Token type for long integer literals (e.g. <code class="docutils literal notranslate"><span class="pre">42L</span></code> in Python).</p>
+</dd>
+<dt><cite>Number.Oct</cite></dt><dd><p>Token type for octal literals.</p>
+</dd>
+</dl>
+</div>
+<div class="section" id="operators">
+<h2>Operators<a class="headerlink" href="#operators" title="Permalink to this headline">¶</a></h2>
+<dl class="simple">
+<dt><cite>Operator</cite></dt><dd><p>For any punctuation operator (e.g. <code class="docutils literal notranslate"><span class="pre">+</span></code>, <code class="docutils literal notranslate"><span class="pre">-</span></code>).</p>
+</dd>
+<dt><cite>Operator.Word</cite></dt><dd><p>For any operator that is a word (e.g. <code class="docutils literal notranslate"><span class="pre">not</span></code>).</p>
+</dd>
+</dl>
+</div>
+<div class="section" id="punctuation">
+<h2>Punctuation<a class="headerlink" href="#punctuation" title="Permalink to this headline">¶</a></h2>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7.</span></p>
+</div>
+<dl class="simple">
+<dt><cite>Punctuation</cite></dt><dd><p>For any punctuation which is not an operator (e.g. <code class="docutils literal notranslate"><span class="pre">[</span></code>, <code class="docutils literal notranslate"><span class="pre">(</span></code>…)</p>
+</dd>
+</dl>
+</div>
+<div class="section" id="comments">
+<h2>Comments<a class="headerlink" href="#comments" title="Permalink to this headline">¶</a></h2>
+<dl class="simple">
+<dt><cite>Comment</cite></dt><dd><p>Token type for any comment.</p>
+</dd>
+<dt><cite>Comment.Hashbang</cite></dt><dd><dl class="simple">
+<dt>Token type for hashbang comments (i.e. first lines of files that start with</dt><dd><p><code class="docutils literal notranslate"><span class="pre">#!</span></code>).</p>
+</dd>
+</dl>
+</dd>
+<dt><cite>Comment.Multiline</cite></dt><dd><p>Token type for multiline comments.</p>
+</dd>
+<dt><cite>Comment.Preproc</cite></dt><dd><p>Token type for preprocessor comments (also <code class="docutils literal notranslate"><span class="pre">&lt;?php</span></code>/<code class="docutils literal notranslate"><span class="pre">&lt;%</span></code> constructs).</p>
+</dd>
+<dt><cite>Comment.Single</cite></dt><dd><p>Token type for comments that end at the end of a line (e.g. <code class="docutils literal notranslate"><span class="pre">#</span> <span class="pre">foo</span></code>).</p>
+</dd>
+<dt><cite>Comment.Special</cite></dt><dd><p>Special data in comments. For example code tags, author and license
+information, etc.</p>
+</dd>
+</dl>
+</div>
+<div class="section" id="generic-tokens">
+<h2>Generic Tokens<a class="headerlink" href="#generic-tokens" title="Permalink to this headline">¶</a></h2>
+<p>Generic tokens are for special lexers like the <cite>DiffLexer</cite> that doesn’t really
+highlight a programming language but a patch file.</p>
+<dl class="simple">
+<dt><cite>Generic</cite></dt><dd><p>A generic, unstyled token. Normally you don’t use this token type.</p>
+</dd>
+<dt><cite>Generic.Deleted</cite></dt><dd><p>Marks the token value as deleted.</p>
+</dd>
+<dt><cite>Generic.Emph</cite></dt><dd><p>Marks the token value as emphasized.</p>
+</dd>
+<dt><cite>Generic.Error</cite></dt><dd><p>Marks the token value as an error message.</p>
+</dd>
+<dt><cite>Generic.Heading</cite></dt><dd><p>Marks the token value as headline.</p>
+</dd>
+<dt><cite>Generic.Inserted</cite></dt><dd><p>Marks the token value as inserted.</p>
+</dd>
+<dt><cite>Generic.Output</cite></dt><dd><p>Marks the token value as program output (e.g. for python cli lexer).</p>
+</dd>
+<dt><cite>Generic.Prompt</cite></dt><dd><p>Marks the token value as command prompt (e.g. bash lexer).</p>
+</dd>
+<dt><cite>Generic.Strong</cite></dt><dd><p>Marks the token value as bold (e.g. for rst lexer).</p>
+</dd>
+<dt><cite>Generic.Subheading</cite></dt><dd><p>Marks the token value as subheadline.</p>
+</dd>
+<dt><cite>Generic.Traceback</cite></dt><dd><p>Marks the token value as a part of an error traceback.</p>
+</dd>
+</dl>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/docs/unicode.html b/doc/_build/html/docs/unicode.html
new file mode 100644 (file)
index 0000000..1b8aaf1
--- /dev/null
@@ -0,0 +1,170 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Unicode and Encodings &#8212; Pygments</title>
+    <link rel="stylesheet" href="../_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
+    <script type="text/javascript" src="../_static/jquery.js"></script>
+    <script type="text/javascript" src="../_static/underscore.js"></script>
+    <script type="text/javascript" src="../_static/doctools.js"></script>
+    <script type="text/javascript" src="../_static/language_data.js"></script>
+    <link rel="shortcut icon" href="../_static/favicon.ico"/>
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="Builtin Tokens" href="tokens.html" />
+    <link rel="prev" title="Styles" href="styles.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="../index.html">Home</a></li>
+    
+    <li><a href="../languages.html">Languages</a></li>
+    <li><a href="../faq.html">FAQ</a></li>
+    <li><a href="../download.html">Get it</a></li>
+    <li><a href="index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="../index.html">
+      <img src="../_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="styles.html"
+                        title="previous chapter">Styles</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="tokens.html"
+                        title="next chapter">Builtin Tokens</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="../_sources/docs/unicode.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="../search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="unicode-and-encodings">
+<h1>Unicode and Encodings<a class="headerlink" href="#unicode-and-encodings" title="Permalink to this headline">¶</a></h1>
+<p>Since Pygments 0.6, all lexers use unicode strings internally. Because of that
+you might encounter the occasional <code class="xref py py-exc docutils literal notranslate"><span class="pre">UnicodeDecodeError</span></code> if you pass strings
+with the wrong encoding.</p>
+<p>Per default all lexers have their input encoding set to <cite>guess</cite>.  This means
+that the following encodings are tried:</p>
+<ul class="simple">
+<li><p>UTF-8 (including BOM handling)</p></li>
+<li><p>The locale encoding (i.e. the result of <cite>locale.getpreferredencoding()</cite>)</p></li>
+<li><p>As a last resort, <cite>latin1</cite></p></li>
+</ul>
+<p>If you pass a lexer a byte string object (not unicode), it tries to decode the
+data using this encoding.</p>
+<p>You can override the encoding using the <cite>encoding</cite> or <cite>inencoding</cite> lexer
+options.  If you have the <a class="reference external" href="https://chardet.github.io/">chardet</a> library installed and set the encoding to
+<code class="docutils literal notranslate"><span class="pre">chardet</span></code> if will analyse the text and use the encoding it thinks is the
+right one automatically:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.lexers</span> <span class="kn">import</span> <span class="n">PythonLexer</span>
+<span class="n">lexer</span> <span class="o">=</span> <span class="n">PythonLexer</span><span class="p">(</span><span class="n">encoding</span><span class="o">=</span><span class="s1">&#39;chardet&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p>The best way is to pass Pygments unicode objects. In that case you can’t get
+unexpected output.</p>
+<p>The formatters now send Unicode objects to the stream if you don’t set the
+output encoding. You can do so by passing the formatters an <cite>encoding</cite> option:</p>
+<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">pygments.formatters</span> <span class="kn">import</span> <span class="n">HtmlFormatter</span>
+<span class="n">f</span> <span class="o">=</span> <span class="n">HtmlFormatter</span><span class="p">(</span><span class="n">encoding</span><span class="o">=</span><span class="s1">&#39;utf-8&#39;</span><span class="p">)</span>
+</pre></div>
+</div>
+<p><strong>You will have to set this option if you have non-ASCII characters in the
+source and the output stream does not accept Unicode written to it!</strong>
+This is the case for all regular files and for terminals.</p>
+<p>Note: The Terminal formatter tries to be smart: if its output stream has an
+<cite>encoding</cite> attribute, and you haven’t set the option, it will encode any
+Unicode string with this encoding before writing it. This is the case for
+<cite>sys.stdout</cite>, for example. The other formatters don’t have that behavior.</p>
+<p>Another note: If you call Pygments via the command line (<cite>pygmentize</cite>),
+encoding is handled differently, see <a class="reference internal" href="cmdline.html"><span class="doc">the command line docs</span></a>.</p>
+<div class="versionadded">
+<p><span class="versionmodified added">New in version 0.7: </span>The formatters now also accept an <cite>outencoding</cite> option which will override
+the <cite>encoding</cite> option if given. This makes it possible to use a single
+options dict with lexers and formatters, and still have different input and
+output encodings.</p>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/download.html b/doc/_build/html/download.html
new file mode 100644 (file)
index 0000000..ab70e42
--- /dev/null
@@ -0,0 +1,156 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Download and installation &#8212; Pygments</title>
+    <link rel="stylesheet" href="_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="./" src="_static/documentation_options.js"></script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="_static/language_data.js"></script>
+    <link rel="shortcut icon" href="_static/favicon.ico"/>
+    <link rel="index" title="Index" href="genindex.html" />
+    <link rel="search" title="Search" href="search.html" />
+    <link rel="next" title="Introduction and Quickstart" href="docs/quickstart.html" />
+    <link rel="prev" title="Pygments documentation" href="docs/index.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="index.html">Home</a></li>
+    
+    <li><a href="languages.html">Languages</a></li>
+    <li><a href="faq.html">FAQ</a></li>
+    <li><a href="#">Get it</a></li>
+    <li><a href="docs/index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="index.html">
+      <img src="_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Download and installation</a><ul>
+<li><a class="reference internal" href="#packaged-versions">Packaged versions</a></li>
+<li><a class="reference internal" href="#development-sources">Development sources</a></li>
+</ul>
+</li>
+</ul>
+
+  <h4>Previous topic</h4>
+  <p class="topless"><a href="docs/index.html"
+                        title="previous chapter">Pygments documentation</a></p>
+  <h4>Next topic</h4>
+  <p class="topless"><a href="docs/quickstart.html"
+                        title="next chapter">Introduction and Quickstart</a></p>
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="_sources/download.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="download-and-installation">
+<h1>Download and installation<a class="headerlink" href="#download-and-installation" title="Permalink to this headline">¶</a></h1>
+<p>The current release is version 2.4.2.</p>
+<div class="section" id="packaged-versions">
+<h2>Packaged versions<a class="headerlink" href="#packaged-versions" title="Permalink to this headline">¶</a></h2>
+<p>You can download it <a class="reference external" href="http://pypi.python.org/pypi/Pygments">from the Python Package Index</a>.  For installation of packages from
+PyPI, we recommend <a class="reference external" href="http://www.pip-installer.org">Pip</a>, which works on all
+major platforms.</p>
+<p>Under Linux, most distributions include a package for Pygments, usually called
+<code class="docutils literal notranslate"><span class="pre">pygments</span></code> or <code class="docutils literal notranslate"><span class="pre">python-pygments</span></code>.  You can install it with the package
+manager as usual.</p>
+</div>
+<div class="section" id="development-sources">
+<h2>Development sources<a class="headerlink" href="#development-sources" title="Permalink to this headline">¶</a></h2>
+<p>We’re using the Git version control system.  You can get the development source
+using this command:</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="n">git</span> <span class="n">clone</span> <span class="n">https</span><span class="p">:</span><span class="o">//</span><span class="n">github</span><span class="o">.</span><span class="n">com</span><span class="o">/</span><span class="n">pygments</span><span class="o">/</span><span class="n">pygments</span>
+</pre></div>
+</div>
+<p>Development takes place at <a class="reference external" href="https://github.com/pygments/pygments">GitHub</a>.</p>
+<p>The latest changes in the development source code are listed in the <a class="reference external" href="https://github.com/pygments/pygments/blob/master/CHANGES">changelog</a>.</p>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/faq.html b/doc/_build/html/faq.html
new file mode 100644 (file)
index 0000000..7f164ab
--- /dev/null
@@ -0,0 +1,255 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Pygments FAQ &#8212; Pygments</title>
+    <link rel="stylesheet" href="_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="./" src="_static/documentation_options.js"></script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="_static/language_data.js"></script>
+    <link rel="shortcut icon" href="_static/favicon.ico"/>
+    <link rel="index" title="Index" href="genindex.html" />
+    <link rel="search" title="Search" href="search.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="index.html">Home</a></li>
+    
+    <li><a href="languages.html">Languages</a></li>
+    <li><a href="#">FAQ</a></li>
+    <li><a href="download.html">Get it</a></li>
+    <li><a href="docs/index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="index.html">
+      <img src="_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Pygments FAQ</a><ul>
+<li><a class="reference internal" href="#what-is-pygments">What is Pygments?</a></li>
+<li><a class="reference internal" href="#where-does-the-name-pygments-come-from">Where does the name Pygments come from?</a></li>
+<li><a class="reference internal" href="#what-are-the-system-requirements">What are the system requirements?</a></li>
+<li><a class="reference internal" href="#how-can-i-use-pygments">How can I use Pygments?</a></li>
+<li><a class="reference internal" href="#how-do-i-make-a-new-style">How do I make a new style?</a></li>
+<li><a class="reference internal" href="#how-can-i-report-a-bug-or-suggest-a-feature">How can I report a bug or suggest a feature?</a></li>
+<li><a class="reference internal" href="#i-want-this-support-for-this-language">I want this support for this language!</a></li>
+<li><a class="reference internal" href="#can-i-use-pygments-for-programming-language-processing">Can I use Pygments for programming language processing?</a></li>
+<li><a class="reference internal" href="#who-uses-pygments">Who uses Pygments?</a></li>
+</ul>
+</li>
+</ul>
+
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="_sources/faq.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="pygments-faq">
+<h1>Pygments FAQ<a class="headerlink" href="#pygments-faq" title="Permalink to this headline">¶</a></h1>
+<div class="section" id="what-is-pygments">
+<h2>What is Pygments?<a class="headerlink" href="#what-is-pygments" title="Permalink to this headline">¶</a></h2>
+<p>Pygments is a syntax highlighting engine written in Python. That means, it will
+take source code (or other markup) in a supported language and output a
+processed version (in different formats) containing syntax highlighting markup.</p>
+<p>Its features include:</p>
+<ul class="simple">
+<li><p>a wide range of common <a class="reference internal" href="languages.html"><span class="doc">languages and markup formats</span></a> is supported</p></li>
+<li><p>new languages and formats are added easily</p></li>
+<li><p>a number of output formats is available, including:</p>
+<ul>
+<li><p>HTML</p></li>
+<li><p>ANSI sequences (console output)</p></li>
+<li><p>LaTeX</p></li>
+<li><p>RTF</p></li>
+</ul>
+</li>
+<li><p>it is usable as a command-line tool and as a library</p></li>
+<li><p>parsing and formatting is fast</p></li>
+</ul>
+<p>Pygments is licensed under the BSD license.</p>
+</div>
+<div class="section" id="where-does-the-name-pygments-come-from">
+<h2>Where does the name Pygments come from?<a class="headerlink" href="#where-does-the-name-pygments-come-from" title="Permalink to this headline">¶</a></h2>
+<p><em>Py</em> of course stands for Python, while <em>pigments</em> are used for coloring paint,
+and in this case, source code!</p>
+</div>
+<div class="section" id="what-are-the-system-requirements">
+<h2>What are the system requirements?<a class="headerlink" href="#what-are-the-system-requirements" title="Permalink to this headline">¶</a></h2>
+<p>Pygments only needs a standard Python install, version 2.7 or higher or version
+3.5 or higher for Python 3. No additional libraries are needed.</p>
+</div>
+<div class="section" id="how-can-i-use-pygments">
+<h2>How can I use Pygments?<a class="headerlink" href="#how-can-i-use-pygments" title="Permalink to this headline">¶</a></h2>
+<p>Pygments is usable as a command-line tool as well as a library.</p>
+<p>From the command-line, usage looks like this (assuming the pygmentize script is
+properly installed):</p>
+<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="n">pygmentize</span> <span class="o">-</span><span class="n">f</span> <span class="n">html</span> <span class="o">/</span><span class="n">path</span><span class="o">/</span><span class="n">to</span><span class="o">/</span><span class="n">file</span><span class="o">.</span><span class="n">py</span>
+</pre></div>
+</div>
+<p>This will print a HTML-highlighted version of /path/to/file.py to standard output.</p>
+<p>For a complete help, please run <code class="docutils literal notranslate"><span class="pre">pygmentize</span> <span class="pre">-h</span></code>.</p>
+<p>Usage as a library is thoroughly demonstrated in the Documentation section.</p>
+</div>
+<div class="section" id="how-do-i-make-a-new-style">
+<h2>How do I make a new style?<a class="headerlink" href="#how-do-i-make-a-new-style" title="Permalink to this headline">¶</a></h2>
+<p>Please see the <a class="reference internal" href="docs/styles.html"><span class="doc">documentation on styles</span></a>.</p>
+</div>
+<div class="section" id="how-can-i-report-a-bug-or-suggest-a-feature">
+<h2>How can I report a bug or suggest a feature?<a class="headerlink" href="#how-can-i-report-a-bug-or-suggest-a-feature" title="Permalink to this headline">¶</a></h2>
+<p>Please report bugs and feature wishes in the tracker at GitHub.</p>
+<p>You can also e-mail the authors, see the contact details.</p>
+</div>
+<div class="section" id="i-want-this-support-for-this-language">
+<h2>I want this support for this language!<a class="headerlink" href="#i-want-this-support-for-this-language" title="Permalink to this headline">¶</a></h2>
+<p>Instead of waiting for others to include language support, why not write it
+yourself? All you have to know is <a class="reference internal" href="docs/lexerdevelopment.html"><span class="doc">outlined in the docs</span></a>.</p>
+</div>
+<div class="section" id="can-i-use-pygments-for-programming-language-processing">
+<h2>Can I use Pygments for programming language processing?<a class="headerlink" href="#can-i-use-pygments-for-programming-language-processing" title="Permalink to this headline">¶</a></h2>
+<p>The Pygments lexing machinery is quite powerful can be used to build lexers for
+basically all languages. However, parsing them is not possible, though some
+lexers go some steps in this direction in order to e.g. highlight function names
+differently.</p>
+<p>Also, error reporting is not the scope of Pygments. It focuses on correctly
+highlighting syntactically valid documents, not finding and compensating errors.</p>
+</div>
+<div class="section" id="who-uses-pygments">
+<h2>Who uses Pygments?<a class="headerlink" href="#who-uses-pygments" title="Permalink to this headline">¶</a></h2>
+<p>This is an (incomplete) list of projects and sites known to use the Pygments highlighter.</p>
+<ul class="simple">
+<li><p><a class="reference external" href="http://en.wikipedia.org">Wikipedia</a></p></li>
+<li><p><a class="reference external" href="http://bitbucket.org/">BitBucket</a>, a Mercurial and Git hosting site</p></li>
+<li><p><a class="reference external" href="http://sphinx.pocoo.org/">The Sphinx documentation builder</a>, for embedded source examples</p></li>
+<li><p><a class="reference external" href="http://code.google.com/p/rst2pdf/">rst2pdf</a>, a reStructuredText to PDF converter</p></li>
+<li><p><a class="reference external" href="http://codecov.io/">Codecov</a>, a code coverage CI service</p></li>
+<li><p><a class="reference external" href="http://trac.edgewall.org/">Trac</a>, the universal project management tool</p></li>
+<li><p><a class="reference external" href="http://www.methods.co.nz/asciidoc/">AsciiDoc</a>, a text-based documentation generator</p></li>
+<li><p><a class="reference external" href="http://code.activestate.com/">ActiveState Code</a>, the Python Cookbook successor</p></li>
+<li><p><a class="reference external" href="http://viewvc.org/">ViewVC</a>, a web-based version control repository browser</p></li>
+<li><p><a class="reference external" href="http://repo.or.cz/w/bzrfruit.git">BzrFruit</a>, a Bazaar branch viewer</p></li>
+<li><p><a class="reference external" href="http://bazaar-vcs.org/QBzr">QBzr</a>, a cross-platform Qt-based GUI front end for Bazaar</p></li>
+<li><p><a class="reference external" href="http://www.review-board.org/">Review Board</a>, a collaborative code reviewing tool</p></li>
+<li><p><a class="reference external" href="http://code.google.com/p/diamanda/">Diamanda</a>, a Django powered wiki system with support for Pygments</p></li>
+<li><p><a class="reference external" href="http://progopedia.ru/">Progopedia</a> (<a class="reference external" href="http://progopedia.com/">English</a>),
+an encyclopedia of programming languages</p></li>
+<li><p><a class="reference external" href="http://r1chardj0n3s.googlepages.com/bruce">Bruce</a>, a reStructuredText presentation tool</p></li>
+<li><p><a class="reference external" href="http://pida.co.uk/">PIDA</a>, a universal IDE written in Python</p></li>
+<li><p><a class="reference external" href="http://www.noiseforfree.com/bpython/">BPython</a>, a curses-based intelligent Python shell</p></li>
+<li><p><a class="reference external" href="http://pypi.python.org/pypi/pudb">PuDB</a>, a console Python debugger</p></li>
+<li><p><a class="reference external" href="http://www.xwiki.org/">XWiki</a>, a wiki-based development framework in Java, using Jython</p></li>
+<li><p><a class="reference external" href="http://ananelson.com/software/roux/">roux</a>, a script for running R scripts
+and creating beautiful output including graphs</p></li>
+<li><p><a class="reference external" href="http://hurl.it/">hurl</a>, a web service for making HTTP requests</p></li>
+<li><p><a class="reference external" href="http://colinbarnette.net/projects/wxHTMLPygmentizer">wxHTMLPygmentizer</a> is
+a GUI utility, used to make code-colorization easier</p></li>
+<li><p><a class="reference external" href="http://code.google.com/p/postmarkup/">Postmarkup</a>, a BBCode to XHTML generator</p></li>
+<li><p><a class="reference external" href="http://blog.mirotin.net/?page_id=49">WpPygments</a>, and <a class="reference external" href="https://github.com/capynet/WPygments">WPygments</a>, highlighter plugins for WordPress</p></li>
+<li><p><a class="reference external" href="http://siafoo.net">Siafoo</a>, a tool for sharing and storing useful code and programming experience</p></li>
+<li><p><a class="reference external" href="http://www.dsource.org/">D source</a>, a community for the D programming language</p></li>
+<li><p><a class="reference external" href="http://dpaste.com/">dpaste.com</a>, another Django pastebin</p></li>
+<li><p><a class="reference external" href="http://www.djangosnippets.org/">Django snippets</a>, a pastebin for Django code</p></li>
+<li><p><a class="reference external" href="http://www.fayaa.com/code/">Fayaa</a>, a Chinese pastebin</p></li>
+<li><p><a class="reference external" href="http://incollo.com">Incollo.com</a>, a free collaborative debugging tool</p></li>
+<li><p><a class="reference external" href="http://p.boxnet.eu/">PasteBox</a>, a pastebin focused on privacy</p></li>
+<li><p><a class="reference external" href="http://www.hilite.me/">hilite.me</a>, a site to highlight code snippets</p></li>
+<li><p><a class="reference external" href="http://patx.me/paste">patx.me</a>, a pastebin</p></li>
+<li><p><a class="reference external" href="https://github.com/richsmith/fluidic">Fluidic</a>, an experiment in
+integrating shells with a GUI</p></li>
+<li><p><a class="reference external" href="https://github.com/tmm1/pygments.rb">pygments.rb</a>, a pygments wrapper for Ruby</p></li>
+<li><p><a class="reference external" href="https://github.com/bfontaine/clygments">Clygments</a>, a pygments wrapper for
+Clojure</p></li>
+<li><p><a class="reference external" href="https://github.com/capynet/PHPygments">PHPygments</a>, a pygments wrapper for PHP</p></li>
+<li><p><a class="reference external" href="https://www.spyder-ide.org/">Spyder</a>, the Scientific Python Development
+Environment, uses pygments for the multi-language syntax highlighting in its
+<a class="reference external" href="https://docs.spyder-ide.org/editor.html">editor</a>.</p></li>
+</ul>
+<p>If you have a project or web site using Pygments, drop me a line, and I’ll add a
+link here.</p>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/genindex.html b/doc/_build/html/genindex.html
new file mode 100644 (file)
index 0000000..9c1ed58
--- /dev/null
@@ -0,0 +1,1682 @@
+
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Index &#8212; Pygments</title>
+    <link rel="stylesheet" href="_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="./" src="_static/documentation_options.js"></script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="_static/language_data.js"></script>
+    <link rel="shortcut icon" href="_static/favicon.ico"/>
+    <link rel="index" title="Index" href="#" />
+    <link rel="search" title="Search" href="search.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="index.html">Home</a></li>
+    
+    <li><a href="languages.html">Languages</a></li>
+    <li><a href="faq.html">FAQ</a></li>
+    <li><a href="download.html">Get it</a></li>
+    <li><a href="docs/index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="index.html">
+      <img src="_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+
+<h1 id="index">Index</h1>
+
+<div class="genindex-jumpbox">
+ <a href="#A"><strong>A</strong></a>
+ | <a href="#B"><strong>B</strong></a>
+ | <a href="#C"><strong>C</strong></a>
+ | <a href="#D"><strong>D</strong></a>
+ | <a href="#E"><strong>E</strong></a>
+ | <a href="#F"><strong>F</strong></a>
+ | <a href="#G"><strong>G</strong></a>
+ | <a href="#H"><strong>H</strong></a>
+ | <a href="#I"><strong>I</strong></a>
+ | <a href="#J"><strong>J</strong></a>
+ | <a href="#K"><strong>K</strong></a>
+ | <a href="#L"><strong>L</strong></a>
+ | <a href="#M"><strong>M</strong></a>
+ | <a href="#N"><strong>N</strong></a>
+ | <a href="#O"><strong>O</strong></a>
+ | <a href="#P"><strong>P</strong></a>
+ | <a href="#Q"><strong>Q</strong></a>
+ | <a href="#R"><strong>R</strong></a>
+ | <a href="#S"><strong>S</strong></a>
+ | <a href="#T"><strong>T</strong></a>
+ | <a href="#U"><strong>U</strong></a>
+ | <a href="#V"><strong>V</strong></a>
+ | <a href="#W"><strong>W</strong></a>
+ | <a href="#X"><strong>X</strong></a>
+ | <a href="#Y"><strong>Y</strong></a>
+ | <a href="#Z"><strong>Z</strong></a>
+</div>
+<h2 id="A">A</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.business.ABAPLexer">ABAPLexer (class in pygments.lexers.business)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.grammar_notation.AbnfLexer">AbnfLexer (class in pygments.lexers.grammar_notation)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.actionscript.ActionScript3Lexer">ActionScript3Lexer (class in pygments.lexers.actionscript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.actionscript.ActionScriptLexer">ActionScriptLexer (class in pygments.lexers.actionscript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.pascal.AdaLexer">AdaLexer (class in pygments.lexers.pascal)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.archetype.AdlLexer">AdlLexer (class in pygments.lexers.archetype)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.haskell.AgdaLexer">AgdaLexer (class in pygments.lexers.haskell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.esoteric.AheuiLexer">AheuiLexer (class in pygments.lexers.esoteric)</a>
+</li>
+      <li><a href="docs/api.html#pygments.lexer.Lexer.alias_filenames">alias_filenames (pygments.lexer.Lexer attribute)</a>
+</li>
+      <li><a href="docs/api.html#pygments.formatter.Formatter.aliases">aliases (pygments.formatter.Formatter attribute)</a>
+
+      <ul>
+        <li><a href="docs/api.html#pygments.lexer.Lexer.aliases">(pygments.lexer.Lexer attribute)</a>
+</li>
+      </ul></li>
+      <li><a href="docs/lexers.html#pygments.lexers.dsls.AlloyLexer">AlloyLexer (class in pygments.lexers.dsls)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.ambient.AmbientTalkLexer">AmbientTalkLexer (class in pygments.lexers.ambient)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.ampl.AmplLexer">AmplLexer (class in pygments.lexers.ampl)</a>
+</li>
+      <li><a href="docs/api.html#pygments.lexer.Lexer.analyse_text">analyse_text() (pygments.lexer.Lexer static method)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.Angular2HtmlLexer">Angular2HtmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.Angular2Lexer">Angular2Lexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.AntlrActionScriptLexer">AntlrActionScriptLexer (class in pygments.lexers.parsers)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.AntlrCppLexer">AntlrCppLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.AntlrCSharpLexer">AntlrCSharpLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.AntlrJavaLexer">AntlrJavaLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.AntlrLexer">AntlrLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.AntlrObjectiveCLexer">AntlrObjectiveCLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.AntlrPerlLexer">AntlrPerlLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.AntlrPythonLexer">AntlrPythonLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.AntlrRubyLexer">AntlrRubyLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.ApacheConfLexer">ApacheConfLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.apl.APLLexer">APLLexer (class in pygments.lexers.apl)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.scripting.AppleScriptLexer">AppleScriptLexer (class in pygments.lexers.scripting)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.c_like.ArduinoLexer">ArduinoLexer (class in pygments.lexers.c_like)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.AspectJLexer">AspectJLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.graphics.AsymptoteLexer">AsymptoteLexer (class in pygments.lexers.graphics)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.AugeasLexer">AugeasLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.automation.AutohotkeyLexer">AutohotkeyLexer (class in pygments.lexers.automation)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.automation.AutoItLexer">AutoItLexer (class in pygments.lexers.automation)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.textedit.AwkLexer">AwkLexer (class in pygments.lexers.textedit)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="B">B</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.make.BaseMakefileLexer">BaseMakefileLexer (class in pygments.lexers.make)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.shell.BashLexer">BashLexer (class in pygments.lexers.shell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.shell.BashSessionLexer">BashSessionLexer (class in pygments.lexers.shell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.shell.BatchLexer">BatchLexer (class in pygments.lexers.shell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.basic.BBCBasicLexer">BBCBasicLexer (class in pygments.lexers.basic)</a>
+</li>
+      <li><a href="docs/formatters.html#BBCodeFormatter">BBCodeFormatter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.markup.BBCodeLexer">BBCodeLexer (class in pygments.lexers.markup)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.algebra.BCLexer">BCLexer (class in pygments.lexers.algebra)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.esoteric.BefungeLexer">BefungeLexer (class in pygments.lexers.esoteric)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.bibtex.BibTeXLexer">BibTeXLexer (class in pygments.lexers.bibtex)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.basic.BlitzBasicLexer">BlitzBasicLexer (class in pygments.lexers.basic)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.basic.BlitzMaxLexer">BlitzMaxLexer (class in pygments.lexers.basic)</a>
+</li>
+      <li><a href="docs/formatters.html#BmpImageFormatter">BmpImageFormatter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.grammar_notation.BnfLexer">BnfLexer (class in pygments.lexers.grammar_notation)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.boa.BoaLexer">BoaLexer (class in pygments.lexers.boa)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.verification.BoogieLexer">BoogieLexer (class in pygments.lexers.verification)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dotnet.BooLexer">BooLexer (class in pygments.lexers.dotnet)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.esoteric.BrainfuckLexer">BrainfuckLexer (class in pygments.lexers.esoteric)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.bibtex.BSTLexer">BSTLexer (class in pygments.lexers.bibtex)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.modeling.BugsLexer">BugsLexer (class in pygments.lexers.modeling)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="C">C</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.asm.Ca65Lexer">Ca65Lexer (class in pygments.lexers.asm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.archetype.CadlLexer">CadlLexer (class in pygments.lexers.archetype)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.esoteric.CAmkESLexer">CAmkESLexer (class in pygments.lexers.esoteric)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.esoteric.CapDLLexer">CapDLLexer (class in pygments.lexers.esoteric)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.capnproto.CapnProtoLexer">CapnProtoLexer (class in pygments.lexers.capnproto)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.basic.CbmBasicV2Lexer">CbmBasicV2Lexer (class in pygments.lexers.basic)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.CeylonLexer">CeylonLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.Cfengine3Lexer">Cfengine3Lexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.scripting.ChaiscriptLexer">ChaiscriptLexer (class in pygments.lexers.scripting)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.chapel.ChapelLexer">ChapelLexer (class in pygments.lexers.chapel)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.c_like.CharmciLexer">CharmciLexer (class in pygments.lexers.c_like)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.CheetahHtmlLexer">CheetahHtmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.CheetahJavascriptLexer">CheetahJavascriptLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.CheetahLexer">CheetahLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.CheetahXmlLexer">CheetahXmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.webmisc.CirruLexer">CirruLexer (class in pygments.lexers.webmisc)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.c_like.ClayLexer">ClayLexer (class in pygments.lexers.c_like)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.clean.CleanLexer">CleanLexer (class in pygments.lexers.clean)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.c_cpp.CLexer">CLexer (class in pygments.lexers.c_cpp)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.ClojureLexer">ClojureLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.ClojureScriptLexer">ClojureScriptLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.make.CMakeLexer">CMakeLexer (class in pygments.lexers.make)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.asm.CObjdumpLexer">CObjdumpLexer (class in pygments.lexers.asm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.business.CobolFreeformatLexer">CobolFreeformatLexer (class in pygments.lexers.business)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.business.CobolLexer">CobolLexer (class in pygments.lexers.business)</a>
+</li>
+      <li><a href="docs/filters.html#CodeTagFilter">CodeTagFilter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.javascript.CoffeeScriptLexer">CoffeeScriptLexer (class in pygments.lexers.javascript)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.ColdfusionCFCLexer">ColdfusionCFCLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.ColdfusionHtmlLexer">ColdfusionHtmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.ColdfusionLexer">ColdfusionLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.lisp.CommonLispLexer">CommonLispLexer (class in pygments.lexers.lisp)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.oberon.ComponentPascalLexer">ComponentPascalLexer (class in pygments.lexers.oberon)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.theorem.CoqLexer">CoqLexer (class in pygments.lexers.theorem)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.c_cpp.CppLexer">CppLexer (class in pygments.lexers.c_cpp)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.asm.CppObjdumpLexer">CppObjdumpLexer (class in pygments.lexers.asm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.lisp.CPSALexer">CPSALexer (class in pygments.lexers.lisp)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dsls.CrmshLexer">CrmshLexer (class in pygments.lexers.dsls)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.d.CrocLexer">CrocLexer (class in pygments.lexers.d)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.haskell.CryptolLexer">CryptolLexer (class in pygments.lexers.haskell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.crystal.CrystalLexer">CrystalLexer (class in pygments.lexers.crystal)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dotnet.CSharpAspxLexer">CSharpAspxLexer (class in pygments.lexers.dotnet)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dotnet.CSharpLexer">CSharpLexer (class in pygments.lexers.dotnet)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.csound.CsoundDocumentLexer">CsoundDocumentLexer (class in pygments.lexers.csound)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.csound.CsoundOrchestraLexer">CsoundOrchestraLexer (class in pygments.lexers.csound)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.csound.CsoundScoreLexer">CsoundScoreLexer (class in pygments.lexers.csound)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.CssDjangoLexer">CssDjangoLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.CssErbLexer">CssErbLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.CssGenshiLexer">CssGenshiLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.css.CssLexer">CssLexer (class in pygments.lexers.css)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.CssPhpLexer">CssPhpLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.CssSmartyLexer">CssSmartyLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.c_like.CudaLexer">CudaLexer (class in pygments.lexers.c_like)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.graph.CypherLexer">CypherLexer (class in pygments.lexers.graph)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.python.CythonLexer">CythonLexer (class in pygments.lexers.python)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="D">D</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.diff.DarcsPatchLexer">DarcsPatchLexer (class in pygments.lexers.diff)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.javascript.DartLexer">DartLexer (class in pygments.lexers.javascript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.asm.Dasm16Lexer">Dasm16Lexer (class in pygments.lexers.asm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.installers.DebianControlLexer">DebianControlLexer (class in pygments.lexers.installers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.pascal.DelphiLexer">DelphiLexer (class in pygments.lexers.pascal)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.python.DgLexer">DgLexer (class in pygments.lexers.python)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.diff.DiffLexer">DiffLexer (class in pygments.lexers.diff)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.DjangoLexer">DjangoLexer (class in pygments.lexers.templates)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.d.DLexer">DLexer (class in pygments.lexers.d)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.asm.DObjdumpLexer">DObjdumpLexer (class in pygments.lexers.asm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.DockerLexer">DockerLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.html.DtdLexer">DtdLexer (class in pygments.lexers.html)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.webmisc.DuelLexer">DuelLexer (class in pygments.lexers.webmisc)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dylan.DylanConsoleLexer">DylanConsoleLexer (class in pygments.lexers.dylan)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dylan.DylanLexer">DylanLexer (class in pygments.lexers.dylan)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dylan.DylanLidLexer">DylanLidLexer (class in pygments.lexers.dylan)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="E">E</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.javascript.EarlGreyLexer">EarlGreyLexer (class in pygments.lexers.javascript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.scripting.EasytrieveLexer">EasytrieveLexer (class in pygments.lexers.scripting)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.EbnfLexer">EbnfLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.c_like.ECLexer">ECLexer (class in pygments.lexers.c_like)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.ecl.ECLLexer">ECLLexer (class in pygments.lexers.ecl)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.eiffel.EiffelLexer">EiffelLexer (class in pygments.lexers.eiffel)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.erlang.ElixirConsoleLexer">ElixirConsoleLexer (class in pygments.lexers.erlang)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.erlang.ElixirLexer">ElixirLexer (class in pygments.lexers.erlang)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.elm.ElmLexer">ElmLexer (class in pygments.lexers.elm)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.lisp.EmacsLispLexer">EmacsLispLexer (class in pygments.lexers.lisp)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.email.EmailLexer">EmailLexer (class in pygments.lexers.email)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.ErbLexer">ErbLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.erlang.ErlangLexer">ErlangLexer (class in pygments.lexers.erlang)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.erlang.ErlangShellLexer">ErlangShellLexer (class in pygments.lexers.erlang)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.EvoqueHtmlLexer">EvoqueHtmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.EvoqueLexer">EvoqueLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.EvoqueXmlLexer">EvoqueXmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.ezhil.EzhilLexer">EzhilLexer (class in pygments.lexers.ezhil)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="F">F</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.factor.FactorLexer">FactorLexer (class in pygments.lexers.factor)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.ruby.FancyLexer">FancyLexer (class in pygments.lexers.ruby)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.fantom.FantomLexer">FantomLexer (class in pygments.lexers.fantom)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.felix.FelixLexer">FelixLexer (class in pygments.lexers.felix)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.lisp.FennelLexer">FennelLexer (class in pygments.lexers.lisp)</a>
+</li>
+      <li><a href="docs/api.html#pygments.formatter.Formatter.filenames">filenames (pygments.formatter.Formatter attribute)</a>
+
+      <ul>
+        <li><a href="docs/api.html#pygments.lexer.Lexer.filenames">(pygments.lexer.Lexer attribute)</a>
+</li>
+      </ul></li>
+      <li><a href="docs/api.html#pygments.lexers.find_lexer_class">find_lexer_class() (in module pygments.lexers)</a>
+</li>
+      <li><a href="docs/api.html#pygments.lexers.find_lexer_class_by_name">find_lexer_class_by_name() (in module pygments.lexers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.shell.FishShellLexer">FishShellLexer (class in pygments.lexers.shell)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.dsls.FlatlineLexer">FlatlineLexer (class in pygments.lexers.dsls)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.floscript.FloScriptLexer">FloScriptLexer (class in pygments.lexers.floscript)</a>
+</li>
+      <li><a href="docs/api.html#pygments.format">format() (in module pygments)</a>
+
+      <ul>
+        <li><a href="docs/api.html#pygments.formatter.Formatter.format">(pygments.formatter.Formatter method)</a>
+</li>
+      </ul></li>
+      <li><a href="docs/api.html#pygments.formatter.Formatter">Formatter (class in pygments.formatter)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.forth.ForthLexer">ForthLexer (class in pygments.lexers.forth)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.fortran.FortranFixedLexer">FortranFixedLexer (class in pygments.lexers.fortran)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.fortran.FortranLexer">FortranLexer (class in pygments.lexers.fortran)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.foxpro.FoxProLexer">FoxProLexer (class in pygments.lexers.foxpro)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.freefem.FreeFemLexer">FreeFemLexer (class in pygments.lexers.freefem)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dotnet.FSharpLexer">FSharpLexer (class in pygments.lexers.dotnet)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="G">G</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.algebra.GAPLexer">GAPLexer (class in pygments.lexers.algebra)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.asm.GasLexer">GasLexer (class in pygments.lexers.asm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.GenshiLexer">GenshiLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.GenshiTextLexer">GenshiTextLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/api.html#pygments.lexers.get_all_lexers">get_all_lexers() (in module pygments.lexers)</a>
+</li>
+      <li><a href="docs/api.html#pygments.styles.get_all_styles">get_all_styles() (in module pygments.styles)</a>
+</li>
+      <li><a href="docs/api.html#pygments.util.get_bool_opt">get_bool_opt() (in module pygments.util)</a>
+</li>
+      <li><a href="docs/api.html#pygments.util.get_choice_opt">get_choice_opt() (in module pygments.util)</a>
+</li>
+      <li><a href="docs/api.html#pygments.formatters.get_formatter_by_name">get_formatter_by_name() (in module pygments.formatters)</a>
+</li>
+      <li><a href="docs/api.html#pygments.formatters.get_formatter_for_filename">get_formatter_for_filename() (in module pygments.formatters)</a>
+</li>
+      <li><a href="docs/api.html#pygments.util.get_int_opt">get_int_opt() (in module pygments.util)</a>
+</li>
+      <li><a href="docs/api.html#pygments.lexers.get_lexer_by_name">get_lexer_by_name() (in module pygments.lexers)</a>
+</li>
+      <li><a href="docs/api.html#pygments.lexers.get_lexer_for_filename">get_lexer_for_filename() (in module pygments.lexers)</a>
+</li>
+      <li><a href="docs/api.html#pygments.lexers.get_lexer_for_mimetype">get_lexer_for_mimetype() (in module pygments.lexers)</a>
+</li>
+      <li><a href="docs/api.html#pygments.util.get_list_opt">get_list_opt() (in module pygments.util)</a>
+</li>
+      <li><a href="docs/api.html#pygments.styles.get_style_by_name">get_style_by_name() (in module pygments.styles)</a>
+</li>
+      <li><a href="docs/api.html#pygments.formatter.Formatter.get_style_defs">get_style_defs() (pygments.formatter.Formatter method)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/api.html#pygments.lexer.Lexer.get_tokens">get_tokens() (pygments.lexer.Lexer method)</a>
+</li>
+      <li><a href="docs/api.html#pygments.lexer.Lexer.get_tokens_unprocessed">get_tokens_unprocessed() (pygments.lexer.Lexer method)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.textfmts.GettextLexer">GettextLexer (class in pygments.lexers.textfmts)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.testing.GherkinLexer">GherkinLexer (class in pygments.lexers.testing)</a>
+</li>
+      <li><a href="docs/formatters.html#GifImageFormatter">GifImageFormatter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.graphics.GLShaderLexer">GLShaderLexer (class in pygments.lexers.graphics)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.graphics.GnuplotLexer">GnuplotLexer (class in pygments.lexers.graphics)</a>
+</li>
+      <li><a href="docs/filters.html#GobbleFilter">GobbleFilter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.go.GoLexer">GoLexer (class in pygments.lexers.go)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.GoloLexer">GoloLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.business.GoodDataCLLexer">GoodDataCLLexer (class in pygments.lexers.business)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.GosuLexer">GosuLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.GosuTemplateLexer">GosuTemplateLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.markup.GroffLexer">GroffLexer (class in pygments.lexers.markup)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.GroovyLexer">GroovyLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/api.html#pygments.lexers.guess_lexer">guess_lexer() (in module pygments.lexers)</a>
+</li>
+      <li><a href="docs/api.html#pygments.lexers.guess_lexer_for_filename">guess_lexer_for_filename() (in module pygments.lexers)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="H">H</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.html.HamlLexer">HamlLexer (class in pygments.lexers.html)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.HandlebarsHtmlLexer">HandlebarsHtmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.HandlebarsLexer">HandlebarsLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.haskell.HaskellLexer">HaskellLexer (class in pygments.lexers.haskell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.haxe.HaxeLexer">HaxeLexer (class in pygments.lexers.haxe)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.hexdump.HexdumpLexer">HexdumpLexer (class in pygments.lexers.hexdump)</a>
+</li>
+      <li><a href="docs/api.html#pygments.highlight">highlight() (in module pygments)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.graphics.HLSLShaderLexer">HLSLShaderLexer (class in pygments.lexers.graphics)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.asm.HsailLexer">HsailLexer (class in pygments.lexers.asm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.haskell.HspecLexer">HspecLexer (class in pygments.lexers.haskell)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.HtmlDjangoLexer">HtmlDjangoLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/formatters.html#HtmlFormatter">HtmlFormatter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.HtmlGenshiLexer">HtmlGenshiLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.html.HtmlLexer">HtmlLexer (class in pygments.lexers.html)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.HtmlPhpLexer">HtmlPhpLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.HtmlSmartyLexer">HtmlSmartyLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.textfmts.HttpLexer">HttpLexer (class in pygments.lexers.textfmts)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.haxe.HxmlLexer">HxmlLexer (class in pygments.lexers.haxe)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.scripting.HybrisLexer">HybrisLexer (class in pygments.lexers.scripting)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.lisp.HyLexer">HyLexer (class in pygments.lexers.lisp)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="I">I</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.unicon.IconLexer">IconLexer (class in pygments.lexers.unicon)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.idl.IDLLexer">IDLLexer (class in pygments.lexers.idl)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.haskell.IdrisLexer">IdrisLexer (class in pygments.lexers.haskell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.igor.IgorLexer">IgorLexer (class in pygments.lexers.igor)</a>
+</li>
+      <li><a href="docs/formatters.html#ImageFormatter">ImageFormatter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.int_fiction.Inform6Lexer">Inform6Lexer (class in pygments.lexers.int_fiction)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.int_fiction.Inform6TemplateLexer">Inform6TemplateLexer (class in pygments.lexers.int_fiction)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.int_fiction.Inform7Lexer">Inform7Lexer (class in pygments.lexers.int_fiction)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.IniLexer">IniLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.IokeLexer">IokeLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.iolang.IoLexer">IoLexer (class in pygments.lexers.iolang)</a>
+</li>
+      <li><a href="docs/formatters.html#IRCFormatter">IRCFormatter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.textfmts.IrcLogsLexer">IrcLogsLexer (class in pygments.lexers.textfmts)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.theorem.IsabelleLexer">IsabelleLexer (class in pygments.lexers.theorem)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="J">J</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.modeling.JagsLexer">JagsLexer (class in pygments.lexers.modeling)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.JasminLexer">JasminLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.JavaLexer">JavaLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.JavascriptDjangoLexer">JavascriptDjangoLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.JavascriptErbLexer">JavascriptErbLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.JavascriptGenshiLexer">JavascriptGenshiLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.javascript.JavascriptLexer">JavascriptLexer (class in pygments.lexers.javascript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.JavascriptPhpLexer">JavascriptPhpLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.JavascriptSmartyLexer">JavascriptSmartyLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.scripting.JclLexer">JclLexer (class in pygments.lexers.scripting)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.j.JLexer">JLexer (class in pygments.lexers.j)</a>
+</li>
+      <li><a href="docs/formatters.html#JpgImageFormatter">JpgImageFormatter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.grammar_notation.JsgfLexer">JsgfLexer (class in pygments.lexers.grammar_notation)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.data.JsonBareObjectLexer">JsonBareObjectLexer (class in pygments.lexers.data)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.data.JsonLdLexer">JsonLdLexer (class in pygments.lexers.data)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.data.JsonLexer">JsonLexer (class in pygments.lexers.data)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.JspLexer">JspLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.julia.JuliaConsoleLexer">JuliaConsoleLexer (class in pygments.lexers.julia)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.julia.JuliaLexer">JuliaLexer (class in pygments.lexers.julia)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.javascript.JuttleLexer">JuttleLexer (class in pygments.lexers.javascript)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="K">K</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.javascript.KalLexer">KalLexer (class in pygments.lexers.javascript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.KconfigLexer">KconfigLexer (class in pygments.lexers.configs)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/filters.html#KeywordCaseFilter">KeywordCaseFilter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.haskell.KokaLexer">KokaLexer (class in pygments.lexers.haskell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.KotlinLexer">KotlinLexer (class in pygments.lexers.jvm)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="L">L</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.LassoCssLexer">LassoCssLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.LassoHtmlLexer">LassoHtmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.LassoJavascriptLexer">LassoJavascriptLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.javascript.LassoLexer">LassoLexer (class in pygments.lexers.javascript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.LassoXmlLexer">LassoXmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/formatters.html#LatexFormatter">LatexFormatter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.theorem.LeanLexer">LeanLexer (class in pygments.lexers.theorem)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.css.LessCssLexer">LessCssLexer (class in pygments.lexers.css)</a>
+</li>
+      <li><a href="docs/api.html#pygments.lex">lex() (in module pygments)</a>
+</li>
+      <li><a href="docs/api.html#pygments.lexer.Lexer">Lexer (class in pygments.lexer)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.LighttpdConfLexer">LighttpdConfLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.inferno.LimboLexer">LimboLexer (class in pygments.lexers.inferno)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.LiquidLexer">LiquidLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.haskell.LiterateAgdaLexer">LiterateAgdaLexer (class in pygments.lexers.haskell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.haskell.LiterateCryptolLexer">LiterateCryptolLexer (class in pygments.lexers.haskell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.haskell.LiterateHaskellLexer">LiterateHaskellLexer (class in pygments.lexers.haskell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.haskell.LiterateIdrisLexer">LiterateIdrisLexer (class in pygments.lexers.haskell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.javascript.LiveScriptLexer">LiveScriptLexer (class in pygments.lexers.javascript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.asm.LlvmLexer">LlvmLexer (class in pygments.lexers.asm)</a>
+</li>
+      <li><a href="docs/api.html#pygments.formatters.load_formatter_from_file">load_formatter_from_file() (in module pygments.formatters)</a>
+</li>
+      <li><a href="docs/api.html#pygments.lexers.load_lexer_from_file">load_lexer_from_file() (in module pygments.lexers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.objective.LogosLexer">LogosLexer (class in pygments.lexers.objective)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.prolog.LogtalkLexer">LogtalkLexer (class in pygments.lexers.prolog)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.scripting.LSLLexer">LSLLexer (class in pygments.lexers.scripting)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.scripting.LuaLexer">LuaLexer (class in pygments.lexers.scripting)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="M">M</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.make.MakefileLexer">MakefileLexer (class in pygments.lexers.make)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.MakoCssLexer">MakoCssLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.MakoHtmlLexer">MakoHtmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.MakoJavascriptLexer">MakoJavascriptLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.MakoLexer">MakoLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.MakoXmlLexer">MakoXmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.business.MaqlLexer">MaqlLexer (class in pygments.lexers.business)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.markup.MarkdownLexer">MarkdownLexer (class in pygments.lexers.markup)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.javascript.MaskLexer">MaskLexer (class in pygments.lexers.javascript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.MasonLexer">MasonLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.algebra.MathematicaLexer">MathematicaLexer (class in pygments.lexers.algebra)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.matlab.MatlabLexer">MatlabLexer (class in pygments.lexers.matlab)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.matlab.MatlabSessionLexer">MatlabSessionLexer (class in pygments.lexers.matlab)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.mime.MIMELexer">MIMELexer (class in pygments.lexers.mime)</a>
+</li>
+      <li><a href="docs/api.html#pygments.lexer.Lexer.mimetypes">mimetypes (pygments.lexer.Lexer attribute)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.d.MiniDLexer">MiniDLexer (class in pygments.lexers.d)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.modeling.ModelicaLexer">ModelicaLexer (class in pygments.lexers.modeling)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.modula2.Modula2Lexer">Modula2Lexer (class in pygments.lexers.modula2)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.markup.MoinWikiLexer">MoinWikiLexer (class in pygments.lexers.markup)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.basic.MonkeyLexer">MonkeyLexer (class in pygments.lexers.basic)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.monte.MonteLexer">MonteLexer (class in pygments.lexers.monte)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.scripting.MOOCodeLexer">MOOCodeLexer (class in pygments.lexers.scripting)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.scripting.MoonScriptLexer">MoonScriptLexer (class in pygments.lexers.scripting)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.markup.MozPreprocCssLexer">MozPreprocCssLexer (class in pygments.lexers.markup)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.markup.MozPreprocHashLexer">MozPreprocHashLexer (class in pygments.lexers.markup)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.markup.MozPreprocJavascriptLexer">MozPreprocJavascriptLexer (class in pygments.lexers.markup)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.markup.MozPreprocPercentLexer">MozPreprocPercentLexer (class in pygments.lexers.markup)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.markup.MozPreprocXulLexer">MozPreprocXulLexer (class in pygments.lexers.markup)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.c_like.MqlLexer">MqlLexer (class in pygments.lexers.c_like)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dsls.MscgenLexer">MscgenLexer (class in pygments.lexers.dsls)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.shell.MSDOSSessionLexer">MSDOSSessionLexer (class in pygments.lexers.shell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.algebra.MuPADLexer">MuPADLexer (class in pygments.lexers.algebra)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.actionscript.MxmlLexer">MxmlLexer (class in pygments.lexers.actionscript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.MyghtyCssLexer">MyghtyCssLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.MyghtyHtmlLexer">MyghtyHtmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.MyghtyJavascriptLexer">MyghtyJavascriptLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.MyghtyLexer">MyghtyLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.MyghtyXmlLexer">MyghtyXmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.sql.MySqlLexer">MySqlLexer (class in pygments.lexers.sql)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="N">N</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/api.html#pygments.formatter.Formatter.name">name (pygments.formatter.Formatter attribute)</a>
+
+      <ul>
+        <li><a href="docs/api.html#pygments.lexer.Lexer.name">(pygments.lexer.Lexer attribute)</a>
+</li>
+      </ul></li>
+      <li><a href="docs/filters.html#NameHighlightFilter">NameHighlightFilter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.asm.NasmLexer">NasmLexer (class in pygments.lexers.asm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.asm.NasmObjdumpLexer">NasmObjdumpLexer (class in pygments.lexers.asm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.ncl.NCLLexer">NCLLexer (class in pygments.lexers.ncl)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dotnet.NemerleLexer">NemerleLexer (class in pygments.lexers.dotnet)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.c_like.NesCLexer">NesCLexer (class in pygments.lexers.c_like)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.lisp.NewLispLexer">NewLispLexer (class in pygments.lexers.lisp)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.smalltalk.NewspeakLexer">NewspeakLexer (class in pygments.lexers.smalltalk)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.NginxConfLexer">NginxConfLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.nimrod.NimrodLexer">NimrodLexer (class in pygments.lexers.nimrod)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.nit.NitLexer">NitLexer (class in pygments.lexers.nit)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.nix.NixLexer">NixLexer (class in pygments.lexers.nix)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.textfmts.NotmuchLexer">NotmuchLexer (class in pygments.lexers.textfmts)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.installers.NSISLexer">NSISLexer (class in pygments.lexers.installers)</a>
+</li>
+      <li><a href="docs/formatters.html#NullFormatter">NullFormatter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.python.NumPyLexer">NumPyLexer (class in pygments.lexers.python)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.smv.NuSMVLexer">NuSMVLexer (class in pygments.lexers.smv)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="O">O</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.asm.ObjdumpLexer">ObjdumpLexer (class in pygments.lexers.asm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.objective.ObjectiveCLexer">ObjectiveCLexer (class in pygments.lexers.objective)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.objective.ObjectiveCppLexer">ObjectiveCppLexer (class in pygments.lexers.objective)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.javascript.ObjectiveJLexer">ObjectiveJLexer (class in pygments.lexers.javascript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.ml.OcamlLexer">OcamlLexer (class in pygments.lexers.ml)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.matlab.OctaveLexer">OctaveLexer (class in pygments.lexers.matlab)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.archetype.OdinLexer">OdinLexer (class in pygments.lexers.archetype)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.ooc.OocLexer">OocLexer (class in pygments.lexers.ooc)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.ml.OpaLexer">OpaLexer (class in pygments.lexers.ml)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.business.OpenEdgeLexer">OpenEdgeLexer (class in pygments.lexers.business)</a>
+</li>
+      <li><a href="docs/api.html#pygments.util.OptionError">OptionError</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="P">P</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.PacmanConfLexer">PacmanConfLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dsls.PanLexer">PanLexer (class in pygments.lexers.dsls)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parasail.ParaSailLexer">ParaSailLexer (class in pygments.lexers.parasail)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.pawn.PawnLexer">PawnLexer (class in pygments.lexers.pawn)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.perl.Perl6Lexer">Perl6Lexer (class in pygments.lexers.perl)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.perl.PerlLexer">PerlLexer (class in pygments.lexers.perl)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.php.PhpLexer">PhpLexer (class in pygments.lexers.php)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.PigLexer">PigLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.c_like.PikeLexer">PikeLexer (class in pygments.lexers.c_like)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.PkgConfigLexer">PkgConfigLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.sql.PlPgsqlLexer">PlPgsqlLexer (class in pygments.lexers.sql)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.pony.PonyLexer">PonyLexer (class in pygments.lexers.pony)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.sql.PostgresConsoleLexer">PostgresConsoleLexer (class in pygments.lexers.sql)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.sql.PostgresLexer">PostgresLexer (class in pygments.lexers.sql)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.graphics.PostScriptLexer">PostScriptLexer (class in pygments.lexers.graphics)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.graphics.PovrayLexer">PovrayLexer (class in pygments.lexers.graphics)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.shell.PowerShellLexer">PowerShellLexer (class in pygments.lexers.shell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.shell.PowerShellSessionLexer">PowerShellSessionLexer (class in pygments.lexers.shell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.praat.PraatLexer">PraatLexer (class in pygments.lexers.praat)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.prolog.PrologLexer">PrologLexer (class in pygments.lexers.prolog)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.PropertiesLexer">PropertiesLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dsls.ProtoBufLexer">ProtoBufLexer (class in pygments.lexers.dsls)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.html.PugLexer">PugLexer (class in pygments.lexers.html)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dsls.PuppetLexer">PuppetLexer (class in pygments.lexers.dsls)</a>
+</li>
+      <li><a href="docs/api.html#module-pygments">pygments (module)</a>
+</li>
+      <li><a href="docs/api.html#module-pygments.formatter">pygments.formatter (module)</a>
+</li>
+      <li><a href="docs/api.html#module-pygments.formatters">pygments.formatters (module)</a>
+</li>
+      <li><a href="docs/api.html#module-pygments.lexer">pygments.lexer (module)</a>
+</li>
+      <li><a href="docs/api.html#module-pygments.lexers">pygments.lexers (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.actionscript">pygments.lexers.actionscript (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.algebra">pygments.lexers.algebra (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.ambient">pygments.lexers.ambient (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.ampl">pygments.lexers.ampl (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.apl">pygments.lexers.apl (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.archetype">pygments.lexers.archetype (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.asm">pygments.lexers.asm (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.automation">pygments.lexers.automation (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.basic">pygments.lexers.basic (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.bibtex">pygments.lexers.bibtex (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.boa">pygments.lexers.boa (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.business">pygments.lexers.business (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.c_cpp">pygments.lexers.c_cpp (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.c_like">pygments.lexers.c_like (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.capnproto">pygments.lexers.capnproto (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.chapel">pygments.lexers.chapel (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.clean">pygments.lexers.clean (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.configs">pygments.lexers.configs (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.console">pygments.lexers.console (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.crystal">pygments.lexers.crystal (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.csound">pygments.lexers.csound (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.css">pygments.lexers.css (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.d">pygments.lexers.d (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.dalvik">pygments.lexers.dalvik (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.data">pygments.lexers.data (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.diff">pygments.lexers.diff (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.dotnet">pygments.lexers.dotnet (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.dsls">pygments.lexers.dsls (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.dylan">pygments.lexers.dylan (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.ecl">pygments.lexers.ecl (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.eiffel">pygments.lexers.eiffel (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.elm">pygments.lexers.elm (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.email">pygments.lexers.email (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.erlang">pygments.lexers.erlang (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.esoteric">pygments.lexers.esoteric (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.ezhil">pygments.lexers.ezhil (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.factor">pygments.lexers.factor (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.fantom">pygments.lexers.fantom (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.felix">pygments.lexers.felix (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.floscript">pygments.lexers.floscript (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.forth">pygments.lexers.forth (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.fortran">pygments.lexers.fortran (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.foxpro">pygments.lexers.foxpro (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.freefem">pygments.lexers.freefem (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.go">pygments.lexers.go (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.grammar_notation">pygments.lexers.grammar_notation (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.graph">pygments.lexers.graph (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.graphics">pygments.lexers.graphics (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.haskell">pygments.lexers.haskell (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.haxe">pygments.lexers.haxe (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.hdl">pygments.lexers.hdl (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.hexdump">pygments.lexers.hexdump (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.html">pygments.lexers.html (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.idl">pygments.lexers.idl (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.igor">pygments.lexers.igor (module)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#module-pygments.lexers.inferno">pygments.lexers.inferno (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.installers">pygments.lexers.installers (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.int_fiction">pygments.lexers.int_fiction (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.iolang">pygments.lexers.iolang (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.j">pygments.lexers.j (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.javascript">pygments.lexers.javascript (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.julia">pygments.lexers.julia (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.jvm">pygments.lexers.jvm (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.lisp">pygments.lexers.lisp (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.make">pygments.lexers.make (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.markup">pygments.lexers.markup (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.matlab">pygments.lexers.matlab (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.mime">pygments.lexers.mime (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.ml">pygments.lexers.ml (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.modeling">pygments.lexers.modeling (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.modula2">pygments.lexers.modula2 (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.monte">pygments.lexers.monte (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.ncl">pygments.lexers.ncl (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.nimrod">pygments.lexers.nimrod (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.nit">pygments.lexers.nit (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.nix">pygments.lexers.nix (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.oberon">pygments.lexers.oberon (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.objective">pygments.lexers.objective (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.ooc">pygments.lexers.ooc (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.parasail">pygments.lexers.parasail (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.parsers">pygments.lexers.parsers (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.pascal">pygments.lexers.pascal (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.pawn">pygments.lexers.pawn (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.perl">pygments.lexers.perl (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.php">pygments.lexers.php (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.pony">pygments.lexers.pony (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.praat">pygments.lexers.praat (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.prolog">pygments.lexers.prolog (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.python">pygments.lexers.python (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.qvt">pygments.lexers.qvt (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.r">pygments.lexers.r (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.rdf">pygments.lexers.rdf (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.rebol">pygments.lexers.rebol (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.resource">pygments.lexers.resource (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.rnc">pygments.lexers.rnc (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.roboconf">pygments.lexers.roboconf (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.robotframework">pygments.lexers.robotframework (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.ruby">pygments.lexers.ruby (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.rust">pygments.lexers.rust (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.sas">pygments.lexers.sas (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.scdoc">pygments.lexers.scdoc (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.scripting">pygments.lexers.scripting (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.sgf">pygments.lexers.sgf (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.shell">pygments.lexers.shell (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.slash">pygments.lexers.slash (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.smalltalk">pygments.lexers.smalltalk (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.smv">pygments.lexers.smv (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.snobol">pygments.lexers.snobol (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.solidity">pygments.lexers.solidity (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.special">pygments.lexers.special (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.sql">pygments.lexers.sql (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.stata">pygments.lexers.stata (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.supercollider">pygments.lexers.supercollider (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.tcl">pygments.lexers.tcl (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.templates">pygments.lexers.templates (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.teraterm">pygments.lexers.teraterm (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.testing">pygments.lexers.testing (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.textedit">pygments.lexers.textedit (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.textfmts">pygments.lexers.textfmts (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.theorem">pygments.lexers.theorem (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.trafficscript">pygments.lexers.trafficscript (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.typoscript">pygments.lexers.typoscript (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.unicon">pygments.lexers.unicon (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.urbi">pygments.lexers.urbi (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.varnish">pygments.lexers.varnish (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.verification">pygments.lexers.verification (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.webmisc">pygments.lexers.webmisc (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.whiley">pygments.lexers.whiley (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.x10">pygments.lexers.x10 (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.xorg">pygments.lexers.xorg (module)</a>
+</li>
+      <li><a href="docs/lexers.html#module-pygments.lexers.zig">pygments.lexers.zig (module)</a>
+</li>
+      <li><a href="docs/api.html#module-pygments.styles">pygments.styles (module)</a>
+</li>
+      <li><a href="docs/tokens.html#module-pygments.token">pygments.token (module)</a>
+</li>
+      <li><a href="docs/api.html#module-pygments.util">pygments.util (module)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.console.PyPyLogLexer">PyPyLogLexer (class in pygments.lexers.console)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.python.Python2Lexer">Python2Lexer (class in pygments.lexers.python)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.python.Python2TracebackLexer">Python2TracebackLexer (class in pygments.lexers.python)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.python.PythonConsoleLexer">PythonConsoleLexer (class in pygments.lexers.python)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.python.PythonLexer">PythonLexer (class in pygments.lexers.python)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.python.PythonTracebackLexer">PythonTracebackLexer (class in pygments.lexers.python)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="Q">Q</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.basic.QBasicLexer">QBasicLexer (class in pygments.lexers.basic)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.webmisc.QmlLexer">QmlLexer (class in pygments.lexers.webmisc)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.qvt.QVToLexer">QVToLexer (class in pygments.lexers.qvt)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="R">R</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.lisp.RacketLexer">RacketLexer (class in pygments.lexers.lisp)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.RagelCLexer">RagelCLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.RagelCppLexer">RagelCppLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.RagelDLexer">RagelDLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.RagelEmbeddedLexer">RagelEmbeddedLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.RagelJavaLexer">RagelJavaLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.RagelLexer">RagelLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.RagelObjectiveCLexer">RagelObjectiveCLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.RagelRubyLexer">RagelRubyLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/filters.html#RaiseOnErrorTokenFilter">RaiseOnErrorTokenFilter (built-in class)</a>
+</li>
+      <li><a href="docs/formatters.html#RawTokenFormatter">RawTokenFormatter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.special.RawTokenLexer">RawTokenLexer (class in pygments.lexers.special)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.r.RConsoleLexer">RConsoleLexer (class in pygments.lexers.r)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.r.RdLexer">RdLexer (class in pygments.lexers.r)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.rebol.RebolLexer">RebolLexer (class in pygments.lexers.rebol)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.esoteric.RedcodeLexer">RedcodeLexer (class in pygments.lexers.esoteric)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.rebol.RedLexer">RedLexer (class in pygments.lexers.rebol)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.RegeditLexer">RegeditLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.resource.ResourceLexer">ResourceLexer (class in pygments.lexers.resource)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.scripting.RexxLexer">RexxLexer (class in pygments.lexers.scripting)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.RhtmlLexer">RhtmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.rnc.RNCCompactLexer">RNCCompactLexer (class in pygments.lexers.rnc)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.roboconf.RoboconfGraphLexer">RoboconfGraphLexer (class in pygments.lexers.roboconf)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.roboconf.RoboconfInstancesLexer">RoboconfInstancesLexer (class in pygments.lexers.roboconf)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.robotframework.RobotFrameworkLexer">RobotFrameworkLexer (class in pygments.lexers.robotframework)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.installers.RPMSpecLexer">RPMSpecLexer (class in pygments.lexers.installers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.sql.RqlLexer">RqlLexer (class in pygments.lexers.sql)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dsls.RslLexer">RslLexer (class in pygments.lexers.dsls)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.markup.RstLexer">RstLexer (class in pygments.lexers.markup)</a>
+</li>
+      <li><a href="docs/formatters.html#RtfFormatter">RtfFormatter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.trafficscript.RtsLexer">RtsLexer (class in pygments.lexers.trafficscript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.ruby.RubyConsoleLexer">RubyConsoleLexer (class in pygments.lexers.ruby)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.ruby.RubyLexer">RubyLexer (class in pygments.lexers.ruby)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.rust.RustLexer">RustLexer (class in pygments.lexers.rust)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="S">S</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.SarlLexer">SarlLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.sas.SASLexer">SASLexer (class in pygments.lexers.sas)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.css.SassLexer">SassLexer (class in pygments.lexers.css)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.ScalaLexer">ScalaLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.html.ScamlLexer">ScamlLexer (class in pygments.lexers.html)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.scdoc.ScdocLexer">ScdocLexer (class in pygments.lexers.scdoc)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.lisp.SchemeLexer">SchemeLexer (class in pygments.lexers.lisp)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.matlab.ScilabLexer">ScilabLexer (class in pygments.lexers.matlab)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.css.ScssLexer">ScssLexer (class in pygments.lexers.css)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.lisp.ShenLexer">ShenLexer (class in pygments.lexers.lisp)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.rdf.ShExCLexer">ShExCLexer (class in pygments.lexers.rdf)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.verification.SilverLexer">SilverLexer (class in pygments.lexers.verification)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.slash.SlashLexer">SlashLexer (class in pygments.lexers.slash)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.r.SLexer">SLexer (class in pygments.lexers.r)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.webmisc.SlimLexer">SlimLexer (class in pygments.lexers.webmisc)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.shell.SlurmBashLexer">SlurmBashLexer (class in pygments.lexers.shell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dalvik.SmaliLexer">SmaliLexer (class in pygments.lexers.dalvik)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.smalltalk.SmalltalkLexer">SmalltalkLexer (class in pygments.lexers.smalltalk)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.sgf.SmartGameFormatLexer">SmartGameFormatLexer (class in pygments.lexers.sgf)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.SmartyLexer">SmartyLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.ml.SMLLexer">SMLLexer (class in pygments.lexers.ml)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.snobol.SnobolLexer">SnobolLexer (class in pygments.lexers.snobol)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dsls.SnowballLexer">SnowballLexer (class in pygments.lexers.dsls)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.solidity.SolidityLexer">SolidityLexer (class in pygments.lexers.solidity)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.pawn.SourcePawnLexer">SourcePawnLexer (class in pygments.lexers.pawn)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.installers.SourcesListLexer">SourcesListLexer (class in pygments.lexers.installers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.rdf.SparqlLexer">SparqlLexer (class in pygments.lexers.rdf)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.sql.SqliteConsoleLexer">SqliteConsoleLexer (class in pygments.lexers.sql)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.sql.SqlLexer">SqlLexer (class in pygments.lexers.sql)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.SquidConfLexer">SquidConfLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.SspLexer">SspLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.modeling.StanLexer">StanLexer (class in pygments.lexers.modeling)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.stata.StataLexer">StataLexer (class in pygments.lexers.stata)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.supercollider.SuperColliderLexer">SuperColliderLexer (class in pygments.lexers.supercollider)</a>
+</li>
+      <li><a href="docs/formatters.html#SvgFormatter">SvgFormatter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.objective.SwiftLexer">SwiftLexer (class in pygments.lexers.objective)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.c_like.SwigLexer">SwigLexer (class in pygments.lexers.c_like)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.hdl.SystemVerilogLexer">SystemVerilogLexer (class in pygments.lexers.hdl)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="T">T</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.int_fiction.Tads3Lexer">Tads3Lexer (class in pygments.lexers.int_fiction)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.testing.TAPLexer">TAPLexer (class in pygments.lexers.testing)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.asm.TasmLexer">TasmLexer (class in pygments.lexers.asm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.tcl.TclLexer">TclLexer (class in pygments.lexers.tcl)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.shell.TcshLexer">TcshLexer (class in pygments.lexers.shell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.shell.TcshSessionLexer">TcshSessionLexer (class in pygments.lexers.shell)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.TeaTemplateLexer">TeaTemplateLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.teraterm.TeraTermLexer">TeraTermLexer (class in pygments.lexers.teraterm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.TermcapLexer">TermcapLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/formatters.html#Terminal256Formatter">Terminal256Formatter (built-in class)</a>
+</li>
+      <li><a href="docs/formatters.html#TerminalFormatter">TerminalFormatter (built-in class)</a>
+</li>
+      <li><a href="docs/formatters.html#TerminalTrueColorFormatter">TerminalTrueColorFormatter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.TerminfoLexer">TerminfoLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.TerraformLexer">TerraformLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/formatters.html#TestcaseFormatter">TestcaseFormatter (built-in class)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.markup.TexLexer">TexLexer (class in pygments.lexers.markup)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.special.TextLexer">TextLexer (class in pygments.lexers.special)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dsls.ThriftLexer">ThriftLexer (class in pygments.lexers.dsls)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.textfmts.TodotxtLexer">TodotxtLexer (class in pygments.lexers.textfmts)</a>
+</li>
+      <li><a href="docs/filters.html#TokenMergeFilter">TokenMergeFilter (built-in class)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.configs.TOMLLexer">TOMLLexer (class in pygments.lexers.configs)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.sql.TransactSqlLexer">TransactSqlLexer (class in pygments.lexers.sql)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.parsers.TreetopLexer">TreetopLexer (class in pygments.lexers.parsers)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.rdf.TurtleLexer">TurtleLexer (class in pygments.lexers.rdf)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.TwigHtmlLexer">TwigHtmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.TwigLexer">TwigLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.javascript.TypeScriptLexer">TypeScriptLexer (class in pygments.lexers.javascript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.typoscript.TypoScriptCssDataLexer">TypoScriptCssDataLexer (class in pygments.lexers.typoscript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.typoscript.TypoScriptHtmlDataLexer">TypoScriptHtmlDataLexer (class in pygments.lexers.typoscript)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.typoscript.TypoScriptLexer">TypoScriptLexer (class in pygments.lexers.typoscript)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="U">U</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.unicon.UcodeLexer">UcodeLexer (class in pygments.lexers.unicon)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.unicon.UniconLexer">UniconLexer (class in pygments.lexers.unicon)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.urbi.UrbiscriptLexer">UrbiscriptLexer (class in pygments.lexers.urbi)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="V">V</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.c_like.ValaLexer">ValaLexer (class in pygments.lexers.c_like)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dotnet.VbNetAspxLexer">VbNetAspxLexer (class in pygments.lexers.dotnet)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dotnet.VbNetLexer">VbNetLexer (class in pygments.lexers.dotnet)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.basic.VBScriptLexer">VBScriptLexer (class in pygments.lexers.basic)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.varnish.VCLLexer">VCLLexer (class in pygments.lexers.varnish)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.varnish.VCLSnippetLexer">VCLSnippetLexer (class in pygments.lexers.varnish)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.console.VCTreeStatusLexer">VCTreeStatusLexer (class in pygments.lexers.console)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.VelocityHtmlLexer">VelocityHtmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.VelocityLexer">VelocityLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.VelocityXmlLexer">VelocityXmlLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.hdl.VerilogLexer">VerilogLexer (class in pygments.lexers.hdl)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.dsls.VGLLexer">VGLLexer (class in pygments.lexers.dsls)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.hdl.VhdlLexer">VhdlLexer (class in pygments.lexers.hdl)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.textedit.VimLexer">VimLexer (class in pygments.lexers.textedit)</a>
+</li>
+      <li><a href="docs/filters.html#VisibleWhitespaceFilter">VisibleWhitespaceFilter (built-in class)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="W">W</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.diff.WDiffLexer">WDiffLexer (class in pygments.lexers.diff)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.whiley.WhileyLexer">WhileyLexer (class in pygments.lexers.whiley)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="X">X</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.x10.X10Lexer">X10Lexer (class in pygments.lexers.x10)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.XmlDjangoLexer">XmlDjangoLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.XmlErbLexer">XmlErbLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.html.XmlLexer">XmlLexer (class in pygments.lexers.html)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.XmlPhpLexer">XmlPhpLexer (class in pygments.lexers.templates)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.XmlSmartyLexer">XmlSmartyLexer (class in pygments.lexers.templates)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.xorg.XorgLexer">XorgLexer (class in pygments.lexers.xorg)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.webmisc.XQueryLexer">XQueryLexer (class in pygments.lexers.webmisc)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.html.XsltLexer">XsltLexer (class in pygments.lexers.html)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.jvm.XtendLexer">XtendLexer (class in pygments.lexers.jvm)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.lisp.XtlangLexer">XtlangLexer (class in pygments.lexers.lisp)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="Y">Y</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.templates.YamlJinjaLexer">YamlJinjaLexer (class in pygments.lexers.templates)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.data.YamlLexer">YamlLexer (class in pygments.lexers.data)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+<h2 id="Z">Z</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.dsls.ZeekLexer">ZeekLexer (class in pygments.lexers.dsls)</a>
+</li>
+  </ul></td>
+  <td style="width: 33%; vertical-align: top;"><ul>
+      <li><a href="docs/lexers.html#pygments.lexers.php.ZephirLexer">ZephirLexer (class in pygments.lexers.php)</a>
+</li>
+      <li><a href="docs/lexers.html#pygments.lexers.zig.ZigLexer">ZigLexer (class in pygments.lexers.zig)</a>
+</li>
+  </ul></td>
+</tr></table>
+
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/index.html b/doc/_build/html/index.html
new file mode 100644 (file)
index 0000000..80f2259
--- /dev/null
@@ -0,0 +1,166 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Welcome! &#8212; Pygments</title>
+    <link rel="stylesheet" href="_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="./" src="_static/documentation_options.js"></script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="_static/language_data.js"></script>
+    <link rel="shortcut icon" href="_static/favicon.ico"/>
+    <link rel="index" title="Index" href="genindex.html" />
+    <link rel="search" title="Search" href="search.html" />
+    <link rel="next" title="Pygments documentation" href="docs/index.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+      .related { display: none; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="#">Home</a></li>
+    
+    <li><a href="languages.html">Languages</a></li>
+    <li><a href="faq.html">FAQ</a></li>
+    <li><a href="download.html">Get it</a></li>
+    <li><a href="docs/index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="#">
+      <img src="_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper"><h3>Download</h3>
+
+<p>Current version: <b>2.4.2</b></p>
+<p>Get Pygments from the <a href="http://pypi.python.org/pypi/Pygments">Python Package
+    Index</a>, or install it with:</p>
+<pre>pip install Pygments</pre>
+
+
+<h3>Questions? Suggestions?</h3>
+
+<p><img src="_static/github.png" width="24" />
+    Clone at <a href="https://github.com/pygments/pygments">GitHub</a>.</p>
+<p>You can also open an issue at the
+  <a href="https://github.com/pygments/pygments/issues">tracker</a>.</p>
+
+<p class="logo">A <a href="http://pocoo.org/">
+    <img src="_static/pocoo.png" /></a> project</a></p>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="welcome">
+<h1>Welcome!<a class="headerlink" href="#welcome" title="Permalink to this headline">¶</a></h1>
+<p>This is the home of Pygments.  It is a generic syntax highlighter suitable for
+use in code hosting, forums, wikis or other applications that need to prettify
+source code.  Highlights are:</p>
+<ul class="simple">
+<li><p>a wide range of over 300 languages and other text formats is supported</p></li>
+<li><p>special attention is paid to details that increase highlighting quality</p></li>
+<li><p>support for new languages and formats are added easily; most languages use a
+simple regex-based lexing mechanism</p></li>
+<li><p>a number of output formats is available, among them HTML, RTF, LaTeX and ANSI
+sequences</p></li>
+<li><p>it is usable as a command-line tool and as a library</p></li>
+<li><p>… and it highlights even Perl 6!</p></li>
+</ul>
+<p>Read more in the <a class="reference internal" href="faq.html"><span class="doc">FAQ list</span></a> or the <a class="reference internal" href="docs/index.html"><span class="doc">documentation</span></a>,
+or <a class="reference external" href="http://pypi.python.org/pypi/Pygments">download the latest release</a>.</p>
+<div class="section" id="contribute">
+<span id="id1"></span><h2>Contribute<a class="headerlink" href="#contribute" title="Permalink to this headline">¶</a></h2>
+<p>Like every open-source project, we are always looking for volunteers to help us
+with programming. Python knowledge is required, but don’t fear: Python is a very
+clear and easy to learn language.</p>
+<p>Development takes place on <a class="reference external" href="https://github.com/pygments/pygments">GitHub</a>.</p>
+<p>If you found a bug, just open a ticket in the GitHub tracker. Be sure to log
+in to be notified when the issue is fixed – development is not fast-paced as
+the library is quite stable.  You can also send an e-mail to the developers, see
+below.</p>
+</div>
+<div class="section" id="the-authors">
+<h2>The authors<a class="headerlink" href="#the-authors" title="Permalink to this headline">¶</a></h2>
+<p>Pygments is maintained by <strong>Georg Brandl</strong>, e-mail address <em>georg</em><em>&#64;</em><em>python.org</em>
+and <strong>Matthäus Chajdas</strong>.</p>
+<p>Many lexers and fixes have been contributed by <strong>Armin Ronacher</strong>, the rest of
+the <a class="reference external" href="http://dev.pocoo.org/">Pocoo</a> team and <strong>Tim Hatch</strong>.</p>
+<div class="toctree-wrapper compound">
+</div>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/languages.html b/doc/_build/html/languages.html
new file mode 100644 (file)
index 0000000..279da8c
--- /dev/null
@@ -0,0 +1,301 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Supported languages &#8212; Pygments</title>
+    <link rel="stylesheet" href="_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="./" src="_static/documentation_options.js"></script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="_static/language_data.js"></script>
+    <link rel="shortcut icon" href="_static/favicon.ico"/>
+    <link rel="index" title="Index" href="genindex.html" />
+    <link rel="search" title="Search" href="search.html" />
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="index.html">Home</a></li>
+    
+    <li><a href="#">Languages</a></li>
+    <li><a href="faq.html">FAQ</a></li>
+    <li><a href="download.html">Get it</a></li>
+    <li><a href="docs/index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="index.html">
+      <img src="_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+  <h3><a href="index.html">Table of Contents</a></h3>
+  <ul>
+<li><a class="reference internal" href="#">Supported languages</a><ul>
+<li><a class="reference internal" href="#programming-languages">Programming languages</a></li>
+<li><a class="reference internal" href="#template-languages">Template languages</a></li>
+<li><a class="reference internal" href="#other-markup">Other markup</a></li>
+<li><a class="reference internal" href="#that-s-all">… that’s all?</a></li>
+</ul>
+</li>
+</ul>
+
+  <div role="note" aria-label="source link">
+    <h3>This Page</h3>
+    <ul class="this-page-menu">
+      <li><a href="_sources/languages.rst.txt"
+            rel="nofollow">Show Source</a></li>
+    </ul>
+   </div>
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <div class="section" id="supported-languages">
+<h1>Supported languages<a class="headerlink" href="#supported-languages" title="Permalink to this headline">¶</a></h1>
+<p>Pygments supports an ever-growing range of languages. Watch this space…</p>
+<div class="section" id="programming-languages">
+<h2>Programming languages<a class="headerlink" href="#programming-languages" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><p>ActionScript</p></li>
+<li><p>Ada</p></li>
+<li><p>ANTLR</p></li>
+<li><p>AppleScript</p></li>
+<li><p>Assembly (various)</p></li>
+<li><p>Asymptote</p></li>
+<li><p><a class="reference external" href="http://augeas.net">Augeas</a></p></li>
+<li><p>Awk</p></li>
+<li><p>BBC Basic</p></li>
+<li><p>Befunge</p></li>
+<li><p><a class="reference external" href="http://boa.cs.iastate.edu/docs/index.php">Boa</a></p></li>
+<li><p>Boo</p></li>
+<li><p>BrainFuck</p></li>
+<li><p>C, C++</p></li>
+<li><p>C#</p></li>
+<li><p><a class="reference external" href="http://charmplusplus.org/">Charm++ CI</a></p></li>
+<li><p>Clojure</p></li>
+<li><p>CoffeeScript</p></li>
+<li><p>ColdFusion</p></li>
+<li><p>Common Lisp</p></li>
+<li><p>Coq</p></li>
+<li><p>Cryptol (incl. Literate Cryptol)</p></li>
+<li><p><a class="reference external" href="http://crystal-lang.org">Crystal</a></p></li>
+<li><p><a class="reference external" href="http://cython.org">Cython</a></p></li>
+<li><p><a class="reference external" href="http://dlang.org">D</a></p></li>
+<li><p>Dart</p></li>
+<li><p>DCPU-16</p></li>
+<li><p>Delphi</p></li>
+<li><p>Dylan</p></li>
+<li><p><a class="reference external" href="http://elm-lang.org/">Elm</a></p></li>
+<li><p>Email</p></li>
+<li><p>Erlang</p></li>
+<li><p><a class="reference external" href="http://ezhillang.org">Ezhil</a> Ezhil - A Tamil programming language</p></li>
+<li><p>Factor</p></li>
+<li><p>Fancy</p></li>
+<li><p><a class="reference external" href="https://fennel-lang.org/">Fennel</a></p></li>
+<li><p><a class="reference external" href="http://ioflo.com/">FloScript</a></p></li>
+<li><p>Fortran</p></li>
+<li><p><a class="reference external" href="https://freefem.org/">FreeFEM++</a></p></li>
+<li><p>F#</p></li>
+<li><p>GAP</p></li>
+<li><p>Gherkin (Cucumber)</p></li>
+<li><p>GL shaders</p></li>
+<li><p>Groovy</p></li>
+<li><p><a class="reference external" href="http://www.haskell.org">Haskell</a> (incl. Literate Haskell)</p></li>
+<li><p>HLSL</p></li>
+<li><p><a class="reference external" href="http://hackage.haskell.org/package/hspec">HSpec</a></p></li>
+<li><p>IDL</p></li>
+<li><p>Io</p></li>
+<li><p>Java</p></li>
+<li><p>JavaScript</p></li>
+<li><p>Lasso</p></li>
+<li><p>LLVM</p></li>
+<li><p>Logtalk</p></li>
+<li><p><a class="reference external" href="http://www.lua.org">Lua</a></p></li>
+<li><p>Matlab</p></li>
+<li><p>MiniD</p></li>
+<li><p>Modelica</p></li>
+<li><p>Modula-2</p></li>
+<li><p>MuPad</p></li>
+<li><p>Nemerle</p></li>
+<li><p>Nimrod</p></li>
+<li><p>Notmuch</p></li>
+<li><p>Objective-C</p></li>
+<li><p>Objective-J</p></li>
+<li><p>Octave</p></li>
+<li><p>OCaml</p></li>
+<li><p>PHP</p></li>
+<li><p><a class="reference external" href="http://perl.org">Perl 5</a> and <a class="reference external" href="https://perl6.org">Perl 6</a></p></li>
+<li><p><a class="reference external" href="https://www.ponylang.io/">Pony</a></p></li>
+<li><p>PovRay</p></li>
+<li><p>PostScript</p></li>
+<li><p>PowerShell</p></li>
+<li><p>Prolog</p></li>
+<li><p><a class="reference external" href="http://www.python.org">Python</a> 2.x and 3.x (incl. console sessions and tracebacks)</p></li>
+<li><p><a class="reference external" href="http://www.rebol.com">REBOL</a></p></li>
+<li><p><a class="reference external" href="http://www.red-lang.org">Red</a></p></li>
+<li><p>Redcode</p></li>
+<li><p><a class="reference external" href="http://www.ruby-lang.org">Ruby</a> (incl. irb sessions)</p></li>
+<li><p>Rust</p></li>
+<li><p>S, S-Plus, R</p></li>
+<li><p>Scala</p></li>
+<li><p><a class="reference external" href="https://git.sr.ht/~sircmpwn/scdoc">Scdoc</a></p></li>
+<li><p>Scheme</p></li>
+<li><p>Scilab</p></li>
+<li><p><a class="reference external" href="https://www.red-bean.com/sgf/">SGF</a></p></li>
+<li><p><a class="reference external" href="https://github.com/arturadib/Slash-A">Slash</a></p></li>
+<li><p><a class="reference external" href="https://slurm.schedmd.com/overview.html">Slurm</a></p></li>
+<li><p>Smalltalk</p></li>
+<li><p>SNOBOL</p></li>
+<li><p><a class="reference external" href="https://solidity.readthedocs.io/">Solidity</a></p></li>
+<li><p>Tcl</p></li>
+<li><p><a class="reference external" href="https://ttssh2.osdn.jp/">Tera Term language</a></p></li>
+<li><p><a class="reference external" href="https://github.com/toml-lang/toml">TOML</a></p></li>
+<li><p>Vala</p></li>
+<li><p>Verilog</p></li>
+<li><p>VHDL</p></li>
+<li><p>Visual Basic.NET</p></li>
+<li><p>Visual FoxPro</p></li>
+<li><p>XQuery</p></li>
+<li><p><a class="reference external" href="https://www.zeek.org">Zeek</a></p></li>
+<li><p>Zephir</p></li>
+<li><p><a class="reference external" href="https://ziglang.org/">Zig</a></p></li>
+</ul>
+</div>
+<div class="section" id="template-languages">
+<h2>Template languages<a class="headerlink" href="#template-languages" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><p>Cheetah templates</p></li>
+<li><p><a class="reference external" href="http://www.djangoproject.com">Django</a> / <a class="reference external" href="http://jinja.pocoo.org/jinja">Jinja</a> templates</p></li>
+<li><p>ERB (Ruby templating)</p></li>
+<li><p><a class="reference external" href="http://genshi.edgewall.org">Genshi</a> (the Trac template language)</p></li>
+<li><p>JSP (Java Server Pages)</p></li>
+<li><p><a class="reference external" href="http://www.myghty.org">Myghty</a> (the HTML::Mason based framework)</p></li>
+<li><p><a class="reference external" href="http://www.makotemplates.org">Mako</a> (the Myghty successor)</p></li>
+<li><p><a class="reference external" href="http://www.smarty.net">Smarty</a> templates (PHP templating)</p></li>
+<li><p>Tea</p></li>
+</ul>
+</div>
+<div class="section" id="other-markup">
+<h2>Other markup<a class="headerlink" href="#other-markup" title="Permalink to this headline">¶</a></h2>
+<ul class="simple">
+<li><p>Apache config files</p></li>
+<li><p>Bash shell scripts</p></li>
+<li><p>BBCode</p></li>
+<li><p>CMake</p></li>
+<li><p>CSS</p></li>
+<li><p>Debian control files</p></li>
+<li><p>Diff files</p></li>
+<li><p>DTD</p></li>
+<li><p>Gettext catalogs</p></li>
+<li><p>Gnuplot script</p></li>
+<li><p>Groff markup</p></li>
+<li><p>HTML</p></li>
+<li><p>HTTP sessions</p></li>
+<li><p>INI-style config files</p></li>
+<li><p>IRC logs (irssi style)</p></li>
+<li><p>Lighttpd config files</p></li>
+<li><p>Makefiles</p></li>
+<li><p>MoinMoin/Trac Wiki markup</p></li>
+<li><p>MySQL</p></li>
+<li><p>Nginx config files</p></li>
+<li><p>POV-Ray scenes</p></li>
+<li><p>Ragel</p></li>
+<li><p>Redcode</p></li>
+<li><p>ReST</p></li>
+<li><p>Robot Framework</p></li>
+<li><p>RPM spec files</p></li>
+<li><p>SQL, also MySQL, SQLite</p></li>
+<li><p>Squid configuration</p></li>
+<li><p>TeX</p></li>
+<li><p>tcsh</p></li>
+<li><p>Vim Script</p></li>
+<li><p>Windows batch files</p></li>
+<li><p>XML</p></li>
+<li><p>XSLT</p></li>
+<li><p>YAML</p></li>
+</ul>
+</div>
+<div class="section" id="that-s-all">
+<h2>… that’s all?<a class="headerlink" href="#that-s-all" title="Permalink to this headline">¶</a></h2>
+<p>Well, why not write your own? Contributing to Pygments is easy and fun.  Take a
+look at the <a class="reference internal" href="docs/lexerdevelopment.html"><span class="doc">docs on lexer development</span></a>.  Pull
+requests are welcome on <cite>GitHub &lt;https://github.com/pygments/pygments&gt;</cite>.</p>
+<p>Note: the languages listed here are supported in the development version. The
+latest release may lack a few of them.</p>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/objects.inv b/doc/_build/html/objects.inv
new file mode 100644 (file)
index 0000000..63b10ba
Binary files /dev/null and b/doc/_build/html/objects.inv differ
diff --git a/doc/_build/html/py-modindex.html b/doc/_build/html/py-modindex.html
new file mode 100644 (file)
index 0000000..60a287e
--- /dev/null
@@ -0,0 +1,820 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Python Module Index &#8212; Pygments</title>
+    <link rel="stylesheet" href="_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    <script type="text/javascript" id="documentation_options" data-url_root="./" src="_static/documentation_options.js"></script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="_static/language_data.js"></script>
+    <link rel="shortcut icon" href="_static/favicon.ico"/>
+    <link rel="index" title="Index" href="genindex.html" />
+    <link rel="search" title="Search" href="search.html" />
+
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="index.html">Home</a></li>
+    
+    <li><a href="languages.html">Languages</a></li>
+    <li><a href="faq.html">FAQ</a></li>
+    <li><a href="download.html">Get it</a></li>
+    <li><a href="docs/index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="index.html">
+      <img src="_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+<div id="searchbox" style="display: none" role="search">
+  <h3 id="searchlabel">Quick search</h3>
+    <div class="searchformwrapper">
+    <form class="search" action="search.html" method="get">
+      <input type="text" name="q" aria-labelledby="searchlabel" />
+      <input type="submit" value="Go" />
+    </form>
+    </div>
+</div>
+<script type="text/javascript">$('#searchbox').show(0);</script>
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+
+   <h1>Python Module Index</h1>
+
+   <div class="modindex-jumpbox">
+   <a href="#cap-p"><strong>p</strong></a>
+   </div>
+
+   <table class="indextable modindextable">
+     <tr class="pcap"><td></td><td>&#160;</td><td></td></tr>
+     <tr class="cap" id="cap-p"><td></td><td>
+       <strong>p</strong></td><td></td></tr>
+     <tr>
+       <td><img src="_static/minus.png" class="toggler"
+              id="toggle-1" style="display: none" alt="-" /></td>
+       <td>
+       <a href="docs/api.html#module-pygments"><code class="xref">pygments</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/api.html#module-pygments.formatter"><code class="xref">pygments.formatter</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/api.html#module-pygments.formatters"><code class="xref">pygments.formatters</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/api.html#module-pygments.lexer"><code class="xref">pygments.lexer</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/api.html#module-pygments.lexers"><code class="xref">pygments.lexers</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.actionscript"><code class="xref">pygments.lexers.actionscript</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.algebra"><code class="xref">pygments.lexers.algebra</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.ambient"><code class="xref">pygments.lexers.ambient</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.ampl"><code class="xref">pygments.lexers.ampl</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.apl"><code class="xref">pygments.lexers.apl</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.archetype"><code class="xref">pygments.lexers.archetype</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.asm"><code class="xref">pygments.lexers.asm</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.automation"><code class="xref">pygments.lexers.automation</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.basic"><code class="xref">pygments.lexers.basic</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.bibtex"><code class="xref">pygments.lexers.bibtex</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.boa"><code class="xref">pygments.lexers.boa</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.business"><code class="xref">pygments.lexers.business</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.c_cpp"><code class="xref">pygments.lexers.c_cpp</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.c_like"><code class="xref">pygments.lexers.c_like</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.capnproto"><code class="xref">pygments.lexers.capnproto</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.chapel"><code class="xref">pygments.lexers.chapel</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.clean"><code class="xref">pygments.lexers.clean</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.configs"><code class="xref">pygments.lexers.configs</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.console"><code class="xref">pygments.lexers.console</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.crystal"><code class="xref">pygments.lexers.crystal</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.csound"><code class="xref">pygments.lexers.csound</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.css"><code class="xref">pygments.lexers.css</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.d"><code class="xref">pygments.lexers.d</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.dalvik"><code class="xref">pygments.lexers.dalvik</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.data"><code class="xref">pygments.lexers.data</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.diff"><code class="xref">pygments.lexers.diff</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.dotnet"><code class="xref">pygments.lexers.dotnet</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.dsls"><code class="xref">pygments.lexers.dsls</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.dylan"><code class="xref">pygments.lexers.dylan</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.ecl"><code class="xref">pygments.lexers.ecl</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.eiffel"><code class="xref">pygments.lexers.eiffel</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.elm"><code class="xref">pygments.lexers.elm</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.email"><code class="xref">pygments.lexers.email</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.erlang"><code class="xref">pygments.lexers.erlang</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.esoteric"><code class="xref">pygments.lexers.esoteric</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.ezhil"><code class="xref">pygments.lexers.ezhil</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.factor"><code class="xref">pygments.lexers.factor</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.fantom"><code class="xref">pygments.lexers.fantom</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.felix"><code class="xref">pygments.lexers.felix</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.floscript"><code class="xref">pygments.lexers.floscript</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.forth"><code class="xref">pygments.lexers.forth</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.fortran"><code class="xref">pygments.lexers.fortran</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.foxpro"><code class="xref">pygments.lexers.foxpro</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.freefem"><code class="xref">pygments.lexers.freefem</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.go"><code class="xref">pygments.lexers.go</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.grammar_notation"><code class="xref">pygments.lexers.grammar_notation</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.graph"><code class="xref">pygments.lexers.graph</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.graphics"><code class="xref">pygments.lexers.graphics</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.haskell"><code class="xref">pygments.lexers.haskell</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.haxe"><code class="xref">pygments.lexers.haxe</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.hdl"><code class="xref">pygments.lexers.hdl</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.hexdump"><code class="xref">pygments.lexers.hexdump</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.html"><code class="xref">pygments.lexers.html</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.idl"><code class="xref">pygments.lexers.idl</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.igor"><code class="xref">pygments.lexers.igor</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.inferno"><code class="xref">pygments.lexers.inferno</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.installers"><code class="xref">pygments.lexers.installers</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.int_fiction"><code class="xref">pygments.lexers.int_fiction</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.iolang"><code class="xref">pygments.lexers.iolang</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.j"><code class="xref">pygments.lexers.j</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.javascript"><code class="xref">pygments.lexers.javascript</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.julia"><code class="xref">pygments.lexers.julia</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.jvm"><code class="xref">pygments.lexers.jvm</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.lisp"><code class="xref">pygments.lexers.lisp</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.make"><code class="xref">pygments.lexers.make</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.markup"><code class="xref">pygments.lexers.markup</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.matlab"><code class="xref">pygments.lexers.matlab</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.mime"><code class="xref">pygments.lexers.mime</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.ml"><code class="xref">pygments.lexers.ml</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.modeling"><code class="xref">pygments.lexers.modeling</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.modula2"><code class="xref">pygments.lexers.modula2</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.monte"><code class="xref">pygments.lexers.monte</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.ncl"><code class="xref">pygments.lexers.ncl</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.nimrod"><code class="xref">pygments.lexers.nimrod</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.nit"><code class="xref">pygments.lexers.nit</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.nix"><code class="xref">pygments.lexers.nix</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.oberon"><code class="xref">pygments.lexers.oberon</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.objective"><code class="xref">pygments.lexers.objective</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.ooc"><code class="xref">pygments.lexers.ooc</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.parasail"><code class="xref">pygments.lexers.parasail</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.parsers"><code class="xref">pygments.lexers.parsers</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.pascal"><code class="xref">pygments.lexers.pascal</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.pawn"><code class="xref">pygments.lexers.pawn</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.perl"><code class="xref">pygments.lexers.perl</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.php"><code class="xref">pygments.lexers.php</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.pony"><code class="xref">pygments.lexers.pony</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.praat"><code class="xref">pygments.lexers.praat</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.prolog"><code class="xref">pygments.lexers.prolog</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.python"><code class="xref">pygments.lexers.python</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.qvt"><code class="xref">pygments.lexers.qvt</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.r"><code class="xref">pygments.lexers.r</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.rdf"><code class="xref">pygments.lexers.rdf</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.rebol"><code class="xref">pygments.lexers.rebol</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.resource"><code class="xref">pygments.lexers.resource</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.rnc"><code class="xref">pygments.lexers.rnc</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.roboconf"><code class="xref">pygments.lexers.roboconf</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.robotframework"><code class="xref">pygments.lexers.robotframework</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.ruby"><code class="xref">pygments.lexers.ruby</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.rust"><code class="xref">pygments.lexers.rust</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.sas"><code class="xref">pygments.lexers.sas</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.scdoc"><code class="xref">pygments.lexers.scdoc</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.scripting"><code class="xref">pygments.lexers.scripting</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.sgf"><code class="xref">pygments.lexers.sgf</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.shell"><code class="xref">pygments.lexers.shell</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.slash"><code class="xref">pygments.lexers.slash</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.smalltalk"><code class="xref">pygments.lexers.smalltalk</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.smv"><code class="xref">pygments.lexers.smv</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.snobol"><code class="xref">pygments.lexers.snobol</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.solidity"><code class="xref">pygments.lexers.solidity</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.special"><code class="xref">pygments.lexers.special</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.sql"><code class="xref">pygments.lexers.sql</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.stata"><code class="xref">pygments.lexers.stata</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.supercollider"><code class="xref">pygments.lexers.supercollider</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.tcl"><code class="xref">pygments.lexers.tcl</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.templates"><code class="xref">pygments.lexers.templates</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.teraterm"><code class="xref">pygments.lexers.teraterm</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.testing"><code class="xref">pygments.lexers.testing</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.textedit"><code class="xref">pygments.lexers.textedit</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.textfmts"><code class="xref">pygments.lexers.textfmts</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.theorem"><code class="xref">pygments.lexers.theorem</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.trafficscript"><code class="xref">pygments.lexers.trafficscript</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.typoscript"><code class="xref">pygments.lexers.typoscript</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.unicon"><code class="xref">pygments.lexers.unicon</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.urbi"><code class="xref">pygments.lexers.urbi</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.varnish"><code class="xref">pygments.lexers.varnish</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.verification"><code class="xref">pygments.lexers.verification</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.webmisc"><code class="xref">pygments.lexers.webmisc</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.whiley"><code class="xref">pygments.lexers.whiley</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.x10"><code class="xref">pygments.lexers.x10</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.xorg"><code class="xref">pygments.lexers.xorg</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/lexers.html#module-pygments.lexers.zig"><code class="xref">pygments.lexers.zig</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/api.html#module-pygments.styles"><code class="xref">pygments.styles</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/tokens.html#module-pygments.token"><code class="xref">pygments.token</code></a></td><td>
+       <em></em></td></tr>
+     <tr class="cg-1">
+       <td></td>
+       <td>&#160;&#160;&#160;
+       <a href="docs/api.html#module-pygments.util"><code class="xref">pygments.util</code></a></td><td>
+       <em></em></td></tr>
+   </table>
+
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/search.html b/doc/_build/html/search.html
new file mode 100644 (file)
index 0000000..ab4abe5
--- /dev/null
@@ -0,0 +1,126 @@
+
+<!DOCTYPE html>
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta charset="utf-8" />
+    <title>Search &#8212; Pygments</title>
+    <link rel="stylesheet" href="_static/pygments14.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    
+    <script type="text/javascript" id="documentation_options" data-url_root="./" src="_static/documentation_options.js"></script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="_static/language_data.js"></script>
+    <script type="text/javascript" src="_static/searchtools.js"></script>
+    <link rel="shortcut icon" href="_static/favicon.ico"/>
+    <link rel="index" title="Index" href="genindex.html" />
+    <link rel="search" title="Search" href="#" />
+  <script type="text/javascript" src="searchindex.js" defer></script>
+  
+    <link href='http://fonts.googleapis.com/css?family=PT+Sans:300,400,700'
+          rel='stylesheet' type='text/css'>
+    <style type="text/css">
+      table.right { float: right; margin-left: 20px; }
+      table.right td { border: 1px solid #ccc; }
+      
+    </style>
+    <script type="text/javascript">
+      // intelligent scrolling of the sidebar content
+      $(window).scroll(function() {
+        var sb = $('.sphinxsidebarwrapper');
+        var win = $(window);
+        var sbh = sb.height();
+        var offset = $('.sphinxsidebar').position()['top'];
+        var wintop = win.scrollTop();
+        var winbot = wintop + win.innerHeight();
+        var curtop = sb.position()['top'];
+        var curbot = curtop + sbh;
+        // does sidebar fit in window?
+        if (sbh < win.innerHeight()) {
+          // yes: easy case -- always keep at the top
+          sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+                                $(document).height() - sbh - 200]));
+        } else {
+          // no: only scroll if top/bottom edge of sidebar is at
+          // top/bottom edge of window
+          if (curtop > wintop && curbot > winbot) {
+            sb.css('top', $u.max([wintop - offset - 10, 0]));
+          } else if (curtop < wintop && curbot < winbot) {
+            sb.css('top', $u.min([winbot - sbh - offset - 20,
+                                  $(document).height() - sbh - 200]));
+          }
+        }
+      });
+    </script>
+
+
+  </head><body>
+<div class="outerwrapper">
+<div class="pageheader">
+  <ul>
+    <li><a href="index.html">Home</a></li>
+    
+    <li><a href="languages.html">Languages</a></li>
+    <li><a href="faq.html">FAQ</a></li>
+    <li><a href="download.html">Get it</a></li>
+    <li><a href="docs/index.html">Docs</a></li>
+  </ul>
+  <div>
+    <a href="index.html">
+      <img src="_static/logo.png" alt="Pygments logo" />
+    </a>
+  </div>
+</div>
+
+      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+        <div class="sphinxsidebarwrapper">
+        </div>
+      </div>
+
+    <div class="document">
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body" role="main">
+            
+  <h1 id="search-documentation">Search</h1>
+  <div id="fallback" class="admonition warning">
+  <script type="text/javascript">$('#fallback').hide();</script>
+  <p>
+    Please activate JavaScript to enable the search
+    functionality.
+  </p>
+  </div>
+  <p>
+    From here you can search these documents. Enter your search
+    words into the box below and click "search". Note that the search
+    function will automatically search for all of the words. Pages
+    containing fewer words won't appear in the result list.
+  </p>
+  <form action="" method="get">
+    <input type="text" name="q" aria-labelledby="search-documentation" value="" />
+    <input type="submit" value="search" />
+    <span id="search-progress" style="padding-left: 10px"></span>
+  </form>
+  
+  <div id="search-results">
+  
+  </div>
+
+          </div>
+        </div>
+      </div>
+      <div class="clearer"></div>
+    </div>
+    <div class="footer" role="contentinfo">
+      &copy; Copyright 2006-2019, Georg Brandl and Pygments contributors.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.2.1. <br/>
+      Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+      Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+    </div>
+  </div> 
+
+  </body>
+</html>
\ No newline at end of file
diff --git a/doc/_build/html/searchindex.js b/doc/_build/html/searchindex.js
new file mode 100644 (file)
index 0000000..bba49a9
--- /dev/null
@@ -0,0 +1 @@
+Search.setIndex({docnames:["docs/api","docs/authors","docs/changelog","docs/cmdline","docs/filterdevelopment","docs/filters","docs/formatterdevelopment","docs/formatters","docs/index","docs/integrate","docs/java","docs/lexerdevelopment","docs/lexers","docs/moinmoin","docs/plugins","docs/quickstart","docs/rstdirective","docs/styles","docs/tokens","docs/unicode","download","faq","index","languages"],envversion:{"sphinx.domains.c":1,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":1,"sphinx.domains.javascript":1,"sphinx.domains.math":2,"sphinx.domains.python":1,"sphinx.domains.rst":1,"sphinx.domains.std":1,"sphinx.ext.intersphinx":1,sphinx:56},filenames:["docs/api.rst","docs/authors.rst","docs/changelog.rst","docs/cmdline.rst","docs/filterdevelopment.rst","docs/filters.rst","docs/formatterdevelopment.rst","docs/formatters.rst","docs/index.rst","docs/integrate.rst","docs/java.rst","docs/lexerdevelopment.rst","docs/lexers.rst","docs/moinmoin.rst","docs/plugins.rst","docs/quickstart.rst","docs/rstdirective.rst","docs/styles.rst","docs/tokens.rst","docs/unicode.rst","download.rst","faq.rst","index.rst","languages.rst"],objects:{"":{BBCodeFormatter:[7,0,1,""],BmpImageFormatter:[7,0,1,""],CodeTagFilter:[5,0,1,""],GifImageFormatter:[7,0,1,""],GobbleFilter:[5,0,1,""],HtmlFormatter:[7,0,1,""],IRCFormatter:[7,0,1,""],ImageFormatter:[7,0,1,""],JpgImageFormatter:[7,0,1,""],KeywordCaseFilter:[5,0,1,""],LatexFormatter:[7,0,1,""],NameHighlightFilter:[5,0,1,""],NullFormatter:[7,0,1,""],RaiseOnErrorTokenFilter:[5,0,1,""],RawTokenFormatter:[7,0,1,""],RtfFormatter:[7,0,1,""],SvgFormatter:[7,0,1,""],Terminal256Formatter:[7,0,1,""],TerminalFormatter:[7,0,1,""],TerminalTrueColorFormatter:[7,0,1,""],TestcaseFormatter:[7,0,1,""],TokenMergeFilter:[5,0,1,""],VisibleWhitespaceFilter:[5,0,1,""],pygments:[0,1,0,"-"]},"pygments.formatter":{Formatter:[0,0,1,""]},"pygments.formatter.Formatter":{aliases:[0,3,1,""],filenames:[0,3,1,""],format:[0,4,1,""],get_style_defs:[0,4,1,""],name:[0,3,1,""]},"pygments.formatters":{get_formatter_by_name:[0,2,1,""],get_formatter_for_filename:[0,2,1,""],load_formatter_from_file:[0,2,1,""]},"pygments.lexer":{Lexer:[0,0,1,""]},"pygments.lexer.Lexer":{alias_filenames:[0,3,1,""],aliases:[0,3,1,""],analyse_text:[0,4,1,""],filenames:[0,3,1,""],get_tokens:[0,4,1,""],get_tokens_unprocessed:[0,4,1,""],mimetypes:[0,3,1,""],name:[0,3,1,""]},"pygments.lexers":{actionscript:[12,1,0,"-"],algebra:[12,1,0,"-"],ambient:[12,1,0,"-"],ampl:[12,1,0,"-"],apl:[12,1,0,"-"],archetype:[12,1,0,"-"],asm:[12,1,0,"-"],automation:[12,1,0,"-"],basic:[12,1,0,"-"],bibtex:[12,1,0,"-"],boa:[12,1,0,"-"],business:[12,1,0,"-"],c_cpp:[12,1,0,"-"],c_like:[12,1,0,"-"],capnproto:[12,1,0,"-"],chapel:[12,1,0,"-"],clean:[12,1,0,"-"],configs:[12,1,0,"-"],console:[12,1,0,"-"],crystal:[12,1,0,"-"],csound:[12,1,0,"-"],css:[12,1,0,"-"],d:[12,1,0,"-"],dalvik:[12,1,0,"-"],data:[12,1,0,"-"],diff:[12,1,0,"-"],dotnet:[12,1,0,"-"],dsls:[12,1,0,"-"],dylan:[12,1,0,"-"],ecl:[12,1,0,"-"],eiffel:[12,1,0,"-"],elm:[12,1,0,"-"],email:[12,1,0,"-"],erlang:[12,1,0,"-"],esoteric:[12,1,0,"-"],ezhil:[12,1,0,"-"],factor:[12,1,0,"-"],fantom:[12,1,0,"-"],felix:[12,1,0,"-"],find_lexer_class:[0,2,1,""],find_lexer_class_by_name:[0,2,1,""],floscript:[12,1,0,"-"],forth:[12,1,0,"-"],fortran:[12,1,0,"-"],foxpro:[12,1,0,"-"],freefem:[12,1,0,"-"],get_all_lexers:[0,2,1,""],get_lexer_by_name:[0,2,1,""],get_lexer_for_filename:[0,2,1,""],get_lexer_for_mimetype:[0,2,1,""],go:[12,1,0,"-"],grammar_notation:[12,1,0,"-"],graph:[12,1,0,"-"],graphics:[12,1,0,"-"],guess_lexer:[0,2,1,""],guess_lexer_for_filename:[0,2,1,""],haskell:[12,1,0,"-"],haxe:[12,1,0,"-"],hdl:[12,1,0,"-"],hexdump:[12,1,0,"-"],html:[12,1,0,"-"],idl:[12,1,0,"-"],igor:[12,1,0,"-"],inferno:[12,1,0,"-"],installers:[12,1,0,"-"],int_fiction:[12,1,0,"-"],iolang:[12,1,0,"-"],j:[12,1,0,"-"],javascript:[12,1,0,"-"],julia:[12,1,0,"-"],jvm:[12,1,0,"-"],lisp:[12,1,0,"-"],load_lexer_from_file:[0,2,1,""],make:[12,1,0,"-"],markup:[12,1,0,"-"],matlab:[12,1,0,"-"],mime:[12,1,0,"-"],ml:[12,1,0,"-"],modeling:[12,1,0,"-"],modula2:[12,1,0,"-"],monte:[12,1,0,"-"],ncl:[12,1,0,"-"],nimrod:[12,1,0,"-"],nit:[12,1,0,"-"],nix:[12,1,0,"-"],oberon:[12,1,0,"-"],objective:[12,1,0,"-"],ooc:[12,1,0,"-"],parasail:[12,1,0,"-"],parsers:[12,1,0,"-"],pascal:[12,1,0,"-"],pawn:[12,1,0,"-"],perl:[12,1,0,"-"],php:[12,1,0,"-"],pony:[12,1,0,"-"],praat:[12,1,0,"-"],prolog:[12,1,0,"-"],python:[12,1,0,"-"],qvt:[12,1,0,"-"],r:[12,1,0,"-"],rdf:[12,1,0,"-"],rebol:[12,1,0,"-"],resource:[12,1,0,"-"],rnc:[12,1,0,"-"],roboconf:[12,1,0,"-"],robotframework:[12,1,0,"-"],ruby:[12,1,0,"-"],rust:[12,1,0,"-"],sas:[12,1,0,"-"],scdoc:[12,1,0,"-"],scripting:[12,1,0,"-"],sgf:[12,1,0,"-"],shell:[12,1,0,"-"],slash:[12,1,0,"-"],smalltalk:[12,1,0,"-"],smv:[12,1,0,"-"],snobol:[12,1,0,"-"],solidity:[12,1,0,"-"],special:[12,1,0,"-"],sql:[12,1,0,"-"],stata:[12,1,0,"-"],supercollider:[12,1,0,"-"],tcl:[12,1,0,"-"],templates:[12,1,0,"-"],teraterm:[12,1,0,"-"],testing:[12,1,0,"-"],textedit:[12,1,0,"-"],textfmts:[12,1,0,"-"],theorem:[12,1,0,"-"],trafficscript:[12,1,0,"-"],typoscript:[12,1,0,"-"],unicon:[12,1,0,"-"],urbi:[12,1,0,"-"],varnish:[12,1,0,"-"],verification:[12,1,0,"-"],webmisc:[12,1,0,"-"],whiley:[12,1,0,"-"],x10:[12,1,0,"-"],xorg:[12,1,0,"-"],zig:[12,1,0,"-"]},"pygments.lexers.actionscript":{ActionScript3Lexer:[12,0,1,""],ActionScriptLexer:[12,0,1,""],MxmlLexer:[12,0,1,""]},"pygments.lexers.algebra":{BCLexer:[12,0,1,""],GAPLexer:[12,0,1,""],MathematicaLexer:[12,0,1,""],MuPADLexer:[12,0,1,""]},"pygments.lexers.ambient":{AmbientTalkLexer:[12,0,1,""]},"pygments.lexers.ampl":{AmplLexer:[12,0,1,""]},"pygments.lexers.apl":{APLLexer:[12,0,1,""]},"pygments.lexers.archetype":{AdlLexer:[12,0,1,""],CadlLexer:[12,0,1,""],OdinLexer:[12,0,1,""]},"pygments.lexers.asm":{CObjdumpLexer:[12,0,1,""],Ca65Lexer:[12,0,1,""],CppObjdumpLexer:[12,0,1,""],DObjdumpLexer:[12,0,1,""],Dasm16Lexer:[12,0,1,""],GasLexer:[12,0,1,""],HsailLexer:[12,0,1,""],LlvmLexer:[12,0,1,""],NasmLexer:[12,0,1,""],NasmObjdumpLexer:[12,0,1,""],ObjdumpLexer:[12,0,1,""],TasmLexer:[12,0,1,""]},"pygments.lexers.automation":{AutoItLexer:[12,0,1,""],AutohotkeyLexer:[12,0,1,""]},"pygments.lexers.basic":{BBCBasicLexer:[12,0,1,""],BlitzBasicLexer:[12,0,1,""],BlitzMaxLexer:[12,0,1,""],CbmBasicV2Lexer:[12,0,1,""],MonkeyLexer:[12,0,1,""],QBasicLexer:[12,0,1,""],VBScriptLexer:[12,0,1,""]},"pygments.lexers.bibtex":{BSTLexer:[12,0,1,""],BibTeXLexer:[12,0,1,""]},"pygments.lexers.boa":{BoaLexer:[12,0,1,""]},"pygments.lexers.business":{ABAPLexer:[12,0,1,""],CobolFreeformatLexer:[12,0,1,""],CobolLexer:[12,0,1,""],GoodDataCLLexer:[12,0,1,""],MaqlLexer:[12,0,1,""],OpenEdgeLexer:[12,0,1,""]},"pygments.lexers.c_cpp":{CLexer:[12,0,1,""],CppLexer:[12,0,1,""]},"pygments.lexers.c_like":{ArduinoLexer:[12,0,1,""],CharmciLexer:[12,0,1,""],ClayLexer:[12,0,1,""],CudaLexer:[12,0,1,""],ECLexer:[12,0,1,""],MqlLexer:[12,0,1,""],NesCLexer:[12,0,1,""],PikeLexer:[12,0,1,""],SwigLexer:[12,0,1,""],ValaLexer:[12,0,1,""]},"pygments.lexers.capnproto":{CapnProtoLexer:[12,0,1,""]},"pygments.lexers.chapel":{ChapelLexer:[12,0,1,""]},"pygments.lexers.clean":{CleanLexer:[12,0,1,""]},"pygments.lexers.configs":{ApacheConfLexer:[12,0,1,""],AugeasLexer:[12,0,1,""],Cfengine3Lexer:[12,0,1,""],DockerLexer:[12,0,1,""],IniLexer:[12,0,1,""],KconfigLexer:[12,0,1,""],LighttpdConfLexer:[12,0,1,""],NginxConfLexer:[12,0,1,""],PacmanConfLexer:[12,0,1,""],PkgConfigLexer:[12,0,1,""],PropertiesLexer:[12,0,1,""],RegeditLexer:[12,0,1,""],SquidConfLexer:[12,0,1,""],TOMLLexer:[12,0,1,""],TermcapLexer:[12,0,1,""],TerminfoLexer:[12,0,1,""],TerraformLexer:[12,0,1,""]},"pygments.lexers.console":{PyPyLogLexer:[12,0,1,""],VCTreeStatusLexer:[12,0,1,""]},"pygments.lexers.crystal":{CrystalLexer:[12,0,1,""]},"pygments.lexers.csound":{CsoundDocumentLexer:[12,0,1,""],CsoundOrchestraLexer:[12,0,1,""],CsoundScoreLexer:[12,0,1,""]},"pygments.lexers.css":{CssLexer:[12,0,1,""],LessCssLexer:[12,0,1,""],SassLexer:[12,0,1,""],ScssLexer:[12,0,1,""]},"pygments.lexers.d":{CrocLexer:[12,0,1,""],DLexer:[12,0,1,""],MiniDLexer:[12,0,1,""]},"pygments.lexers.dalvik":{SmaliLexer:[12,0,1,""]},"pygments.lexers.data":{JsonBareObjectLexer:[12,0,1,""],JsonLdLexer:[12,0,1,""],JsonLexer:[12,0,1,""],YamlLexer:[12,0,1,""]},"pygments.lexers.diff":{DarcsPatchLexer:[12,0,1,""],DiffLexer:[12,0,1,""],WDiffLexer:[12,0,1,""]},"pygments.lexers.dotnet":{BooLexer:[12,0,1,""],CSharpAspxLexer:[12,0,1,""],CSharpLexer:[12,0,1,""],FSharpLexer:[12,0,1,""],NemerleLexer:[12,0,1,""],VbNetAspxLexer:[12,0,1,""],VbNetLexer:[12,0,1,""]},"pygments.lexers.dsls":{AlloyLexer:[12,0,1,""],CrmshLexer:[12,0,1,""],FlatlineLexer:[12,0,1,""],MscgenLexer:[12,0,1,""],PanLexer:[12,0,1,""],ProtoBufLexer:[12,0,1,""],PuppetLexer:[12,0,1,""],RslLexer:[12,0,1,""],SnowballLexer:[12,0,1,""],ThriftLexer:[12,0,1,""],VGLLexer:[12,0,1,""],ZeekLexer:[12,0,1,""]},"pygments.lexers.dylan":{DylanConsoleLexer:[12,0,1,""],DylanLexer:[12,0,1,""],DylanLidLexer:[12,0,1,""]},"pygments.lexers.ecl":{ECLLexer:[12,0,1,""]},"pygments.lexers.eiffel":{EiffelLexer:[12,0,1,""]},"pygments.lexers.elm":{ElmLexer:[12,0,1,""]},"pygments.lexers.email":{EmailLexer:[12,0,1,""]},"pygments.lexers.erlang":{ElixirConsoleLexer:[12,0,1,""],ElixirLexer:[12,0,1,""],ErlangLexer:[12,0,1,""],ErlangShellLexer:[12,0,1,""]},"pygments.lexers.esoteric":{AheuiLexer:[12,0,1,""],BefungeLexer:[12,0,1,""],BrainfuckLexer:[12,0,1,""],CAmkESLexer:[12,0,1,""],CapDLLexer:[12,0,1,""],RedcodeLexer:[12,0,1,""]},"pygments.lexers.ezhil":{EzhilLexer:[12,0,1,""]},"pygments.lexers.factor":{FactorLexer:[12,0,1,""]},"pygments.lexers.fantom":{FantomLexer:[12,0,1,""]},"pygments.lexers.felix":{FelixLexer:[12,0,1,""]},"pygments.lexers.floscript":{FloScriptLexer:[12,0,1,""]},"pygments.lexers.forth":{ForthLexer:[12,0,1,""]},"pygments.lexers.fortran":{FortranFixedLexer:[12,0,1,""],FortranLexer:[12,0,1,""]},"pygments.lexers.foxpro":{FoxProLexer:[12,0,1,""]},"pygments.lexers.freefem":{FreeFemLexer:[12,0,1,""]},"pygments.lexers.go":{GoLexer:[12,0,1,""]},"pygments.lexers.grammar_notation":{AbnfLexer:[12,0,1,""],BnfLexer:[12,0,1,""],JsgfLexer:[12,0,1,""]},"pygments.lexers.graph":{CypherLexer:[12,0,1,""]},"pygments.lexers.graphics":{AsymptoteLexer:[12,0,1,""],GLShaderLexer:[12,0,1,""],GnuplotLexer:[12,0,1,""],HLSLShaderLexer:[12,0,1,""],PostScriptLexer:[12,0,1,""],PovrayLexer:[12,0,1,""]},"pygments.lexers.haskell":{AgdaLexer:[12,0,1,""],CryptolLexer:[12,0,1,""],HaskellLexer:[12,0,1,""],HspecLexer:[12,0,1,""],IdrisLexer:[12,0,1,""],KokaLexer:[12,0,1,""],LiterateAgdaLexer:[12,0,1,""],LiterateCryptolLexer:[12,0,1,""],LiterateHaskellLexer:[12,0,1,""],LiterateIdrisLexer:[12,0,1,""]},"pygments.lexers.haxe":{HaxeLexer:[12,0,1,""],HxmlLexer:[12,0,1,""]},"pygments.lexers.hdl":{SystemVerilogLexer:[12,0,1,""],VerilogLexer:[12,0,1,""],VhdlLexer:[12,0,1,""]},"pygments.lexers.hexdump":{HexdumpLexer:[12,0,1,""]},"pygments.lexers.html":{DtdLexer:[12,0,1,""],HamlLexer:[12,0,1,""],HtmlLexer:[12,0,1,""],PugLexer:[12,0,1,""],ScamlLexer:[12,0,1,""],XmlLexer:[12,0,1,""],XsltLexer:[12,0,1,""]},"pygments.lexers.idl":{IDLLexer:[12,0,1,""]},"pygments.lexers.igor":{IgorLexer:[12,0,1,""]},"pygments.lexers.inferno":{LimboLexer:[12,0,1,""]},"pygments.lexers.installers":{DebianControlLexer:[12,0,1,""],NSISLexer:[12,0,1,""],RPMSpecLexer:[12,0,1,""],SourcesListLexer:[12,0,1,""]},"pygments.lexers.int_fiction":{Inform6Lexer:[12,0,1,""],Inform6TemplateLexer:[12,0,1,""],Inform7Lexer:[12,0,1,""],Tads3Lexer:[12,0,1,""]},"pygments.lexers.iolang":{IoLexer:[12,0,1,""]},"pygments.lexers.j":{JLexer:[12,0,1,""]},"pygments.lexers.javascript":{CoffeeScriptLexer:[12,0,1,""],DartLexer:[12,0,1,""],EarlGreyLexer:[12,0,1,""],JavascriptLexer:[12,0,1,""],JuttleLexer:[12,0,1,""],KalLexer:[12,0,1,""],LassoLexer:[12,0,1,""],LiveScriptLexer:[12,0,1,""],MaskLexer:[12,0,1,""],ObjectiveJLexer:[12,0,1,""],TypeScriptLexer:[12,0,1,""]},"pygments.lexers.julia":{JuliaConsoleLexer:[12,0,1,""],JuliaLexer:[12,0,1,""]},"pygments.lexers.jvm":{AspectJLexer:[12,0,1,""],CeylonLexer:[12,0,1,""],ClojureLexer:[12,0,1,""],ClojureScriptLexer:[12,0,1,""],GoloLexer:[12,0,1,""],GosuLexer:[12,0,1,""],GosuTemplateLexer:[12,0,1,""],GroovyLexer:[12,0,1,""],IokeLexer:[12,0,1,""],JasminLexer:[12,0,1,""],JavaLexer:[12,0,1,""],KotlinLexer:[12,0,1,""],PigLexer:[12,0,1,""],SarlLexer:[12,0,1,""],ScalaLexer:[12,0,1,""],XtendLexer:[12,0,1,""]},"pygments.lexers.lisp":{CPSALexer:[12,0,1,""],CommonLispLexer:[12,0,1,""],EmacsLispLexer:[12,0,1,""],FennelLexer:[12,0,1,""],HyLexer:[12,0,1,""],NewLispLexer:[12,0,1,""],RacketLexer:[12,0,1,""],SchemeLexer:[12,0,1,""],ShenLexer:[12,0,1,""],XtlangLexer:[12,0,1,""]},"pygments.lexers.make":{BaseMakefileLexer:[12,0,1,""],CMakeLexer:[12,0,1,""],MakefileLexer:[12,0,1,""]},"pygments.lexers.markup":{BBCodeLexer:[12,0,1,""],GroffLexer:[12,0,1,""],MarkdownLexer:[12,0,1,""],MoinWikiLexer:[12,0,1,""],MozPreprocCssLexer:[12,0,1,""],MozPreprocHashLexer:[12,0,1,""],MozPreprocJavascriptLexer:[12,0,1,""],MozPreprocPercentLexer:[12,0,1,""],MozPreprocXulLexer:[12,0,1,""],RstLexer:[12,0,1,""],TexLexer:[12,0,1,""]},"pygments.lexers.matlab":{MatlabLexer:[12,0,1,""],MatlabSessionLexer:[12,0,1,""],OctaveLexer:[12,0,1,""],ScilabLexer:[12,0,1,""]},"pygments.lexers.mime":{MIMELexer:[12,0,1,""]},"pygments.lexers.ml":{OcamlLexer:[12,0,1,""],OpaLexer:[12,0,1,""],SMLLexer:[12,0,1,""]},"pygments.lexers.modeling":{BugsLexer:[12,0,1,""],JagsLexer:[12,0,1,""],ModelicaLexer:[12,0,1,""],StanLexer:[12,0,1,""]},"pygments.lexers.modula2":{Modula2Lexer:[12,0,1,""]},"pygments.lexers.monte":{MonteLexer:[12,0,1,""]},"pygments.lexers.ncl":{NCLLexer:[12,0,1,""]},"pygments.lexers.nimrod":{NimrodLexer:[12,0,1,""]},"pygments.lexers.nit":{NitLexer:[12,0,1,""]},"pygments.lexers.nix":{NixLexer:[12,0,1,""]},"pygments.lexers.oberon":{ComponentPascalLexer:[12,0,1,""]},"pygments.lexers.objective":{LogosLexer:[12,0,1,""],ObjectiveCLexer:[12,0,1,""],ObjectiveCppLexer:[12,0,1,""],SwiftLexer:[12,0,1,""]},"pygments.lexers.ooc":{OocLexer:[12,0,1,""]},"pygments.lexers.parasail":{ParaSailLexer:[12,0,1,""]},"pygments.lexers.parsers":{AntlrActionScriptLexer:[12,0,1,""],AntlrCSharpLexer:[12,0,1,""],AntlrCppLexer:[12,0,1,""],AntlrJavaLexer:[12,0,1,""],AntlrLexer:[12,0,1,""],AntlrObjectiveCLexer:[12,0,1,""],AntlrPerlLexer:[12,0,1,""],AntlrPythonLexer:[12,0,1,""],AntlrRubyLexer:[12,0,1,""],EbnfLexer:[12,0,1,""],RagelCLexer:[12,0,1,""],RagelCppLexer:[12,0,1,""],RagelDLexer:[12,0,1,""],RagelEmbeddedLexer:[12,0,1,""],RagelJavaLexer:[12,0,1,""],RagelLexer:[12,0,1,""],RagelObjectiveCLexer:[12,0,1,""],RagelRubyLexer:[12,0,1,""],TreetopLexer:[12,0,1,""]},"pygments.lexers.pascal":{AdaLexer:[12,0,1,""],DelphiLexer:[12,0,1,""]},"pygments.lexers.pawn":{PawnLexer:[12,0,1,""],SourcePawnLexer:[12,0,1,""]},"pygments.lexers.perl":{Perl6Lexer:[12,0,1,""],PerlLexer:[12,0,1,""]},"pygments.lexers.php":{PhpLexer:[12,0,1,""],ZephirLexer:[12,0,1,""]},"pygments.lexers.pony":{PonyLexer:[12,0,1,""]},"pygments.lexers.praat":{PraatLexer:[12,0,1,""]},"pygments.lexers.prolog":{LogtalkLexer:[12,0,1,""],PrologLexer:[12,0,1,""]},"pygments.lexers.python":{CythonLexer:[12,0,1,""],DgLexer:[12,0,1,""],NumPyLexer:[12,0,1,""],Python2Lexer:[12,0,1,""],Python2TracebackLexer:[12,0,1,""],PythonConsoleLexer:[12,0,1,""],PythonLexer:[12,0,1,""],PythonTracebackLexer:[12,0,1,""]},"pygments.lexers.qvt":{QVToLexer:[12,0,1,""]},"pygments.lexers.r":{RConsoleLexer:[12,0,1,""],RdLexer:[12,0,1,""],SLexer:[12,0,1,""]},"pygments.lexers.rdf":{ShExCLexer:[12,0,1,""],SparqlLexer:[12,0,1,""],TurtleLexer:[12,0,1,""]},"pygments.lexers.rebol":{RebolLexer:[12,0,1,""],RedLexer:[12,0,1,""]},"pygments.lexers.resource":{ResourceLexer:[12,0,1,""]},"pygments.lexers.rnc":{RNCCompactLexer:[12,0,1,""]},"pygments.lexers.roboconf":{RoboconfGraphLexer:[12,0,1,""],RoboconfInstancesLexer:[12,0,1,""]},"pygments.lexers.robotframework":{RobotFrameworkLexer:[12,0,1,""]},"pygments.lexers.ruby":{FancyLexer:[12,0,1,""],RubyConsoleLexer:[12,0,1,""],RubyLexer:[12,0,1,""]},"pygments.lexers.rust":{RustLexer:[12,0,1,""]},"pygments.lexers.sas":{SASLexer:[12,0,1,""]},"pygments.lexers.scdoc":{ScdocLexer:[12,0,1,""]},"pygments.lexers.scripting":{AppleScriptLexer:[12,0,1,""],ChaiscriptLexer:[12,0,1,""],EasytrieveLexer:[12,0,1,""],HybrisLexer:[12,0,1,""],JclLexer:[12,0,1,""],LSLLexer:[12,0,1,""],LuaLexer:[12,0,1,""],MOOCodeLexer:[12,0,1,""],MoonScriptLexer:[12,0,1,""],RexxLexer:[12,0,1,""]},"pygments.lexers.sgf":{SmartGameFormatLexer:[12,0,1,""]},"pygments.lexers.shell":{BashLexer:[12,0,1,""],BashSessionLexer:[12,0,1,""],BatchLexer:[12,0,1,""],FishShellLexer:[12,0,1,""],MSDOSSessionLexer:[12,0,1,""],PowerShellLexer:[12,0,1,""],PowerShellSessionLexer:[12,0,1,""],SlurmBashLexer:[12,0,1,""],TcshLexer:[12,0,1,""],TcshSessionLexer:[12,0,1,""]},"pygments.lexers.slash":{SlashLexer:[12,0,1,""]},"pygments.lexers.smalltalk":{NewspeakLexer:[12,0,1,""],SmalltalkLexer:[12,0,1,""]},"pygments.lexers.smv":{NuSMVLexer:[12,0,1,""]},"pygments.lexers.snobol":{SnobolLexer:[12,0,1,""]},"pygments.lexers.solidity":{SolidityLexer:[12,0,1,""]},"pygments.lexers.special":{RawTokenLexer:[12,0,1,""],TextLexer:[12,0,1,""]},"pygments.lexers.sql":{MySqlLexer:[12,0,1,""],PlPgsqlLexer:[12,0,1,""],PostgresConsoleLexer:[12,0,1,""],PostgresLexer:[12,0,1,""],RqlLexer:[12,0,1,""],SqlLexer:[12,0,1,""],SqliteConsoleLexer:[12,0,1,""],TransactSqlLexer:[12,0,1,""]},"pygments.lexers.stata":{StataLexer:[12,0,1,""]},"pygments.lexers.supercollider":{SuperColliderLexer:[12,0,1,""]},"pygments.lexers.tcl":{TclLexer:[12,0,1,""]},"pygments.lexers.templates":{Angular2HtmlLexer:[12,0,1,""],Angular2Lexer:[12,0,1,""],CheetahHtmlLexer:[12,0,1,""],CheetahJavascriptLexer:[12,0,1,""],CheetahLexer:[12,0,1,""],CheetahXmlLexer:[12,0,1,""],ColdfusionCFCLexer:[12,0,1,""],ColdfusionHtmlLexer:[12,0,1,""],ColdfusionLexer:[12,0,1,""],CssDjangoLexer:[12,0,1,""],CssErbLexer:[12,0,1,""],CssGenshiLexer:[12,0,1,""],CssPhpLexer:[12,0,1,""],CssSmartyLexer:[12,0,1,""],DjangoLexer:[12,0,1,""],ErbLexer:[12,0,1,""],EvoqueHtmlLexer:[12,0,1,""],EvoqueLexer:[12,0,1,""],EvoqueXmlLexer:[12,0,1,""],GenshiLexer:[12,0,1,""],GenshiTextLexer:[12,0,1,""],HandlebarsHtmlLexer:[12,0,1,""],HandlebarsLexer:[12,0,1,""],HtmlDjangoLexer:[12,0,1,""],HtmlGenshiLexer:[12,0,1,""],HtmlPhpLexer:[12,0,1,""],HtmlSmartyLexer:[12,0,1,""],JavascriptDjangoLexer:[12,0,1,""],JavascriptErbLexer:[12,0,1,""],JavascriptGenshiLexer:[12,0,1,""],JavascriptPhpLexer:[12,0,1,""],JavascriptSmartyLexer:[12,0,1,""],JspLexer:[12,0,1,""],LassoCssLexer:[12,0,1,""],LassoHtmlLexer:[12,0,1,""],LassoJavascriptLexer:[12,0,1,""],LassoXmlLexer:[12,0,1,""],LiquidLexer:[12,0,1,""],MakoCssLexer:[12,0,1,""],MakoHtmlLexer:[12,0,1,""],MakoJavascriptLexer:[12,0,1,""],MakoLexer:[12,0,1,""],MakoXmlLexer:[12,0,1,""],MasonLexer:[12,0,1,""],MyghtyCssLexer:[12,0,1,""],MyghtyHtmlLexer:[12,0,1,""],MyghtyJavascriptLexer:[12,0,1,""],MyghtyLexer:[12,0,1,""],MyghtyXmlLexer:[12,0,1,""],RhtmlLexer:[12,0,1,""],SmartyLexer:[12,0,1,""],SspLexer:[12,0,1,""],TeaTemplateLexer:[12,0,1,""],TwigHtmlLexer:[12,0,1,""],TwigLexer:[12,0,1,""],VelocityHtmlLexer:[12,0,1,""],VelocityLexer:[12,0,1,""],VelocityXmlLexer:[12,0,1,""],XmlDjangoLexer:[12,0,1,""],XmlErbLexer:[12,0,1,""],XmlPhpLexer:[12,0,1,""],XmlSmartyLexer:[12,0,1,""],YamlJinjaLexer:[12,0,1,""]},"pygments.lexers.teraterm":{TeraTermLexer:[12,0,1,""]},"pygments.lexers.testing":{GherkinLexer:[12,0,1,""],TAPLexer:[12,0,1,""]},"pygments.lexers.textedit":{AwkLexer:[12,0,1,""],VimLexer:[12,0,1,""]},"pygments.lexers.textfmts":{GettextLexer:[12,0,1,""],HttpLexer:[12,0,1,""],IrcLogsLexer:[12,0,1,""],NotmuchLexer:[12,0,1,""],TodotxtLexer:[12,0,1,""]},"pygments.lexers.theorem":{CoqLexer:[12,0,1,""],IsabelleLexer:[12,0,1,""],LeanLexer:[12,0,1,""]},"pygments.lexers.trafficscript":{RtsLexer:[12,0,1,""]},"pygments.lexers.typoscript":{TypoScriptCssDataLexer:[12,0,1,""],TypoScriptHtmlDataLexer:[12,0,1,""],TypoScriptLexer:[12,0,1,""]},"pygments.lexers.unicon":{IconLexer:[12,0,1,""],UcodeLexer:[12,0,1,""],UniconLexer:[12,0,1,""]},"pygments.lexers.urbi":{UrbiscriptLexer:[12,0,1,""]},"pygments.lexers.varnish":{VCLLexer:[12,0,1,""],VCLSnippetLexer:[12,0,1,""]},"pygments.lexers.verification":{BoogieLexer:[12,0,1,""],SilverLexer:[12,0,1,""]},"pygments.lexers.webmisc":{CirruLexer:[12,0,1,""],DuelLexer:[12,0,1,""],QmlLexer:[12,0,1,""],SlimLexer:[12,0,1,""],XQueryLexer:[12,0,1,""]},"pygments.lexers.whiley":{WhileyLexer:[12,0,1,""]},"pygments.lexers.x10":{X10Lexer:[12,0,1,""]},"pygments.lexers.xorg":{XorgLexer:[12,0,1,""]},"pygments.lexers.zig":{ZigLexer:[12,0,1,""]},"pygments.styles":{get_all_styles:[0,2,1,""],get_style_by_name:[0,2,1,""]},"pygments.util":{OptionError:[0,5,1,""],get_bool_opt:[0,2,1,""],get_choice_opt:[0,2,1,""],get_int_opt:[0,2,1,""],get_list_opt:[0,2,1,""]},pygments:{format:[0,2,1,""],formatter:[0,1,0,"-"],formatters:[0,1,0,"-"],highlight:[0,2,1,""],lex:[0,2,1,""],lexer:[0,1,0,"-"],lexers:[0,1,0,"-"],styles:[0,1,0,"-"],token:[18,1,0,"-"],util:[0,1,0,"-"]}},objnames:{"0":["py","class","Python class"],"1":["py","module","Python module"],"2":["py","function","Python function"],"3":["py","attribute","Python attribute"],"4":["py","method","Python method"],"5":["py","exception","Python exception"]},objtypes:{"0":"py:class","1":"py:module","2":"py:function","3":"py:attribute","4":"py:method","5":"py:exception"},terms:{"0000aa":6,"00ff00":7,"00m":17,"01m":17,"01mhello":17,"0b101010":18,"0f0":17,"0x10c":12,"0x20":12,"0xb785decc":5,"0xdeadbeef":18,"14px":7,"16m":[2,7],"42d":18,"42l":18,"6pl":12,"6pm":12,"beno\u00eet":1,"boolean":[0,2],"break":7,"byte":[2,7,15,19],"case":[0,2,3,4,5,7,11,19,21],"char":[2,11,12,18],"cl\u00e9ment":1,"class":[0,2,3,4,5,6,10,12,13,14,15,17,18],"const":18,"dani\u00ebl":1,"default":[0,1,2,3,5,7,11,12,15,17,19],"enum":12,"export":2,"final":[2,11],"float":[0,2,18],"function":[0,2,4,5,6,11,12,14,15,17,18,21],"g\u00f3rny":1,"goto":18,"guti\u00e9rrez":1,"hegg\u00f8":1,"helles\u00f8i":1,"import":[2,4,5,6,7,10,11,12,15,17,18,19],"int":[5,18],"joaqu\u00edn":1,"jos\u00e9":1,"kl\u00e4rck":1,"ko\u017ear":1,"long":[2,18],"lyngst\u00f8l":1,"maik\u00e4fer":2,"matth\u00e4u":22,"micha\u0142":1,"new":[0,2,3,4,5,6,7,10,12,14,17,18,19,22],"null":[7,12],"pr\u00e9vost":1,"public":[2,12],"ren\u00e9":1,"return":[0,5,6,7,11,12,15],"s\u00e9bastien":1,"schneegl\u00f6ckchen":2,"short":[0,3,7,11,12],"st\u00e9phane":1,"static":[0,18],"strau\u00dfenei":2,"super":11,"switch":[2,12],"true":[0,2,5,6,7,11,12,13,15,18],"try":[6,12],"unsch\u00e4rf":2,"var":[2,12,18],"while":[2,3,6,11,12,15,21],Abe:1,Added:[2,12],And:11,BAS:12,But:11,DOS:12,EXE:12,For:[0,3,5,6,7,11,12,13,15,17,18,20,21],GAS:2,Gas:12,IDE:21,IDEs:2,Its:[3,21],One:11,SAS:[1,2],That:[0,4,7,11,14,17,21],The:[2,3,4,5,6,7,8,9,10,12,15,17,18,19,20,21,23],Then:[7,11],There:[2,5,11,14,15,17],These:[7,9,12],Use:[2,8,12],Used:7,Useful:12,Using:[7,8],VBS:12,Vos:1,Will:0,With:[2,7,18],XDS:12,__all__:11,__doc__:18,__init__:[0,4,6,11,18],_by_:2,_format_lin:7,_lua_builtin:12,_php_builtin:12,_style:6,_wrap_cod:7,aa22ff:15,aamann:12,aaron:1,abandon:11,abap:[1,2,12],abaplex:12,abil:2,abl:[1,2,12],abnf:12,abnflex:12,about:[2,3,8,11,12,14],abov:[3,11],absolut:7,abysm:2,academ:12,accept:[2,5,7,12,13,15,19],access:[4,6,7,12,18],accord:12,accordingli:7,accur:2,acm:12,aconf:12,acorn:12,act:12,action:11,actionscript3:12,actionscript3lex:12,actionscript:[2,23],actionscriptlex:12,activ:[1,12],activest:21,actual:[2,12],ada2005:12,ada95:12,ada:[1,2,12,23],adalex:12,adam:[1,2,12],adapt:[5,9,16],adb:12,add:[0,2,5,7,11,14,21],add_filt:[4,5],added:[2,3,4,7,11,17,21,22],adding:[11,14],addit:[2,6,7,11,12,15,21],addition:[4,6,7,11,13],address:22,adjust:4,adl:12,adlf:12,adllex:12,adlx:12,ado:12,adob:12,ads:12,adt:12,advanc:10,affect:[2,7,17],affix:[2,18],afshar:1,after:[2,3,7,11,12,18],again:[6,7,11],against:11,agda:[1,2,12],agdalex:12,aglassing:1,aglet:12,aheui:12,aheuilex:12,ahk:12,ahkl:12,aim:12,alain:1,alastair:1,alex:1,alexand:1,algol:[2,12],algol_nu:12,algorithm:[7,12],ali:1,alia:[0,2,12,14,15],alias:[0,2,11,12,14,18],alias_filenam:0,align:[7,17],all:[0,1,2,3,4,5,6,7,11,13,15,17,18,19,20,21],alloi:[2,12],allow:[0,2,5,7,9,11,12,17],alloylex:12,almost:[11,12],along:3,alphabet:[1,12],alreadi:[0,5,11,12,18],als:12,also:[0,2,3,4,7,11,12,13,16,17,18,19,21,22,23],altern:[2,7,11,12],alwai:[2,4,12,22],amann:[1,2,12],ambient:12,ambienttalk:2,ambienttalklex:12,ambigu:15,among:[0,15,22],amount:[5,11],ampl:[1,2],ampllex:12,ana:[1,2],analog:2,analys:[0,19],analyse_text:[0,2,15],analysi:2,anchor:7,anchorlineno:[1,2,7],andr:[1,2],andrea:[1,2,12],andrei:1,andrew:1,android:12,angl:5,angu:1,angular2:12,angular2htmllex:12,angular2lex:12,angular:12,ani:[0,2,3,7,11,12,17,18,19],annamalai:1,annot:[2,5,12],anonym:11,anoth:[2,5,11,18,19,21],ansi:[1,2,7,12,15,17,21,22],ansiblack:17,ansiblu:17,ansibrightblack:17,ansibrightblu:17,ansibrightcyan:17,ansibrightgreen:17,ansibrightmagenta:17,ansibrightr:17,ansibrightyellow:17,ansibrown:17,ansicolor:17,ansicyan:17,ansidarkblu:17,ansidarkgrai:17,ansidarkgreen:17,ansidarkr:17,ansifuchsia:17,ansigrai:17,ansigreen:17,ansilightgrai:17,ansimagenta:17,ansipurpl:17,ansir:17,ansit:17,ansiturquois:17,ansiwhit:17,ansiyellow:17,antlr:[1,2,12,23],antlractionscriptlex:12,antlrcpplex:12,antlrcsharplex:12,antlrjavalex:12,antlrlex:12,antlrobjectiveclex:12,antlrperllex:12,antlrpythonlex:12,antlrrubylex:12,antonio:9,anymor:2,anyth:[2,3,7,11,12],apach:[2,10,12,23],apache2:[2,12],apacheconf:12,apacheconflex:12,api:[8,11,15],apl:[1,2],apllex:12,apostroph:2,appaiah:1,appear:6,append:11,applescript:[1,2,12,23],applescriptlex:12,appli:5,applic:[2,12,22],approach:[11,12],appropri:[7,11,12],apt:12,arbitrari:11,archetyp:[1,2],arduino:[1,2,12],arduinolex:12,area:7,aren:[6,11,17,18],arexx:12,arg:[0,2,3,7],argument:[0,2,3,4,5,6,7,11,15],armin:[1,22],armstrong:1,arnold:1,around:[2,3,12],arrow:12,art:12,artem:1,articl:7,artifactid:10,as3:[2,12],asax:12,ascii:[3,7,12,19],asciidoc:21,ascx:12,ashkena:1,ashx:12,asi:12,ask:2,aslak:1,aslakhellesoi:12,asm:[2,12],asmx:12,asp:[2,12],aspectj:[1,2,12],aspectjlex:12,aspx:12,assembl:[1,2,23],assign:12,assist:12,associ:11,assum:[7,12,21],assumpt:12,asterisk:11,asymptot:[1,2,12,23],asymptotelex:12,async:2,atom:12,atria:1,attach:[13,18],attent:22,attribut:[0,2,3,6,7,11,12,14,15,18,19],au3:12,aug:[2,12],augea:[1,2,12,23],augeaslex:12,aust:[1,12],author:[12,18,21],autodeleg:12,autodetect:12,autohandl:12,autohotkei:[1,2,12],autohotkeylex:12,autoit:[1,2,12],autoitlex:12,autolisp:12,automat:[2,4,7,11,12,17,19],autopygment:2,aux:12,avail:[0,2,8,11,15,18,21,22],avoid:[2,15],await:2,awar:2,awk:[1,2,12,23],awklex:12,axd:12,b3d:12,background:[2,5,6,7,17],backquot:2,backreferenc:11,backslash:[2,12],backtick:18,backtrack:2,backward:[2,18],bajolet:1,bangert:[1,2],bar:[5,7,11],barfoo:11,baruchel:1,bas:12,base:[0,1,2,7,11,12,18,21,22,23],baselex:11,baselin:7,basemak:12,basemakefilelex:12,bash:[2,12,18,23],bashcomp:9,bashlex:[2,12],bashrc:12,bashsessionlex:[2,12],basic:[0,2,6,11,15,21,23],bat:12,batch:[2,12,23],batchlex:12,battcher:1,baumann:1,baumgart:1,bayer:1,baz:5,bazaar:21,bazbam:12,bazel:12,bb4444:15,bbc:[2,12,23],bbcbasic:12,bbcbasiclex:12,bbcode:[1,2,7,12,21,23],bbcodeformatt:7,bbcodelex:12,bclexer:12,beal:1,bean:12,beauti:21,becaus:[0,2,3,4,6,7,11,12,14,17,18,19],been:[2,5,7,10,11,12,22],befor:[2,7,12,19],befung:[2,12,23],befungelex:[11,12],begin:[7,11,12],behavior:[2,4,17,19],being:[2,11,15,17],below:[11,18,22],ben:[1,2],benediktsson:1,benjamin:1,bergeron:1,bernat:1,bertel:1,bertrand:1,best:19,better:[2,12],between:[2,7,11,12,17],bgcolor:6,bib:12,bibtex:1,bibtexlex:12,big:12,bigaret:1,billingslei:[1,2],bin:[15,18],binari:[2,12,18],bind:12,bird:12,bit:[2,7],bitbucket:21,bitmap:7,bitstream:7,black:17,blackwhitestyl:2,blame:12,blink:12,blinkinsop:[1,2,12],blitzbas:[1,2,12],blitzbasiclex:12,blitzmax:[1,2,12],blitzmaxlex:12,block:[2,7,12],blondon:1,blue:[6,17],bmax:12,bmp:[2,7],bmpimageformatt:7,bmx:12,bnflexer:12,boa:[2,23],boalex:12,board:[7,12,21],bob:1,bodi:12,body_lex:12,bold:[2,6,7,15,17,18],boldfac:[12,17],bom:19,bommel:1,boo:[2,12,18,23],boogi:[1,2,12],boogielex:12,bool:5,boolex:12,border:[6,7,17],borland:12,both:[11,12,15,17],boundari:12,bourdon:[1,2],bpl:12,bplu:12,bpython:21,brace:[2,12],bracket:12,brainfuck:[12,23],brainfucklex:[11,12],branch:21,brandl:[1,22],brian:1,bright:[2,17],brightblack:17,brightblu:17,brightcyan:17,brightgreen:17,brightmagenta:17,brightr:17,brightyellow:17,brillouin:2,bro:[1,2,12],broken:7,browser:[7,11,21],bruce:[1,21],bruno:1,bryan:1,bsd:[2,12,21],bsdmake:12,bst:12,bstlexer:12,buck:12,buffer:[1,2,6,12],bug:[1,2,5,8,12,22],bugfix:[1,2],bugslex:12,build:[2,12,21],builder:21,built:[2,7,12,15],builtin:[0,2,7,8,11,12,15],builtinshighlight:12,bulletin:7,bump:2,bundl:[9,12],bussonni:1,bygroup:11,bz2:[7,12],bzl:12,bzrfruit:21,c99:[2,12],c_cpp:12,c_like:12,ca65:12,ca65lex:12,cacer:1,cach:[2,6,12],cadl:12,cadllex:12,call:[0,2,7,11,12,13,15,17,18,19,20],callaghan:1,caller:11,camil:1,camk:[1,2,12],camkeslex:12,can:[0,2,3,4,5,6,7,9,10,11,12,13,14,15,16,17,18,19,20,22],cangiano:9,cannot:11,canon:12,cap:2,capabl:2,capdl:[2,12],capdllex:12,capit:5,capnp:12,capnproto:12,capnprotolex:12,caption:7,captur:11,care:[0,11],carlo:1,cascad:12,cat:1,catalog:[2,12,23],catastroph:2,categori:[3,12],caus:[2,7,12],cbl:12,cbm:12,cbmba:12,cbmbasicv2lex:12,cdf:12,cdl:12,cell:7,central:10,certain:[2,7,11],cetera:16,ceylon:[1,2,12],ceylonlex:12,cf3:12,cfc:[2,12],cfengine3:[1,2,12],cfengine3lex:12,cfg:[11,12],cfm:12,cfml:12,cfs:12,chai:12,chain:[2,12],chaiscript:[2,12],chaiscriptlex:12,chajda:22,chanc:14,chang:[2,7,12,15,17,20],changelog:[8,20],chapel:[1,2],chapellex:12,charact:[2,3,5,7,11,12,18,19],chardet:[12,19],charl:1,charm:[2,12,23],charmci:12,charmcilex:12,charna:2,chdr:12,chebee7i:1,check:[2,4,11,12],checkspac:12,chee:1,cheetah:[1,2,12,23],cheetahhtmllex:12,cheetahjavascriptlex:12,cheetahlex:12,cheetahxmllex:12,child:[2,11],chines:21,chirino:1,chpl:12,christian:1,christoph:[1,2,12],cirru:[2,12],cirrulex:12,clai:[1,2,12],clariti:7,clash:7,class_too:4,classifi:2,classnotfound:[0,2],classprefix:7,classtoo:4,claylex:12,clean:[1,2],cleanlex:12,clear:22,clexer:[2,12],cli:18,click:7,clipper:12,clj:12,clobber:2,clojur:[1,2,9,12,21,23],clojurelex:12,clojurescript:12,clojurescriptlex:12,clone:[11,20],close:[6,11],closingcommentdelim:12,cls:12,cluster:12,clygment:[9,21],cmake:[2,12,23],cmakelex:12,cmakelist:12,cmd:12,cmdline:1,cob:12,cobjdumplex:12,cobol:12,cobolfre:12,cobolfreeformatlex:12,cobollex:12,cocoa:2,code:[0,2,5,7,9,10,11,13,15,16,17,18,20,21,22],codecov:21,codehtmlformatt:7,codenam:2,codetag:[2,5,7],codetagfilt:5,codetagifi:5,coffe:12,coffeescript:[1,2,12,23],coffeescriptlex:12,coldfus:[2,12,23],coldfusioncfclex:12,coldfusionhtmllex:12,coldfusionlex:12,colin:1,collabor:21,collis:2,colon:[2,3,11],color:[2,3,4,5,6,7,9,12,13,15,17,21],colorama:2,colorfulstyl:17,colornam:7,colorschem:7,colour:7,column:7,com:[1,2,8,12,20,21,23],combin:[0,2,7,11,12],come:[5,17],comma:[2,3],command:[0,2,7,8,9,11,14,18,19,20,21,22],commandchar:7,commandlin:12,commandprefix:[2,7],comment:[2,5,6,7,11,12,15,17],commit:2,common:[1,2,12,21,23],commonli:12,commonlisplex:12,commun:21,compar:[12,18],compat:[2,7,18],compens:21,compil:12,complet:[2,7,11,21],complex:[6,11,12],compon:[2,8,12,15],componentpasc:12,componentpascallex:12,compress:[0,7,12],comput:1,concaten:11,concept:15,concret:12,concurr:12,conf:[2,12],config:[1,2,13,23],configur:[2,13,18,23],conflict:2,confus:[4,11,12],consecut:[5,6],consid:[12,17],consider:[0,12],consist:[2,11],consol:[1,2,3,7,21,23],console16m:7,console256:7,constant:[12,18],constitut:2,construct:[2,7,11,12,18],constructor:[0,5],consum:12,contact:21,contain:[0,2,3,5,6,7,9,11,12,15,21],content:[0,2,7,10,11,12,15],context:[2,11,12],contextu:2,continu:[2,11],contribut:[1,12,23],contributor:8,control:[0,1,2,7,12,20,21,23],convers:12,convert:[0,2,4,5,7,12,15,17,18,21],cookbook:21,cooper:1,coordin:7,copi:[7,9,11,12,13,16],coq:[2,12,23],coqlex:12,corbett:1,corbin:1,corcoran:1,corei:1,corner:2,coroutin:12,correct:[0,2],correctli:[2,7,11,21],correspond:[11,12,17],could:[3,4,7,11,12,15,17],count:12,coupl:2,courier:7,cours:[18,21],cover:[12,14],coverag:21,cpp:12,cppcommentlex:11,cpplexer:12,cppobjdumplex:12,cps:12,cpsa:[1,12],cpsalex:12,cpy:12,cpython:12,crash:[2,11],creat:[2,3,6,7,9,10,11,15,18,21],creation:12,creutzig:[1,2,12],crmsh:[1,2,12],crmshlexer:12,croc:[2,12],croclex:12,crocsrc:12,crompton:1,cross:21,crunchi:2,cry:12,cryptol2:12,cryptol:[1,2,12,23],cryptollex:12,crystal:[1,2,23],crystallex:12,csail:12,csd:12,csh:12,csharp:12,csharpaspxlex:12,csharplex:[2,12],csound:[1,2],csounddocumentlex:12,csoundorchestralex:12,csoundscorelex:12,csrc:12,css:[0,2,3,6,7,13,15,17,23],cssclass:[2,7,15],cssdjangolex:12,csserblex:12,cssfile:7,cssgenshilex:12,csslexer:12,cssphplexer:12,csssmartylex:12,cssstyle:7,ctag:[1,2,7],ctx:11,cucumb:[2,12,23],cuda:[1,2,12],cudalex:12,cuh:12,curli:12,current:[0,3,6,7,11,12,13,18,20],curri:1,curs:21,custom:[2,7,11,16,17],customformatt:[0,3],customlex:[0,3,11],cxx:12,cyan:17,cyp:12,cypher:[2,12],cypherlex:12,cython:[2,12,23],cythonlex:12,dalvik:2,dan:1,daniel:1,darc:[1,2,12],darcspatch:12,darcspatchlex:12,dark:[2,6,7,17],darkbg:7,darkgrei:17,dart:[1,2,12,23],dartlex:12,dash:[1,2],dasm16:[1,2,12],dasm16lex:12,dasm:12,data:[2,6,11,13,18,19],databas:12,date:18,davi:1,david:1,dba:12,dcl:12,dcpu:[12,23],debcontrol:12,debian:[1,2,12,23],debiancontrollex:12,debsourc:12,debug:[12,21],debugg:21,dec:2,decid:12,decis:2,decl:12,declar:[0,2,7,12,18],decod:19,decompress:12,decor:[2,18],deepcopi:2,deepli:11,def:[0,2,4,6,7,11,12],default_styl:17,deferrari:1,defin:[0,2,6,7,11,12,14,17,18],definit:[0,2,3,7,11,14,17,18],degener:2,dejan:1,dejavu:7,delai:2,deleg:12,delegatinglex:[11,12],delet:[11,18],delimit:[2,7,11,12,18],delphi:[12,23],delphilex:12,delroth:2,demonstr:21,denni:[1,2],depend:[0,2,7,10,12,17],deploi:10,deprec:17,deriv:[0,2,12],derivedlex:11,describ:[0,7],descript:12,design:12,desir:[5,7],desis:12,detail:[3,11,21,22],detect:[2,12],determin:[0,2,12,15],develop:[1,3,12,21,22,23],dglexer:12,dhandler:12,dialect:2,dialectopt:12,dialecttag:12,diamanda:21,dict:[6,18,19],dictionari:[0,7,11],didn:3,diego:1,dietmar:1,diff:[2,11,23],differ:[2,5,7,11,12,15,17,19,21],differenti:12,difflex:[11,12,18],digia:12,direct3d:12,direct:[2,7,11,12,13,16,21],directli:[2,11,12],directori:[0,2,3,11,13,17],disabl:7,disabled_modul:12,disabledmodul:12,disallow:2,disappoint:12,displai:[2,7,16],disrupt:5,distinct:7,distinguish:12,distribut:[2,9,11,13,14,16,17,20],div:[7,10,15],divis:12,django:[12,21,23],djangolex:12,dlexer:12,dmitri:1,do_insert:2,dobjdumplex:12,doc:[2,7,12,17,18,19,21,23],docclass:7,docker:[2,12],dockerfil:[2,12],dockerlex:12,docstr:[2,5],doctest:12,doctyp:[2,7],document:[2,3,7,10,11,12,14,15,18,21,22],docutil:2,doe:[2,7,11,12,19],doesn:[2,3,4,6,7,11,12,14,18],domen:1,dominik:1,don:[0,2,3,6,7,11,12,15,17,18,19,22],done:[2,11],doren:1,dos:12,dosbatch:12,doscon:12,dosini:12,dot:[2,5],dotal:11,dotnet:12,doubl:[5,6,18],doug:1,down:11,download:[8,22],dpast:21,dpatch:12,dpr:12,drawn:7,dreiundzwanzig:2,drop:[0,2,5,17,21],dsrc:12,dtd:[1,2,12,23],dtdlexer:12,dubi:12,dubinska:1,due:[0,2],duel:[1,2,12],duelengin:12,duellex:12,duplic:2,dure:12,durni:1,dustin:1,dutton:1,dyl:12,dylan:[1,2,23],dylanconsolelex:12,dylanlex:12,dylanlidlex:12,dynam:12,each:[7,11,18],earl:12,earlgrei:12,earlgreylex:12,earlier:[7,12],easi:[2,4,6,7,11,22,23],easier:[7,10,17,21],easiest:11,easili:[2,6,11,12,16,21,22],easytriev:[1,2,12],easytrievelex:12,eat:[2,5],ebnf:[1,2,12],ebnflex:12,ebuild:12,echdr:12,ecl:2,eclass:12,eclex:12,ecllex:12,ecsrc:12,edg:7,edit:13,editor:21,edoardo:1,edp:12,edu:12,edward:1,eed:7,eee:17,eeeeeee:6,effect:7,efford:1,effting:1,egg:11,egorkin:1,eiffel:[1,2],eiffellex:12,either:[2,6,7,11,15,17],element:[7,11,18],elf:12,elia:1,elisp:12,elixir:[1,2,12],elixirconsolelex:12,elixirlex:12,ellipsi:2,elm:[2,23],elmlex:12,els:[6,7,11,12,15,18],elseif:11,elxir:2,emac:[1,2,3,12,17],emacslisplex:12,email:[2,12,23],emaillex:12,embed:21,emit:11,eml:12,emph:18,emphas:18,empti:[2,7,11,12],emul:17,enabl:[3,7,16],enclos:[3,7,12,18],encod:[2,7,8,10,12,15,17],encount:[11,19],encyclopedia:21,end:[2,4,6,7,11,12,18,21],enforc:5,engin:[2,11,21],english:21,enhanc:[1,2],enough:[7,11,12],enriqu:1,ensur:11,ensurenl:[2,12],enter:[11,12],entir:[11,12],entiti:[11,18],entri:[3,11,15],environ:[2,7,12,21],envnam:[2,7],eps:12,equal:[2,3,11],equal_sign:11,equival:[0,11,12],erb:[12,23],erblex:12,eric:1,erick:1,erl:12,erlang:[1,2,23],erlanglex:12,erlangshelllex:12,erron:11,error:[0,2,5,7,11,12,17,18,21],error_color:7,errortoken:5,es6:2,escap:[2,7,11,12,15,17,18],escapeinsid:7,escript:12,esoter:[2,11],especi:[12,18],etc:[2,3,5,6,18],eval:[0,11],even:[7,11,12,22],event:12,ever:23,everi:[0,2,5,7,11,15,17,22],everybodi:11,everyth:[11,12,14],evoqu:[1,2,12],evoquehtmllex:12,evoquelex:12,evoquexmllex:12,exact:17,exactli:[0,11],exampl:[0,3,4,5,6,7,10,11,12,13,17,18,19,21],examplefil:11,examplelex:11,excclass:5,except:[0,2,5,7,11,12,17,18],exclud:18,exec:10,exher:12,exhibit:2,exhypotheticlex:11,exist:[2,6,7,11,15,18],exlib:12,expand:[5,12],expans:3,expect:[0,3,11],experi:21,experiment:[2,7],explain:[6,14,15],explan:[3,18],explicit:[3,7],explicitli:[3,11],explor:7,express:[2,11,12,18],exrc:12,exs:12,ext:14,extempor:[1,2,12],extemporelang:12,extend:[2,7,11,12,17],extens:[2,3,11,15],extern:[2,7,9,12,13,16],extra:[7,12],extra_keyword:11,extract:[12,14],extrem:11,ezhil:[1,2,23],ezhillex:12,ezt:12,f00:17,f03:12,f90:12,facil:[1,12],fact:[0,12],factor:[1,2,23],factorlex:12,fail:2,failur:2,fallback:12,fallenstein:12,fals:[0,2,5,7,12,18],famili:[2,7],fan:12,fanci:[1,2,12,23],fancylex:12,fancypack:12,fancysrc:12,fancyvrb:7,fantom:[1,2],fantomlex:12,faq:22,far:11,faschingskrapfn:2,fast:[21,22],faster:2,fastest:12,favor:2,favorit:11,fayaa:21,fear:[11,22],featur:[2,7,11,12],feb:2,fed:5,feel:11,felix:1,felixlex:12,fenc:2,fennel:[1,2,12,23],fennellex:12,fernandez:1,few:[2,11,15,17,23],fext:7,ff0000:[6,17],ffffff:17,fhtml:12,ficarra:1,field:12,file:[0,1,2,3,6,7,9,10,11,13,15,17,18,19,21,23],filenam:[0,2,3,7,11,12,13,14,15],filetext:2,filetyp:12,filter:[2,8,12,14,15,18],find:[2,8,11,21],find_lexer_class:0,find_lexer_class_by_nam:[0,2],fine:12,finish:11,first:[0,2,5,6,7,11,12,15,18],firstchild:7,fish:[1,2,12],fisher:1,fishshel:12,fishshelllex:12,fix:[1,2,5,7,11,12,22],fixm:12,flag:[3,5,12,17],flatlin:[1,2,12],flatlinelex:12,flexibl:2,flo:12,florian:1,floscript:[1,2,23],floscriptlex:12,fluidic:21,flx:12,flxh:12,fmarc:2,fmter:7,fname:7,fnl:12,fnmatch:0,focus:21,fold:12,follow:[0,3,6,7,10,11,12,15,17,18,19],font:[2,6,7,15],font_nam:7,font_siz:[2,7],fontfac:7,fontfamili:7,fontsiz:7,foo:[2,5,7,11,12,18],foobar:[11,12],foreground:[7,17],forev:2,form:[0,1,3,6,7,11,12,17],formal:12,format:[0,2,3,6,7,15,21,22],formatt:[1,2,8,10,11,14,17,19],formatternam:0,former:2,forth:1,forthlex:12,fortran:[1,2,23],fortranfix:12,fortranfixedlex:12,fortranlex:12,forum:22,forward:[5,12],found:[0,2,3,7,9,11,12,15,22],four:[2,15],fourth:15,foxpro:[1,2,23],foxprolex:12,frag:12,fragment:[12,15],frame:2,framework:[1,2,21,23],free:[11,12,21],freefem:[1,2,23],freefemlex:12,freepasc:12,freewar:12,friendli:[7,12,17],frit:1,from:[0,1,2,3,4,5,6,7,10,12,13,14,15,16,17,18,19,20],front:21,frt:12,fruiti:2,fsharp:12,fsharplex:12,fsi:12,fulfil:11,full:[2,3,7,8,11,12,15],fulli:2,fulton:1,fun:[12,23],func_name_highlight:12,funcnamehighlight:12,funki:11,further:[7,18],furthermor:[12,17],fusesourc:12,futur:[6,12,18],futurewarn:2,galdino:1,galloi:1,gap:[1,2,12,23],gaplex:12,garg:1,garnotel:1,gas:12,gaslex:12,gautier:1,gave:2,gawk:12,gaynor:1,gdc:12,gemfil:12,gemspec:12,gener:[0,2,5,7,11,17,21,22],genshi:[1,12,23],genshilex:12,genshitext:12,genshitextlex:12,gentoo:2,geo:12,georg:[1,22],gerd:1,gerkin:1,gerwin:1,get:[0,2,5,7,10,11,12,13,18,19,20],get_:2,get_all_filt:5,get_all_lex:[0,12],get_all_styl:[0,17],get_bool_opt:[0,4],get_choice_opt:0,get_formatter_by_nam:[0,15],get_formatter_for_filenam:[0,15],get_int_opt:0,get_lexer_by_nam:[0,2,12,14,15],get_lexer_for_filenam:[0,15],get_lexer_for_mimetyp:[0,15],get_list_opt:0,get_style_by_nam:[0,17],get_style_def:[0,2,3,6,7,15],get_syntax_def:7,get_token:[0,2],get_tokens_unprocess:[0,11],getpreferredencod:19,gettext:[2,12,23],gettextlex:12,gherkin:[1,2,12,23],gherkinlex:12,giedriu:1,gif:[2,7],gifimageformatt:7,gilbert:1,gild:1,git:[2,12,20,21],github:[1,2,8,11,12,20,21,22,23],give:[2,3,5,7,11,15],given:[0,2,3,5,7,11,12,15,19],global:[11,18],glshaderlex:12,glsl:[2,12],glslsrc:12,glue:12,gm2:12,gnu:[2,12],gnumakefil:12,gnuplot:[2,12,23],gnuplotlex:12,gobbl:5,gobblefilt:5,goe:11,goetzmann:1,goj:[1,2],golda:1,golex:12,golo:[2,12],gololex:12,golovizin:1,good:[1,2,6],gooddata:[1,2,12],gooddatacllex:12,googl:[1,2],gordon:1,gosrc:12,goss:1,gosu:[2,12],gosulex:12,gosutemplatelex:12,gotthardt:1,govern:17,gracefulli:2,gradl:12,grai:[6,17],grammar:[11,12],grammar_not:12,graph:21,graphic:7,greater:12,greatli:2,green:17,greg:1,grei:[5,12],groff:[2,12,23],grofflex:12,groovi:[1,2,12,23],groovylex:12,group:11,groupid:10,grow:23,gsp:12,gst:12,gsx:12,guarante:18,guess:[0,2,3,7,12,19],guess_lex:[0,2,15],guess_lexer_for_filenam:[0,15],gui:[12,21],guib:1,guid:[5,12],gvimrc:12,hack:[8,14],hagelberg:1,hahn:1,half:7,haml:[1,2,12],hamllex:12,handl:[0,1,2,7,12,19],handlebar:[2,12],handlebarshtmllex:12,handlebarsj:12,handlebarslex:12,handlecodeblock:12,happen:11,harder:2,harriman:1,harrison:1,has:[0,2,3,4,6,7,9,10,11,12,17,19],hash:2,hashbang:18,haskel:[1,2,23],haskelllex:12,hatch:[1,2,22],have:[0,2,3,4,5,6,7,8,11,12,13,14,15,17,18,19,21,22],haven:[3,19],hax:[1,2],haxelex:12,haxeml:12,hazel:1,hbs:12,hdl:12,hdp:12,hdr:12,head:[3,11,12,18],header:[2,12],headlin:[11,18],headline_callback:11,height:7,hello:[3,10,15,17],help:[0,11,21,22],helper:[2,11],hendershott:1,hendrick:1,herbstzeitlos:2,here:[6,7,9,10,11,14,15,17,21,23],heredoc:[2,18],hermoso:1,hess:1,hex:[2,6,12,18],hexadecim:[6,18],hexcat:12,hexdump:[1,2,12],hexdumplex:12,hierarchi:18,high:[12,14],higher:[7,21],highest:[0,15],highlight:[0,2,3,5,7,9,10,11,12,13,14,15,17,18,21,22],highlightt:7,hilit:21,hint:8,hiram:1,hiremath:1,hiroaki:1,histori:12,hl_color:[2,7],hl_line:[2,7],hlsl:[1,2,12,23],hlsli:12,hlslshaderlex:12,hoelz:1,hogan:1,hold:11,holli:1,home:22,hong:1,horizont:7,horn:1,host:[12,21,22],houghton:1,how:[0,2,6,7,10,11,12,15,16,17],howard:1,howett:1,howev:[2,7,15,21],hpp:12,hrl:12,hsa:12,hsail:[1,2,12],hsaillex:12,hspec:[2,12,23],hspeclex:12,htaccess:12,htdoc:13,htm:[7,11,12],html5:[2,7],html:[0,1,2,3,7,11,13,15,17,18,21,22,23],htmlcheetah:12,htmldjango:12,htmldjangolex:12,htmlformatt:[0,2,6,7,10,15,17,19],htmlgenshilex:12,htmllexer:[11,12],htmlphplexer:[11,12],htmlsmartylex:12,http:[1,2,8,10,12,20,21,23],httpd:12,httplexer:[2,12],huge:11,human:[0,12],hundr:11,hurl:21,hxml:12,hxmllexer:12,hxsl:12,hxx:12,hyb:12,hybri:[1,2,12],hybrislex:12,hylang:12,hylex:12,hyperlink:7,hypothet:11,hypotheticlex:11,i18n:2,i6t:12,i7x:12,iOS:2,ian:[1,2],icl:12,icn:12,iconlex:12,icu:12,icw:12,idc:12,idea:[2,10],ideal:12,identifi:[0,2,12,18],idl4:12,idl:[1,2,23],idllex:12,idr:12,idri:[1,2,12],idrislex:12,iec:12,ieee:12,ietf:12,iex:12,ignor:[6,7,12],ignorecas:11,igor:[1,2],igorexchang:12,igorlex:12,igorpro:12,ijs:12,imag:[1,2,7,12],image_format:7,image_pad:7,imageformatt:[2,7],img:7,immedi:0,implement:[0,2,6,7,12],implicit:18,imposs:11,improv:[1,2,5,7],inc:[1,12],incl:23,includ:[0,2,3,7,11,13,15,16,19,20,21],inclus:11,incollo:21,incompat:2,incomplet:[2,21],incorrect:2,incorrectli:12,increas:[7,11,22],incred:11,indent:[2,5,7,12],index:[0,7,11,20],indexerror:11,indic:[2,7,11],individu:7,industri:12,inencod:[2,3,19],inf:12,infer:3,inferno:1,infinit:2,influenc:11,info:12,inform6:12,inform6lex:12,inform6templatelex:12,inform7:12,inform7lex:12,inform:[1,2,3,7,11,12,14,15,18],ing:2,inherit:[1,2,11,17],ini:[2,11,12,23],inilex:[11,12],initi:[2,5,6,11],initialis:11,inkpot:2,inlin:[2,6,7,15],inlinestyl:13,ino:12,inozemtsev:1,input:[0,2,3,7,11,12,15,19],inputenc:7,insensit:[0,2],insert:[2,11,18],insid:[7,11,18],instal:[2,3,7,8,11,19,21],instanc:[0,2,4,5,10,11,12,13,17,18],instanti:[0,4,6,11],instead:[2,6,7,11,12,15,17,21],instruct:12,int_fict:12,integ:[0,12,18],integr:[2,7,12,21],intel:[2,12],intellig:21,interact:2,interchang:12,interfac:[0,2,8,12],intern:[4,11,15,19],internet:7,interpol:[2,18],interpret:[0,10],intr:12,introduc:[2,7,11,18],introduct:8,invalid:2,invari:11,invoc:7,iok:[1,2,12],iokelex:12,iokesrc:12,iolang:12,iolex:12,iosrc:12,ipf:12,ipython:2,irb:[12,23],irc:[2,7,12,23],ircformatt:7,irclog:12,irclogslex:12,irssi:[12,23],is_token_subtyp:18,isabel:[1,2,12],isabellelex:12,isn:[2,5,6,11,12],iso:12,issu:[2,22],ital:[6,7,12,17],italic:7,item:[6,11,14],iter:[0,2,5,6,7,11,17],itoh:1,its:[0,2,11,12,15,17,19,21],itself:11,ivan:1,jackson:1,jade:[1,12],jag:[1,2,12],jagslex:12,jame:1,jan:2,jann:1,januari:12,jar:10,jarrett:[1,2],jasmin:[1,2,12],jasminlex:12,jasminxt:12,java:[2,8,12,18,21,23],javalex:12,javascript:[2,7,11,18,23],javascriptdjangolex:12,javascripterblex:12,javascriptgenshilex:12,javascriptlex:[2,11,12],javascriptphplex:12,javascriptsmartylex:12,jbst:[1,2,12],jcl:[1,2,12],jcllexer:12,jeffrei:1,jeremi:[1,12],jerith:12,jerom:1,jesper:1,jinja2:2,jinja:[12,23],jlcon:12,jlexer:12,job:12,jochen:1,joe:1,joerg:1,john:1,join:[2,6],jon:1,jona:1,jordi:1,jpeg:7,jpg:[2,7],jpgimageformatt:7,jproperti:12,jsgf:[1,2,12],jsgflexer:12,jsm:12,json:[1,2,12],jsonbareobjectlex:12,jsonld:12,jsonldlex:12,jsonlex:12,jsonml:12,jsp:[12,23],jspeech:12,jsplexer:12,julia:[1,2],juliaconsolelex:12,julialex:12,jun:2,just:[2,6,7,11,12,17,18,22],justin:1,juttl:12,juttlelex:12,jython:[2,10,21],kaarsemak:[1,2],kabak:1,kal:[1,2,12],kallex:12,kalnitski:1,kashif:1,kconfig:[1,2,12],kconfiglex:12,keep:[2,7],kei:[0,6,11,12,17],ken:[1,12],kept:11,kernel:12,keyboardinterrupt:2,keyword:[0,2,4,5,6,12,15,17],keywordcas:[3,5],keywordcasefilt:5,kid:12,kif:12,kind:[2,18],kiril:[1,2],kirk:[1,2],kit:1,kki:12,klein:1,knibb:1,know:[2,15,18,21],knowledg:22,known:[0,3,5,11,17,21],koka:[2,12],kokalex:12,koltsov:1,konrad:1,koprowski:1,korean:12,kotlin:[1,2,12],kotlinlex:12,kowarsch:1,krekel:1,kriegisch:1,kristian:1,krzysiek:[1,2],kschutt:12,ksh:12,kubica:[1,2],kumar:1,kupperschmidt:1,kurt:1,kurzbach:1,label:[2,18],lack:23,lagda:12,lambda:2,lang_builtin:11,languag:[0,2,11,15,18,22],language_lex:11,larger:[7,13],larim:1,lasso:[1,2,12,23],lassocsslex:12,lassohtmllex:12,lassojavascriptlex:12,lassolex:12,lassoscript:12,lassoxmllex:12,lassu:1,last:[3,6,11,12,19],lasttyp:6,lastval:6,later:[6,7,11,12],latest:[20,22,23],latex:[1,2,7,12,15,17,21,22],latexformatt:[2,6,7],latin1:[2,3,19],latin:12,latter:[2,6,7],laurent:1,layman:1,layout:12,lazi:12,lcry:12,lcryptol:12,lead:[2,11,12,15],leaf:1,lean:[2,12],leanlex:12,learn:22,ledru:1,lee:1,left:[6,12],length:[7,11,12],lenient:12,less:[2,4,12],lesscss:1,lesscsslex:12,lessfilt:2,let:[6,12,15],letter:[5,7,12],level:12,lex:[0,2,11,12,21,22],lexem:12,lexer:[1,2,4,5,6,7,8,10,13,14,17,18,19,21,22,23],lexercontext:11,lexernam:0,lgt:12,lhaskel:12,lhs:12,librari:[2,6,7,9,12,15,19,21,22],licens:[2,18,21],lid:[2,12],lidr:12,lidri:12,life:12,light:[6,7,17],lightbg:7,lighti:12,lighttpd:[2,12,23],lighttpdconflex:12,like:[0,2,3,6,7,9,10,11,15,16,18,21,22],limbo:[1,2,12],limbolex:12,linden:12,line:[0,2,5,7,8,11,12,14,18,19,21,22],line_numb:7,line_number_bg:7,line_number_bold:7,line_number_char:7,line_number_fg:7,line_number_ital:7,line_number_pad:7,line_number_separ:[2,7],line_number_start:[2,7],line_number_step:7,line_pad:7,lineanchor:[2,7],lineno:[2,3,7,15],linenospeci:7,linenostart:7,linenostep:7,linenumb:7,linesepar:7,linespan:[2,7],linewis:12,linh:1,link:[2,7,12,21],linux:[12,20],liquid:[2,12],liquidlex:12,lisp:[1,2,12,18,23],list:[0,2,3,5,7,8,12,14,15,20,21,22,23],listen:2,liter:[1,2,6,7,11,12,23],literateagdalex:12,literatecryptollex:12,literatehaskelllex:12,literateidrislex:12,litstyl:12,littl:[2,12],live:12,livescript:[1,2,12],livescriptlex:12,llvm:[2,12,23],llvmlexer:12,load:[0,1,2,3,11,12],load_formatter_from_fil:[0,2],load_lexer_from_fil:[0,2,11],local:[2,3,11,19],locat:2,log:[1,2,12,22,23],logo:[1,2,12],logoslex:12,logtalk:[1,2,12,23],logtalklex:12,longer:[2,11],longest:11,longnam:0,look:[0,3,5,7,10,11,12,15,17,18,21,22,23],lookup:[0,11,14],loop:[2,15],lorentz:1,lot:[2,10,11],loui:1,lovelac:[1,2],lower:5,lowercas:[5,12],lsl:[2,12],lsllexer:12,lsp:12,lua:[1,2,12,23],lualex:[11,12],lubomir:1,luca:1,luka:1,m2iso:12,m2pim:12,m2r10:12,mabei:1,mac:[2,7,12],macarthur:1,machineri:21,macro:[1,6,7],made:[2,11],magenta:17,magic:[2,18],mai:[0,2,3,5,7,11,12,15,17,23],mail:[21,22],main:[3,7,11,12],mainfram:12,mainli:12,maintain:[1,17,22],major:[1,2,20],mak:12,make:[2,4,7,10,11,12,15,17,19],makefil:[2,23],makefilelex:12,mako:[1,2,12,23],makocsslex:12,makohtmllex:12,makojavascriptlex:12,makolex:12,makoxmllex:12,malform:12,malzeug:2,manag:[12,20,21],mandatori:0,mandel:1,mani:[1,2,7,11,16,17,22],manpag:[2,12],manual:12,mao:12,map:[2,7,11,12,13,15,17],mapfil:11,maql:[1,12],maqllex:12,mar:2,marchand:1,marek:[1,2],margaritelli:1,margin:7,mario:1,mark:[1,5,11,18],markdown:[1,2,12],markdownlex:12,marker:12,markup:[2,6,7,13,15,18,21],martin:1,mask:12,maskj:[1,2],masklex:12,mason:[1,2,12,23],masonlex:12,master:12,match:[0,2,11,12,15,18],math:[2,7,12],mathematica:[2,12],mathematicalex:12,mathescap:7,matlab:[1,2,23],matlablex:12,matlabsess:12,matlabsessionlex:12,matt:[1,2],matteo:1,matter:14,matthew:1,matthia:[1,12],mauricio:1,maven:10,mawk:12,max:[1,12],maxim:12,maximum:11,mayb:12,mcdonald:[1,2],mcgregor:1,mckamei:1,mckee:1,mckenna:1,mclaughlin:1,mean:[0,2,3,5,7,11,12,14,16,19,21],meant:18,mechan:[0,7,15,22],media:2,member:[11,12],menlo:7,mention:14,menu:9,menuconfig:12,mercuri:21,merg:[5,11],messag:[12,18],meta:[7,12],metacharact:11,metaclass:[0,6],metadata:14,metamodel:12,method:[0,2,3,4,5,6,7,11,12,15,18],meuser:1,mher:1,mhtml:12,michael:1,michiel:1,micro:12,microsoft:7,middl:5,might:[6,11,12,17,19],miikka:1,mike:1,miller:1,mime:[0,2],mimelex:12,mimetyp:[0,2,12],minhe:1,minid:[1,2,12,23],minidlex:12,minidsrc:12,minim:[6,12],minimum:2,minor:2,mior:1,mirc:1,misc:2,misdetect:2,mishandl:2,mishighlight:2,miss:[2,11,12],mit:12,mitchen:1,mix:12,mixtur:12,mli:12,mll:12,mly:12,mma:12,mod:12,mode:[2,7,12],modelica:[1,2,12,23],modelicalex:12,modelin:[1,2],modelvers:10,modif:11,modifi:[0,7,15],modul:[0,2,4,5,7,11,12,14,15,18],modula2:12,modula2lex:12,modula:[1,2,23],modulo:12,mof:12,moin:[2,12,13],moinmoin:[2,8,12,23],moinwikilex:12,mondrian:17,mondrianstyl:17,monkei:[2,12],monkeylex:12,mono:[2,7],monofont:7,monokai:[1,2],monospac:7,mont:[1,2],montelex:12,moo:12,moocod:[1,2,12],moocodelex:12,moon:12,moonscript:[1,2,12],moonscriptlex:12,morai:1,more:[2,3,6,7,10,11,12,15,22],morton:1,most:[0,2,6,7,11,12,17,20,22],moura:1,move:12,movsisyan:1,mozhashpreproc:12,mozilla:[2,12],mozpercentpreproc:12,mozpreproc:12,mozpreproccsslex:12,mozpreprochashlex:12,mozpreprocjavascriptlex:12,mozpreprocpercentlex:12,mozpreprocxullex:12,mq4:12,mq5:12,mqh:12,mql4:12,mql5:12,mql:[2,12],mqllexer:12,msc:12,mscgen:[1,2,12],mscgenlex:12,msdo:[1,2,12],msdossessionlex:12,much:[2,11,12],muhamedag:1,mulitpart:12,multi:[2,11,18,21],multilin:[2,11,18],multipart:12,multipl:[2,7,12],mupad:[1,2,12,23],mupadlex:12,must:[0,3,5,6,7,11,12,15,17],muthiah:1,mxml:[1,2],mxmllexer:12,myghti:[1,2,12,23],myghtycsslex:12,myghtyhtmllex:12,myghtyjavascriptlex:12,myghtylex:12,myghtyxmllex:12,mygthi:12,mylex:11,mynewlex:11,mypythonlex:11,mysql:[2,12,23],mysqllex:12,mystyl:17,myt:12,nafu:7,nam:1,name:[0,2,4,5,7,11,12,13,14,15,17],namehighlightfilt:[2,5],namespac:[1,2,18],nasm:[2,12],nasmlex:[2,12],nasmobjdumplex:12,nathan:1,nativ:[7,12],naveen:1,nawk:12,nbp:12,nbsp:18,ncar:2,ncl:12,ncllexer:12,nearest:7,nearli:2,necessari:7,need:[0,2,4,6,7,11,12,14,15,21,22],needl:11,neg:12,nelson:[1,2],nemerl:[1,2,12,23],nemerlelex:12,neo4j:12,nesc:[1,2,12],nesclex:12,nescsrc:12,nest:[2,11,12],net:[2,23],neufeld:1,neujahr:2,never:11,nevertheless:11,new_stat:11,newest:2,newli:[11,18],newlin:[2,5,7,11,12],newlisp:[2,12],newlisplex:12,newspeak:[2,12],newspeaklanguag:12,newspeaklex:12,next:[11,12],ng2:12,nginx:[2,12,23],nginxconflex:[2,12],nguyen:1,nick:1,nil:[1,12],nimrod:[1,2,23],nimrodlex:12,nit:[1,2],nitlex:12,nix:[1,2,7],nixlex:12,nobackground:7,nobodi:18,nobold:17,noclass:[2,7],noclobber_cssfil:[2,7],noehr:1,noinherit:17,noital:17,nolta:1,non:[2,3,7,11,17,19],none:[0,4,6,7,11,12,18],nonempti:7,nontermin:12,nonzero:5,normal:[0,3,4,5,11,12,18],norman:1,north:1,nose:2,notabl:12,note:[4,5,7,11,12,14,15,17,18,19,23],notebook:2,noth:11,notifi:22,notmuch:[2,12,23],notmuchlex:12,nounderlin:17,nov:2,now:[2,7,11,12,19],nowrap:7,nprint:15,nqp:12,nresult:10,nroff:12,ns2:12,nsh:12,nsi:[2,12],nsislex:12,nth:7,nullformatt:[6,7],number:[2,3,5,6,7,11,12,15,17,18,21,22],numer:[2,12],numpi:[1,2,12],numpylex:12,nusmv:[1,2,12],nusmvlex:12,nvidia:12,obei:7,obj:12,objc:[2,12],objdumb:12,objdump:[2,12],objdumplex:12,object:[0,1,2,5,6,7,11,15,18,19,23],objectivec:[1,2,12],objectiveclex:12,objectivecpplex:12,objectivej:12,objectivejlex:12,objectpasc:12,objj:12,objm2:12,obrist:1,obviou:11,ocaml:[2,12,23],ocamllex:12,occasion:19,occur:11,oct:[2,18],octal:18,octav:[1,2,12,23],octavelex:12,odbc:12,odd:2,odin:12,odinlex:12,off:[0,5,12],offload:2,offset:7,often:[11,16],old:[2,17,18],oldhtmlformatt:6,oleh:1,oliva:1,olivi:1,olov:1,omg:12,omit:3,onclick:7,one:[0,2,3,5,6,7,11,12,15,17,19],ones:[0,11,12],onli:[0,2,3,7,11,12,15,18,21],onto:11,ooc:2,ooclex:12,opa:[1,2,12],opalang:12,opalex:12,open:[6,11,14,22],openbug:12,opencobol:[1,2,12],openedg:[1,2,12],openedgelex:12,opengl:12,openingcommentdelim:12,oper:[1,2,11,17],optim:[2,11],option:[1,2,4,5,6,9,11,12,13,17,19],optionerror:0,optnam:0,orc:12,orchestra:12,order:[2,12,15,21],ordinari:12,org:[1,10,12,22],origin:[2,12],other:[1,2,7,11,13,17,18,19,21,22],otherlex:11,otherst:11,otherwis:[0,7,11,12],out:[2,3,5,10],outencod:[2,3,7,19],outfil:[0,6,7,15],outlin:21,output:[0,2,3,4,5,6,7,11,13,15,18,19,21,22],outsid:[11,12],over:[0,2,6,7,11,15,17,22],overhaul:1,overload:18,overrid:[0,2,3,4,6,7,11,12,19],overridden:0,overview:[15,17],overwrit:6,overwritten:7,owen:1,own:[0,5,7,8,18,23],oxford:1,ozarowski:2,p6l:12,p6m:12,pace:22,pacemak:12,packag:[2,7,11,18],pacman:12,pacmanconf:12,pacmanconflex:12,pad:7,page:[0,2,7,10,11,13,14,15,23],paid:22,paint:21,pair:[0,11],pan:[2,12],panlex:12,pannuto:1,paper:12,paramet:[6,11],parasail:[1,2],parasaillex:12,paren:2,parent:[6,11,18],parenthesi:11,paris:1,pars:[2,6,12,21],parser:[2,13,18],part:[6,11,12,18],partial:2,particular:[3,7,12],partner:12,pas:[3,12],pascal:[2,3,5],pass:[2,3,4,6,7,11,12,15,17,19],past:[7,12],pastebin:[12,21],pastebox:21,pat:1,patch:[1,2,18],path:[7,11,12,18,21],patrick:1,pattern:[0,2,7,11,12,14,15],patx:21,paul:1,paulo:1,pawn:2,pawnlex:12,pcmk:12,pdf:[12,21],peculiar:2,pekka:1,peopl:16,pep:2,pepijn:1,per:[2,3,12,17,19],percent:12,perfect:7,perform:[2,11,12,18],perl6:12,perl6lex:12,perl:[1,2,15,18,22,23],perllex:[12,15],permit:12,persist:12,pete:1,peterson:1,pfannschmidt:1,pgsql:12,phil:1,philosophi:17,php3:12,php4:12,php5:12,php:[1,2,9,11,18,21,23],phpbb:7,phplexer:[11,12],phpygment:[9,21],phtml:12,picheta:1,pick:7,pida:21,piec:[7,15],pierr:[1,2],pig:[1,2,12],piglex:12,pigment:21,pike:[1,2,12],pikelex:12,pil:[2,7],pilcrow:5,pim:12,pinkham:1,piotr:2,pip:[2,20],pipe:[12,15],pixel:7,pixmap:7,pkg:12,pkg_resourc:2,pkgbuild:12,pkgconfig:12,pkgconfiglex:12,pl6:12,place:[2,12,20,22],plain:[2,12,15],platform:[12,20,21],player:12,pleas:[7,8,14,21],plot:2,plpgsql:12,plpgsqllexer:12,plrm:12,plt:12,plu:[2,7,11,12,23],plugin:[8,12,13,17,21],pm6:12,pmod:12,png:[2,7],pocoo:22,pod:2,point:[5,7,11],polici:12,pom:10,poni:[2,23],ponylex:12,pop:[2,11],popular:12,port:2,pos:11,posh:12,posit:[0,2,11],posix:12,possibl:[2,6,7,10,11,12,15,18,19,21],post:7,postgr:12,postgresconsolelex:12,postgreslex:12,postgresql:[1,2,12],postmarkup:21,postscr:12,postscript:[1,2,12,23],postscriptlex:12,pot:12,pov:[2,12,23],povrai:[12,23],povraylex:12,power:[11,21],powershel:[1,2,12,23],powershelllex:12,powershellsessionlex:12,praat:[1,2],praatlex:12,pre:[2,6,7,10,12,15],preambl:7,preced:[2,11],prefer:11,prefix:[2,7,11,12,14,18],preimplement:11,prepar:11,prepend:[3,7],preproc:18,preprocess:12,preprocessor:[2,9,11,12,18],present:[2,12,21],preserv:7,prestyl:[2,7],prettifi:22,prevent:[11,17],previou:11,previous:2,prg:12,primari:[12,15],primit:12,principl:18,print:[3,7,10,12,15,17,18,21],printabl:12,println:10,prioriti:2,privaci:21,pro:[1,2],probabl:[0,4,12],problem:[2,7],proc:12,procedur:[2,11,12],process:[2,3,11],processor:[1,2,9],produc:[0,2,7,11,12,15],profil:12,progopedia:21,program:[2,16,18,22],progress:12,project:[2,7,10,17,21,22],prolog:[2,23],prologlex:12,prompt:[2,12,18],proof:12,proper:11,properli:[2,21],properti:[2,7,12],propertieslex:12,propos:18,proprietari:12,proto:2,protobuf:12,protobuflex:12,protocol:[1,2,12],prototyp:12,prover:12,provid:[0,2,3,6,7,9,11,12,17],prs:12,prynn:1,prypin:1,ps1:12,ps1con:12,psc:12,pseudo:[11,12,18],psi:12,psl:12,psm1:12,psql:12,pth:2,publicli:11,publish:12,pudb:21,pug:12,puglex:12,pull:[2,23],pumbaa80:2,punctuat:[2,12],puppet:[1,2,12],puppetlex:12,pure:12,purpos:[6,12,15],push:11,put:[7,11,12,13,17],pwn:12,pxd:12,pxi:12,py2:12,py2tb:12,py3:12,py3tb:12,pybtex:12,pycon:12,pygment:[1,3,4,5,6,7,11,14,15,17,18,19,20,22,23],pykleur:2,pypi:[1,2,7,12,20],pypylog:12,pypyloglex:12,pyrex:12,pytb:12,python2:12,python2lex:[2,12],python2tracebacklex:12,python3:[2,12],python3lex:[2,12,17],python3tracebacklex:[2,12],python:[1,2,3,7,9,10,11,13,15,16,18,20,21,22,23],pythonconsolelex:[2,12],pythoninterpret:10,pythonlex:[2,5,10,11,12,15,19],pythontracebacklex:[2,12],pyw:12,pyx:12,qbasic:[2,12],qbasiclex:12,qbs:12,qbzr:21,qdeclarativeintroduct:12,qml:[1,2,12],qmllexer:12,qualifi:12,qualiti:22,quickstart:[2,8],quit:[2,4,7,21,22],quot:[2,3,10,15,18],quotat:5,qvt:[1,2],qvto:12,qvtolex:12,r10:12,r5r:12,rabel:1,racket:[1,2,12],racketlex:12,ragel:[1,2,12,23],ragelclex:12,ragelcpplex:12,rageldlex:12,ragelembeddedlex:12,rageljavalex:12,ragellex:12,ragelobjectiveclex:12,ragelrubylex:12,rai:[2,23],raichoo:1,rainbow:[1,2],rais:[0,2,5,7,12],raiseonerror:5,raiseonerrortokenfilt:[2,5],rake:12,rakefil:12,rang:[0,12,21,22,23],rare:2,rasul:1,rather:11,raw:[2,7,18],rawtokenformatt:[2,7,12],rawtokenlex:[7,12],raytrac:12,rbcon:12,rbnf:12,rbw:12,rbx:12,rconsol:12,rconsolelex:12,rdlexer:12,rdoc:2,read:[7,11,12,22],readabl:0,readili:6,realli:[7,11,12,14,18],reason:12,reb:12,rebol:[1,2,23],rebollex:12,rebuild:11,receiv:2,recent:12,recogn:[0,2,3,11,12],recognis:[7,12,18],recognit:2,recommend:[7,20],record:12,recreat:12,recurs:11,recurss:12,red:[2,6,7,12,15,17,23],redcod:[1,2,12,23],redcodelex:12,redlex:12,reduc:2,redund:15,reed:1,refactor:1,refer:[2,4,6,7,8,11,12,15],reg:12,regard:3,regedit:12,regeditlex:12,regex:[2,18,22],regist:[0,2,4,5,8,12,13,17],registri:[2,12],regress:2,regular:[2,7,11,18,19],reidi:1,rel:[0,3,7,11],relas:2,relaxng:12,releas:[2,20,22,23],reli:14,remov:[2,7,11],renam:[12,13],render:[2,7,9,11,12,17],renviron:12,repeat:11,repl:12,replac:[2,5,7,11],report:12,repositori:21,repr:7,repres:[15,18],represent:7,request:[2,7,21,23],requir:[2,11,12,14,22],requiredelimit:12,reserv:[12,18],resolv:11,resort:[3,19],resourcebundl:[2,12],resourcelex:12,respect:[2,7,17],respons:13,rest:[2,5,8,11,12,22,23],restrict:12,restructur:2,restructuredtext:[2,12,21],result:[0,2,7,10,11,12,15,17,19],retain:2,reuben:1,review:21,revis:12,rewrit:[1,2],rewritten:12,rewrot:2,rex:12,rexx:[1,2,12],rexxlex:12,rfc822:12,rgb:17,rhistori:12,rhtml:12,rhtmllexer:12,richard:1,richardson:1,right:[5,19],rigor:12,rintel:1,risc:12,rkt:12,rktd:12,rktl:12,rnc:12,rnccompactlex:12,rng:12,rob:1,roberg:[1,2],robert:1,roboconf:[1,2],roboconfgraphlex:12,roboconfinstanceslex:12,robot:[1,2,23],robotframework:12,robotframeworklex:12,roff:12,rolling:1,roman:2,ronach:[1,22],ronni:1,roo:1,root:[7,11,12],root_lex:11,rostyslav:1,roughli:7,rout:12,roux:21,row:7,rpf:12,rpm:[2,12,23],rpmspeclex:12,rprofil:12,rql:[2,12],rqllexer:12,rrggbb:6,rrt:2,rsl:[1,2,12],rsllexer:12,rss:12,rst2pdf:21,rst:[12,16,18],rstlexer:12,rtf:[1,2,7,12,15,21,22],rtfformatt:7,rts:12,rtslexer:12,rubi:[1,2,9,11,18,21,23],rubiniu:12,rubyconsolelex:[2,12],rubylex:[12,15],rudolph:1,ruggier:1,rule:[2,3,7,11,12,13,18],run:[0,7,10,11,12,21],runtim:10,runtimeerror:[7,18],rust:[1,2,23],rustlex:12,rvt:12,sage:12,salminen:1,salt:12,saltstack:12,sam:1,same:[2,3,5,6,7,11,12,17,18],sampl:[9,11,13,16],samplemanag:12,san:[2,7],sandalski:1,sane:7,sap:12,sarl:12,sarllex:12,sas:12,saslex:12,sass:[1,2,12],sasslex:12,sasso:1,save:[2,11],sbatch:12,sbl:12,scala:[1,2,12,23],scalalex:12,scalat:12,scaml:[1,12],scamllex:12,scd:12,scdoc:[2,23],scdoclex:12,sce:12,scenario:8,scene:[2,23],schafer:1,schemaloc:10,scheme:[1,2,12,23],schemelex:12,schutt:[1,12],schwaiger:1,schweizer:1,schweyer:1,sci:12,scientif:[12,21],scilab:[1,2,12,23],scilablex:12,scm:12,sco:12,sconscript:12,sconstruct:12,scope:[10,21],score:[2,12],screen:13,script:[2,3,11,14,15,16,21,23],scss:[2,12],scsslexer:12,search:[2,11],sebastian:1,second:[2,11,12],secondari:15,section:[11,21],see:[0,2,3,5,6,7,11,12,15,17,19,21,22],seem:11,seen:3,sel4:12,select:[0,2,3,7,11,12,15],selector:[2,3,7,15],self:[0,4,6,7,11,12,18],semant:15,semicolon:2,send:[6,11,12,19,22],sensit:2,sep:2,separ:[2,3,7,11,12],sequenc:[0,2,7,15,17,18,21,22],sequenti:12,serial:12,server:[2,12,23],servic:[1,21],session:[1,2,23],set:[2,6,7,9,10,11,12,13,15,17,18,19],setup:3,setuptool:14,sever:[2,11,12],sgf:[1,2,23],shade:17,shader:[12,23],shadow:12,shape:12,share:21,shaw:1,sheet:[3,12],shell:[1,2,3,21,23],shellscript:12,shellsess:[1,12],shellsessionlex:2,shen:[1,2,12],shenlex:12,shex:12,shexc:12,shexclex:12,ship:[6,9,11,13,16,17],shorten:12,should:[0,2,5,6,7,11,12,13,14,17],shouldn:[2,11],show:[2,7,11,12],shown:[7,11],siafoo:21,sieker:1,sig:12,sign:[2,3,5,11],signatur:18,sil:12,silent:2,silver:[2,12],silverlex:12,similar:[2,11,15,18],simmon:1,simon:1,simonov:[1,2],simpl:[2,9,10,11,22],simplefilt:4,simpli:11,simplifi:2,simplist:12,simpson:1,sinc:[0,2,3,4,7,9,10,11,12,15,17,19],sing:1,singl:[2,11,18,19],singlelin:11,singleton:18,sircmpwn:12,site:21,size:[6,7],skip:11,skylark:2,slash:[1,2,11,14,23],slashlex:12,slexer:[2,12],slightli:2,slim:[2,12],slimlex:12,slowdown:12,slowish:2,sls:12,slurm:[2,12,23],slurmbashlex:12,smali:[1,2,12],smalilex:12,small:[2,12,15,17],smaller:2,smalltalk:[1,2,23],smalltalklex:12,smart:[3,19],smarter:2,smartgameformatlex:12,smarti:[12,23],smartylex:12,smishlajev:1,sml:12,smllexer:[11,12],snapshot:10,snippet:[12,15,16,17,21],snobol4:12,snobol:[1,2,23],snobollex:12,snowbal:[1,2,12],snowballlex:12,softwar:[1,12],sol:12,solar:[1,2],solid:[2,23],soliditylex:12,solvabl:11,some:[0,2,4,6,7,11,12,15,17,18,21],somelex:[3,11],someth:[6,10,11,15,18],sometim:[7,11],somewhat:12,sound:11,sourc:[1,2,5,7,9,11,13,14,15,16,19,21,22],sourcecod:[2,7,12,16],sourcepawn:[1,2,12],sourcepawnlex:12,sourceslist:12,sourceslistlex:12,space:[2,3,5,7,11,12,17,23],spacehack:7,spam:[11,15],span:[2,3,7,10,15],sparql:[1,2,12],sparqllex:12,spec:[2,12,23],special:[2,3,5,7,11,18,22],specif:[2,3,7,11],specifi:[0,3,6,7,11,12,17,18],speed:2,spell:11,sphinx:[2,21],spigarelli:1,spitfir:[2,12],split:[0,2,3,12,15,17,18],splitlin:2,splu:12,spt:12,spyder:21,sql:[1,2,23],sqlite3:[2,12],sqlite:23,sqliteconsolelex:12,sqllexer:12,squeak:12,squid:[1,2,12,23],squidconf:12,squidconflex:12,squiggli:2,src:12,ssp:[2,12],ssplexer:12,stabl:22,stack:[2,11],stan:[1,2,12],stand:21,standalon:10,standard:[0,1,2,3,6,7,12,15,21],standard_typ:18,standardml:12,stanlex:12,stap:1,star:11,starlark:2,start:[0,2,6,7,8,11,12,18],starter:11,startinlin:12,stata:[1,2],statalex:12,state1:11,state2:11,state:[2,12],statement:[0,2,12],staticmethod:0,statist:2,statu:12,stdin:12,stdout:[3,19],stefan:[1,12],step:[7,11,21],stepan:1,stephen:1,steve:1,steven:1,still:[2,7,11,12,17,18,19],stingrai:12,stolen:12,store:[7,11,12,21],stou:1,strachan:1,straightforward:6,strang:11,stream:[0,2,3,4,5,6,7,12,15,19],strict:2,string:[0,2,3,5,6,7,10,11,12,15,17,18,19],string_to_tokentyp:18,stringio:7,strip:[11,12,15],stripal:[0,12,15],stripnl:[0,2,12],strong:18,strongli:12,structur:[2,12],stuart:1,studio:[2,12],stuff:7,style:[0,1,2,5,7,8,11,14,15,18,23],style_map:[0,17],stylebegin:6,styleend:6,styleguid:5,stylemap:6,stylenameclass:17,stylesheet:[2,3,7,13,15],styleshet:12,subclass:[0,2,7,12,17],subdirectori:13,subhead:[11,18],subheadlin:18,submit:8,subpackag:[2,17],subsequ:[0,7,12],subset:12,subsystem:12,subtoken:17,subtyp:18,successor:[21,23],suffix:[2,11],suggest:8,suit:[1,2,11],suitabl:[0,2,7,22],sullivan:1,supercollid:[1,2],supercolliderlex:12,superset:12,supertoken:17,suppli:[12,18],support:[1,2,3,6,7,11,12,15,16,17,18,22],suppos:[7,15],suppress:2,sure:[3,11,12,22],surpris:2,sven:1,svg:[1,2,7,12],svgformatt:7,svh:12,svn:12,swallow:2,swg:12,swift:[1,2,12],swiftlex:12,swig:[1,2,12],swiglex:12,sybas:12,sylvestr:1,symbol:[2,12,18],synonym:11,syntact:[18,21],syntax:[2,3,7,11,17,18,21,22],syntaxerror:2,sys:[3,19],system:[3,7,10,11,20],systemverilog:[1,2,12],systemveriloglex:12,sysutil:12,tab:[2,5,7,12],tabl:[2,7],tabsiz:[0,5,12],tac:12,tad:[1,2,12],tads3:12,tads3lex:12,tag:[2,3,5,6,7,11,12,15,18],tagsfil:7,tagurlformat:7,tail:12,take:[0,2,5,7,11,12,15,20,21,22,23],taken:[0,2,12],tamil:[12,23],tango:[1,2],tanner:1,tap:[1,12],taplex:12,tarbal:[2,11],target:12,task:12,tasm:12,tasmlex:[2,12],tassilo:1,tcl:[1,2,23],tcllexer:12,tcsh:[1,2,12,23],tcshcon:12,tcshlexer:12,tcshsessionlex:12,tea:[1,2,12,23],team:[17,22],teatemplatelex:12,techniqu:11,ted:1,tell:[7,11],templat:[0,2,11,15],tenani:1,teng:[1,2],tera:[1,2,23],teraterm:[2,12],teratermlex:12,teratermmacro:12,term:[1,2,11,15,23],termcap:12,termcaplex:12,termin:[1,2,3,7,12,15,19],terminal16m:7,terminal256:[1,2,7],terminal256formatt:[7,17],terminalformatt:[2,3,7],terminaltruecolorformatt:7,terminfo:12,terminfolex:12,ternari:2,terraform:[1,2,12],terraformi:12,terraformlex:12,test:[1,2,3,15,18],testcas:7,testcaseformatt:7,tex:[7,12,23],texcom:7,texlex:12,text:[0,2,3,7,11,15,16,17,18,19,21,22],textedit:12,textfmt:12,textlex:12,than:[2,9,11,17],thank:[1,2,7,10],thei:[0,2,6,7,11,12,14,15,17,18],them:[0,2,5,7,8,11,12,15,18,21,22,23],theme:[2,17],themselv:11,theori:7,therefor:[3,7,12],thi:[0,2,3,5,6,7,10,11,12,13,14,15,16,17,18,19,20,22,23],thing:11,think:[0,19],third:11,thoma:1,thoroughli:21,those:[6,11,12,14,18],though:[12,21],three:2,thrift:[1,2,12],thriftlex:12,through:[3,11,15],thu:[7,17],thurgood:[1,12],thy:12,tiberiu:[1,2],ticket:[2,22],tiffin:1,tim:[1,2,22],time:[2,7,11],timhatch:1,timothi:1,titl:[0,7],tmp:11,tmpl:12,toc:12,todo:[2,5,12],todotxt:12,todotxtlex:12,togeth:[7,12,15],toggl:7,token:[0,2,3,4,5,6,7,8,12,15,17],token_typ:6,tokenmerg:5,tokenmergefilt:5,tokensourc:[0,6],tokenstr:7,tokentyp:[0,5,7,11],tolbert:1,toler:11,tom:1,toml:[1,2,12,23],tomllex:12,too:[2,3,11,12],tool:[2,12,21,22],top:[7,11,13],toplevel:11,topmost:11,total:[5,11],totaldownload:12,tpl:12,trac:[2,12,21,23],traceback:[1,2,12,18,23],tracker:[2,21,22],traffic:12,trafficscript:1,trail:[12,15],trailer:2,trait:12,transact:[1,12],transactsqllex:12,transcript:12,transfer:12,transform:12,translat:2,transpar:17,treat:[2,12,17],treat_stdlib_adts_as_builtin:12,tree:12,treetop:[1,2,12],treetoplex:12,trevor:1,tri:[2,3,11,19],trick:[8,15],tricki:11,trigger:7,troff:12,trove:2,trust:11,trute:1,tryzelaar:1,tspan:7,tsql:12,tst:12,tsx:12,ttl:12,ttype:[4,6],tupl:[0,6,7,11,12],turbo:12,turbopasc:12,turn:12,turtl:[1,2,12],turtlelex:12,tutori:10,twig:[2,12],twightmllex:12,twiglex:12,two:[6,7,11,12,15],twowaybind:12,txt:[2,7,12],type:[0,2,5,6,7,11,12,15,17,18],typescript:[1,2,12],typescriptlex:12,typeset:[7,12],typic:12,typo3:12,typo3cm:12,typoscript:[1,2],typoscriptcssdata:12,typoscriptcssdatalex:12,typoscripthtmldata:12,typoscripthtmldatalex:12,typoscriptlex:12,typoscriptrefer:12,ucodelex:12,udalov:1,udiff:[2,12],udo:12,unabl:11,unbalanc:12,unchang:7,uncolor:4,uncolorfilt:4,under:[11,20,21],underlin:[6,7,12,17],underscor:2,understand:[7,14,17],undocu:12,unexpect:19,unfold:12,unfortun:11,unhandl:12,unicod:[2,5,7,8,11,12,15],unicodedecodeerror:19,unicodeerror:2,unicodelevel:12,unicon:2,uniconlex:12,unifi:[2,12],uniqu:[0,15],unistr:2,unit:[2,12],univers:[1,21],unix:12,unknown:12,unless:7,unlex:12,unlimit:[12,18],unmatch:12,unmodifi:11,unnecessari:11,unnecessarili:12,unpack:11,unquot:2,unsign:2,unstyl:18,until:11,untouch:12,unus:18,updat:[1,2,7,12],upper:[3,5],uppercas:[5,15,18],urbi:12,urbiscript:[1,2],urbiscriptlex:12,usabl:[0,6,17,21,22],usag:[0,3,21],use:[2,3,4,5,6,7,10,11,12,13,14,15,16,17,18,19,22],used:[0,2,3,4,5,6,7,11,12,13,14,15,17,18,21],useful:[2,5,7,12,18,21],usepackag:7,user:[0,7,9,12],uses:[0,2,4,7,9,10,11,15],usesyslog:12,using:[0,2,3,4,7,11,12,15,16,17,18,19,20,21],usr:15,usual:[7,15,20],utf8:2,utf:[2,7,10,12,19],util:[0,2,4,21],v4_0_0:10,vala:[1,2,12,23],valalex:12,valentin:1,valid:[0,12,21],vallentin:1,valu:[0,2,3,4,5,6,7,11,12,14,17,18],valueerror:2,van:1,vapi:12,vari:17,variabl:[2,7,10,11,12,18],variant:[12,17],varieti:17,variou:[0,2,8,23],vark:12,varnish:[1,2],varrazzo:1,varun:1,vba:12,vbnet:12,vbnetaspxlex:12,vbnetlex:12,vbs:12,vbscript:[1,2,12],vbscriptlex:12,vcl:12,vcllexer:12,vclsnippet:12,vclsnippetlex:12,vclsrc:12,vctreestatu:12,vctreestatuslex:12,veloc:[2,12],velocityhtmllex:12,velocitylex:12,velocityxmllex:12,vera:7,verbatim:[2,7],verbopt:7,verbosepkglist:12,veri:[0,4,11,12,22],verilog:[2,12,23],veriloglex:12,version:[0,3,4,5,7,10,11,12,15,17,18,19,21,23],versionad:12,vert:12,vfp:12,vgl:[1,2,12],vgllexer:12,vhd:12,vhdl:[1,2,12,23],vhdllexer:12,via:[7,9,12,19],view:[11,12],viewer:[7,21],viewvc:21,vim:[1,2,12,23],viml:12,vimlex:12,vimrc:12,vincent:1,vinot:1,virtualenv:2,visibl:[2,5],visiblewhitespacefilt:[2,5,18],vision:12,visit:15,visual:[1,2,23],vnd:12,voelker:1,volunt:22,vpr:12,wai:[2,7,11,17,18,19],wait:21,want:[3,5,7,10,11,12,13,14,15,17,18],wasn:11,watch:23,wavemetr:12,wdiff:[1,2,12],wdifflex:12,web:[11,21],webmisc:12,websit:2,weechat:[2,12],weechatlog:12,weight:[7,15],weizenbaum:1,welcom:[15,23],well:[2,6,7,12,21,23],were:[2,14],what:[2,3,15],wheel:2,when:[2,5,7,11,12,15,17,22],where:[0,6,7,11,13,15,17],whether:[0,2,7],whetsel:1,which:[0,2,3,5,7,10,11,12,14,15,17,18,19,20],whilei:[1,2],whileylex:12,white:17,whitespac:[0,2,5,7,11,12,15,17,18],whitnei:1,whole:[6,7,11],whose:[2,12,13,15],why:[4,21,23],wide:[12,21,22],width:7,wiki:[2,12,13,21,22,23],wikipedia:21,wildcard:3,william:1,willing:13,winbatch:12,winbug:12,window:[2,7,12,23],winkler:1,winner:2,winston:2,winter:[1,12],wish:21,within:[0,7,12],without:[0,2,7,11,12,14,18],wlua:12,wolfram:12,won:[3,4,7,11],word:[2,3,5,7,11,12,18],wordpress:21,work:[2,3,5,7,10,11,12,15,17,20],workaround:7,workspac:12,world:[3,10,15,17],would:[5,6,7,10,11,12,18],wouldn:7,wppygment:21,wpygment:21,wrap:[2,6,7,15],wrapcod:7,wrapper:[2,7,21],write:[0,2,5,7,8,12,14,15,18,19,21,23],written:[0,1,2,3,7,11,12,14,15,17,19,21],wrong:19,wsdl:12,wsf:12,wstokentyp:5,www:[10,12],wxhtmlpygment:21,wybir:1,x10:1,x10lexer:12,x1b:17,x1z:12,xbase:12,xchat:[2,12],xcode:2,xds:12,xhtml:[12,21],xmi:12,xml:[2,7,10,18,23],xmldjangolex:12,xmlerblex:12,xmllexer:12,xmln:10,xmlphplexer:12,xmlschema:10,xmlsmartylex:12,xoffset:7,xorglex:12,xpl:12,xql:12,xqm:12,xqueri:[1,2,12,23],xquerylex:12,xqy:12,xsd:[10,12],xsi:10,xsl:12,xslt:[1,2,12,23],xsltlexer:12,xten:12,xtend:[1,2,12],xtendlex:12,xtlang:12,xtlanglex:12,xtm:12,xul:12,xwiki:21,xxd:12,xxx:5,yai:2,yaml:[1,2,12,23],yamljinjalex:12,yamllex:12,yellow:17,yes:[0,11],yet:3,yield:[0,2,4,6,7,11,12],yml:12,yoffset:7,you:[0,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22],young:1,your:[3,5,7,8,9,12,13,14,16,17,18,23],your_formatt:3,your_lex:[3,11],your_lexer_fil:11,your_named_lex:11,yourapp:17,yourfilt:14,yourformatt:14,yourlex:14,yourmodul:[14,17],yourself:[11,21],yourstyl:[14,17],ystep:7,zamboni:1,zamudio:1,zeek:[2,12,23],zeeklex:12,zeitdilat:2,zep:12,zephir:[12,23],zephirlex:12,zero:12,zerodivisionerror:12,zig:[2,23],ziglang:12,ziglex:12,zimin:1,zimmerman:1,zimtstern:2,zip:12,zsh:12,zshrc:12,zurczak:1},titles:["The full Pygments API","Full contributor list","Pygments changelog","Command Line Interface","Write your own filter","Filters","Write your own formatter","Available formatters","Pygments documentation","Using Pygments in various scenarios","Use Pygments in Java","Write your own lexer","Available lexers","Using Pygments with MoinMoin","Register Plugins","Introduction and Quickstart","Using Pygments in ReST documents","Styles","Builtin Tokens","Unicode and Encodings","Download and installation","Pygments FAQ","Welcome!","Supported languages"],titleterms:{"0rc1":2,"6rc1":2,"class":[7,11],"new":[11,21],Adding:11,RTS:12,SAS:12,The:[0,11,14,22],Use:[10,14],Using:[4,9,11,13,16],actionscript:12,advanc:11,algebra:12,all:[12,23],ambienttalk:12,ampl:12,api:0,apl:12,archetyp:12,architectur:15,assembl:12,author:22,autom:12,avail:[7,12,17],bash:9,basic:12,bibliographi:12,bibtex:12,bnf:12,boa:12,bug:21,builtin:[5,17,18],busi:12,callback:11,can:21,cap:12,chang:11,changelog:2,chapel:12,clean:12,code:12,come:21,command:[3,12,15],comment:18,common:7,compact:12,complet:9,comput:12,config:12,configur:12,consol:12,contribut:22,contributor:1,core:14,creat:17,crystal:12,csound:12,css:12,custom:3,dalvik:12,data:12,decor:4,definit:[6,12],deleg:11,deriv:11,descriptor:12,develop:20,dialect:12,diff:12,document:[8,16],doe:21,domain:12,download:20,dsl:12,dump:12,dylan:12,ecl:12,eiffel:12,elm:12,embed:12,encod:[3,19],engin:12,entrypoint:14,erlang:12,esoter:12,exampl:15,extend:14,extendedregexlex:11,extens:12,ezhil:12,factor:12,famili:12,fantom:12,faq:21,featur:21,felix:12,fiction:12,file:12,filter:[3,4,5],flag:11,floscript:12,format:12,formatt:[0,3,6,7,15],formerli:12,forth:12,fortran:12,foxpro:12,framework:12,freefem:12,from:[11,21],full:[0,1],game:12,gener:[3,6,12,18],get:[3,17],googl:12,grammer:12,graph:12,graphic:12,guess:15,handl:11,hardwar:12,haskel:12,hax:12,help:3,hexadecim:12,high:0,how:[14,21],html:[6,12],icon:12,idl:12,igor:12,includ:12,inferno:12,instal:[12,20],interact:12,interfac:3,intermedi:12,internet:12,introduct:15,iter:12,ivl:12,java:10,javascript:12,julia:12,jvm:12,keyword:[11,18],known:12,languag:[9,12,21,23],level:0,lexer:[0,3,11,12,15],like:12,line:[3,15],lispi:12,list:[1,11,17],liter:18,lookup:15,macro:12,mail:12,make:21,makefil:12,man:12,markdown:9,markup:[12,23],matlab:12,microsoft:12,mime:12,misc:12,model:12,modifi:11,modula:12,moinmoin:13,mont:12,multi:12,multipl:11,multipurpos:12,mxml:12,name:[3,18,21],ncar:12,net:12,nim:12,nimrod:12,nit:12,nix:12,nixo:12,non:12,notat:12,note:3,oberon:12,object:12,onc:11,ooc:12,oper:[12,18],option:[0,3,7,15],orient:12,other:[9,12,23],output:12,over:12,own:[4,6,11,17],packag:[12,20],page:12,parasail:12,parser:12,pascal:12,patch:12,pawn:12,perl:12,php:12,plot:12,plugin:14,poni:12,praat:12,pro:12,process:[0,12,21],program:[12,21,23],prolog:12,proto:12,prove:12,punctuat:18,pygment:[0,2,8,9,10,12,13,16,21],python:12,queri:12,quickstart:[6,15],qvt:12,raw:12,rdf:12,rebol:12,regex:11,regexlex:11,regist:14,relat:12,relax:12,report:21,requir:21,resourc:12,rest:16,riverb:12,roboconf:12,robot:12,rubi:12,rule:17,rust:12,scan:11,scdoc:12,scenario:9,schema:12,script:12,semant:12,session:12,sgf:12,shell:12,similar:12,simpl:12,slash:12,smalltalk:12,smart:12,smv:12,snobol:12,solid:12,sourc:[12,20],special:12,specif:12,sql:12,stata:12,state:11,stream:11,stuff:12,style:[3,6,12,17,21],stylesheet:12,subclass:[4,11],suggest:21,supercollid:12,support:[21,23],syntax:12,system:[12,21],tcl:12,templat:[12,23],tera:12,term:12,termin:17,test:[11,12],text:12,textmat:9,than:12,theorem:12,thi:21,token:[11,18],trafficscript:12,trick:11,typoscript:12,ucod:12,unicod:19,unicon:12,urbiscript:12,usag:15,use:21,uses:21,variou:[9,12],varnish:12,verif:12,version:[2,20],visual:12,want:21,web:12,welcom:22,what:21,where:21,whilei:12,who:21,wrapper:9,write:[4,6,11],x10:12,xml:12,xorg:12,your:[4,6,11],zig:12}})
\ No newline at end of file
diff --git a/doc/_static/demo.css b/doc/_static/demo.css
new file mode 100644 (file)
index 0000000..9344291
--- /dev/null
@@ -0,0 +1,38 @@
+#try {
+    background-color: #f6f6f6;
+    border-radius: 0;
+    border: 1px solid #ccc;
+    margin-top: 15px;
+    padding: 10px 15px 5px 10px;
+    position: relative;
+}
+
+#try h2 {
+    margin-top: 0;
+}
+
+#try textarea {
+    border: 1px solid #999;
+    padding: 2px;
+    width: 100%;
+    min-height: 150px;
+}
+
+#hlcode pre {
+    background-color: transparent;
+    border-radius: 0;
+}
+
+#loading {
+    position: absolute;
+    top: 0;
+    left: 0;
+    width: 100%;
+    height: 100%;
+    margin: auto auto;
+    background-color: #cccccccc;
+    display: flex;
+    flex-direction: column;
+    justify-content: center;
+    text-align: center;
+}
diff --git a/doc/_static/demo.js b/doc/_static/demo.js
new file mode 100644 (file)
index 0000000..f538492
--- /dev/null
@@ -0,0 +1,100 @@
+languagePluginLoader.then(() => {
+    // pyodide is now ready to use...
+    pyodide.loadPackage('Pygments').then(() => {
+        pyodide.runPython('import pygments.lexers, pygments.formatters.html, pygments.styles');
+
+        var lexerlist = pyodide.runPython('list(pygments.lexers.get_all_lexers())');
+        var sel = document.getElementById("lang");
+        for (lex of lexerlist) {
+            var opt = document.createElement("option");
+            opt.text = lex[0];
+            opt.value = lex[1][0];
+            sel.add(opt);
+        }
+
+        var stylelist = pyodide.runPython('list(pygments.styles.get_all_styles())');
+        var sel = document.getElementById("style");
+        for (sty of stylelist) {
+            if (sty != "default") {
+                var opt = document.createElement("option");
+                opt.text = sty;
+                opt.value = sty;
+                sel.add(opt);
+            }
+        }
+
+        document.getElementById("hlbtn").disabled = false;
+        document.getElementById("loading").style.display = "none";
+    });
+});
+
+function new_file() {
+    pyodide.globals['fname'] = document.getElementById("file").files[0].name;
+    var alias = pyodide.runPython('pygments.lexers.find_lexer_class_for_filename(fname).aliases[0]');
+    var sel = document.getElementById("lang");
+    for (var i = 0; i < sel.length; i++) {
+        if (sel.options[i].value == alias) {
+            sel.selectedIndex = i;
+            reset_err_hl();
+            break;
+        }
+    }
+}
+
+function reset_err_hl() {
+    document.getElementById("aroundlang").style.backgroundColor = null;
+}
+
+function highlight() {
+    var select = document.getElementById("lang");
+    var alias = select.options.item(select.selectedIndex).value
+
+    if (alias == "") {
+        document.getElementById("aroundlang").style.backgroundColor = "#ffcccc";
+        return;
+    }
+    pyodide.globals['alias'] = alias;
+
+    var select = document.getElementById("style");
+    pyodide.globals['style'] = select.options.item(select.selectedIndex).value;
+
+    pyodide.runPython('lexer = pygments.lexers.get_lexer_by_name(alias)');
+    pyodide.runPython('fmter = pygments.formatters.html.HtmlFormatter(noclasses=True, style=style)');
+
+    var file = document.getElementById("file").files[0];
+    if (file) {
+        file.arrayBuffer().then(function(buf) {
+            pyodide.globals['code_mem'] = buf;
+            pyodide.runPython('code = bytes(code_mem)');
+            highlight_now();
+        });
+    } else {
+        pyodide.globals['code'] = document.getElementById("code").value;
+        highlight_now();
+    }
+}
+
+function highlight_now() {
+    var out = document.getElementById("hlcode");
+    out.innerHTML = pyodide.runPython('pygments.highlight(code, lexer, fmter)');
+    document.location.hash = "#try";
+    document.getElementById("hlcodedl").style.display = "block";
+}
+
+function download_code() {
+    var filename = "highlighted.html";
+    var hlcode = document.getElementById("hlcode").innerHTML;
+    var blob = new Blob([hlcode], {type: 'text/html'});
+    if (window.navigator.msSaveOrOpenBlob) {
+        window.navigator.msSaveBlob(blob, filename);
+    }
+    else{
+        var elem = window.document.createElement('a');
+        elem.href = window.URL.createObjectURL(blob);
+        elem.download = filename;
+        document.body.appendChild(elem);
+        elem.click();
+        document.body.removeChild(elem);
+        window.URL.revokeObjectURL(elem.href);
+    }
+}
diff --git a/doc/_static/github.png b/doc/_static/github.png
new file mode 100644 (file)
index 0000000..5d146ad
Binary files /dev/null and b/doc/_static/github.png differ
diff --git a/doc/_static/spinner.gif b/doc/_static/spinner.gif
new file mode 100644 (file)
index 0000000..2212db9
Binary files /dev/null and b/doc/_static/spinner.gif differ
diff --git a/doc/_templates/demo.html b/doc/_templates/demo.html
new file mode 100644 (file)
index 0000000..bc788d1
--- /dev/null
@@ -0,0 +1,53 @@
+{% extends "layout.html" %}
+{% set sidebars = sidebars + ["demo_sidebar.html"] %}
+
+{% block extrahead %}
+{{ super() }}
+<link rel="stylesheet" type="text/css" href="{{ pathto("_static/demo.css", 1) }}">
+<script type="text/javascript">var languagePluginUrl = "{{ pathto("_static/pyodide/", 1) }}";</script>
+<script type="text/javascript" src="{{ pathto("_static/pyodide/pyodide.js", 1) }}"></script>
+<script type="text/javascript" src="{{ pathto("_static/demo.js", 1) }}"></script>
+{% endblock %}
+
+{% block htmltitle %}<title>Demo{{ titlesuffix }}</title>{% endblock %}
+
+{% block body %}
+{{ body }}
+
+<h1>Demo - Try it out!</h1>
+<p>The highlighting here is performed in-browser using
+    a WebAssembly translation of Pygments, courtesy of
+    <a href="https://github.com/iodide-project/pyodide">Pyodide</a>.</p>
+<p>Your content is neither sent over the web nor stored anywhere.</p>
+
+<div id="try">
+    <h2>Enter code and select a language</h2>
+    <form>
+        <p><span id="aroundlang">
+            <label for="lang">Language:</label> &nbsp;<select id="lang" onchange="reset_err_hl()">
+                <option value="">&nbsp;&nbsp;Select a lexer&nbsp;&nbsp;</option>
+            </select>
+           &nbsp;&nbsp;</span>
+            &middot;&nbsp;&nbsp;
+            <label for="style">Style:</label> &nbsp;<select id="style">
+                <option value="default">default&nbsp;&nbsp;&nbsp;&nbsp;</option>
+            </select></p>
+        <p><label for="file">Upload a file here:</label> &nbsp;
+           <input type="file" id="file" onchange="new_file()"> &nbsp; or enter code below:</p>
+       <p><textarea id="code" rows="1" cols="60"></textarea></p>
+       <p style="text-align: right">
+            <input type="button" value="Highlight!" onclick="highlight()" id="hlbtn" disabled>
+           &nbsp;&nbsp;&nbsp; <input type="reset" value="Reset"></p>
+    </form>
+    <div id="loading">
+        <p><img src="{{ pathto("_static/spinner.gif", 1) }}" style="vertical-align: middle"></p>
+        <p>Loading Python...</p>
+    </div>
+</div>
+
+<div id="hlcode"></div>
+
+<div id="hlcodedl" style="display: none">
+    <input type="button" value="Download" onclick="download_code()">
+</div>
+{% endblock %}
diff --git a/doc/_templates/demo_sidebar.html b/doc/_templates/demo_sidebar.html
new file mode 100644 (file)
index 0000000..3f2a86c
--- /dev/null
@@ -0,0 +1 @@
+<p><a href="#try">Back to top</a></p>
diff --git a/doc/_templates/index_with_try.html b/doc/_templates/index_with_try.html
new file mode 100644 (file)
index 0000000..e69de29
index 299545541c04a366d184ebefd9d3069b5087f4bf..5544f98ad46d9e614bbb557f78b337360be5c1f2 100644 (file)
@@ -3,23 +3,22 @@
 <p>This documentation is for version <b>{{ version }}</b>, which is
   not released yet.</p>
 <p>You can use it from the
-  <a href="http://bitbucket.org/birkenfeld/sphinx/">Mercurial repo</a> or look for
-  released versions in the <a href="http://pypi.python.org/pypi/Sphinx">Python
+  <a href="http://github.com/pygments/pygments/">Git repo</a> or look for
+  released versions in the <a href="http://pypi.python.org/pypi/Pygments">Python
     Package Index</a>.</p>
 {% else %}
 <p>Current version: <b>{{ version }}</b></p>
 <p>Get Pygments from the <a href="http://pypi.python.org/pypi/Pygments">Python Package
-Index</a>, or install it with:</p>
+    Index</a>, or install it with:</p>
 <pre>pip install Pygments</pre>
 {% endif %}
 
 <h3>Questions? Suggestions?</h3>
 
-<p>Clone at <a href="https://bitbucket.org/birkenfeld/pygments-main">Bitbucket</a>
-or come to the <tt>#pocoo</tt> channel on FreeNode.</p>
+<p><img src="{{ pathto("_static/github.png", 1) }}" width="24" />
+    Clone at <a href="https://github.com/pygments/pygments">GitHub</a>.</p>
 <p>You can also open an issue at the
-  <a href="https://www.bitbucket.org/birkenfeld/pygments-main/issues/">tracker</a>.</p>
+  <a href="https://github.com/pygments/pygments/issues">tracker</a>.</p>
 
 <p class="logo">A <a href="http://pocoo.org/">
-  <img src="{{ pathto("_static/pocoo.png", 1) }}" /></a> project</a></p>
-
+    <img src="{{ pathto("_static/pocoo.png", 1) }}" /></a> project</a></p>
index 00db7d9b015a88898ce6cf56d16cae9d6223acb0..3ab5c2e2d9b08df2712c002e3a3062595b452f1f 100644 (file)
@@ -35,7 +35,7 @@ master_doc = 'index'
 
 # General information about the project.
 project = u'Pygments'
-copyright = u'2015, Georg Brandl'
+copyright = u'2006-2019, Georg Brandl and Pygments contributors'
 
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
@@ -97,7 +97,7 @@ html_theme_path = ['_themes']
 
 # The name for this set of Sphinx documents.  If None, it defaults to
 # "<project> v<release> documentation".
-#html_title = None
+html_title = 'Pygments'
 
 # A shorter title for the navigation bar.  Default is the same as html_title.
 #html_short_title = None
@@ -125,12 +125,14 @@ html_static_path = ['_static']
 #html_use_smartypants = True
 
 # Custom sidebar templates, maps document names to template names.
-html_sidebars = {'index': ['indexsidebar.html'],
-                 'docs/*': ['docssidebar.html']}
+html_sidebars = {'index': ['indexsidebar.html', 'searchbox.html']}
 
 # Additional templates that should be rendered to pages, maps page names to
 # template names.
-#html_additional_pages = {}
+if os.environ.get('WEBSITE_BUILD'):
+    html_additional_pages = {
+        'demo': 'demo.html',
+    }
 
 # If false, no module index is generated.
 #html_domain_indices = True
@@ -159,7 +161,7 @@ html_sidebars = {'index': ['indexsidebar.html'],
 #html_file_suffix = None
 
 # Output file base name for HTML help builder.
-htmlhelp_basename = 'Pygmentsdoc'
+htmlhelp_basename = 'Pygments'
 
 
 # -- Options for LaTeX output --------------------------------------------------
@@ -178,8 +180,8 @@ latex_elements = {
 # Grouping the document tree into LaTeX files. List of tuples
 # (source start file, target name, title, author, documentclass [howto/manual]).
 latex_documents = [
-  ('index', 'Pygments.tex', u'Pygments Documentation',
-   u'Georg Brandl', 'manual'),
+  ('docs/index', 'Pygments.tex', u'Pygments Documentation',
+   u'Pygments authors', 'manual'),
 ]
 
 # The name of an image file (relative to this directory) to place at the top of
@@ -208,34 +210,21 @@ latex_documents = [
 # One entry per manual page. List of tuples
 # (source start file, name, description, authors, manual section).
 man_pages = [
-    ('index', 'pygments', u'Pygments Documentation',
-     [u'Georg Brandl'], 1)
+    ('docs/index', 'pygments', u'Pygments Documentation',
+     [u'Pygments authors'], 1)
 ]
 
 # If true, show URL addresses after external links.
 #man_show_urls = False
 
 
-# -- Options for Texinfo output ------------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-#  dir menu entry, description, category)
-texinfo_documents = [
-  ('index', 'Pygments', u'Pygments Documentation',
-   u'Georg Brandl', 'Pygments', 'One line description of project.',
-   'Miscellaneous'),
-]
-
-# Documents to append as an appendix to all manuals.
-#texinfo_appendices = []
+# Example configuration for intersphinx: refer to the Python standard library.
+#intersphinx_mapping = {'http://docs.python.org/': None}
 
-# If false, no module index is generated.
-#texinfo_domain_indices = True
 
-# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#texinfo_show_urls = 'footnote'
+def pg_context(app, pagename, templatename, ctx, event_arg):
+    ctx['demo_active'] = bool(os.environ.get('WEBSITE_BUILD'))
 
 
-# Example configuration for intersphinx: refer to the Python standard library.
-#intersphinx_mapping = {'http://docs.python.org/': None}
+def setup(app):
+    app.connect('html-page-context', pg_context)
index 30d5c0851d9c52feba695e97745c10459962fc57..4cf710f8ab5693c81ce106d9fc07e43fe7d6015b 100644 (file)
@@ -50,17 +50,12 @@ Pygments documentation
    integrate
 
 **About Pygments**
-              
+
 .. toctree::
    :maxdepth: 1
 
    changelog
    authors
 
-
-If you find bugs or have suggestions for the documentation, please look
-:ref:`here <contribute>` for info on how to contact the team.
-
-.. XXX You can download an offline version of this documentation from the
-   :doc:`download page </download>`.
-
+If you find bugs or have suggestions for the documentation, please submit them
+on `GitHub <https://github.com/pygments/pygments>`.
index 63bd01a3e098a8c4b59e4d2d4c5a39deb2bdf854..5b6813fb5177a6fc931d5c4e101ae4f91f6cb7a0 100644 (file)
@@ -136,7 +136,7 @@ have to perform the following steps.
 
 First, change to the current directory containing the Pygments source code.  You
 will need to have either an unpacked source tarball, or (preferably) a copy
-cloned from BitBucket.
+cloned from GitHub.
 
 .. code-block:: console
 
@@ -611,7 +611,7 @@ possibility to influence the position.
 There are not really any simple examples for lexer callbacks, but you can see
 them in action e.g. in the `SMLLexer` class in `ml.py`_.
 
-.. _ml.py: http://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/ml.py
+.. _ml.py: https://github.com/pygments/pygments/blob/master/pygments/lexers/ml.py
 
 
 The ExtendedRegexLexer class
@@ -667,7 +667,7 @@ For example, this is how the hypothetical lexer above would be written with the
 This might sound confusing (and it can really be). But it is needed, and for an
 example look at the Ruby lexer in `ruby.py`_.
 
-.. _ruby.py: https://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/ruby.py
+.. _ruby.py: https://github.com/pygments/pygments/blob/master/pygments/lexers/ruby.py
 
 
 Handling Lists of Keywords
index cf32f481a3baf4701a3023fbeee722e09506b520..975c41b080ec585cd6adee7346cd755faeceef5d 100644 (file)
@@ -18,17 +18,15 @@ manager as usual.
 Development sources
 -------------------
 
-We're using the `Mercurial <http://selenic.com/mercurial>`_ version control
-system.  You can get the development source using this command::
+We're using the Git version control system.  You can get the development source
+using this command::
 
-    hg clone http://bitbucket.org/birkenfeld/pygments-main pygments
+    git clone https://github.com/pygments/pygments
 
-Development takes place at `Bitbucket
-<http://bitbucket.org/birkenfeld/pygments-main>`_, you can browse the source
-online `here <http://bitbucket.org/birkenfeld/pygments-main/src>`_.
+Development takes place at `GitHub <https://github.com/pygments/pygments>`_.
 
 The latest changes in the development source code are listed in the `changelog
-<http://bitbucket.org/birkenfeld/pygments-main/src/tip/CHANGES>`_.
+<https://github.com/pygments/pygments/blob/master/CHANGES>`_.
 
 .. Documentation
    -------------
@@ -36,6 +34,6 @@ The latest changes in the development source code are listed in the `changelog
 .. XXX todo
 
    You can download the <a href="/docs/">documentation</a> either as
-      a bunch of rst files from the Mercurial repository, see above, or
+      a bunch of rst files from the Git repository, see above, or
       as a tar.gz containing rendered HTML files:</p>
       <p><a href="/docs/download/pygmentsdocs.tar.gz">pygmentsdocs.tar.gz</a></p>
index 172929e099e2350eb34ca6a8aa3f6b3af7c885af..108cef4422c2ce36ebac0e95d84d8832791f43bf 100644 (file)
@@ -62,9 +62,9 @@ Please see the :doc:`documentation on styles <docs/styles>`.
 How can I report a bug or suggest a feature?
 --------------------------------------------
 
-Please report bugs and feature wishes in the tracker at Bitbucket.
+Please report bugs and feature wishes in the tracker at GitHub.
 
-You can also e-mail the author or use IRC, see the contact details.
+You can also e-mail the authors, see the contact details.
 
 I want this support for this language!
 --------------------------------------
@@ -132,8 +132,9 @@ This is an (incomplete) list of projects and sites known to use the Pygments hig
 * `Clygments <https://github.com/bfontaine/clygments>`_, a pygments wrapper for
   Clojure
 * `PHPygments <https://github.com/capynet/PHPygments>`_, a pygments wrapper for PHP
-
+* `Spyder <https://www.spyder-ide.org/>`_, the Scientific Python Development
+  Environment, uses pygments for the multi-language syntax highlighting in its
+  `editor <https://docs.spyder-ide.org/editor.html>`_.
 
 If you have a project or web site using Pygments, drop me a line, and I'll add a
 link here.
-
index 261140459f636e6e204bb78c663d117de52e391f..d89277ec0f83f28bc0382b4b338e41ca96900fbf 100644 (file)
@@ -26,15 +26,9 @@ Like every open-source project, we are always looking for volunteers to help us
 with programming. Python knowledge is required, but don't fear: Python is a very
 clear and easy to learn language.
 
-Development takes place on `Bitbucket
-<https://bitbucket.org/birkenfeld/pygments-main>`_, where the Mercurial
-repository, tickets and pull requests can be viewed.
+Development takes place on `GitHub <https://github.com/pygments/pygments>`_.
 
-Our primary communication instrument is the IRC channel **#pocoo** on the
-Freenode network. To join it, let your IRC client connect to
-``irc.freenode.net`` and do ``/join #pocoo``.
-
-If you found a bug, just open a ticket in the Bitbucket tracker. Be sure to log
+If you found a bug, just open a ticket in the GitHub tracker. Be sure to log
 in to be notified when the issue is fixed -- development is not fast-paced as
 the library is quite stable.  You can also send an e-mail to the developers, see
 below.
@@ -42,7 +36,8 @@ below.
 The authors
 -----------
 
-Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*.
+Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*
+and **Matthäus Chajdas**.
 
 Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of
 the `Pocoo <http://dev.pocoo.org/>`_ team and **Tim Hatch**.
index b06ccc55d3e4353a510b47c33c01ea2c1fd8cc12..a91664c6f7156da47c9447b0a85e207d9cdb3956 100644 (file)
@@ -38,6 +38,7 @@ Programming languages
 * Delphi
 * Dylan
 * `Elm <http://elm-lang.org/>`_
+* Email
 * Erlang
 * `Ezhil <http://ezhillang.org>`_ Ezhil - A Tamil programming language
 * Factor
@@ -69,6 +70,7 @@ Programming languages
 * MuPad
 * Nemerle
 * Nimrod
+* Notmuch
 * Objective-C
 * Objective-J
 * Octave
@@ -88,6 +90,7 @@ Programming languages
 * Rust
 * S, S-Plus, R
 * Scala
+* `Scdoc <https://git.sr.ht/~sircmpwn/scdoc>`_
 * Scheme
 * Scilab
 * `SGF <https://www.red-bean.com/sgf/>`_
@@ -95,6 +98,7 @@ Programming languages
 * `Slurm <https://slurm.schedmd.com/overview.html>`_
 * Smalltalk
 * SNOBOL
+* `Solidity <https://solidity.readthedocs.io/>`_
 * Tcl
 * `Tera Term language <https://ttssh2.osdn.jp/>`_
 * `TOML <https://github.com/toml-lang/toml>`_
@@ -104,7 +108,9 @@ Programming languages
 * Visual Basic.NET
 * Visual FoxPro
 * XQuery
+* `Zeek <https://www.zeek.org>`_
 * Zephir
+* `Zig <https://ziglang.org/>`_
 
 Template languages
 ------------------
@@ -162,9 +168,9 @@ Other markup
 ... that's all?
 ---------------
 
-Well, why not write your own? Contributing to Pygments is easy and fun.  Take a look at the
-:doc:`docs on lexer development <docs/lexerdevelopment>` and
-:ref:`contact details <contribute>`.
+Well, why not write your own? Contributing to Pygments is easy and fun.  Take a
+look at the :doc:`docs on lexer development <docs/lexerdevelopment>`.  Pull
+requests are welcome on `GitHub <https://github.com/pygments/pygments>`.
 
 Note: the languages listed here are supported in the development version. The
 latest release may lack a few of them.
index d2d05970b6d72655aff724dcdba45c83624864e0..8a2e7a6dbef52a409376f5fc436712d1add5c3fc 100755 (executable)
@@ -1,6 +1,6 @@
 #!/bin/bash
 # Best effort auto-pygmentization with transparent decompression
-# by Reuben Thomas 2008-2016
+# by Reuben Thomas 2008-2019
 # This program is in the public domain.
 
 # Strategy: first see if pygmentize can find a lexer; if not, ask file; if that finds nothing, fail
@@ -15,7 +15,7 @@ file_common_opts="--brief --dereference"
 
 lexer=$(pygmentize -N "$file")
 if [[ "$lexer" == text ]]; then
-    unset lexer
+    # Try to do better than just "text"
     case $(file --mime-type --uncompress $file_common_opts "$file") in
         application/xml|image/svg+xml) lexer=xml;;
         application/javascript) lexer=javascript;;
@@ -66,36 +66,42 @@ if [[ "$lexer" == text ]]; then
     esac
 fi
 
-# Find a preprocessor for compressed files
+# Find a concatenator for compressed files
 concat=cat
 case $(file $file_common_opts --mime-type "$file") in
-    application/x-gzip)  concat=zcat;;
+    application/gzip)    concat=zcat;;
     application/x-bzip2) concat=bzcat;;
     application/x-xz)    concat=xzcat;;
 esac
 
-# Find a suitable lexer, preceded by a hex dump for binary files
+# Find a suitable reader, preceded by a hex dump for binary files,
+# or fmt for text with very long lines
 prereader=""
+reader=cat
 encoding=$(file --mime-encoding --uncompress $file_common_opts "$file")
-if [[ $encoding == "binary" ]]; then
-    prereader="od -x" # POSIX fallback
-    if [[ -n $(which hd) ]]; then
-        prereader="hd" # preferred
-    fi
-    lexer=hexdump
-    encoding=latin1
-fi
-if [[ -n "$lexer" ]]; then
+# FIXME: need a way to switch between hex and text view, as file often
+# misdiagnoses files when they contain a few control characters
+# if [[ $encoding == "binary" ]]; then
+#     prereader="od -x" # POSIX fallback
+#     if [[ -n $(which hd) ]]; then
+#         prereader=hd # preferred
+#     fi
+#     lexer=hexdump
+#     encoding=latin1
+#el
+# FIXME: Using fmt does not work well for system logs
+# if [[ "$lexer" == "text" ]]; then
+#    if file "$file" | grep -ql "text, with very long lines"; then
+#        reader=fmt
+#    fi
+# fi
+if [[ "$lexer" != "text" ]]; then
     reader="pygmentize -O inencoding=$encoding $PYGMENTIZE_OPTS $options -l $lexer"
 fi
 
-# If we found a reader, run it
-if [[ -n "$reader" ]]; then
-    if [[ -n "$prereader" ]]; then
-        exec $concat "$file" | $prereader | $reader
-    else
-        exec $concat "$file" | $reader
-    fi
+# Run the reader
+if [[ -n "$prereader" ]]; then
+    exec $concat "$file" | $prereader | $reader
+else
+    exec $concat "$file" | $reader
 fi
-
-exit 1
index 0b7831f7f97ee6dc31c3735b2eece6c75f18640b..e0c39b32bacc5fd2901e5d379d9937de847e3a48 100644 (file)
@@ -64,7 +64,7 @@ class Pygments(Directive):
     required_arguments = 1
     optional_arguments = 0
     final_argument_whitespace = True
-    option_spec = dict([(key, directives.flag) for key in VARIANTS])
+    option_spec = {key: directives.flag for key in VARIANTS}
     has_content = True
 
     def run(self):
diff --git a/pygmentize b/pygmentize
deleted file mode 100755 (executable)
index aea3872..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env python2
-
-import sys
-import pygments.cmdline
-try:
-    sys.exit(pygments.cmdline.main(sys.argv))
-except KeyboardInterrupt:
-    sys.exit(1)
index 15c226a3684d090fc0d0e7c90005ea8e58dfef01..b28da13c6a020cb6e23ed54ead2c4258a73f45cd 100644 (file)
     * it is usable as a command-line tool and as a library
     * ... and it highlights even Brainfuck!
 
-    The `Pygments tip`_ is installable with ``easy_install Pygments==dev``.
+    The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
 
-    .. _Pygments tip:
-       http://bitbucket.org/birkenfeld/pygments-main/get/tip.zip#egg=Pygments-dev
+    .. _Pygments master branch:
+       https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
 
     :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
     :license: BSD, see LICENSE for details.
@@ -29,7 +29,7 @@ import sys
 
 from pygments.util import StringIO, BytesIO
 
-__version__ = '2.4.2'
+__version__ = '2.5.1'
 __docformat__ = 'restructuredtext'
 
 __all__ = ['lex', 'format', 'highlight']
diff --git a/pygments/__main__.py b/pygments/__main__.py
new file mode 100644 (file)
index 0000000..cd80a2d
--- /dev/null
@@ -0,0 +1,7 @@
+import sys
+import pygments.cmdline
+
+try:
+    sys.exit(pygments.cmdline.main(sys.argv))
+except KeyboardInterrupt:
+    sys.exit(1)
index 292cb87030a46b7704d5caa442a40495a349b319..34752d66b2e9966f45e8796c557ee74ce055e599 100644 (file)
@@ -554,7 +554,7 @@ def main(args=sys.argv):
                   file=sys.stderr)
             print('Please report the whole traceback to the issue tracker at',
                   file=sys.stderr)
-            print('<https://bitbucket.org/birkenfeld/pygments-main/issues>.',
+            print('<https://github.com/pygments/pygments/issues>.',
                   file=sys.stderr)
             print('*' * 65, file=sys.stderr)
             print(file=sys.stderr)
index d65c09cedde6a81f9c13c80b2167c4498fb2af84..042f04cfb1e5f1bf1b5b3a9fdb422885471ff36a 100644 (file)
@@ -435,7 +435,7 @@ class HtmlFormatter(Formatter):
         self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
         self.linenospecial = abs(get_int_opt(options, 'linenospecial', 0))
         self.nobackground = get_bool_opt(options, 'nobackground', False)
-        self.lineseparator = options.get('lineseparator', '\n')
+        self.lineseparator = options.get('lineseparator', u'\n')
         self.lineanchors = options.get('lineanchors', '')
         self.linespans = options.get('linespans', '')
         self.anchorlinenos = options.get('anchorlinenos', False)
index de0ea0a4eff4e544281c64bb9591fbcbb3501509..6bb3364458ecfc79d86b983613e12ff205a05607 100644 (file)
@@ -46,9 +46,9 @@ STYLES = {
 }
 
 # A sane default for modern systems
-DEFAULT_FONT_NAME_NIX = 'Bitstream Vera Sans Mono'
+DEFAULT_FONT_NAME_NIX = 'DejaVu Sans Mono'
 DEFAULT_FONT_NAME_WIN = 'Courier New'
-DEFAULT_FONT_NAME_MAC = 'Courier New'
+DEFAULT_FONT_NAME_MAC = 'Menlo'
 
 
 class PilNotAvailable(ImportError):
@@ -125,8 +125,8 @@ class FontManager(object):
         for font_dir in (os.path.join(os.getenv("HOME"), 'Library/Fonts/'),
                          '/Library/Fonts/', '/System/Library/Fonts/'):
             font_map.update(
-                ((os.path.splitext(f)[0].lower(), os.path.join(font_dir, f))
-                    for f in os.listdir(font_dir) if f.lower().endswith('ttf')))
+                (os.path.splitext(f)[0].lower(), os.path.join(font_dir, f))
+                for f in os.listdir(font_dir) if f.lower().endswith('ttf'))
 
         for name in STYLES['NORMAL']:
             path = self._get_mac_font_path(font_map, self.font_name, name)
@@ -237,7 +237,8 @@ class ImageFormatter(Formatter):
         bold and italic fonts will be generated.  This really should be a
         monospace font to look sane.
 
-        Default: "Bitstream Vera Sans Mono" on Windows, Courier New on \\*nix
+        Default: "Courier New" on Windows, "Menlo" on Mac OS, and
+                 "DejaVu Sans Mono" on \\*nix
 
     `font_size`
         The font size in points to be used.
@@ -521,7 +522,8 @@ class ImageFormatter(Formatter):
         rectw = self.image_pad + self.line_number_width - self.line_number_pad
         draw.rectangle([(0, 0), (rectw, recth)],
                        fill=self.line_number_bg)
-        draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg)
+        if self.line_number_separator:
+            draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg)
         del draw
 
     def format(self, tokensource, outfile):
index a2eca91a35a6e1a81462b54b84a15809ccec7763..c09eff0cb443524fef909d85043aeee9bd4a84c2 100644 (file)
@@ -10,7 +10,7 @@
 """
 
 from pygments.formatter import Formatter
-from pygments.util import OptionError, get_choice_opt
+from pygments.util import get_choice_opt
 from pygments.token import Token
 from pygments.console import colorize
 
@@ -87,14 +87,17 @@ class RawTokenFormatter(Formatter):
         if self.compress == 'gz':
             import gzip
             outfile = gzip.GzipFile('', 'wb', 9, outfile)
+
             def write(text):
                 outfile.write(text.encode())
             flush = outfile.flush
         elif self.compress == 'bz2':
             import bz2
             compressor = bz2.BZ2Compressor(9)
+
             def write(text):
                 outfile.write(compressor.compress(text.encode()))
+
             def flush():
                 outfile.write(compressor.flush())
                 outfile.flush()
@@ -115,14 +118,15 @@ class RawTokenFormatter(Formatter):
                 write("%s\t%r\n" % (ttype, value))
         flush()
 
+
 TESTCASE_BEFORE = u'''\
-    def testNeedsName(self):
+    def testNeedsName(lexer):
         fragment = %r
         tokens = [
 '''
 TESTCASE_AFTER = u'''\
         ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+        assert list(lexer.get_tokens(fragment)) == tokens
 '''
 
 
index 0dae6aabb2f6a8b9a73f38de148f675055a1097c..1246db2a734db02694dd9bafb1141b825e610f31 100644 (file)
@@ -35,7 +35,7 @@ class RtfFormatter(Formatter):
         ``'default'``).
 
     `fontface`
-        The used font famliy, for example ``Bitstream Vera Sans``. Defaults to
+        The used font family, for example ``Bitstream Vera Sans``. Defaults to
         some generic font which is supposed to have fixed width.
 
     `fontsize`
@@ -70,7 +70,7 @@ class RtfFormatter(Formatter):
                    .replace(u'}', u'\\}')
 
     def _escape_text(self, text):
-        # empty strings, should give a small performance improvment
+        # empty strings, should give a small performance improvement
         if not text:
             return u''
 
index 1baf93b276ddd5096e007d8b41fc46dad6ad1b94..1cdf25d21e97db66314d756a93e4d3b4ebb63540 100644 (file)
@@ -20,9 +20,13 @@ from pygments.modeline import get_filetype_from_buffer
 from pygments.plugin import find_plugin_lexers
 from pygments.util import ClassNotFound, itervalues, guess_decode, text_type
 
+COMPAT = {
+    'Python3Lexer': 'PythonLexer',
+    'Python3TracebackLexer': 'PythonTracebackLexer',
+}
 
 __all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
-           'guess_lexer', 'load_lexer_from_file'] + list(LEXERS)
+           'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT)
 
 _lexer_cache = {}
 _pattern_cache = {}
@@ -327,6 +331,8 @@ class _automodule(types.ModuleType):
             cls = _lexer_cache[info[1]]
             setattr(self, name, cls)
             return cls
+        if name in COMPAT:
+            return getattr(self, COMPAT[name])
         raise AttributeError(name)
 
 
index 6ac79ad77c505050dd6be89921837744742062ed..b76c22aba8855c44a43a2743fb57c052427ae97a 100644 (file)
@@ -14,7 +14,7 @@
     :license: BSD, see LICENSE for details.
 """
 
-ASYFUNCNAME = set((
+ASYFUNCNAME = {
     'AND',
     'Arc',
     'ArcArrow',
@@ -1038,9 +1038,9 @@ ASYFUNCNAME = set((
     'ztick',
     'ztick3',
     'ztrans'
-))
+}
 
-ASYVARNAME = set((
+ASYVARNAME = {
     'AliceBlue',
     'Align',
     'Allow',
@@ -1642,4 +1642,4 @@ ASYVARNAME = set((
     'ylabelwidth',
     'zerotickfuzz',
     'zerowinding'
-))
+}
index d0306fab687c839bc3c40e9576ecbdf204af1bbb..7722e81f530a41aa366c34d1b6c48480a4991d3d 100644 (file)
@@ -9,7 +9,7 @@
     :license: BSD, see LICENSE for details.
 """
 
-BUILTIN_FUNCTIONS = set((  # 638 functions
+BUILTIN_FUNCTIONS = {  # 638 functions
     '<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
     'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
     'adjustable-array-p', 'adjust-array', 'allocate-instance',
@@ -157,17 +157,17 @@ BUILTIN_FUNCTIONS = set((  # 638 functions
     'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
     'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
     'y-or-n-p', 'zerop',
-))
+}
 
-SPECIAL_FORMS = set((
+SPECIAL_FORMS = {
     'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
     'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
     'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
     'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
     'unwind-protect',
-))
+}
 
-MACROS = set((
+MACROS = {
     'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
     'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
     'define-compiler-macro', 'define-condition', 'define-method-combination',
@@ -188,19 +188,19 @@ MACROS = set((
     'with-input-from-string', 'with-open-file', 'with-open-stream',
     'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
     'with-slots', 'with-standard-io-syntax',
-))
+}
 
-LAMBDA_LIST_KEYWORDS = set((
+LAMBDA_LIST_KEYWORDS = {
     '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
     '&rest', '&whole',
-))
+}
 
-DECLARATIONS = set((
+DECLARATIONS = {
     'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
     'ignorable', 'notinline', 'type',
-))
+}
 
-BUILTIN_TYPES = set((
+BUILTIN_TYPES = {
     'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
     'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
     'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
@@ -217,9 +217,9 @@ BUILTIN_TYPES = set((
     'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
     'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
     'undefined-function', 'warning',
-))
+}
 
-BUILTIN_CLASSES = set((
+BUILTIN_CLASSES = {
     'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
     'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
     'file-stream', 'float', 'function', 'generic-function', 'hash-table',
@@ -229,4 +229,4 @@ BUILTIN_CLASSES = set((
     'standard-generic-function', 'standard-method', 'standard-object',
     'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
     'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
-))
+}
index f7c55c611713e2eca2d5077088cdc5410b412685..2cf444385183a9145e0508e3e72afa5af332d11b 100644 (file)
@@ -14,9 +14,9 @@
 
 from __future__ import print_function
 
-COCOA_INTERFACES = set(['UITableViewCell', 'HKCorrelationQuery', 'NSURLSessionDataTask', 'PHFetchOptions', 'NSLinguisticTagger', 'NSStream', 'AVAudioUnitDelay', 'GCMotion', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'AVAudioEnvironmentDistanceAttenuationParameters', 'HKStatisticsCollection', 'SCNPlane', 'CBPeer', 'JSContext', 'SCNTransaction', 'SCNTorus', 'AVAudioUnitEffect', 'UICollectionReusableView', 'MTLSamplerDescriptor', 'AVAssetReaderSampleReferenceOutput', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'SKRange', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UIVisualEffectView', 'CIWarpKernel', 'PKObject', 'MKRoute', 'MPVolumeView', 'UIPrintInfo', 'SCNText', 'ADClient', 'PKPayment', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'WKScriptMessage', 'AVMIDIPlayer', 'PHCollectionListChangeRequest', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'SKPaymentTransaction', 'NEOnDemandRuleConnect', 'NSShadow', 'SCNView', 'NSURLSessionConfiguration', 'MTLVertexAttributeDescriptor', 'CBCharacteristic', 'HKQuantityType', 'CKLocationSortDescriptor', 'NEVPNIKEv2SecurityAssociationParameters', 'CMStepCounter', 'NSNetService', 'AVAssetWriterInputMetadataAdaptor', 'UICollectionView', 'UIViewPrintFormatter', 'SCNLevelOfDetail', 'CAShapeLayer', 'MCPeerID', 'MPRatingCommand', 'WKNavigation', 'NSDictionary', 'NSFileVersion', 'CMGyroData', 'AVAudioUnitDistortion', 'CKFetchRecordsOperation', 'SKPhysicsJointSpring', 'SCNHitTestResult', 'AVAudioTime', 'CIFilter', 'UIView', 'SCNConstraint', 'CAPropertyAnimation', 'MKMapItem', 'MPRemoteCommandCenter', 'PKPaymentSummaryItem', 'UICollectionViewFlowLayoutInvalidationContext', 'UIInputViewController', 'PKPass', 'SCNPhysicsBehavior', 'MTLRenderPassColorAttachmentDescriptor', 'MKPolygonRenderer', 'CKNotification', 'JSValue', 'PHCollectionList', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'MPFeedbackCommand', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'NSLengthFormatter', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'SCNPyramid', 'PHAssetCollection', 'MKMapView', 'HMHomeManager', 'CATransition', 'MTLCompileOptions', 'UIVibrancyEffect', 'CLCircularRegion', 'MKTileOverlay', 'SCNShape', 'ACAccountCredential', 'SKPhysicsJointLimit', 'MKMapSnapshotter', 'AVMediaSelectionGroup', 'NSIndexSet', 'CBPeripheralManager', 'CKRecordZone', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'AVAudioOutputNode', 'MTLVertexAttributeDescriptorArray', 'MKETAResponse', 'SKTransition', 'SSReadingList', 'HKSourceQuery', 'UITableViewRowAction', 'UITableView', 'SCNParticlePropertyController', 'AVCaptureStillImageOutput', 'GCController', 'AVAudioPlayerNode', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NEOnDemandRuleEvaluateConnection', 'SKEffectNode', 'HKQuantity', 'GCControllerElement', 'AVPlayerItemAccessLogEvent', 'SCNBox', 'NSExtensionContext', 'MKOverlayRenderer', 'SCNPhysicsVehicle', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'CKQuery', 'AVAudioMixerNode', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'NSBatchUpdateRequest', 'PHContentEditingOutput', 'PHObjectChangeDetails', 'HKWorkoutType', 'MPMoviePlayerController', 'AVAudioFormat', 'HMTrigger', 'MTLRenderPassDepthAttachmentDescriptor', 'SCNRenderer', 'GKScore', 'UISplitViewController', 'HKSource', 'NSURLConnection', 'ABUnknownPersonViewController', 'SCNTechnique', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'AVAudioPCMBuffer', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'SKProductsRequest', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SCNFloor', 'NSSortDescriptor', 'CKFetchNotificationChangesOperation', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVAudioUnitGenerator', 'WKBackForwardList', 'SKMutableTexture', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'MPRatingCommandEvent', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'MPRemoteCommand', 'AVCaptureSession', 'UIStepper', 'UIRefreshControl', 'NEEvaluateConnectionRule', 'CKModifyRecordsOperation', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'PKShippingMethod', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UILexiconEntry', 'CMMotionActivity', 'SKAction', 'SKShader', 'AVPlayerItemOutput', 'MTLRenderPassAttachmentDescriptor', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'CAInterAppAudioTransportView', 'SKMutablePayment', 'UIImage', 'PHCachingImageManager', 'SCNTransformConstraint', 'HKCorrelationType', 'UIColor', 'SCNGeometrySource', 'AVCaptureAutoExposureBracketedStillImageSettings', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'UICollectionViewLayoutAttributes', 'AVAssetReaderOutputMetadataAdaptor', 'NSEntityMigrationPolicy', 'HMUser', 'NSLocale', 'NSURLSession', 'SCNCamera', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'HMServiceGroup', 'UIPasteboard', 'PHContentEditingInput', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSEnergyFormatter', 'SLRequest', 'HMCharacteristic', 'AVPlayerLayer', 'MTLRenderPassDescriptor', 'SKPayment', 'NSPointerArray', 'AVAudioMix', 'SCNLight', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'HKCategorySample', 'AVAudioEnvironmentReverbParameters', 'SCNMorpher', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'HMActionSet', 'CTSubscriber', 'PHAssetChangeRequest', 'NSPersistentStoreRequest', 'UITabBarController', 'HKQuantitySample', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UIPresentationController', 'UINavigationItem', 'PHFetchResultChangeDetails', 'PHImageManager', 'AVCaptureManualExposureBracketedStillImageSettings', 'UIStoryboardPopoverSegue', 'SCNLookAtConstraint', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'NEOnDemandRuleDisconnect', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSHTTPCookie', 'NSURLResponse', 'SKPaymentQueue', 'NSAssertionHandler', 'MKReverseGeocoder', 'GCControllerAxisInput', 'NSArray', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVMutableVideoCompositionInstruction', 'AVAssetReaderOutput', 'EAGLContext', 'WKFrameInfo', 'CMPedometer', 'MyClass', 'CKModifyBadgeOperation', 'AVCaptureAudioFileOutput', 'SKEmitterNode', 'NSMachPort', 'AVVideoCompositionCoreAnimationTool', 'PHCollection', 'SCNPhysicsWorld', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'CLFloor', 'AVAsynchronousVideoCompositionRequest', 'SCNGeometry', 'SCNIKConstraint', 'CIKernel', 'CAGradientLayer', 'HKCharacteristicType', 'NSFormatter', 'SCNAction', 'CATransaction', 'CBUUID', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'AVAudioUnit', 'MCBrowserViewController', 'UIFontDescriptor', 'NSRelationshipDescription', 'HKSample', 'WKWebView', 'NSMutableAttributedString', 'NSPersistentStoreAsynchronousResult', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'HKCorrelation', 'CATextLayer', 'NSNotificationQueue', 'UINib', 'GLKTextureLoader', 'HKObjectType', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'AVPlayerViewController', 'CAScrollLayer', 'GKSavedGame', 'NSTextCheckingResult', 'PHObjectPlaceholder', 'SKConstraint', 'EKEventEditViewController', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'SCNNode', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'NEOnDemandRule', 'MTLRenderPassStencilAttachmentDescriptor', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'NEOnDemandRuleIgnore', 'SKRegion', 'AVAssetResourceLoader', 'EAWiFiUnconfiguredAccessoryBrowser', 'NSUserActivity', 'CTCall', 'UIPrinterPickerController', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'CKRecordZoneID', 'HKAnchoredObjectQuery', 'CKFetchRecordZonesOperation', 'UIStoryboardSegue', 'ACAccountType', 'GKSession', 'SKVideoNode', 'PHChange', 'SKReceiptRefreshRequest', 'GCExtendedGamepadSnapshot', 'MPSeekCommandEvent', 'GCExtendedGamepad', 'CAValueFunction', 'SCNCylinder', 'NSNotification', 'NSBatchUpdateResult', 'PKPushCredentials', 'SCNPhysicsSliderJoint', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'UILexicon', 'MTLArrayType', 'AVAudioUnitReverb', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIPrinter', 'NSOrderedSet', 'CBAttribute', 'PKPushPayload', 'NSIncrementalStoreNode', 'EKEventStore', 'MPRemoteCommandEvent', 'UISlider', 'UIBlurEffect', 'CKAsset', 'AVCaptureInput', 'AVAudioEngine', 'MTLVertexDescriptor', 'SKPhysicsBody', 'NSOperation', 'PKPaymentPass', 'UIImageAsset', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CABTMIDILocalPeripheralViewController', 'NEVPNManager', 'HKQuery', 'SCNPhysicsContact', 'CBMutableService', 'AVSampleBufferDisplayLayer', 'SCNSceneSource', 'SKLightNode', 'CKDiscoveredUserInfo', 'NSMutableArray', 'MTLDepthStencilDescriptor', 'MTLArgument', 'NSMassFormatter', 'CIRectangleFeature', 'PKPushRegistry', 'NEVPNConnection', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'HKWorkout', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'MPChangePlaybackRateCommandEvent', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'AVAudioUnitSampler', 'NSItemProvider', 'SKUniform', 'MPMediaPickerController', 'CKOperation', 'MTLRenderPipelineDescriptor', 'EAWiFiUnconfiguredAccessory', 'NSFileCoordinator', 'SKRequest', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'SCNCone', 'MPMediaItem', 'SCNMaterial', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NEVPNProtocol', 'NSCompoundPredicate', 'HKHealthStore', 'MKMultiPoint', 'HKSampleType', 'UIPrintFormatter', 'AVAudioUnitEQFilterParameters', 'SKView', 'NSConstantString', 'UIPopoverController', 'CKDatabase', 'AVMetadataFaceObject', 'UIAccelerometer', 'EKEventViewController', 'CMAltitudeData', 'MTLStencilDescriptor', 'UISwipeGestureRecognizer', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'NSAsynchronousFetchRequest', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'CKNotificationID', 'AVAudioSession', 'HKUnit', 'NSNull', 'NSPersistentStoreResult', 'MKCircleView', 'AVAudioChannelLayout', 'NEVPNProtocolIKEv2', 'WKProcessPool', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'CIQRCodeFeature', 'AVMutableVideoComposition', 'PHFetchResult', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'PKPaymentRequest', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'AVAudioUnitTimePitch', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'UIDocumentPickerViewController', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'AVAudioFile', 'UIActivity', 'NSPipe', 'MKShape', 'NSMergeConflict', 'CIImage', 'HKObject', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'CKMarkNotificationsReadOperation', 'CKSubscription', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'HMAccessoryBrowser', 'GKTurnBasedEventHandler', 'UIWebView', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'WKNavigationResponse', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'HKStatistics', 'UILocalNotification', 'HKBiologicalSexObject', 'AVURLAsset', 'CBPeripheral', 'NSDateComponentsFormatter', 'SKSpriteNode', 'UIAccessibilityElement', 'AVAssetWriterInputGroup', 'HMZone', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'PHPhotoLibrary', 'CBService', 'UIDocumentMenuViewController', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'ASIdentifierManager', 'AVAssetResourceLoadingRequest', 'SLComposeServiceViewController', 'UIPinchGestureRecognizer', 'PHObject', 'NSExtensionItem', 'HKSampleQuery', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MKRouteStep', 'SCNCapsule', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'UITraitCollection', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'MTLRenderPipelineReflection', 'CLHeading', 'CLVisit', 'MKDirectionsResponse', 'HMAccessory', 'MTLStructType', 'UITextView', 'CMMagnetometerData', 'UICollisionBehavior', 'UIProgressView', 'CKServerChangeToken', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'NSPropertyMapping', 'ALAssetsFilter', 'SK3DNode', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'SCNSphere', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSFileProviderExtension', 'NSDecimalNumberHandler', 'NSAtomicStoreCacheNode', 'NSAtomicStore', 'EKAlarm', 'CKNotificationInfo', 'AVAudioUnitEQ', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'MTLVertexAttribute', 'NSExpressionDescription', 'HKStatisticsCollectionQuery', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'UIAlertAction', 'NSPropertyListSerialization', 'GKPeerPickerController', 'UIUserNotificationSettings', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'MTLRenderPassColorAttachmentDescriptorArray', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'UIViewController', 'PKPaymentToken', 'MFMessageComposeViewController', 'AVAudioInputNode', 'NSDataDetector', 'CABTMIDICentralViewController', 'AVAudioUnitMIDIInstrument', 'AVCaptureVideoPreviewLayer', 'AVAssetWriterInputPassDescription', 'MPChangePlaybackRateCommand', 'NSURLComponents', 'CAMetalLayer', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'CKDiscoverUserInfosOperation', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'SCNGeometryElement', 'NCWidgetController', 'CAEmitterLayer', 'MKUserLocation', 'UIImagePickerController', 'CIFeature', 'AVCaptureDeviceInput', 'ALAsset', 'NSURLSessionDownloadTask', 'SCNPhysicsHingeJoint', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'SCNMaterialProperty', 'UIFont', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'WKWebViewConfiguration', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'AVComposition', 'CKDiscoverAllContactsOperation', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'UIMutableUserNotificationCategory', 'NSLock', 'NEVPNProtocolIPSec', 'ADBannerView', 'UIDocumentPickerExtensionViewController', 'UIActivityIndicatorView', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIAccessibilityCustomAction', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'HKBloodTypeObject', 'MTLVertexBufferLayoutDescriptorArray', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'PKPaymentAuthorizationViewController', 'UIScrollView', 'WKNavigationAction', 'AVPlayerItemMetadataOutput', 'EKRecurrenceDayOfWeek', 'NSNumberFormatter', 'MTLComputePipelineReflection', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'SCNSkinner', 'AVCaptureMetadataOutput', 'SCNAnimationEvent', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'WKBackForwardListItem', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKCalendar', 'HKWorkoutEvent', 'NSMutableURLRequest', 'UIVideoEditorController', 'HMTimerTrigger', 'AVAudioUnitVarispeed', 'UIDynamicAnimator', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'UIPushBehavior', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'WKUserScript', 'EKEvent', 'NSDateFormatter', 'NSAsynchronousFetchResult', 'AVAssetWriterInputPixelBufferAdaptor', 'UIVisualEffect', 'UICollectionViewCell', 'UITextField', 'CLPlacemark', 'MPPlayableContentManager', 'AVCaptureOutput', 'HMCharacteristicWriteAction', 'CKModifySubscriptionsOperation', 'NSPropertyDescription', 'GCGamepad', 'UIMarkupTextPrintFormatter', 'SCNTube', 'NSPersistentStoreCoordinator', 'AVAudioEnvironmentNode', 'GKMatchmaker', 'CIContext', 'NSThread', 'SLComposeSheetConfigurationItem', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'SCNPhysicsBallSocketJoint', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'AVAudioIONode', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CKRecordZoneNotification', 'SCNScene', 'MPMovieAccessLogEvent', 'CKFetchSubscriptionsOperation', 'CAEmitterCell', 'AVAudioUnitTimeEffect', 'HMCharacteristicMetadata', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'UIUserNotificationCategory', 'SCNPhysicsVehicleWheel', 'HKCategoryType', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'MTLRenderPipelineColorAttachmentDescriptor', 'SCNPhysicsShape', 'UISearchController', 'SCNPhysicsBody', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'SKNode', 'UIMutableUserNotificationAction', 'SCNProgram', 'AVSpeechSynthesisVoice', 'CMAltimeter', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SCNPhysicsField', 'CKReference', 'LAContext', 'CKRecordID', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'AVAudioBuffer', 'CIColorKernel', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'CKQueryNotification', 'PHAdjustmentData', 'EASession', 'AVAssetResourceRenewalRequest', 'UIInputView', 'NSFileWrapper', 'UIResponder', 'NSPointerFunctions', 'UIKeyCommand', 'NSHTTPCookieStorage', 'AVMediaSelectionOption', 'NSRunLoop', 'NSFileAccessIntent', 'CAAnimationGroup', 'MKCircle', 'UIAlertController', 'NSMigrationManager', 'NSDateIntervalFormatter', 'UICollectionViewUpdateItem', 'CKDatabaseOperation', 'PHImageRequestOptions', 'SKReachConstraints', 'CKRecord', 'CAInterAppAudioSwitcherView', 'WKWindowFeatures', 'GKInvite', 'NSMutableData', 'PHAssetCollectionChangeRequest', 'NSMutableParagraphStyle', 'UIDynamicBehavior', 'GLKEffectProperty', 'CKFetchRecordChangesOperation', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'MPContentItem', 'HMAction', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'CKContainer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'AVCaptureMovieFileOutput', 'HMRoom', 'GKChallenge', 'UITextRange', 'NSURLProtectionSpace', 'ACAccountStore', 'MPSkipIntervalCommand', 'NSComparisonPredicate', 'HMHome', 'PHVideoRequestOptions', 'NSOutputStream', 'MPSkipIntervalCommandEvent', 'PKAddPassesViewController', 'UITextSelectionRect', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'UICollectionViewController', 'AVAudioNode', 'MCNearbyServiceAdvertiser', 'NSObject', 'PHAsset', 'GKLeaderboardViewController', 'CKQueryCursor', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'CMPedometerData', 'HMService', 'SKFieldNode', 'GKAchievement', 'WKUserContentController', 'AVAssetTrack', 'TWRequest', 'SKLabelNode', 'AVCaptureBracketedStillImageSettings', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'MTLTextureDescriptor', 'MTLVertexBufferLayoutDescriptor', 'MPFeedbackCommandEvent', 'UIUserNotificationAction', 'HKStatisticsQuery', 'SCNParticleSystem', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'HKObserverQuery', 'UIPopoverPresentationController', 'CKQueryOperation', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'NSURLQueryItem', 'MTLStructMember', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'GKTurnBasedParticipant', 'NSURLProtocol', 'NSUserDefaults', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'CKModifyRecordZonesOperation', 'WKPreferences', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'PHContentEditingInputRequestOptions', 'GKMatch', 'CIColor', 'UIDictationPhrase'])
-COCOA_PROTOCOLS = set(['SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'MTLComputeCommandEncoder', 'SCNSceneExportDelegate', 'UISearchResultsUpdating', 'MFMailComposeViewControllerDelegate', 'MTLBlitCommandEncoder', 'NSDecimalNumberBehaviors', 'PHContentEditingController', 'NSMutableCopying', 'UIActionSheetDelegate', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UIAdaptivePresentationControllerDelegate', 'GKAchievementViewControllerDelegate', 'UIViewControllerTransitionCoordinator', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'UITableViewDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'AVAudio3DMixing', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'HMAccessoryBrowserDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'CKRecordValue', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'SKSceneDelegate', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'MyClassJavaScriptMethods', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'SCNBoundingVolume', 'NSPortDelegate', 'UIContentContainer', 'SCNNodeRendererDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'HMAccessoryDelegate', 'UIPageViewControllerDataSource', 'SCNSceneRendererDelegate', 'SCNPhysicsContactDelegate', 'MKMapViewDelegate', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'MTLRenderCommandEncoder', 'PKPaymentAuthorizationViewControllerDelegate', 'UIToolbarDelegate', 'WKUIDelegate', 'SCNActionable', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'PKPushRegistryDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSLayoutManagerDelegate', 'MTLLibrary', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'MTLResource', 'NSDiscardableContent', 'UITextFieldDelegate', 'MTLBuffer', 'MTLSamplerState', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UISplitViewControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UITraitEnvironment', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'MPPlayableContentDataSource', 'MTLComputePipelineState', 'NSURLSessionDelegate', 'MTLCommandBuffer', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MTLDepthStencilState', 'GKTurnBasedMatchmakerViewControllerDelegate', 'MPPlayableContentDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'UITextDocumentProxy', 'MTLDrawable', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'AVAudioStereoMixing', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'SCNProgramDelegate', 'AVVideoCompositing', 'SCNAnimatable', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'NSExtensionRequestHandling', 'UITextSelecting', 'UIPrinterPickerControllerDelegate', 'NCWidgetProviding', 'MTLCommandEncoder', 'NSURLProtocolClient', 'MFMessageComposeViewControllerDelegate', 'UIVideoEditorControllerDelegate', 'WKNavigationDelegate', 'GKSavedGameListener', 'UITableViewDataSource', 'MTLFunction', 'EKCalendarChooserDelegate', 'NSUserActivityDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'HMHomeDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIDocumentPickerDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'SCNSceneRenderer', 'MTLTexture', 'GLKViewDelegate', 'EAAccessoryDelegate', 'WKScriptMessageHandler', 'PHPhotoLibraryChangeObserver', 'NSKeyedUnarchiverDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'NSMachPortDelegate', 'SCNShadable', 'UIPopoverBackgroundViewMethods', 'UIDocumentMenuDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'AVAudioMixing', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'MTLParallelRenderCommandEncoder', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UIPopoverPresentationControllerDelegate', 'UIDynamicAnimatorDelegate', 'NSTextAttachmentContainer', 'MKAnnotation', 'UIAccessibilityIdentification', 'UICoordinateSpace', 'ABNewPersonViewControllerDelegate', 'MTLDevice', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'HMHomeManagerDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'UISearchControllerDelegate', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'UITextInputTraits', 'MTLRenderPipelineState', 'GKVoiceChatClient', 'UIKeyInput', 'UICollectionViewDataSource', 'SCNTechniqueSupport', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'MTLCommandQueue', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'CAMetalDrawable', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'NSURLSessionDataDelegate'])
-COCOA_PRIMITIVES = set(['ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', '_GLKMatrix3', '_GLKMatrix2', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'OpaqueCMBlockBuffer', 'CGTextDrawingMode', 'EKErrorCode', 'gss_buffer_desc_struct', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'AudioFile_SMPTE_Time', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', '__CFBinaryHeap', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', 'SFNTLookupSegment', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__SecCertificate', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', 'AnchorPoint', 'JustTable', '__CFNetService', 'CF_BRIDGED_TYPE', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'KerxSubtableHeader', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', 'OpaqueJSClass', 'CGGradient', 'OpaqueMIDISetup', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', 'MIDIThruConnectionParams', 'BslnFormat0Part', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'AnkrTable', '__SCNetworkProtocol', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'SCNMatrix4', 'KerxControlPointEntry', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'KernStateEntry', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'AVAudio3DAngularOrientation', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'KernVersion0Header', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'GCQuaternion', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'CGImagePropertyOrientation', 'CGPDFScanner', 'OpaqueMusicEventIterator', 'sfntDescriptorHeader', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'BslnFormat1Part', 'sfntFontDescriptor', 'KernSimpleArrayHeader', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGInterpolationQuality', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', '__CTRubyAnnotation', 'SCNVector4', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__GLsync', '__CFStringTokenizer', 'JustWidthDeltaEntry', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', '__CVMetalTextureCache', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'AVAudio3DPoint', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', 'CGDataConsumerCallbacks', 'ALMXHeader', 'CGLineCap', 'MIDIControlTransform', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'AVAudio3DVectorOrientation', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'OpaqueVTCompressionSession', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'VTDecompressionOutputCallbackRecord', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'OpaqueVTDecompressionSession', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'PropTable', 'KernOrderedListEntry', 'CF_BRIDGED_MUTABLE_TYPE', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'SCNVector3', 'OpaqueAudioConverter', 'MIDIRawData', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', '__SecAccessControl', 'JustPCAction', 'OpaqueVTFrameSilo', 'OpaqueVTMultiPassStorage', 'CGPathElementType', 'AudioFormatListItem', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'sfntCMapHeader', '__CFURLEnumerator', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader'])
+COCOA_INTERFACES = {'UITableViewCell', 'HKCorrelationQuery', 'NSURLSessionDataTask', 'PHFetchOptions', 'NSLinguisticTagger', 'NSStream', 'AVAudioUnitDelay', 'GCMotion', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'AVAudioEnvironmentDistanceAttenuationParameters', 'HKStatisticsCollection', 'SCNPlane', 'CBPeer', 'JSContext', 'SCNTransaction', 'SCNTorus', 'AVAudioUnitEffect', 'UICollectionReusableView', 'MTLSamplerDescriptor', 'AVAssetReaderSampleReferenceOutput', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'SKRange', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UIVisualEffectView', 'CIWarpKernel', 'PKObject', 'MKRoute', 'MPVolumeView', 'UIPrintInfo', 'SCNText', 'ADClient', 'PKPayment', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'WKScriptMessage', 'AVMIDIPlayer', 'PHCollectionListChangeRequest', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'SKPaymentTransaction', 'NEOnDemandRuleConnect', 'NSShadow', 'SCNView', 'NSURLSessionConfiguration', 'MTLVertexAttributeDescriptor', 'CBCharacteristic', 'HKQuantityType', 'CKLocationSortDescriptor', 'NEVPNIKEv2SecurityAssociationParameters', 'CMStepCounter', 'NSNetService', 'AVAssetWriterInputMetadataAdaptor', 'UICollectionView', 'UIViewPrintFormatter', 'SCNLevelOfDetail', 'CAShapeLayer', 'MCPeerID', 'MPRatingCommand', 'WKNavigation', 'NSDictionary', 'NSFileVersion', 'CMGyroData', 'AVAudioUnitDistortion', 'CKFetchRecordsOperation', 'SKPhysicsJointSpring', 'SCNHitTestResult', 'AVAudioTime', 'CIFilter', 'UIView', 'SCNConstraint', 'CAPropertyAnimation', 'MKMapItem', 'MPRemoteCommandCenter', 'PKPaymentSummaryItem', 'UICollectionViewFlowLayoutInvalidationContext', 'UIInputViewController', 'PKPass', 'SCNPhysicsBehavior', 'MTLRenderPassColorAttachmentDescriptor', 'MKPolygonRenderer', 'CKNotification', 'JSValue', 'PHCollectionList', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'MPFeedbackCommand', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'NSLengthFormatter', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'SCNPyramid', 'PHAssetCollection', 'MKMapView', 'HMHomeManager', 'CATransition', 'MTLCompileOptions', 'UIVibrancyEffect', 'CLCircularRegion', 'MKTileOverlay', 'SCNShape', 'ACAccountCredential', 'SKPhysicsJointLimit', 'MKMapSnapshotter', 'AVMediaSelectionGroup', 'NSIndexSet', 'CBPeripheralManager', 'CKRecordZone', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'AVAudioOutputNode', 'MTLVertexAttributeDescriptorArray', 'MKETAResponse', 'SKTransition', 'SSReadingList', 'HKSourceQuery', 'UITableViewRowAction', 'UITableView', 'SCNParticlePropertyController', 'AVCaptureStillImageOutput', 'GCController', 'AVAudioPlayerNode', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NEOnDemandRuleEvaluateConnection', 'SKEffectNode', 'HKQuantity', 'GCControllerElement', 'AVPlayerItemAccessLogEvent', 'SCNBox', 'NSExtensionContext', 'MKOverlayRenderer', 'SCNPhysicsVehicle', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'CKQuery', 'AVAudioMixerNode', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'NSBatchUpdateRequest', 'PHContentEditingOutput', 'PHObjectChangeDetails', 'HKWorkoutType', 'MPMoviePlayerController', 'AVAudioFormat', 'HMTrigger', 'MTLRenderPassDepthAttachmentDescriptor', 'SCNRenderer', 'GKScore', 'UISplitViewController', 'HKSource', 'NSURLConnection', 'ABUnknownPersonViewController', 'SCNTechnique', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'AVAudioPCMBuffer', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'SKProductsRequest', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SCNFloor', 'NSSortDescriptor', 'CKFetchNotificationChangesOperation', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVAudioUnitGenerator', 'WKBackForwardList', 'SKMutableTexture', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'MPRatingCommandEvent', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'MPRemoteCommand', 'AVCaptureSession', 'UIStepper', 'UIRefreshControl', 'NEEvaluateConnectionRule', 'CKModifyRecordsOperation', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'PKShippingMethod', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UILexiconEntry', 'CMMotionActivity', 'SKAction', 'SKShader', 'AVPlayerItemOutput', 'MTLRenderPassAttachmentDescriptor', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'CAInterAppAudioTransportView', 'SKMutablePayment', 'UIImage', 'PHCachingImageManager', 'SCNTransformConstraint', 'HKCorrelationType', 'UIColor', 'SCNGeometrySource', 'AVCaptureAutoExposureBracketedStillImageSettings', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'UICollectionViewLayoutAttributes', 'AVAssetReaderOutputMetadataAdaptor', 'NSEntityMigrationPolicy', 'HMUser', 'NSLocale', 'NSURLSession', 'SCNCamera', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'HMServiceGroup', 'UIPasteboard', 'PHContentEditingInput', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSEnergyFormatter', 'SLRequest', 'HMCharacteristic', 'AVPlayerLayer', 'MTLRenderPassDescriptor', 'SKPayment', 'NSPointerArray', 'AVAudioMix', 'SCNLight', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'HKCategorySample', 'AVAudioEnvironmentReverbParameters', 'SCNMorpher', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'HMActionSet', 'CTSubscriber', 'PHAssetChangeRequest', 'NSPersistentStoreRequest', 'UITabBarController', 'HKQuantitySample', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UIPresentationController', 'UINavigationItem', 'PHFetchResultChangeDetails', 'PHImageManager', 'AVCaptureManualExposureBracketedStillImageSettings', 'UIStoryboardPopoverSegue', 'SCNLookAtConstraint', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'NEOnDemandRuleDisconnect', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSHTTPCookie', 'NSURLResponse', 'SKPaymentQueue', 'NSAssertionHandler', 'MKReverseGeocoder', 'GCControllerAxisInput', 'NSArray', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVMutableVideoCompositionInstruction', 'AVAssetReaderOutput', 'EAGLContext', 'WKFrameInfo', 'CMPedometer', 'MyClass', 'CKModifyBadgeOperation', 'AVCaptureAudioFileOutput', 'SKEmitterNode', 'NSMachPort', 'AVVideoCompositionCoreAnimationTool', 'PHCollection', 'SCNPhysicsWorld', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'CLFloor', 'AVAsynchronousVideoCompositionRequest', 'SCNGeometry', 'SCNIKConstraint', 'CIKernel', 'CAGradientLayer', 'HKCharacteristicType', 'NSFormatter', 'SCNAction', 'CATransaction', 'CBUUID', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'AVAudioUnit', 'MCBrowserViewController', 'UIFontDescriptor', 'NSRelationshipDescription', 'HKSample', 'WKWebView', 'NSMutableAttributedString', 'NSPersistentStoreAsynchronousResult', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'HKCorrelation', 'CATextLayer', 'NSNotificationQueue', 'UINib', 'GLKTextureLoader', 'HKObjectType', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'AVPlayerViewController', 'CAScrollLayer', 'GKSavedGame', 'NSTextCheckingResult', 'PHObjectPlaceholder', 'SKConstraint', 'EKEventEditViewController', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'SCNNode', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'NEOnDemandRule', 'MTLRenderPassStencilAttachmentDescriptor', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'NEOnDemandRuleIgnore', 'SKRegion', 'AVAssetResourceLoader', 'EAWiFiUnconfiguredAccessoryBrowser', 'NSUserActivity', 'CTCall', 'UIPrinterPickerController', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'CKRecordZoneID', 'HKAnchoredObjectQuery', 'CKFetchRecordZonesOperation', 'UIStoryboardSegue', 'ACAccountType', 'GKSession', 'SKVideoNode', 'PHChange', 'SKReceiptRefreshRequest', 'GCExtendedGamepadSnapshot', 'MPSeekCommandEvent', 'GCExtendedGamepad', 'CAValueFunction', 'SCNCylinder', 'NSNotification', 'NSBatchUpdateResult', 'PKPushCredentials', 'SCNPhysicsSliderJoint', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'UILexicon', 'MTLArrayType', 'AVAudioUnitReverb', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIPrinter', 'NSOrderedSet', 'CBAttribute', 'PKPushPayload', 'NSIncrementalStoreNode', 'EKEventStore', 'MPRemoteCommandEvent', 'UISlider', 'UIBlurEffect', 'CKAsset', 'AVCaptureInput', 'AVAudioEngine', 'MTLVertexDescriptor', 'SKPhysicsBody', 'NSOperation', 'PKPaymentPass', 'UIImageAsset', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CABTMIDILocalPeripheralViewController', 'NEVPNManager', 'HKQuery', 'SCNPhysicsContact', 'CBMutableService', 'AVSampleBufferDisplayLayer', 'SCNSceneSource', 'SKLightNode', 'CKDiscoveredUserInfo', 'NSMutableArray', 'MTLDepthStencilDescriptor', 'MTLArgument', 'NSMassFormatter', 'CIRectangleFeature', 'PKPushRegistry', 'NEVPNConnection', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'HKWorkout', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'MPChangePlaybackRateCommandEvent', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'AVAudioUnitSampler', 'NSItemProvider', 'SKUniform', 'MPMediaPickerController', 'CKOperation', 'MTLRenderPipelineDescriptor', 'EAWiFiUnconfiguredAccessory', 'NSFileCoordinator', 'SKRequest', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'SCNCone', 'MPMediaItem', 'SCNMaterial', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NEVPNProtocol', 'NSCompoundPredicate', 'HKHealthStore', 'MKMultiPoint', 'HKSampleType', 'UIPrintFormatter', 'AVAudioUnitEQFilterParameters', 'SKView', 'NSConstantString', 'UIPopoverController', 'CKDatabase', 'AVMetadataFaceObject', 'UIAccelerometer', 'EKEventViewController', 'CMAltitudeData', 'MTLStencilDescriptor', 'UISwipeGestureRecognizer', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'NSAsynchronousFetchRequest', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'CKNotificationID', 'AVAudioSession', 'HKUnit', 'NSNull', 'NSPersistentStoreResult', 'MKCircleView', 'AVAudioChannelLayout', 'NEVPNProtocolIKEv2', 'WKProcessPool', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'CIQRCodeFeature', 'AVMutableVideoComposition', 'PHFetchResult', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'PKPaymentRequest', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'AVAudioUnitTimePitch', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'UIDocumentPickerViewController', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'AVAudioFile', 'UIActivity', 'NSPipe', 'MKShape', 'NSMergeConflict', 'CIImage', 'HKObject', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'CKMarkNotificationsReadOperation', 'CKSubscription', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'HMAccessoryBrowser', 'GKTurnBasedEventHandler', 'UIWebView', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'WKNavigationResponse', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'HKStatistics', 'UILocalNotification', 'HKBiologicalSexObject', 'AVURLAsset', 'CBPeripheral', 'NSDateComponentsFormatter', 'SKSpriteNode', 'UIAccessibilityElement', 'AVAssetWriterInputGroup', 'HMZone', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'PHPhotoLibrary', 'CBService', 'UIDocumentMenuViewController', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'ASIdentifierManager', 'AVAssetResourceLoadingRequest', 'SLComposeServiceViewController', 'UIPinchGestureRecognizer', 'PHObject', 'NSExtensionItem', 'HKSampleQuery', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MKRouteStep', 'SCNCapsule', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'UITraitCollection', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'MTLRenderPipelineReflection', 'CLHeading', 'CLVisit', 'MKDirectionsResponse', 'HMAccessory', 'MTLStructType', 'UITextView', 'CMMagnetometerData', 'UICollisionBehavior', 'UIProgressView', 'CKServerChangeToken', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'NSPropertyMapping', 'ALAssetsFilter', 'SK3DNode', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'SCNSphere', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSFileProviderExtension', 'NSDecimalNumberHandler', 'NSAtomicStoreCacheNode', 'NSAtomicStore', 'EKAlarm', 'CKNotificationInfo', 'AVAudioUnitEQ', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'MTLVertexAttribute', 'NSExpressionDescription', 'HKStatisticsCollectionQuery', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'UIAlertAction', 'NSPropertyListSerialization', 'GKPeerPickerController', 'UIUserNotificationSettings', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'MTLRenderPassColorAttachmentDescriptorArray', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'UIViewController', 'PKPaymentToken', 'MFMessageComposeViewController', 'AVAudioInputNode', 'NSDataDetector', 'CABTMIDICentralViewController', 'AVAudioUnitMIDIInstrument', 'AVCaptureVideoPreviewLayer', 'AVAssetWriterInputPassDescription', 'MPChangePlaybackRateCommand', 'NSURLComponents', 'CAMetalLayer', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'CKDiscoverUserInfosOperation', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'SCNGeometryElement', 'NCWidgetController', 'CAEmitterLayer', 'MKUserLocation', 'UIImagePickerController', 'CIFeature', 'AVCaptureDeviceInput', 'ALAsset', 'NSURLSessionDownloadTask', 'SCNPhysicsHingeJoint', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'SCNMaterialProperty', 'UIFont', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'WKWebViewConfiguration', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'AVComposition', 'CKDiscoverAllContactsOperation', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'UIMutableUserNotificationCategory', 'NSLock', 'NEVPNProtocolIPSec', 'ADBannerView', 'UIDocumentPickerExtensionViewController', 'UIActivityIndicatorView', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIAccessibilityCustomAction', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'HKBloodTypeObject', 'MTLVertexBufferLayoutDescriptorArray', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'PKPaymentAuthorizationViewController', 'UIScrollView', 'WKNavigationAction', 'AVPlayerItemMetadataOutput', 'EKRecurrenceDayOfWeek', 'NSNumberFormatter', 'MTLComputePipelineReflection', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'SCNSkinner', 'AVCaptureMetadataOutput', 'SCNAnimationEvent', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'WKBackForwardListItem', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKCalendar', 'HKWorkoutEvent', 'NSMutableURLRequest', 'UIVideoEditorController', 'HMTimerTrigger', 'AVAudioUnitVarispeed', 'UIDynamicAnimator', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'UIPushBehavior', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'WKUserScript', 'EKEvent', 'NSDateFormatter', 'NSAsynchronousFetchResult', 'AVAssetWriterInputPixelBufferAdaptor', 'UIVisualEffect', 'UICollectionViewCell', 'UITextField', 'CLPlacemark', 'MPPlayableContentManager', 'AVCaptureOutput', 'HMCharacteristicWriteAction', 'CKModifySubscriptionsOperation', 'NSPropertyDescription', 'GCGamepad', 'UIMarkupTextPrintFormatter', 'SCNTube', 'NSPersistentStoreCoordinator', 'AVAudioEnvironmentNode', 'GKMatchmaker', 'CIContext', 'NSThread', 'SLComposeSheetConfigurationItem', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'SCNPhysicsBallSocketJoint', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'AVAudioIONode', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CKRecordZoneNotification', 'SCNScene', 'MPMovieAccessLogEvent', 'CKFetchSubscriptionsOperation', 'CAEmitterCell', 'AVAudioUnitTimeEffect', 'HMCharacteristicMetadata', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'UIUserNotificationCategory', 'SCNPhysicsVehicleWheel', 'HKCategoryType', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'MTLRenderPipelineColorAttachmentDescriptor', 'SCNPhysicsShape', 'UISearchController', 'SCNPhysicsBody', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'SKNode', 'UIMutableUserNotificationAction', 'SCNProgram', 'AVSpeechSynthesisVoice', 'CMAltimeter', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SCNPhysicsField', 'CKReference', 'LAContext', 'CKRecordID', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'AVAudioBuffer', 'CIColorKernel', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'CKQueryNotification', 'PHAdjustmentData', 'EASession', 'AVAssetResourceRenewalRequest', 'UIInputView', 'NSFileWrapper', 'UIResponder', 'NSPointerFunctions', 'UIKeyCommand', 'NSHTTPCookieStorage', 'AVMediaSelectionOption', 'NSRunLoop', 'NSFileAccessIntent', 'CAAnimationGroup', 'MKCircle', 'UIAlertController', 'NSMigrationManager', 'NSDateIntervalFormatter', 'UICollectionViewUpdateItem', 'CKDatabaseOperation', 'PHImageRequestOptions', 'SKReachConstraints', 'CKRecord', 'CAInterAppAudioSwitcherView', 'WKWindowFeatures', 'GKInvite', 'NSMutableData', 'PHAssetCollectionChangeRequest', 'NSMutableParagraphStyle', 'UIDynamicBehavior', 'GLKEffectProperty', 'CKFetchRecordChangesOperation', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'MPContentItem', 'HMAction', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'CKContainer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'AVCaptureMovieFileOutput', 'HMRoom', 'GKChallenge', 'UITextRange', 'NSURLProtectionSpace', 'ACAccountStore', 'MPSkipIntervalCommand', 'NSComparisonPredicate', 'HMHome', 'PHVideoRequestOptions', 'NSOutputStream', 'MPSkipIntervalCommandEvent', 'PKAddPassesViewController', 'UITextSelectionRect', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'UICollectionViewController', 'AVAudioNode', 'MCNearbyServiceAdvertiser', 'NSObject', 'PHAsset', 'GKLeaderboardViewController', 'CKQueryCursor', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'CMPedometerData', 'HMService', 'SKFieldNode', 'GKAchievement', 'WKUserContentController', 'AVAssetTrack', 'TWRequest', 'SKLabelNode', 'AVCaptureBracketedStillImageSettings', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'MTLTextureDescriptor', 'MTLVertexBufferLayoutDescriptor', 'MPFeedbackCommandEvent', 'UIUserNotificationAction', 'HKStatisticsQuery', 'SCNParticleSystem', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'HKObserverQuery', 'UIPopoverPresentationController', 'CKQueryOperation', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'NSURLQueryItem', 'MTLStructMember', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'GKTurnBasedParticipant', 'NSURLProtocol', 'NSUserDefaults', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'CKModifyRecordZonesOperation', 'WKPreferences', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'PHContentEditingInputRequestOptions', 'GKMatch', 'CIColor', 'UIDictationPhrase'}
+COCOA_PROTOCOLS = {'SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'MTLComputeCommandEncoder', 'SCNSceneExportDelegate', 'UISearchResultsUpdating', 'MFMailComposeViewControllerDelegate', 'MTLBlitCommandEncoder', 'NSDecimalNumberBehaviors', 'PHContentEditingController', 'NSMutableCopying', 'UIActionSheetDelegate', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UIAdaptivePresentationControllerDelegate', 'GKAchievementViewControllerDelegate', 'UIViewControllerTransitionCoordinator', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'UITableViewDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'AVAudio3DMixing', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'HMAccessoryBrowserDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'CKRecordValue', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'SKSceneDelegate', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'MyClassJavaScriptMethods', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'SCNBoundingVolume', 'NSPortDelegate', 'UIContentContainer', 'SCNNodeRendererDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'HMAccessoryDelegate', 'UIPageViewControllerDataSource', 'SCNSceneRendererDelegate', 'SCNPhysicsContactDelegate', 'MKMapViewDelegate', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'MTLRenderCommandEncoder', 'PKPaymentAuthorizationViewControllerDelegate', 'UIToolbarDelegate', 'WKUIDelegate', 'SCNActionable', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'PKPushRegistryDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSLayoutManagerDelegate', 'MTLLibrary', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'MTLResource', 'NSDiscardableContent', 'UITextFieldDelegate', 'MTLBuffer', 'MTLSamplerState', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UISplitViewControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UITraitEnvironment', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'MPPlayableContentDataSource', 'MTLComputePipelineState', 'NSURLSessionDelegate', 'MTLCommandBuffer', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MTLDepthStencilState', 'GKTurnBasedMatchmakerViewControllerDelegate', 'MPPlayableContentDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'UITextDocumentProxy', 'MTLDrawable', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'AVAudioStereoMixing', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'SCNProgramDelegate', 'AVVideoCompositing', 'SCNAnimatable', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'NSExtensionRequestHandling', 'UITextSelecting', 'UIPrinterPickerControllerDelegate', 'NCWidgetProviding', 'MTLCommandEncoder', 'NSURLProtocolClient', 'MFMessageComposeViewControllerDelegate', 'UIVideoEditorControllerDelegate', 'WKNavigationDelegate', 'GKSavedGameListener', 'UITableViewDataSource', 'MTLFunction', 'EKCalendarChooserDelegate', 'NSUserActivityDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'HMHomeDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIDocumentPickerDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'SCNSceneRenderer', 'MTLTexture', 'GLKViewDelegate', 'EAAccessoryDelegate', 'WKScriptMessageHandler', 'PHPhotoLibraryChangeObserver', 'NSKeyedUnarchiverDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'NSMachPortDelegate', 'SCNShadable', 'UIPopoverBackgroundViewMethods', 'UIDocumentMenuDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'AVAudioMixing', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'MTLParallelRenderCommandEncoder', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UIPopoverPresentationControllerDelegate', 'UIDynamicAnimatorDelegate', 'NSTextAttachmentContainer', 'MKAnnotation', 'UIAccessibilityIdentification', 'UICoordinateSpace', 'ABNewPersonViewControllerDelegate', 'MTLDevice', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'HMHomeManagerDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'UISearchControllerDelegate', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'UITextInputTraits', 'MTLRenderPipelineState', 'GKVoiceChatClient', 'UIKeyInput', 'UICollectionViewDataSource', 'SCNTechniqueSupport', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'MTLCommandQueue', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'CAMetalDrawable', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'NSURLSessionDataDelegate'}
+COCOA_PRIMITIVES = {'ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', '_GLKMatrix3', '_GLKMatrix2', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'OpaqueCMBlockBuffer', 'CGTextDrawingMode', 'EKErrorCode', 'gss_buffer_desc_struct', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'AudioFile_SMPTE_Time', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', '__CFBinaryHeap', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', 'SFNTLookupSegment', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__SecCertificate', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', 'AnchorPoint', 'JustTable', '__CFNetService', 'CF_BRIDGED_TYPE', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'KerxSubtableHeader', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', 'OpaqueJSClass', 'CGGradient', 'OpaqueMIDISetup', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', 'MIDIThruConnectionParams', 'BslnFormat0Part', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'AnkrTable', '__SCNetworkProtocol', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'SCNMatrix4', 'KerxControlPointEntry', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'KernStateEntry', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'AVAudio3DAngularOrientation', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'KernVersion0Header', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'GCQuaternion', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'CGImagePropertyOrientation', 'CGPDFScanner', 'OpaqueMusicEventIterator', 'sfntDescriptorHeader', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'BslnFormat1Part', 'sfntFontDescriptor', 'KernSimpleArrayHeader', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGInterpolationQuality', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', '__CTRubyAnnotation', 'SCNVector4', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__GLsync', '__CFStringTokenizer', 'JustWidthDeltaEntry', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', '__CVMetalTextureCache', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'AVAudio3DPoint', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', 'CGDataConsumerCallbacks', 'ALMXHeader', 'CGLineCap', 'MIDIControlTransform', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'AVAudio3DVectorOrientation', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'OpaqueVTCompressionSession', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'VTDecompressionOutputCallbackRecord', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'OpaqueVTDecompressionSession', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'PropTable', 'KernOrderedListEntry', 'CF_BRIDGED_MUTABLE_TYPE', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'SCNVector3', 'OpaqueAudioConverter', 'MIDIRawData', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', '__SecAccessControl', 'JustPCAction', 'OpaqueVTFrameSilo', 'OpaqueVTMultiPassStorage', 'CGPathElementType', 'AudioFormatListItem', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'sfntCMapHeader', '__CFURLEnumerator', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader'}
 
 if __name__ == '__main__':  # pragma: no cover
     import os
index 16a483964587a2e28f391343cb70e4685d35842a..72e1fe39e96ae283347b28625b6f269a99231b8d 100644 (file)
@@ -7,48 +7,52 @@
     :license: BSD, see LICENSE for details.
 """
 
-# Opcodes in Csound 6.12.0 at commit 6ca322bd31f1ca907c008616b40a5f237ff449db using
-#   python -c "
-#   import re, subprocess
-#   output = subprocess.Popen(['csound', '--list-opcodes0'], stderr=subprocess.PIPE).communicate()[1]
-#   opcodes = output[re.search(r'^$', output, re.M).end():re.search(r'^\d+ opcodes$', output, re.M).start()].split()
-#   output = subprocess.Popen(['csound', '--list-opcodes2'], stderr=subprocess.PIPE).communicate()[1]
-#   all_opcodes = output[re.search(r'^$', output, re.M).end():re.search(r'^\d+ opcodes$', output, re.M).start()].split()
+# Opcodes in Csound 6.13.0 using:
+#   python3 -c "
+#   import re
+#   from subprocess import Popen, PIPE
+#   output = Popen(['csound', '--list-opcodes0'], stderr=PIPE, text=True).communicate()[1]
+#   opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split()
+#   output = Popen(['csound', '--list-opcodes2'], stderr=PIPE, text=True).communicate()[1]
+#   all_opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split()
 #   deprecated_opcodes = [opcode for opcode in all_opcodes if opcode not in opcodes]
-#   print '''OPCODES = set(\'''
-#   {}
+#   # Remove opcodes that csound.py treats as keywords.
+#   keyword_opcodes = [
+#       'cggoto',   # https://csound.com/docs/manual/cggoto.html
+#       'cigoto',   # https://csound.com/docs/manual/cigoto.html
+#       'cingoto',  # (undocumented)
+#       'ckgoto',   # https://csound.com/docs/manual/ckgoto.html
+#       'cngoto',   # https://csound.com/docs/manual/cngoto.html
+#       'cnkgoto',  # (undocumented)
+#       'endin',    # https://csound.com/docs/manual/endin.html
+#       'endop',    # https://csound.com/docs/manual/endop.html
+#       'goto',     # https://csound.com/docs/manual/goto.html
+#       'igoto',    # https://csound.com/docs/manual/igoto.html
+#       'instr',    # https://csound.com/docs/manual/instr.html
+#       'kgoto',    # https://csound.com/docs/manual/kgoto.html
+#       'loop_ge',  # https://csound.com/docs/manual/loop_ge.html
+#       'loop_gt',  # https://csound.com/docs/manual/loop_gt.html
+#       'loop_le',  # https://csound.com/docs/manual/loop_le.html
+#       'loop_lt',  # https://csound.com/docs/manual/loop_lt.html
+#       'opcode',   # https://csound.com/docs/manual/opcode.html
+#       'reinit',   # https://csound.com/docs/manual/reinit.html
+#       'return',   # https://csound.com/docs/manual/return.html
+#       'rireturn', # https://csound.com/docs/manual/rireturn.html
+#       'rigoto',   # https://csound.com/docs/manual/rigoto.html
+#       'tigoto',   # https://csound.com/docs/manual/tigoto.html
+#       'timout'    # https://csound.com/docs/manual/timout.html
+#   ]
+#   opcodes = [opcode for opcode in opcodes if opcode not in keyword_opcodes]
+#   newline = '\n'
+#   print(f'''OPCODES = set(\'''
+#   {newline.join(opcodes)}
 #   \'''.split())
 #
 #   DEPRECATED_OPCODES = set(\'''
-#   {}
+#   {newline.join(deprecated_opcodes)}
 #   \'''.split())
-#   '''.format('\n'.join(opcodes), '\n'.join(deprecated_opcodes))
+#   ''')
 #   "
-# except for
-#   cggoto   csound.com/docs/manual/cggoto.html
-#   cigoto   csound.com/docs/manual/cigoto.html
-#   cingoto  (undocumented)
-#   ckgoto   csound.com/docs/manual/ckgoto.html
-#   cngoto   csound.com/docs/manual/cngoto.html
-#   cnkgoto  (undocumented)
-#   endin    csound.com/docs/manual/endin.html
-#   endop    csound.com/docs/manual/endop.html
-#   goto     csound.com/docs/manual/goto.html
-#   igoto    csound.com/docs/manual/igoto.html
-#   instr    csound.com/docs/manual/instr.html
-#   kgoto    csound.com/docs/manual/kgoto.html
-#   loop_ge  csound.com/docs/manual/loop_ge.html
-#   loop_gt  csound.com/docs/manual/loop_gt.html
-#   loop_le  csound.com/docs/manual/loop_le.html
-#   loop_lt  csound.com/docs/manual/loop_lt.html
-#   opcode   csound.com/docs/manual/opcode.html
-#   reinit   csound.com/docs/manual/reinit.html
-#   return   csound.com/docs/manual/return.html
-#   rireturn csound.com/docs/manual/rireturn.html
-#   rigoto   csound.com/docs/manual/rigoto.html
-#   tigoto   csound.com/docs/manual/tigoto.html
-#   timout   csound.com/docs/manual/timout.html
-# which are treated as keywords in csound.py.
 
 OPCODES = set('''
 ATSadd
@@ -169,8 +173,8 @@ STKBowed
 STKBrass
 STKClarinet
 STKDrummer
-STKFlute
 STKFMVoices
+STKFlute
 STKHevyMetl
 STKMandolin
 STKModalBar
@@ -201,6 +205,7 @@ alwayson
 ampdb
 ampdbfs
 ampmidi
+ampmidicurve
 ampmidid
 areson
 aresonk
@@ -249,7 +254,6 @@ centroid
 ceps
 cepsinv
 chanctrl
-changed
 changed2
 chani
 chano
@@ -418,6 +422,17 @@ flashtxt
 flooper
 flooper2
 floor
+fluidAllOut
+fluidCCi
+fluidCCk
+fluidControl
+fluidEngine
+fluidInfo
+fluidLoad
+fluidNote
+fluidOut
+fluidProgramSelect
+fluidSetInterpMethod
 fmanal
 fmax
 fmb3
@@ -492,6 +507,7 @@ grain
 grain2
 grain3
 granule
+gtf
 guiro
 harmon
 harmon2
@@ -599,6 +615,10 @@ la_i_multiply_mc
 la_i_multiply_mr
 la_i_multiply_vc
 la_i_multiply_vr
+la_i_norm1_mc
+la_i_norm1_mr
+la_i_norm1_vc
+la_i_norm1_vr
 la_i_norm_euclid_mc
 la_i_norm_euclid_mr
 la_i_norm_euclid_vc
@@ -609,10 +629,6 @@ la_i_norm_inf_vc
 la_i_norm_inf_vr
 la_i_norm_max_mc
 la_i_norm_max_mr
-la_i_norm1_mc
-la_i_norm1_mr
-la_i_norm1_vc
-la_i_norm1_vr
 la_i_print_mc
 la_i_print_mr
 la_i_print_vc
@@ -697,6 +713,10 @@ la_k_multiply_mc
 la_k_multiply_mr
 la_k_multiply_vc
 la_k_multiply_vr
+la_k_norm1_mc
+la_k_norm1_mr
+la_k_norm1_vc
+la_k_norm1_vr
 la_k_norm_euclid_mc
 la_k_norm_euclid_mr
 la_k_norm_euclid_vc
@@ -707,10 +727,6 @@ la_k_norm_inf_vc
 la_k_norm_inf_vr
 la_k_norm_max_mc
 la_k_norm_max_mr
-la_k_norm1_mc
-la_k_norm1_mr
-la_k_norm1_vc
-la_k_norm1_vr
 la_k_qr_eigen_mc
 la_k_qr_eigen_mr
 la_k_qr_factor_mc
@@ -900,6 +916,8 @@ nrpn
 nsamp
 nstance
 nstrnum
+nstrstr
+ntof
 ntom
 ntrpol
 nxtpow2
@@ -1030,7 +1048,6 @@ pset
 ptable
 ptable3
 ptablei
-ptableiw
 ptablew
 ptrack
 puts
@@ -1337,6 +1354,7 @@ strfromurl
 strget
 strindex
 strindexk
+string2array
 strlen
 strlenk
 strlower
@@ -1380,7 +1398,6 @@ tableicopy
 tableigpw
 tableikt
 tableimix
-tableiw
 tablekt
 tablemix
 tableng
@@ -1589,6 +1606,7 @@ DEPRECATED_OPCODES = set('''
 array
 bformdec
 bformenc
+changed
 copy2ftab
 copy2ttab
 hrtfer
@@ -1598,6 +1616,7 @@ maxtab
 mintab
 pop
 pop_f
+ptableiw
 push
 push_f
 scalet
@@ -1616,6 +1635,7 @@ spectrum
 stack
 sumtab
 tabgen
+tableiw
 tabmap
 tabmap_i
 tabslice
index b2b46a6f298ce94960b6e910c591da25e26312c7..ca3acb1c10d0204969726d2218262f39298bf61b 100644 (file)
@@ -288,7 +288,7 @@ if __name__ == '__main__':  # pragma: no cover
             print('>> %s' % full_function_name)
             m = get_function_module(full_function_name)
             modules.setdefault(m, []).append(full_function_name)
-        modules = dict((k, tuple(v)) for k, v in modules.iteritems())
+        modules = {k: tuple(v) for k, v in modules.iteritems()}
 
         regenerate(__file__, modules)
 
index ce1b6dfc16c4373adac5ce4de9881895e76660b4..acb71ad94b755718ca5cf38d3d706fa3cea7c72d 100644 (file)
@@ -65,7 +65,6 @@ LEXERS = {
     'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
     'BoogieLexer': ('pygments.lexers.verification', 'Boogie', ('boogie',), ('*.bpl',), ()),
     'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
-    'BroLexer': ('pygments.lexers.dsls', 'Bro', ('bro',), ('*.bro',), ()),
     'BugsLexer': ('pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
     'CAmkESLexer': ('pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()),
     'CLexer': ('pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')),
@@ -146,6 +145,7 @@ LEXERS = {
     'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)),
     'ElmLexer': ('pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)),
     'EmacsLispLexer': ('pygments.lexers.lisp', 'EmacsLisp', ('emacs', 'elisp', 'emacs-lisp'), ('*.el',), ('text/x-elisp', 'application/x-elisp')),
+    'EmailLexer': ('pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)),
     'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
     'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
     'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
@@ -255,6 +255,7 @@ LEXERS = {
     'LogosLexer': ('pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
     'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
     'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
+    'MIMELexer': ('pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')),
     'MOOCodeLexer': ('pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
     'MSDOSSessionLexer': ('pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()),
     'MakefileLexer': ('pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
@@ -304,6 +305,7 @@ LEXERS = {
     'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nim', 'nimrod'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
     'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
     'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
+    'NotmuchLexer': ('pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()),
     'NuSMVLexer': ('pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()),
     'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
     'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
@@ -341,11 +343,11 @@ LEXERS = {
     'PugLexer': ('pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')),
     'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
     'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
-    'Python3Lexer': ('pygments.lexers.python', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
-    'Python3TracebackLexer': ('pygments.lexers.python', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)),
+    'Python2Lexer': ('pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')),
+    'Python2TracebackLexer': ('pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)),
     'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
-    'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')),
-    'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)),
+    'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
+    'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')),
     'QBasicLexer': ('pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
     'QVToLexer': ('pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()),
     'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')),
@@ -367,12 +369,12 @@ LEXERS = {
     'RedLexer': ('pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
     'RedcodeLexer': ('pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()),
     'RegeditLexer': ('pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
-    'ResourceLexer': ('pygments.lexers.resource', 'ResourceBundle', ('resource', 'resourcebundle'), ('*.txt',), ()),
+    'ResourceLexer': ('pygments.lexers.resource', 'ResourceBundle', ('resource', 'resourcebundle'), (), ()),
     'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
     'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
     'RoboconfGraphLexer': ('pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()),
     'RoboconfInstancesLexer': ('pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()),
-    'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.txt', '*.robot'), ('text/x-robotframework',)),
+    'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot',), ('text/x-robotframework',)),
     'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
     'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
     'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
@@ -387,9 +389,11 @@ LEXERS = {
     'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
     'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
     'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)),
+    'ScdocLexer': ('pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()),
     'SchemeLexer': ('pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
     'ScilabLexer': ('pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
     'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
+    'ShExCLexer': ('pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)),
     'ShenLexer': ('pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')),
     'SilverLexer': ('pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()),
     'SlashLexer': ('pygments.lexers.slash', 'Slash', ('slash',), ('*.sl',), ()),
@@ -401,6 +405,7 @@ LEXERS = {
     'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
     'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
     'SnowballLexer': ('pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()),
+    'SolidityLexer': ('pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()),
     'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
     'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()),
     'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
@@ -471,7 +476,9 @@ LEXERS = {
     'XtlangLexer': ('pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()),
     'YamlJinjaLexer': ('pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')),
     'YamlLexer': ('pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
+    'ZeekLexer': ('pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()),
     'ZephirLexer': ('pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()),
+    'ZigLexer': ('pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)),
 }
 
 if __name__ == '__main__':  # pragma: no cover
index 8c803791394ee46ddc3fa96e4ee93224acec38a7..e59fd910d1b339f1e46af9e47deffe68ee7ed259 100644 (file)
@@ -884,7 +884,7 @@ constants = (
     'PERIOD_W1',
     'POINTER_AUTOMATIC',
     'POINTER_DYNAMIC',
-    'POINTER_INVALID'
+    'POINTER_INVALID',
     'PRICE_CLOSE',
     'PRICE_HIGH',
     'PRICE_LOW',
index 75d65f9fb85c25d55c2a51c501b7db322fcc6084..32ac936127e0bb6eca44c2f4b774769f435bbcd1 100644 (file)
@@ -37,6 +37,7 @@ class GasLexer(RegexLexer):
     char = r'[\w$.@-]'
     identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)'
     number = r'(?:0[xX][a-zA-Z0-9]+|\d+)'
+    register = '%' + identifier
 
     tokens = {
         'root': [
@@ -52,6 +53,7 @@ class GasLexer(RegexLexer):
             (string, String),
             ('@' + identifier, Name.Attribute),
             (number, Number.Integer),
+            (register, Name.Variable),
             (r'[\r\n]+', Text, '#pop'),
             (r'[;#].*?\n', Comment, '#pop'),
 
@@ -72,7 +74,7 @@ class GasLexer(RegexLexer):
             (identifier, Name.Constant),
             (number, Number.Integer),
             # Registers
-            ('%' + identifier, Name.Variable),
+            (register, Name.Variable),
             # Numeric constants
             ('$'+number, Number.Integer),
             (r"$'(.|\\')'", String.Char),
@@ -455,6 +457,10 @@ class NasmLexer(RegexLexer):
     filenames = ['*.asm', '*.ASM']
     mimetypes = ['text/x-nasm']
 
+    # Tasm uses the same file endings, but TASM is not as common as NASM, so
+    # we prioritize NASM higher by default
+    priority = 1.0
+
     identifier = r'[a-z$._?][\w$.?#@~]*'
     hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
     octn = r'[0-7]+q'
@@ -520,6 +526,11 @@ class NasmLexer(RegexLexer):
         ],
     }
 
+    def analyse_text(text):
+        # Probably TASM
+        if re.match(r'PROC', text, re.IGNORECASE):
+            return False
+
 
 class NasmObjdumpLexer(ObjdumpLexer):
     """
@@ -614,6 +625,11 @@ class TasmLexer(RegexLexer):
         ],
     }
 
+    def analyse_text(text):
+        # See above
+        if re.match(r'PROC', text, re.I):
+            return True
+
 
 class Ca65Lexer(RegexLexer):
     """
index 855254efe42538d4b63f8eb58ab743cbc46f4e6b..6d15c230d2640d82630c3ccea7127a8a78efdc4f 100644 (file)
@@ -33,7 +33,7 @@ class BibTeXLexer(ExtendedRegexLexer):
     flags = re.IGNORECASE
 
     ALLOWED_CHARS = r'@!$&*+\-./:;<>?\[\\\]^`|~'
-    IDENTIFIER = '[{0}][{1}]*'.format('a-z_' + ALLOWED_CHARS, r'\w' + ALLOWED_CHARS)
+    IDENTIFIER = '[{}][{}]*'.format('a-z_' + ALLOWED_CHARS, r'\w' + ALLOWED_CHARS)
 
     def open_brace_callback(self, match, ctx):
         opening_brace = match.group()
index e676a8af5f98eab54827e95ae1ade1651502cb93..5d84a377580a8b4d475e026ff9df4bfdf29105e5 100644 (file)
@@ -144,21 +144,21 @@ class CFamilyLexer(RegexLexer):
         ]
     }
 
-    stdlib_types = set((
+    stdlib_types = {
         'size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t', 'sig_atomic_t', 'fpos_t',
         'clock_t', 'time_t', 'va_list', 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t',
-        'mbstate_t', 'wctrans_t', 'wint_t', 'wctype_t'))
-    c99_types = set((
+        'mbstate_t', 'wctrans_t', 'wint_t', 'wctype_t'}
+    c99_types = {
         '_Bool', '_Complex', 'int8_t', 'int16_t', 'int32_t', 'int64_t', 'uint8_t',
         'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t', 'int_least16_t',
         'int_least32_t', 'int_least64_t', 'uint_least8_t', 'uint_least16_t',
         'uint_least32_t', 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
         'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t', 'uint_fast64_t',
-        'intptr_t', 'uintptr_t', 'intmax_t', 'uintmax_t'))
-    linux_types = set((
+        'intptr_t', 'uintptr_t', 'intmax_t', 'uintmax_t'}
+    linux_types = {
         'clockid_t', 'cpu_set_t', 'cpumask_t', 'dev_t', 'gid_t', 'id_t', 'ino_t', 'key_t',
         'mode_t', 'nfds_t', 'pid_t', 'rlim_t', 'sig_t', 'sighandler_t', 'siginfo_t',
-        'sigset_t', 'sigval_t', 'socklen_t', 'timer_t', 'uid_t'))
+        'sigset_t', 'sigval_t', 'socklen_t', 'timer_t', 'uid_t'}
 
     def __init__(self, **options):
         self.stdlibhighlighting = get_bool_opt(options, 'stdlibhighlighting', True)
index cc88dfb99a6f731e6bc193bc7b020c69c424f537..82dee35ad3a7450515b74048e0867d13020ac5d9 100644 (file)
@@ -291,23 +291,23 @@ class CudaLexer(CLexer):
     aliases = ['cuda', 'cu']
     mimetypes = ['text/x-cuda']
 
-    function_qualifiers = set(('__device__', '__global__', '__host__',
-                               '__noinline__', '__forceinline__'))
-    variable_qualifiers = set(('__device__', '__constant__', '__shared__',
-                               '__restrict__'))
-    vector_types = set(('char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3',
-                        'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2',
-                        'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1',
-                        'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1',
-                        'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4',
-                        'ulong4', 'longlong1', 'ulonglong1', 'longlong2',
-                        'ulonglong2', 'float1', 'float2', 'float3', 'float4',
-                        'double1', 'double2', 'dim3'))
-    variables = set(('gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize'))
-    functions = set(('__threadfence_block', '__threadfence', '__threadfence_system',
-                     '__syncthreads', '__syncthreads_count', '__syncthreads_and',
-                     '__syncthreads_or'))
-    execution_confs = set(('<<<', '>>>'))
+    function_qualifiers = {'__device__', '__global__', '__host__',
+                           '__noinline__', '__forceinline__'}
+    variable_qualifiers = {'__device__', '__constant__', '__shared__',
+                           '__restrict__'}
+    vector_types = {'char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3',
+                    'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2',
+                    'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1',
+                    'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1',
+                    'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4',
+                    'ulong4', 'longlong1', 'ulonglong1', 'longlong2',
+                    'ulonglong2', 'float1', 'float2', 'float3', 'float4',
+                    'double1', 'double2', 'dim3'}
+    variables = {'gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize'}
+    functions = {'__threadfence_block', '__threadfence', '__threadfence_system',
+                 '__syncthreads', '__syncthreads_count', '__syncthreads_and',
+                 '__syncthreads_or'}
+    execution_confs = {'<<<', '>>>'}
 
     def get_tokens_unprocessed(self, text):
         for index, token, value in CLexer.get_tokens_unprocessed(self, text):
@@ -352,7 +352,7 @@ class SwigLexer(CppLexer):
     }
 
     # This is a far from complete set of SWIG directives
-    swig_directives = set((
+    swig_directives = {
         # Most common directives
         '%apply', '%define', '%director', '%enddef', '%exception', '%extend',
         '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include',
@@ -371,7 +371,7 @@ class SwigLexer(CppLexer):
         '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall',
         '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof',
         '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn',
-        '%warnfilter'))
+        '%warnfilter'}
 
     def analyse_text(text):
         rv = 0
@@ -429,13 +429,13 @@ class ArduinoLexer(CppLexer):
     mimetypes = ['text/x-arduino']
 
     # Language sketch main structure functions
-    structure = set(('setup', 'loop'))
+    structure = {'setup', 'loop'}
 
     # Language operators
-    operators = set(('not', 'or', 'and', 'xor'))
+    operators = {'not', 'or', 'and', 'xor'}
 
     # Language 'variables'
-    variables = set((
+    variables = {
         'DIGITAL_MESSAGE', 'FIRMATA_STRING', 'ANALOG_MESSAGE', 'REPORT_DIGITAL',
         'REPORT_ANALOG', 'INPUT_PULLUP', 'SET_PIN_MODE', 'INTERNAL2V56', 'SYSTEM_RESET',
         'LED_BUILTIN', 'INTERNAL1V1', 'SYSEX_START', 'INTERNAL', 'EXTERNAL', 'HIGH',
@@ -452,10 +452,10 @@ class ArduinoLexer(CppLexer):
         'signed', 'inline', 'delete', '_Bool', 'complex', '_Complex', '_Imaginary',
         'atomic_bool', 'atomic_char', 'atomic_schar', 'atomic_uchar', 'atomic_short',
         'atomic_ushort', 'atomic_int', 'atomic_uint', 'atomic_long', 'atomic_ulong',
-        'atomic_llong', 'atomic_ullong', 'PROGMEM'))
+        'atomic_llong', 'atomic_ullong', 'PROGMEM'}
 
     # Language shipped functions and class ( )
-    functions = set((
+    functions = {
         'KeyboardController', 'MouseController', 'SoftwareSerial', 'EthernetServer',
         'EthernetClient', 'LiquidCrystal', 'RobotControl', 'GSMVoiceCall',
         'EthernetUDP', 'EsploraTFT', 'HttpClient', 'RobotMotor', 'WiFiClient',
@@ -517,13 +517,13 @@ class ArduinoLexer(CppLexer):
         'cos', 'sin', 'pow', 'map', 'abs', 'max', 'min', 'get', 'run', 'put',
         'isAlphaNumeric', 'isAlpha', 'isAscii', 'isWhitespace', 'isControl', 'isDigit',
         'isGraph', 'isLowerCase', 'isPrintable', 'isPunct', 'isSpace', 'isUpperCase',
-        'isHexadecimalDigit'))
+        'isHexadecimalDigit'}
 
     # do not highlight
-    suppress_highlight = set((
+    suppress_highlight = {
         'namespace', 'template', 'mutable', 'using', 'asm', 'typeid',
         'typename', 'this', 'alignof', 'constexpr', 'decltype', 'noexcept',
-        'static_assert', 'thread_local', 'restrict'))
+        'static_assert', 'thread_local', 'restrict'}
 
     def get_tokens_unprocessed(self, text):
         for index, token, value in CppLexer.get_tokens_unprocessed(self, text):
index 717246af00a13d48a36fd1bcd737aa4b4ea6b626..a18285af610445fa407178457e590556220c728b 100644 (file)
@@ -300,7 +300,7 @@ class ApacheConfLexer(RegexLexer):
     tokens = {
         'root': [
             (r'\s+', Text),
-            (r'(#.*?)$', Comment),
+            (r'#(.*\\\n)+.*$|(#.*?)$', Comment),
             (r'(<[^\s>]+)(?:(\s+)(.*))?(>)',
              bygroups(Name.Tag, Text, String, Name.Tag)),
             (r'([a-z]\w*)(\s+)',
@@ -319,7 +319,7 @@ class ApacheConfLexer(RegexLexer):
              r'os|productonly|full|emerg|alert|crit|error|warn|'
              r'notice|info|debug|registry|script|inetd|standalone|'
              r'user|group)\b', Keyword),
-            (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
+            (r'"([^"\\]*(?:\\(.|[\n])[^"\\]*)*)"', String.Double),
             (r'[^\s"\\]+', Text)
         ],
     }
@@ -540,14 +540,16 @@ class DockerLexer(RegexLexer):
     filenames = ['Dockerfile', '*.docker']
     mimetypes = ['text/x-dockerfile-config']
 
-    _keywords = (r'(?:FROM|MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)')
+    _keywords = (r'(?:MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)')
     _bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)')
-    _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex
+    _lb = r'(?:\s*\\?\s*)'  # dockerfile line break regex
     flags = re.IGNORECASE | re.MULTILINE
 
     tokens = {
         'root': [
             (r'#.*', Comment),
+            (r'(FROM)([ \t]*)(\S*)([ \t]*)(?:(AS)([ \t]*)(\S*))?',
+             bygroups(Keyword, Text, String, Text, Keyword, Text, String)),
             (r'(ONBUILD)(%s)' % (_lb,), bygroups(Keyword, using(BashLexer))),
             (r'(HEALTHCHECK)((%s--\w+=\w+%s)*)' % (_lb, _lb),
                 bygroups(Keyword, using(BashLexer))),
index 4f754d4524d48019e5fdedc6457a3b3812d21db0..c35bd94b0ee78d53f2078da01e39acda852de5fa 100644 (file)
@@ -35,6 +35,7 @@ class CsoundLexer(RegexLexer):
 
         'preprocessor directives': [
             (r'#(?:e(?:nd(?:if)?|lse)\b|##)|@@?[ \t]*\d+', Comment.Preproc),
+            (r'#includestr', Comment.Preproc, 'includestr directive'),
             (r'#include', Comment.Preproc, 'include directive'),
             (r'#[ \t]*define', Comment.Preproc, 'define directive'),
             (r'#(?:ifn?def|undef)\b', Comment.Preproc, 'macro directive')
@@ -44,6 +45,10 @@ class CsoundLexer(RegexLexer):
             include('whitespace'),
             (r'([^ \t]).*?\1', String, '#pop')
         ],
+        'includestr directive': [
+            include('whitespace'),
+            (r'"', String, ('#pop', 'quoted string'))
+        ],
 
         'define directive': [
             (r'\n', Text),
@@ -114,6 +119,13 @@ class CsoundLexer(RegexLexer):
             (r'\d+', Number.Integer)
         ],
 
+        'quoted string': [
+            (r'"', String, '#pop'),
+            (r'[^"$]+', String),
+            include('macro uses'),
+            (r'[$]', String)
+        ],
+
         'braced string': [
             # Do nothing. This must be defined in subclasses.
         ]
@@ -122,7 +134,7 @@ class CsoundLexer(RegexLexer):
 
 class CsoundScoreLexer(CsoundLexer):
     """
-    For `Csound <https://csound.github.io>`_ scores.
+    For `Csound <https://csound.com>`_ scores.
 
     .. versionadded:: 2.1
     """
@@ -144,7 +156,7 @@ class CsoundScoreLexer(CsoundLexer):
             (r'z', Keyword.Constant),
             # z is a constant equal to 800,000,000,000. 800 billion seconds is about
             # 25,367.8 years. See also
-            # https://csound.github.io/docs/manual/ScoreTop.html and
+            # https://csound.com/docs/manual/ScoreTop.html and
             # https://github.com/csound/csound/search?q=stof+path%3AEngine+filename%3Asread.c.
 
             (r'([nNpP][pP])(\d+)', bygroups(Keyword, Number.Integer)),
@@ -164,13 +176,6 @@ class CsoundScoreLexer(CsoundLexer):
             (r'\n', Text, '#pop')
         ],
 
-        'quoted string': [
-            (r'"', String, '#pop'),
-            (r'[^"$]+', String),
-            include('macro uses'),
-            (r'[$]', String)
-        ],
-
         'loop after left brace': [
             include('whitespace and macro uses'),
             (r'\d+', Number.Integer, ('#pop', 'loop after repeat count')),
@@ -195,7 +200,7 @@ class CsoundScoreLexer(CsoundLexer):
 
 class CsoundOrchestraLexer(CsoundLexer):
     """
-    For `Csound <https://csound.github.io>`_ orchestras.
+    For `Csound <https://csound.com>`_ orchestras.
 
     .. versionadded:: 2.1
     """
@@ -212,28 +217,25 @@ class CsoundOrchestraLexer(CsoundLexer):
         yield match.start(), Name.Function, opcode
 
     def name_callback(lexer, match):
+        type_annotation_token = Keyword.Type
+
         name = match.group(1)
         if name in OPCODES or name in DEPRECATED_OPCODES:
             yield match.start(), Name.Builtin, name
-            if match.group(2):
-                yield match.start(2), Punctuation, match.group(2)
-                yield match.start(3), Keyword.Type, match.group(3)
         elif name in lexer.user_defined_opcodes:
             yield match.start(), Name.Function, name
         else:
-            nameMatch = re.search(r'^(g?[afikSw])(\w+)', name)
-            if nameMatch:
-                yield nameMatch.start(1), Keyword.Type, nameMatch.group(1)
-                yield nameMatch.start(2), Name, nameMatch.group(2)
+            type_annotation_token = Name
+            name_match = re.search(r'^(g?[afikSw])(\w+)', name)
+            if name_match:
+                yield name_match.start(1), Keyword.Type, name_match.group(1)
+                yield name_match.start(2), Name, name_match.group(2)
             else:
                 yield match.start(), Name, name
 
-            # If there's a trailing :V, for example, we want to keep this around
-            # and emit it as well, otherwise this lexer will not pass round-trip
-            # testing
-            if match.group(2):
-                yield match.start(2), Punctuation, match.group(2)
-                yield match.start(3), Name, match.group(3)
+        if match.group(2):
+            yield match.start(2), Punctuation, match.group(2)
+            yield match.start(3), type_annotation_token, match.group(3)
 
     tokens = {
         'root': [
@@ -328,13 +330,13 @@ class CsoundOrchestraLexer(CsoundLexer):
             (r'\\(?:[\\abnrt"]|[0-7]{1,3})', String.Escape)
         ],
         # Format specifiers are highlighted in all strings, even though only
-        #   fprintks        https://csound.github.io/docs/manual/fprintks.html
-        #   fprints         https://csound.github.io/docs/manual/fprints.html
-        #   printf/printf_i https://csound.github.io/docs/manual/printf.html
-        #   printks         https://csound.github.io/docs/manual/printks.html
-        #   prints          https://csound.github.io/docs/manual/prints.html
-        #   sprintf         https://csound.github.io/docs/manual/sprintf.html
-        #   sprintfk        https://csound.github.io/docs/manual/sprintfk.html
+        #   fprintks        https://csound.com/docs/manual/fprintks.html
+        #   fprints         https://csound.com/docs/manual/fprints.html
+        #   printf/printf_i https://csound.com/docs/manual/printf.html
+        #   printks         https://csound.com/docs/manual/printks.html
+        #   prints          https://csound.com/docs/manual/prints.html
+        #   sprintf         https://csound.com/docs/manual/sprintf.html
+        #   sprintfk        https://csound.com/docs/manual/sprintfk.html
         # work with strings that contain format specifiers. In addition, these
         # opcodes’ handling of format specifiers is inconsistent:
         #   - fprintks, fprints, printks, and prints do accept %a and %A
@@ -371,6 +373,7 @@ class CsoundOrchestraLexer(CsoundLexer):
 
         'Csound score opcode': [
             include('whitespace and macro uses'),
+            (r'"', String, 'quoted string'),
             (r'\{\{', String, 'Csound score'),
             (r'\n', Text, '#pop')
         ],
@@ -381,6 +384,7 @@ class CsoundOrchestraLexer(CsoundLexer):
 
         'Python opcode': [
             include('whitespace and macro uses'),
+            (r'"', String, 'quoted string'),
             (r'\{\{', String, 'Python'),
             (r'\n', Text, '#pop')
         ],
@@ -391,6 +395,7 @@ class CsoundOrchestraLexer(CsoundLexer):
 
         'Lua opcode': [
             include('whitespace and macro uses'),
+            (r'"', String, 'quoted string'),
             (r'\{\{', String, 'Lua'),
             (r'\n', Text, '#pop')
         ],
@@ -403,7 +408,7 @@ class CsoundOrchestraLexer(CsoundLexer):
 
 class CsoundDocumentLexer(RegexLexer):
     """
-    For `Csound <https://csound.github.io>`_ documents.
+    For `Csound <https://csound.com>`_ documents.
 
     .. versionadded:: 2.1
     """
index b3253542f0f61b2d595cecca8d7b02a0c95368c9..46ca734006834eeb5eb1541a165cc06651ac41c5 100644 (file)
@@ -233,7 +233,7 @@ class YamlLexer(ExtendedRegexLexer):
             # whitespaces separating tokens
             (r'[ ]+', Text),
             # key with colon
-            (r'''([^,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''',
+            (r'''([^#,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''',
              bygroups(Name.Tag, set_indent(Punctuation, implicit=True))),
             # tags, anchors and aliases,
             include('descriptors'),
index 38a805e01151083105624f718d7892ead5860fec..0af3c6c2730ceb90af50b4ea8358eb293d32282d 100644 (file)
@@ -16,7 +16,7 @@ from pygments.lexer import ExtendedRegexLexer, RegexLexer, bygroups, words, \
 from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
     Number, Punctuation, Literal, Whitespace
 
-__all__ = ['ProtoBufLexer', 'BroLexer', 'PuppetLexer', 'RslLexer',
+__all__ = ['ProtoBufLexer', 'ZeekLexer', 'PuppetLexer', 'RslLexer',
            'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer',
            'CrmshLexer', 'ThriftLexer', 'FlatlineLexer', 'SnowballLexer']
 
@@ -40,9 +40,9 @@ class ProtoBufLexer(RegexLexer):
             (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
             (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
             (words((
-                'import', 'option', 'optional', 'required', 'repeated', 'default',
-                'packed', 'ctype', 'extensions', 'to', 'max', 'rpc', 'returns',
-                'oneof'), prefix=r'\b', suffix=r'\b'),
+                'import', 'option', 'optional', 'required', 'repeated',
+                'reserved', 'default', 'packed', 'ctype', 'extensions', 'to',
+                'max', 'rpc', 'returns', 'oneof'), prefix=r'\b', suffix=r'\b'),
              Keyword),
             (words((
                 'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64',
@@ -188,84 +188,166 @@ class ThriftLexer(RegexLexer):
     }
 
 
-class BroLexer(RegexLexer):
+class ZeekLexer(RegexLexer):
     """
-    For `Bro <http://bro-ids.org/>`_ scripts.
+    For `Zeek <https://www.zeek.org/>`_ scripts.
 
-    .. versionadded:: 1.5
+    .. versionadded:: 2.5
     """
-    name = 'Bro'
-    aliases = ['bro']
-    filenames = ['*.bro']
+    name = 'Zeek'
+    aliases = ['zeek', 'bro']
+    filenames = ['*.zeek', '*.bro']
 
-    _hex = r'[0-9a-fA-F_]'
+    _hex = r'[0-9a-fA-F]'
     _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
     _h = r'[A-Za-z0-9][-A-Za-z0-9]*'
 
     tokens = {
         'root': [
-            # Whitespace
-            (r'^@.*?\n', Comment.Preproc),
-            (r'#.*?\n', Comment.Single),
+            include('whitespace'),
+            include('comments'),
+            include('directives'),
+            include('attributes'),
+            include('types'),
+            include('keywords'),
+            include('literals'),
+            include('operators'),
+            include('punctuation'),
+            (r'((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(?=\s*\()',
+                Name.Function),
+            include('identifiers'),
+        ],
+
+        'whitespace': [
             (r'\n', Text),
             (r'\s+', Text),
             (r'\\\n', Text),
-            # Keywords
-            (r'(add|alarm|break|case|const|continue|delete|do|else|enum|event'
-             r'|export|for|function|if|global|hook|local|module|next'
-             r'|of|print|redef|return|schedule|switch|type|when|while)\b', Keyword),
-            (r'(addr|any|bool|count|counter|double|file|int|interval|net'
-             r'|pattern|port|record|set|string|subnet|table|time|timer'
-             r'|vector)\b', Keyword.Type),
+        ],
+
+        'comments': [
+            (r'#.*$', Comment),
+        ],
+
+        'directives': [
+            (r'@(load-plugin|load-sigs|load|unload)\b.*$', Comment.Preproc),
+            (r'@(DEBUG|DIR|FILENAME|deprecated|if|ifdef|ifndef|else|endif)\b', Comment.Preproc),
+            (r'(@prefixes)\s*(\+?=).*$', Comment.Preproc),
+        ],
+
+        'attributes': [
+            (words(('redef', 'priority', 'log', 'optional', 'default', 'add_func',
+                    'delete_func', 'expire_func', 'read_expire', 'write_expire',
+                    'create_expire', 'synchronized', 'persistent', 'rotate_interval',
+                    'rotate_size', 'encrypt', 'raw_output', 'mergeable', 'error_handler',
+                    'type_column', 'deprecated'),
+                prefix=r'&', suffix=r'\b'),
+             Keyword.Pseudo),
+        ],
+
+        'types': [
+            (words(('any',
+                    'enum', 'record', 'set', 'table', 'vector',
+                    'function', 'hook', 'event',
+                    'addr', 'bool', 'count', 'double', 'file', 'int', 'interval',
+                    'pattern', 'port', 'string', 'subnet', 'time'),
+                suffix=r'\b'),
+             Keyword.Type),
+
+            (r'(opaque)(\s+)(of)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b',
+                bygroups(Keyword.Type, Text, Operator.Word, Text, Keyword.Type)),
+
+            (r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)(\s*)\b(record|enum)\b',
+                bygroups(Keyword, Text, Name.Class, Text, Operator, Text, Keyword.Type)),
+
+            (r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)',
+                bygroups(Keyword, Text, Name, Text, Operator)),
+
+            (r'(redef)(\s+)(record|enum)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b',
+                bygroups(Keyword, Text, Keyword.Type, Text, Name.Class)),
+        ],
+
+        'keywords': [
+            (words(('redef', 'export', 'if', 'else', 'for', 'while',
+                    'return', 'break', 'next', 'continue', 'fallthrough',
+                    'switch', 'default', 'case',
+                    'add', 'delete',
+                    'when', 'timeout', 'schedule'),
+                suffix=r'\b'),
+             Keyword),
+            (r'(print)\b', Keyword),
+            (r'(global|local|const|option)\b', Keyword.Declaration),
+            (r'(module)(\s+)(([A-Za-z_]\w*)(?:::([A-Za-z_]\w*))*)\b',
+                bygroups(Keyword.Namespace, Text, Name.Namespace)),
+        ],
+
+        'literals': [
+            (r'"', String, 'string'),
+
+            # Not the greatest match for patterns, but generally helps
+            # disambiguate between start of a pattern and just a division
+            # operator.
+            (r'/(?=.*/)', String.Regex, 'regex'),
+
             (r'(T|F)\b', Keyword.Constant),
-            (r'(&)((?:add|delete|expire)_func|attr|(?:create|read|write)_expire'
-             r'|default|disable_print_hook|raw_output|encrypt|group|log'
-             r'|mergeable|optional|persistent|priority|redef'
-             r'|rotate_(?:interval|size)|synchronized)\b',
-             bygroups(Punctuation, Keyword)),
-            (r'\s+module\b', Keyword.Namespace),
-            # Addresses, ports and networks
-            (r'\d+/(tcp|udp|icmp|unknown)\b', Number),
-            (r'(\d+\.){3}\d+', Number),
-            (r'(' + _hex + r'){7}' + _hex, Number),
-            (r'0x' + _hex + r'(' + _hex + r'|:)*::(' + _hex + r'|:)*', Number),
-            (r'((\d+|:)(' + _hex + r'|:)*)?::(' + _hex + r'|:)*', Number),
-            (r'(\d+\.\d+\.|(\d+\.){2}\d+)', Number),
+
+            # Port
+            (r'\d{1,5}/(udp|tcp|icmp|unknown)\b', Number),
+
+            # IPv4 Address
+            (r'(\d{1,3}.){3}(\d{1,3})\b', Number),
+
+            # IPv6 Address
+            (r'\[([0-9a-fA-F]{0,4}:){2,7}([0-9a-fA-F]{0,4})?((\d{1,3}.){3}(\d{1,3}))?\]', Number),
+
+            # Numeric
+            (r'0[xX]' + _hex + r'+\b', Number.Hex),
+            (_float + r'\s*(day|hr|min|sec|msec|usec)s?\b', Number.Float),
+            (_float + r'\b', Number.Float),
+            (r'(\d+)\b', Number.Integer),
+
             # Hostnames
             (_h + r'(\.' + _h + r')+', String),
-            # Numeric
-            (_float + r'\s+(day|hr|min|sec|msec|usec)s?\b', Literal.Date),
-            (r'0[xX]' + _hex, Number.Hex),
-            (_float, Number.Float),
-            (r'\d+', Number.Integer),
-            (r'/', String.Regex, 'regex'),
-            (r'"', String, 'string'),
-            # Operators
-            (r'[!%*/+:<=>?~|-]', Operator),
+        ],
+
+        'operators': [
+            (r'[!%*/+<=>~|&^-]', Operator),
             (r'([-+=&|]{2}|[+=!><-]=)', Operator),
-            (r'(in|match)\b', Operator.Word),
-            (r'[{}()\[\]$.,;]', Punctuation),
-            # Identfier
-            (r'([_a-zA-Z]\w*)(::)', bygroups(Name, Name.Namespace)),
+            (r'(in|as|is|of)\b', Operator.Word),
+            (r'\??\$', Operator),
+        ],
+
+        'punctuation': [
+            (r'[{}()\[\],;.]', Punctuation),
+            # The "ternary if", which uses '?' and ':', could instead be
+            # treated as an Operator, but colons are more frequently used to
+            # separate field/identifier names from their types, so the (often)
+            # less-prominent Punctuation is used even with '?' for consistency.
+            (r'[?:]', Punctuation),
+        ],
+
+        'identifiers': [
+            (r'([a-zA-Z_]\w*)(::)', bygroups(Name, Punctuation)),
             (r'[a-zA-Z_]\w*', Name)
         ],
+
         'string': [
+            (r'\\.', String.Escape),
+            (r'%-?[0-9]*(\.[0-9]+)?[DTdxsefg]', String.Escape),
             (r'"', String, '#pop'),
-            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
-            (r'[^\\"\n]+', String),
-            (r'\\\n', String),
-            (r'\\', String)
+            (r'.', String),
         ],
+
         'regex': [
+            (r'\\.', String.Escape),
             (r'/', String.Regex, '#pop'),
-            (r'\\[\\nt/]', String.Regex),  # String.Escape is too intense here.
-            (r'[^\\/\n]+', String.Regex),
-            (r'\\\n', String.Regex),
-            (r'\\', String.Regex)
-        ]
+            (r'.', String.Regex),
+        ],
     }
 
 
+BroLexer = ZeekLexer
+
+
 class PuppetLexer(RegexLexer):
     """
     For `Puppet <http://puppetlabs.com/>`__ configuration DSL.
index b9a13e270b3cab27a8b1d654f8845d84948e033d..dd972bf4d6e7b5affe2b90fe87f055f7f0366f45 100644 (file)
@@ -32,27 +32,27 @@ class DylanLexer(RegexLexer):
 
     flags = re.IGNORECASE
 
-    builtins = set((
+    builtins = {
         'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
         'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
         'each-subclass', 'exception', 'exclude', 'function', 'generic',
         'handler', 'inherited', 'inline', 'inline-only', 'instance',
         'interface', 'import', 'keyword', 'library', 'macro', 'method',
         'module', 'open', 'primary', 'required', 'sealed', 'sideways',
-        'singleton', 'slot', 'thread', 'variable', 'virtual'))
+        'singleton', 'slot', 'thread', 'variable', 'virtual'}
 
-    keywords = set((
+    keywords = {
         'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
         'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
         'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
         'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
-        'while'))
+        'while'}
 
-    operators = set((
+    operators = {
         '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
-        '>', '>=', '&', '|'))
+        '>', '>=', '&', '|'}
 
-    functions = set((
+    functions = {
         'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
         'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
         'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
@@ -86,7 +86,7 @@ class DylanLexer(RegexLexer):
         'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
         'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
         'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
-        'vector', 'zero?'))
+        'vector', 'zero?'}
 
     valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+'
 
index 86dafc7c3ed0c49e6a4f6580ba8eb2d65436399a..ee941d7dbbb99def0ba2e027a2a5a0d97397929b 100644 (file)
@@ -77,7 +77,7 @@ class ElmLexer(RegexLexer):
             (words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function),
 
             # Infix Operators
-            (words((builtinOps)), Name.Function),
+            (words(builtinOps), Name.Function),
 
             # Numbers
             include('numbers'),
diff --git a/pygments/lexers/email.py b/pygments/lexers/email.py
new file mode 100644 (file)
index 0000000..5ad225b
--- /dev/null
@@ -0,0 +1,154 @@
+# -*- coding: utf-8 -*-
+"""
+    pygments.lexers.email
+    ~~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for the raw E-mail.
+
+    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, DelegatingLexer, bygroups
+from pygments.lexers.mime import MIMELexer
+from pygments.token import Text, Keyword, Name, String, Number, Comment
+from pygments.util import get_bool_opt
+
+__all__ = ["EmailLexer"]
+
+
+class EmailHeaderLexer(RegexLexer):
+    """
+    Sub-lexer for raw E-mail. This lexer only process header part of e-mail.
+
+    .. versionadded:: 2.5
+    """
+
+    def __init__(self, **options):
+        super(EmailHeaderLexer, self).__init__(**options)
+        self.highlight_x = get_bool_opt(options, "highlight-X-header", False)
+
+    def get_x_header_tokens(self, match):
+        if self.highlight_x:
+            # field
+            yield match.start(1), Name.Tag, match.group(1)
+
+            # content
+            default_actions = self.get_tokens_unprocessed(
+                match.group(2), stack=("root", "header"))
+            for item in default_actions:
+                yield item
+        else:
+            # lowlight
+            yield match.start(1), Comment.Special, match.group(1)
+            yield match.start(2), Comment.Multiline, match.group(2)
+
+    tokens = {
+        "root": [
+            (r"^(?:[A-WYZ]|X400)[\w\-]*:", Name.Tag, "header"),
+            (r"^(X-(?:\w[\w\-]*:))([\s\S]*?\n)(?![ \t])", get_x_header_tokens),
+        ],
+        "header": [
+            # folding
+            (r"\n[ \t]", Text.Whitespace),
+            (r"\n(?![ \t])", Text.Whitespace, "#pop"),
+
+            # keywords
+            (r"\bE?SMTPS?\b", Keyword),
+            (r"\b(?:HE|EH)LO\b", Keyword),
+
+            # mailbox
+            (r"[\w\.\-\+=]+@[\w\.\-]+", Name.Label),
+            (r"<[\w\.\-\+=]+@[\w\.\-]+>", Name.Label),
+
+            # domain
+            (r"\b(\w[\w\.-]*\.[\w\.-]*\w[a-zA-Z]+)\b", Name.Function),
+
+            # IPv4
+            (
+                r"(?<=\b)(?:(?:25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(?:25[0"
+                r"-5]|2[0-4][0-9]|1?[0-9][0-9]?)(?=\b)",
+                Number.Integer,
+            ),
+
+            # IPv6
+            (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,7}:(?!\b)", Number.Hex),
+            (r"(?<=\b):((:[0-9a-fA-F]{1,4}){1,7}|:)(?=\b)", Number.Hex),
+            (r"(?<=\b)([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
+            (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}(?=\b)", Number.Hex),
+            (r"(?<=\b)[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})(?=\b)", Number.Hex),
+            (r"(?<=\b)fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}(?=\b)", Number.Hex),
+            (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}(?=\b)", Number.Hex),
+            (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}(?=\b)",
+             Number.Hex),
+            (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}(?=\b)",
+             Number.Hex),
+            (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}(?=\b)",
+             Number.Hex),
+            (
+                r"(?<=\b)::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}"
+                r"[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}"
+                r"[0-9])(?=\b)",
+                Number.Hex,
+            ),
+            (
+                r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-"
+                r"9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-"
+                r"9])(?=\b)",
+                Number.Hex,
+            ),
+
+            # Date time
+            (
+                r"(?:(Sun|Mon|Tue|Wed|Thu|Fri|Sat),\s+)?(0[1-9]|[1-2]?[0-9]|3["
+                r"01])\s+(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s+("
+                r"19[0-9]{2}|[2-9][0-9]{3})\s+(2[0-3]|[0-1][0-9]):([0-5][0-9])"
+                r"(?::(60|[0-5][0-9]))?(?:\.\d{1,5})?\s+([-\+][0-9]{2}[0-5][0-"
+                r"9]|\(?(?:UTC?|GMT|(?:E|C|M|P)(?:ST|ET|DT)|[A-IK-Z])\)?)",
+                Name.Decorator,
+            ),
+
+            # RFC-2047 encoded string
+            (
+                r"(=\?)([\w-]+)(\?)([BbQq])(\?)([\[\w!\"#$%&\'()*+,-./:;<=>@[\\"
+                r"\]^_`{|}~]+)(\?=)",
+                bygroups(
+                    String.Affix,
+                    Name.Constant,
+                    String.Affix,
+                    Keyword.Constant,
+                    String.Affix,
+                    Number.Hex,
+                    String.Affix
+                )
+            ),
+
+            # others
+            (r'[\s]+', Text.Whitespace),
+            (r'[\S]', Text),
+        ],
+    }
+
+
+class EmailLexer(DelegatingLexer):
+    """
+    Lexer for raw E-mail.
+
+    Additional options accepted:
+
+    `highlight-X-header`
+        Highlight the fields of ``X-`` user-defined email header. (default:
+        ``False``).
+
+    .. versionadded:: 2.5
+    """
+
+    name = "E-mail"
+    aliases = ["email", "eml"]
+    filenames = ["*.eml"]
+    mimetypes = ["message/rfc822"]
+
+    def __init__(self, **options):
+        super(EmailLexer, self).__init__(
+            EmailHeaderLexer, MIMELexer, Comment, **options
+        )
index 3d9b0fd7c4619e7787618a7660ddebd7beffb86f..07a46c80ff0700851e008aaaa40013865c0c4414 100644 (file)
@@ -163,7 +163,7 @@ class ErlangShellLexer(Lexer):
     filenames = ['*.erl-sh']
     mimetypes = ['text/x-erl-shellsession']
 
-    _prompt_re = re.compile(r'\d+>(?=\s|\Z)')
+    _prompt_re = re.compile(r'(?:\([\w@_.]+\))?\d+>(?=\s|\Z)')
 
     def get_tokens_unprocessed(self, text):
         erlexer = ErlangLexer(**self.options)
@@ -495,7 +495,7 @@ class ElixirConsoleLexer(Lexer):
     aliases = ['iex']
     mimetypes = ['text/x-elixir-shellsession']
 
-    _prompt_re = re.compile(r'(iex|\.{3})(\(\d+\))?> ')
+    _prompt_re = re.compile(r'(iex|\.{3})((?:\([\w@_.]+\))?\d+|\(\d+\))?> ')
 
     def get_tokens_unprocessed(self, text):
         exlexer = ElixirLexer(**self.options)
index c43b285df364f375cad3e106e9c5f51cb61fe823..3e9ac8e840602a658743ba31518fc658c063b213 100644 (file)
@@ -36,27 +36,27 @@ class FreeFemLexer(CppLexer):
     mimetypes = ['text/x-freefem']
 
     # Language operators
-    operators = set(('+', '-', '*', '.*', '/', './', '%', '^', '^-1', ':', '\''))
+    operators = {'+', '-', '*', '.*', '/', './', '%', '^', '^-1', ':', '\''}
 
     # types
-    types = set(('bool', 'border', 'complex', 'dmatrix', 'fespace', 'func', 'gslspline',
-                 'ifstream', 'int', 'macro', 'matrix', 'mesh', 'mesh3', 'mpiComm',
-                 'mpiGroup', 'mpiRequest', 'NewMacro', 'EndMacro', 'ofstream', 'Pmmap',
-                 'problem', 'Psemaphore', 'real', 'solve', 'string', 'varf'))
+    types = {'bool', 'border', 'complex', 'dmatrix', 'fespace', 'func', 'gslspline',
+             'ifstream', 'int', 'macro', 'matrix', 'mesh', 'mesh3', 'mpiComm',
+             'mpiGroup', 'mpiRequest', 'NewMacro', 'EndMacro', 'ofstream', 'Pmmap',
+             'problem', 'Psemaphore', 'real', 'solve', 'string', 'varf'}
 
     # finite element spaces
-    fespaces = set(('BDM1', 'BDM1Ortho', 'Edge03d', 'Edge13d', 'Edge23d', 'FEQF', 'HCT',
-                    'P0', 'P03d', 'P0Edge', 'P1', 'P13d', 'P1b', 'P1b3d', 'P1bl', 'P1bl3d',
-                    'P1dc', 'P1Edge', 'P1nc', 'P2', 'P23d', 'P2b', 'P2BR', 'P2dc', 'P2Edge',
-                    'P2h', 'P2Morley', 'P2pnc', 'P3', 'P3dc', 'P3Edge', 'P4', 'P4dc',
-                    'P4Edge', 'P5Edge', 'RT0', 'RT03d', 'RT0Ortho', 'RT1', 'RT1Ortho',
-                    'RT2', 'RT2Ortho'))
+    fespaces = {'BDM1', 'BDM1Ortho', 'Edge03d', 'Edge13d', 'Edge23d', 'FEQF', 'HCT',
+                'P0', 'P03d', 'P0Edge', 'P1', 'P13d', 'P1b', 'P1b3d', 'P1bl', 'P1bl3d',
+                'P1dc', 'P1Edge', 'P1nc', 'P2', 'P23d', 'P2b', 'P2BR', 'P2dc', 'P2Edge',
+                'P2h', 'P2Morley', 'P2pnc', 'P3', 'P3dc', 'P3Edge', 'P4', 'P4dc',
+                'P4Edge', 'P5Edge', 'RT0', 'RT03d', 'RT0Ortho', 'RT1', 'RT1Ortho',
+                'RT2', 'RT2Ortho'}
 
     # preprocessor
-    preprocessor = set(('ENDIFMACRO', 'include', 'IFMACRO', 'load'))
+    preprocessor = {'ENDIFMACRO', 'include', 'IFMACRO', 'load'}
 
     # Language keywords
-    keywords = set((
+    keywords = {
                 'adj',
                 'append',
                 'area',
@@ -169,10 +169,10 @@ class FreeFemLexer(CppLexer):
                 'x',
                 'y',
                 'z'
-    ))
+    }
 
     # Language shipped functions and class ( )
-    functions = set((
+    functions = {
                 'abs',
                 'acos',
                 'acosh',
@@ -702,10 +702,10 @@ class FreeFemLexer(CppLexer):
                 'y0',
                 'y1',
                 'yn'
-    ))
+    }
 
     # function parameters
-    parameters = set((
+    parameters = {
                 'A',
                 'A1',
                 'abserror',
@@ -849,13 +849,13 @@ class FreeFemLexer(CppLexer):
                 'WindowIndex',
                 'which',
                 'zbound'
-    ))
+    }
 
     # deprecated
-    deprecated = set(('fixeborder',))
+    deprecated = {'fixeborder'}
 
     # do not highlight
-    suppress_highlight = set((
+    suppress_highlight = {
                 'alignof',
                 'asm',
                 'constexpr',
@@ -874,7 +874,7 @@ class FreeFemLexer(CppLexer):
                 'typeid',
                 'typename',
                 'using'
-    ))
+    }
 
     def get_tokens_unprocessed(self, text):
         for index, token, value in CppLexer.get_tokens_unprocessed(self, text):
index d9eecaafdc45e41b6db839115d1552fe6c1ebd6b..0c0917e7212a4d25639cf58be198dd3d2ddc8fa7 100644 (file)
@@ -325,10 +325,10 @@ class AgdaLexer(RegexLexer):
             #  Identifiers
             (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
             (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
-            (r'\b(Set|Prop)\b', Keyword.Type),
+            (u'\\b(Set|Prop)[\u2080-\u2089]*\\b', Keyword.Type),
             #  Special Symbols
             (r'(\(|\)|\{|\})', Operator),
-            (u'(\\.{1,3}|\\||\u039B|\u2200|\u2192|:|=|->)', Operator.Word),
+            (u'(\\.{1,3}|\\||\u03BB|\u2200|\u2192|:|=|->)', Operator.Word),
             #  Numbers
             (r'\d+[eE][+-]?\d+', Number.Float),
             (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
@@ -481,10 +481,10 @@ class CryptolLexer(RegexLexer):
         ],
     }
 
-    EXTRA_KEYWORDS = set(('join', 'split', 'reverse', 'transpose', 'width',
-                          'length', 'tail', '<<', '>>', '<<<', '>>>', 'const',
-                          'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error',
-                          'trace'))
+    EXTRA_KEYWORDS = {'join', 'split', 'reverse', 'transpose', 'width',
+                      'length', 'tail', '<<', '>>', '<<<', '>>>', 'const',
+                      'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error',
+                      'trace'}
 
     def get_tokens_unprocessed(self, text):
         stack = ['root']
index 38578e5b88ddf285ade00dea505ddaedf3966fb1..b3575080d3916e2815dd1e1afecfa7b15db3a886 100644 (file)
@@ -79,7 +79,7 @@ class HaxeLexer(ExtendedRegexLexer):
         if proc in ['error']:
             ctx.stack.append('preproc-error')
 
-        yield match.start(), Comment.Preproc, '#' + proc
+        yield match.start(), Comment.Preproc, u'#' + proc
         ctx.pos = match.end()
 
     tokens = {
index e7c7617b0b240b3c0b0fe5237f393706edc0b73b..b45654ebc81101858bae453bd97e4058c142256f 100644 (file)
@@ -131,15 +131,6 @@ class VerilogLexer(RegexLexer):
         ]
     }
 
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in \
-                RegexLexer.get_tokens_unprocessed(self, text):
-            # Convention: mark all upper case names as constants
-            if token is Name:
-                if value.isupper():
-                    token = Name.Constant
-            yield index, token, value
-
 
 class SystemVerilogLexer(RegexLexer):
     """
@@ -184,63 +175,75 @@ class SystemVerilogLexer(RegexLexer):
             (r'`[a-zA-Z_]\w*', Name.Constant),
 
             (words((
-                'accept_on', 'alias', 'always', 'always_comb', 'always_ff', 'always_latch',
-                'and', 'assert', 'assign', 'assume', 'automatic', 'before', 'begin', 'bind', 'bins',
-                'binsof', 'bit', 'break', 'buf', 'bufif0', 'bufif1', 'byte', 'case', 'casex', 'casez',
-                'cell', 'chandle', 'checker', 'class', 'clocking', 'cmos', 'config', 'const', 'constraint',
-                'context', 'continue', 'cover', 'covergroup', 'coverpoint', 'cross', 'deassign',
-                'default', 'defparam', 'design', 'disable', 'dist', 'do', 'edge', 'else', 'end', 'endcase',
-                'endchecker', 'endclass', 'endclocking', 'endconfig', 'endfunction', 'endgenerate',
-                'endgroup', 'endinterface', 'endmodule', 'endpackage', 'endprimitive',
-                'endprogram', 'endproperty', 'endsequence', 'endspecify', 'endtable',
-                'endtask', 'enum', 'event', 'eventually', 'expect', 'export', 'extends', 'extern',
-                'final', 'first_match', 'for', 'force', 'foreach', 'forever', 'fork', 'forkjoin',
-                'function', 'generate', 'genvar', 'global', 'highz0', 'highz1', 'if', 'iff', 'ifnone',
-                'ignore_bins', 'illegal_bins', 'implies', 'import', 'incdir', 'include',
-                'initial', 'inout', 'input', 'inside', 'instance', 'int', 'integer', 'interface',
-                'intersect', 'join', 'join_any', 'join_none', 'large', 'let', 'liblist', 'library',
-                'local', 'localparam', 'logic', 'longint', 'macromodule', 'matches', 'medium',
-                'modport', 'module', 'nand', 'negedge', 'new', 'nexttime', 'nmos', 'nor', 'noshowcancelled',
-                'not', 'notif0', 'notif1', 'null', 'or', 'output', 'package', 'packed', 'parameter',
-                'pmos', 'posedge', 'primitive', 'priority', 'program', 'property', 'protected',
-                'pull0', 'pull1', 'pulldown', 'pullup', 'pulsestyle_ondetect', 'pulsestyle_onevent',
-                'pure', 'rand', 'randc', 'randcase', 'randsequence', 'rcmos', 'real', 'realtime',
-                'ref', 'reg', 'reject_on', 'release', 'repeat', 'restrict', 'return', 'rnmos',
-                'rpmos', 'rtran', 'rtranif0', 'rtranif1', 's_always', 's_eventually', 's_nexttime',
-                's_until', 's_until_with', 'scalared', 'sequence', 'shortint', 'shortreal',
-                'showcancelled', 'signed', 'small', 'solve', 'specify', 'specparam', 'static',
-                'string', 'strong', 'strong0', 'strong1', 'struct', 'super', 'supply0', 'supply1',
-                'sync_accept_on', 'sync_reject_on', 'table', 'tagged', 'task', 'this', 'throughout',
-                'time', 'timeprecision', 'timeunit', 'tran', 'tranif0', 'tranif1', 'tri', 'tri0',
-                'tri1', 'triand', 'trior', 'trireg', 'type', 'typedef', 'union', 'unique', 'unique0',
-                'unsigned', 'until', 'until_with', 'untyped', 'use', 'uwire', 'var', 'vectored',
-                'virtual', 'void', 'wait', 'wait_order', 'wand', 'weak', 'weak0', 'weak1', 'while',
-                'wildcard', 'wire', 'with', 'within', 'wor', 'xnor', 'xor'), suffix=r'\b'),
+                'accept_on', 'alias', 'always', 'always_comb', 'always_ff',
+                'always_latch', 'and', 'assert', 'assign', 'assume', 'automatic',
+                'before', 'begin', 'bind', 'bins', 'binsof', 'bit', 'break', 'buf',
+                'bufif0', 'bufif1', 'byte', 'case', 'casex', 'casez', 'cell',
+                'chandle', 'checker', 'class', 'clocking', 'cmos', 'config',
+                'const', 'constraint', 'context', 'continue', 'cover', 'covergroup',
+                'coverpoint', 'cross', 'deassign', 'default', 'defparam', 'design',
+                'disable', 'dist', 'do', 'edge', 'else', 'end', 'endcase',
+                'endchecker', 'endclass', 'endclocking', 'endconfig', 'endfunction',
+                'endgenerate', 'endgroup', 'endinterface', 'endmodule', 'endpackage',
+                'endprimitive', 'endprogram', 'endproperty', 'endsequence',
+                'endspecify', 'endtable', 'endtask', 'enum', 'event', 'eventually',
+                'expect', 'export', 'extends', 'extern', 'final', 'first_match',
+                'for', 'force', 'foreach', 'forever', 'fork', 'forkjoin', 'function',
+                'generate', 'genvar', 'global', 'highz0', 'highz1', 'if', 'iff',
+                'ifnone', 'ignore_bins', 'illegal_bins', 'implies', 'import',
+                'incdir', 'include', 'initial', 'inout', 'input', 'inside',
+                'instance', 'int', 'integer', 'interface', 'intersect', 'join',
+                'join_any', 'join_none', 'large', 'let', 'liblist', 'library',
+                'local', 'localparam', 'logic', 'longint', 'macromodule', 'matches',
+                'medium', 'modport', 'module', 'nand', 'negedge', 'new', 'nexttime',
+                'nmos', 'nor', 'noshowcancelled', 'not', 'notif0', 'notif1', 'null',
+                'or', 'output', 'package', 'packed', 'parameter', 'pmos', 'posedge',
+                'primitive', 'priority', 'program', 'property', 'protected', 'pull0',
+                'pull1', 'pulldown', 'pullup', 'pulsestyle_ondetect',
+                'pulsestyle_onevent', 'pure', 'rand', 'randc', 'randcase',
+                'randsequence', 'rcmos', 'real', 'realtime', 'ref', 'reg',
+                'reject_on', 'release', 'repeat', 'restrict', 'return', 'rnmos',
+                'rpmos', 'rtran', 'rtranif0', 'rtranif1', 's_always', 's_eventually',
+                's_nexttime', 's_until', 's_until_with', 'scalared', 'sequence',
+                'shortint', 'shortreal', 'showcancelled', 'signed', 'small', 'solve',
+                'specify', 'specparam', 'static', 'string', 'strong', 'strong0',
+                'strong1', 'struct', 'super', 'supply0', 'supply1', 'sync_accept_on',
+                'sync_reject_on', 'table', 'tagged', 'task', 'this', 'throughout',
+                'time', 'timeprecision', 'timeunit', 'tran', 'tranif0', 'tranif1',
+                'tri', 'tri0', 'tri1', 'triand', 'trior', 'trireg', 'type',
+                'typedef', 'union', 'unique', 'unique0', 'unsigned', 'until',
+                'until_with', 'untyped', 'use', 'uwire', 'var', 'vectored',
+                'virtual', 'void', 'wait', 'wait_order', 'wand', 'weak', 'weak0',
+                'weak1', 'while', 'wildcard', 'wire', 'with', 'within', 'wor',
+                'xnor', 'xor'), suffix=r'\b'),
              Keyword),
 
             (words((
-                '`__FILE__', '`__LINE__', '`begin_keywords', '`celldefine', '`default_nettype',
-                '`define', '`else', '`elsif', '`end_keywords', '`endcelldefine', '`endif',
-                '`ifdef', '`ifndef', '`include', '`line', '`nounconnected_drive', '`pragma',
-                '`resetall', '`timescale', '`unconnected_drive', '`undef', '`undefineall'),
+                '`__FILE__', '`__LINE__', '`begin_keywords', '`celldefine',
+                '`default_nettype', '`define', '`else', '`elsif', '`end_keywords',
+                '`endcelldefine', '`endif', '`ifdef', '`ifndef', '`include',
+                '`line', '`nounconnected_drive', '`pragma', '`resetall',
+                '`timescale', '`unconnected_drive', '`undef', '`undefineall'),
                 suffix=r'\b'),
              Comment.Preproc),
 
             (words((
-                '$display', '$displayb', '$displayh', '$displayo', '$dumpall', '$dumpfile',
-                '$dumpflush', '$dumplimit', '$dumpoff', '$dumpon', '$dumpports',
-                '$dumpportsall', '$dumpportsflush', '$dumpportslimit', '$dumpportsoff',
-                '$dumpportson', '$dumpvars', '$fclose', '$fdisplay', '$fdisplayb',
-                '$fdisplayh', '$fdisplayo', '$feof', '$ferror', '$fflush', '$fgetc',
-                '$fgets', '$finish', '$fmonitor', '$fmonitorb', '$fmonitorh', '$fmonitoro',
-                '$fopen', '$fread', '$fscanf', '$fseek', '$fstrobe', '$fstrobeb', '$fstrobeh',
+                '$display', '$displayb', '$displayh', '$displayo', '$dumpall',
+                '$dumpfile', '$dumpflush', '$dumplimit', '$dumpoff', '$dumpon',
+                '$dumpports', '$dumpportsall', '$dumpportsflush', '$dumpportslimit',
+                '$dumpportsoff', '$dumpportson', '$dumpvars', '$fclose',
+                '$fdisplay', '$fdisplayb', '$fdisplayh', '$fdisplayo', '$feof',
+                '$ferror', '$fflush', '$fgetc', '$fgets', '$finish', '$fmonitor',
+                '$fmonitorb', '$fmonitorh', '$fmonitoro', '$fopen', '$fread',
+                '$fscanf', '$fseek', '$fstrobe', '$fstrobeb', '$fstrobeh',
                 '$fstrobeo', '$ftell', '$fwrite', '$fwriteb', '$fwriteh', '$fwriteo',
                 '$monitor', '$monitorb', '$monitorh', '$monitoro', '$monitoroff',
-                '$monitoron', '$plusargs', '$random', '$readmemb', '$readmemh', '$rewind',
-                '$sformat', '$sformatf', '$sscanf', '$strobe', '$strobeb', '$strobeh', '$strobeo',
-                '$swrite', '$swriteb', '$swriteh', '$swriteo', '$test', '$ungetc',
-                '$value$plusargs', '$write', '$writeb', '$writeh', '$writememb',
-                '$writememh', '$writeo'), suffix=r'\b'),
+                '$monitoron', '$plusargs', '$random', '$readmemb', '$readmemh',
+                '$rewind', '$sformat', '$sformatf', '$sscanf', '$strobe',
+                '$strobeb', '$strobeh', '$strobeo', '$swrite', '$swriteb',
+                '$swriteh', '$swriteo', '$test', '$ungetc', '$value$plusargs',
+                '$write', '$writeb', '$writeh', '$writememb', '$writememh',
+                '$writeo'), suffix=r'\b'),
              Name.Builtin),
 
             (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
@@ -276,15 +279,6 @@ class SystemVerilogLexer(RegexLexer):
         ]
     }
 
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in \
-                RegexLexer.get_tokens_unprocessed(self, text):
-            # Convention: mark all upper case names as constants
-            if token is Name:
-                if value.isupper():
-                    token = Name.Constant
-            yield index, token, value
-
 
 class VhdlLexer(RegexLexer):
     """
index ae38167c04e52778278e9da80b4a1bbc3b7fdbd0..cbef4f7e987c3888c06bb5a1acc3a70d9e80e5e6 100644 (file)
@@ -244,7 +244,7 @@ class XsltLexer(XmlLexer):
     filenames = ['*.xsl', '*.xslt', '*.xpl']  # xpl is XProc
     mimetypes = ['application/xsl+xml', 'application/xslt+xml']
 
-    EXTRA_KEYWORDS = set((
+    EXTRA_KEYWORDS = {
         'apply-imports', 'apply-templates', 'attribute',
         'attribute-set', 'call-template', 'choose', 'comment',
         'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
@@ -253,7 +253,7 @@ class XsltLexer(XmlLexer):
         'preserve-space', 'processing-instruction', 'sort',
         'strip-space', 'stylesheet', 'template', 'text', 'transform',
         'value-of', 'variable', 'when', 'with-param'
-    ))
+    }
 
     def get_tokens_unprocessed(self, text):
         for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
index e61c451ef17a8d9b15207d6161cfa2780b4becaa..e9cf672278cf21eb91e3f406725d97cf91f850dd 100644 (file)
@@ -372,6 +372,7 @@ class DartLexer(RegexLexer):
             (r'\b(bool|double|dynamic|int|num|Object|String|void)\b', Keyword.Type),
             (r'\b(false|null|true)\b', Keyword.Constant),
             (r'[~!%^&*+=|?:<>/-]|as\b', Operator),
+            (r'@[a-zA-Z_$]\w*', Name.Decorator),
             (r'[a-zA-Z_$]\w*:', Name.Label),
             (r'[a-zA-Z_$]\w*', Name),
             (r'[(){}\[\],.;]', Punctuation),
@@ -1033,7 +1034,6 @@ class CoffeeScriptLexer(RegexLexer):
     filenames = ['*.coffee']
     mimetypes = ['text/coffeescript']
 
-
     _operator_re = (
         r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
         r'\|\||\\(?=\n)|'
@@ -1457,17 +1457,20 @@ class EarlGreyLexer(RegexLexer):
             (r'8r[0-7]+', Number.Oct),
             (r'2r[01]+', Number.Bin),
             (r'16r[a-fA-F0-9]+', Number.Hex),
-            (r'([3-79]|[12][0-9]|3[0-6])r[a-zA-Z\d]+(\.[a-zA-Z\d]+)?', Number.Radix),
+            (r'([3-79]|[12][0-9]|3[0-6])r[a-zA-Z\d]+(\.[a-zA-Z\d]+)?',
+             Number.Radix),
             (r'\d+', Number.Integer)
         ],
     }
 
+
 class JuttleLexer(RegexLexer):
     """
     For `Juttle`_ source code.
 
     .. _Juttle: https://github.com/juttle/juttle
 
+    .. versionadded:: 2.2
     """
 
     name = 'Juttle'
@@ -1502,19 +1505,24 @@ class JuttleLexer(RegexLexer):
              r'(\d+(\.\d*)?|\.\d+)(ms|[smhdwMy])?):', String.Moment),
             (r':\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(\.\d*)?)?'
              r'(Z|[+-]\d{2}:\d{2}|[+-]\d{4})?:', String.Moment),
-            (r':((\d+(\.\d*)?|\.\d+)[ ]+)?(millisecond|second|minute|hour|day|week|month|year)[s]?'
-             r'(([ ]+and[ ]+(\d+[ ]+)?(millisecond|second|minute|hour|day|week|month|year)[s]?)'
+            (r':((\d+(\.\d*)?|\.\d+)[ ]+)?(millisecond|second|minute|hour|'
+             r'day|week|month|year)[s]?'
+             r'(([ ]+and[ ]+(\d+[ ]+)?(millisecond|second|minute|hour|'
+             r'day|week|month|year)[s]?)'
              r'|[ ]+(ago|from[ ]+now))*:', String.Moment),
             (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
              r'(==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
             (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
             (r'[})\].]', Punctuation),
             (r'(import|return|continue|if|else)\b', Keyword, 'slashstartsregex'),
-            (r'(var|const|function|reducer|sub|input)\b', Keyword.Declaration, 'slashstartsregex'),
+            (r'(var|const|function|reducer|sub|input)\b', Keyword.Declaration,
+             'slashstartsregex'),
             (r'(batch|emit|filter|head|join|keep|pace|pass|put|read|reduce|remove|'
-             r'sequence|skip|sort|split|tail|unbatch|uniq|view|write)\b', Keyword.Reserved),
+             r'sequence|skip|sort|split|tail|unbatch|uniq|view|write)\b',
+             Keyword.Reserved),
             (r'(true|false|null|Infinity)\b', Keyword.Constant),
-            (r'(Array|Date|Juttle|Math|Number|Object|RegExp|String)\b', Name.Builtin),
+            (r'(Array|Date|Juttle|Math|Number|Object|RegExp|String)\b',
+             Name.Builtin),
             (JS_IDENT, Name.Other),
             (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
             (r'[0-9]+', Number.Integer),
index d01b96fca21bf870f0562eff931a70e557856306..5728e7c5f4f299b8682d955955fde91446f65fa9 100644 (file)
@@ -26,7 +26,7 @@ __all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
 
 class JavaLexer(RegexLexer):
     """
-    For `Java <http://www.sun.com/java/>`_ source code.
+    For `Java <https://www.oracle.com/technetwork/java/>`_ source code.
     """
 
     name = 'Java'
@@ -50,7 +50,7 @@ class JavaLexer(RegexLexer):
             (r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)'  # return arguments
              r'((?:[^\W\d]|\$)[\w$]*)'                  # method name
              r'(\s*)(\()',                              # signature start
-             bygroups(using(this), Name.Function, Text, Operator)),
+             bygroups(using(this), Name.Function, Text, Punctuation)),
             (r'@[^\W\d][\w.]*', Name.Decorator),
             (r'(abstract|const|enum|extends|final|implements|native|private|'
              r'protected|public|static|strictfp|super|synchronized|throws|'
@@ -61,11 +61,14 @@ class JavaLexer(RegexLexer):
             (r'(true|false|null)\b', Keyword.Constant),
             (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
              'class'),
+            (r'(var)(\s+)', bygroups(Keyword.Declaration, Text),
+             'var'),
             (r'(import(?:\s+static)?)(\s+)', bygroups(Keyword.Namespace, Text),
              'import'),
             (r'"(\\\\|\\"|[^"])*"', String),
             (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
-            (r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Operator, Name.Attribute)),
+            (r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation,
+                                                     Name.Attribute)),
             (r'^\s*([^\W\d]|\$)[\w$]*:', Name.Label),
             (r'([^\W\d]|\$)[\w$]*', Name),
             (r'([0-9][0-9_]*\.([0-9][0-9_]*)?|'
@@ -80,12 +83,16 @@ class JavaLexer(RegexLexer):
             (r'0[bB][01][01_]*[lL]?', Number.Bin),
             (r'0[0-7_]+[lL]?', Number.Oct),
             (r'0|[1-9][0-9_]*[lL]?', Number.Integer),
-            (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
+            (r'[~^*!%&\[\]<>|+=/?-]', Operator),
+            (r'[{}();:.,]', Punctuation),
             (r'\n', Text)
         ],
         'class': [
             (r'([^\W\d]|\$)[\w$]*', Name.Class, '#pop')
         ],
+        'var': [
+            (r'([^\W\d]|\$)[\w$]*', Name, '#pop')
+        ],
         'import': [
             (r'[\w.]+\*?', Name.Namespace, '#pop')
         ],
@@ -104,7 +111,7 @@ class AspectJLexer(JavaLexer):
     filenames = ['*.aj']
     mimetypes = ['text/x-aspectj']
 
-    aj_keywords = set((
+    aj_keywords = {
         'aspect', 'pointcut', 'privileged', 'call', 'execution',
         'initialization', 'preinitialization', 'handler', 'get', 'set',
         'staticinitialization', 'target', 'args', 'within', 'withincode',
@@ -114,9 +121,9 @@ class AspectJLexer(JavaLexer):
         'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
         'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
         'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
-    ))
-    aj_inter_type = set(('parents:', 'warning:', 'error:', 'soft:', 'precedence:'))
-    aj_inter_type_annotation = set(('@type', '@method', '@constructor', '@field'))
+    }
+    aj_inter_type = {'parents:', 'warning:', 'error:', 'soft:', 'precedence:'}
+    aj_inter_type_annotation = {'@type', '@method', '@constructor', '@field'}
 
     def get_tokens_unprocessed(self, text):
         for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
index 169d7a9e973f74387ec93da85c364921a72fe7b3..601d5a5f27f45eabfb57ef0f2df3bce975afbdde 100644 (file)
@@ -1554,7 +1554,7 @@ class EmacsLispLexer(RegexLexer):
     # Take a deep breath...
     symbol = r'((?:%s)(?:%s)*)' % (nonmacro, constituent)
 
-    macros = set((
+    macros = {
         'atomic-change-group', 'case', 'block', 'cl-block', 'cl-callf', 'cl-callf2',
         'cl-case', 'cl-decf', 'cl-declaim', 'cl-declare',
         'cl-define-compiler-macro', 'cl-defmacro', 'cl-defstruct',
@@ -1601,17 +1601,17 @@ class EmacsLispLexer(RegexLexer):
         'with-tramp-file-property', 'with-tramp-progress-reporter',
         'with-wrapper-hook', 'load-time-value', 'locally', 'macrolet', 'progv',
         'return-from',
-    ))
+    }
 
-    special_forms = set((
+    special_forms = {
         'and', 'catch', 'cond', 'condition-case', 'defconst', 'defvar',
         'function', 'if', 'interactive', 'let', 'let*', 'or', 'prog1',
         'prog2', 'progn', 'quote', 'save-current-buffer', 'save-excursion',
         'save-restriction', 'setq', 'setq-default', 'subr-arity',
         'unwind-protect', 'while',
-    ))
+    }
 
-    builtin_function = set((
+    builtin_function = {
         '%', '*', '+', '-', '/', '/=', '1+', '1-', '<', '<=', '=', '>', '>=',
         'Snarf-documentation', 'abort-recursive-edit', 'abs',
         'accept-process-output', 'access-file', 'accessible-keymaps', 'acos',
@@ -1937,8 +1937,9 @@ class EmacsLispLexer(RegexLexer):
         'split-window-internal', 'sqrt', 'standard-case-table',
         'standard-category-table', 'standard-syntax-table', 'start-kbd-macro',
         'start-process', 'stop-process', 'store-kbd-macro-event', 'string',
-        'string-as-multibyte', 'string-as-unibyte', 'string-bytes',
-        'string-collate-equalp', 'string-collate-lessp', 'string-equal',
+        'string=', 'string<', 'string>', 'string-as-multibyte',
+        'string-as-unibyte', 'string-bytes', 'string-collate-equalp',
+        'string-collate-lessp', 'string-equal', 'string-greaterp',
         'string-lessp', 'string-make-multibyte', 'string-make-unibyte',
         'string-match', 'string-to-char', 'string-to-multibyte',
         'string-to-number', 'string-to-syntax', 'string-to-unibyte',
@@ -2050,23 +2051,23 @@ class EmacsLispLexer(RegexLexer):
         'xw-color-values', 'xw-display-color-p', 'xw-display-color-p',
         'yes-or-no-p', 'zlib-available-p', 'zlib-decompress-region',
         'forward-point',
-    ))
+    }
 
-    builtin_function_highlighted = set((
+    builtin_function_highlighted = {
         'defvaralias', 'provide', 'require',
         'with-no-warnings', 'define-widget', 'with-electric-help',
         'throw', 'defalias', 'featurep'
-    ))
+    }
 
-    lambda_list_keywords = set((
+    lambda_list_keywords = {
         '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
         '&rest', '&whole',
-    ))
+    }
 
-    error_keywords = set((
+    error_keywords = {
         'cl-assert', 'cl-check-type', 'error', 'signal',
         'user-error', 'warn',
-    ))
+    }
 
     def get_tokens_unprocessed(self, text):
         stack = ['root']
@@ -2225,7 +2226,7 @@ class ShenLexer(RegexLexer):
 
     BUILTINS_ANYWHERE = ('where', 'skip', '>>', '_', '!', '<e>', '<!>')
 
-    MAPPINGS = dict((s, Keyword) for s in DECLARATIONS)
+    MAPPINGS = {s: Keyword for s in DECLARATIONS}
     MAPPINGS.update((s, Name.Builtin) for s in BUILTINS)
     MAPPINGS.update((s, Keyword) for s in SPECIAL_FORMS)
 
index fd3897ad2ce228aaf9cb96fed945a1c531823db4..ee85d0888ab8b7057adeeedb8da083d075bf777f 100644 (file)
@@ -72,6 +72,8 @@ class MatlabLexer(RegexLexer):
              "hilb", "invhilb", "magic", "pascal", "rosser", "toeplitz", "vander",
              "wilkinson")
 
+    _operators = r'-|==|~=|<=|>=|<|>|&&|&|~|\|\|?|\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\'
+
     tokens = {
         'root': [
             # line starting with '!' is sent as a system command.  not sure what
@@ -79,7 +81,7 @@ class MatlabLexer(RegexLexer):
             (r'^!.*', String.Other),
             (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
             (r'%.*$', Comment),
-            (r'^\s*function', Keyword, 'deffunc'),
+            (r'^\s*function\b', Keyword, 'deffunc'),
 
             # from 'iskeyword' on version 7.11 (R2010):
             (words((
@@ -94,10 +96,22 @@ class MatlabLexer(RegexLexer):
             # line continuation with following comment:
             (r'\.\.\..*$', Comment),
 
+            # command form:
+            # "How MATLAB Recognizes Command Syntax" specifies that an operator
+            # is recognized if it is either surrounded by spaces or by no
+            # spaces on both sides; only the former case matters for us.  (This
+            # allows distinguishing `cd ./foo` from `cd ./ foo`.)
+            (r'(?:^|(?<=;))\s*\w+\s+(?!=|\(|(%s)\s+)' % _operators, Name,
+             'commandargs'),
+
             # operators:
-            (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
-            # operators requiring escape for re:
-            (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
+            (_operators, Operator),
+
+            # numbers (must come before punctuation to handle `.5`; cannot use
+            # `\b` due to e.g. `5. + .5`).
+            (r'(?<!\w)((\d+\.\d*)|(\d*\.\d+))([eEf][+-]?\d+)?(?!\w)', Number.Float),
+            (r'\b\d+[eEf][+-]?[0-9]+\b', Number.Float),
+            (r'\b\d+\b', Number.Integer),
 
             # punctuation:
             (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
@@ -107,17 +121,10 @@ class MatlabLexer(RegexLexer):
             # (not great, but handles common cases...)
             (r'(?<=[\w)\].])\'+', Operator),
 
-            (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
-            (r'\d+[eEf][+-]?[0-9]+', Number.Float),
-            (r'\d+', Number.Integer),
-
             (r'(?<![\w)\].])\'', String, 'string'),
             (r'[a-zA-Z_]\w*', Name),
             (r'.', Text),
         ],
-        'string': [
-            (r'[^\']*\'', String, '#pop')
-        ],
         'blockcomment': [
             (r'^\s*%\}', Comment.Multiline, '#pop'),
             (r'^.*\n', Comment.Multiline),
@@ -131,12 +138,28 @@ class MatlabLexer(RegexLexer):
             # function with no args
             (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
         ],
+        'string': [
+            (r"[^']*'", String, '#pop'),
+        ],
+        'commandargs': [
+            ("'[^']*'", String),
+            ("[^';\n]+", String),
+            (";?\n?", Punctuation, '#pop'),
+        ]
     }
 
     def analyse_text(text):
-        if re.match(r'^\s*%', text, re.M):  # comment
+        # function declaration.
+        first_non_comment = next((line for line in text.splitlines()
+                                  if not re.match(r'^\s*%', text)), '').strip()
+        if (first_non_comment.startswith('function')
+                and '{' not in first_non_comment):
+            return 1.
+        # comment
+        elif re.match(r'^\s*%', text, re.M):
             return 0.2
-        elif re.match(r'^!\w+', text, re.M):  # system cmd
+        # system cmd
+        elif re.match(r'^!\w+', text, re.M):
             return 0.2
 
 
@@ -536,7 +559,7 @@ class OctaveLexer(RegexLexer):
         'root': [
             # We should look into multiline comments
             (r'[%#].*$', Comment),
-            (r'^\s*function', Keyword, 'deffunc'),
+            (r'^\s*function\b', Keyword, 'deffunc'),
 
             # from 'iskeyword' on hg changeset 8cc154f45e37
             (words((
@@ -609,7 +632,7 @@ class ScilabLexer(RegexLexer):
     tokens = {
         'root': [
             (r'//.*?$', Comment.Single),
-            (r'^\s*function', Keyword, 'deffunc'),
+            (r'^\s*function\b', Keyword, 'deffunc'),
 
             (words((
                 '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
diff --git a/pygments/lexers/mime.py b/pygments/lexers/mime.py
new file mode 100644 (file)
index 0000000..95979f3
--- /dev/null
@@ -0,0 +1,226 @@
+# -*- coding: utf-8 -*-
+"""
+    pygments.lexers.mime
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for Multipurpose Internet Mail Extensions (MIME) data.
+
+    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.lexers import get_lexer_for_mimetype
+from pygments.token import Text, Name, String, Operator, Comment, Other
+from pygments.util import get_int_opt, ClassNotFound
+
+__all__ = ["MIMELexer"]
+
+
+class MIMELexer(RegexLexer):
+    """
+    Lexer for Multipurpose Internet Mail Extensions (MIME) data. This lexer is
+    designed to process the nested mulitpart data.
+
+    It assumes that the given data contains both header and body (and is
+    splitted by empty line). If no valid header is found, then the entire data
+    would be treated as body.
+
+    Additional options accepted:
+
+    `MIME-max-level`
+        Max recurssion level for nested MIME structure. Any negative number
+        would treated as unlimited. (default: -1)
+
+    `Content-Type`
+        Treat the data as specific content type. Useful when header is
+        missing, or this lexer would try to parse from header. (default:
+        `text/plain`)
+
+    `Multipart-Boundary`
+        Set the default multipart boundary delimiter. This option is only used
+        when `Content-Type` is `multipart` and header is missing. This lexer
+        would try to parse from header by default. (default: None)
+
+    `Content-Transfer-Encoding`
+        Treat the data as specific encoding. Or this lexer would try to parse
+        from header by default. (default: None)
+
+    .. versionadded:: 2.5
+    """
+
+    name = "MIME"
+    aliases = ["mime"]
+    mimetypes = ["multipart/mixed",
+                 "multipart/related",
+                 "multipart/alternative"]
+
+    def __init__(self, **options):
+        super(MIMELexer, self).__init__(**options)
+        self.boundary = options.get("Multipart-Boundary")
+        self.content_transfer_encoding = options.get("Content_Transfer_Encoding")
+        self.content_type = options.get("Content_Type", "text/plain")
+        self.max_nested_level = get_int_opt(options, "MIME-max-level", -1)
+
+    def analyse_text(text):
+        try:
+            header, body = text.strip().split("\n\n", 1)
+            if not body.strip():
+                return 0.1
+
+            invalid_headers = MIMELexer.tokens["header"].sub("", header)
+            if invalid_headers.strip():
+                return 0.1
+            else:
+                return 1
+
+        except ValueError:
+            return 0.1
+
+    def get_header_tokens(self, match):
+        field = match.group(1)
+
+        if field.lower() in self.attention_headers:
+            yield match.start(1), Name.Tag, field + ":"
+            yield match.start(2), Text.Whitespace, match.group(2)
+
+            pos = match.end(2)
+            body = match.group(3)
+            for i, t, v in self.get_tokens_unprocessed(body, ("root", field.lower())):
+                yield pos + i, t, v
+
+        else:
+            yield match.start(), Comment, match.group()
+
+    def get_body_tokens(self, match):
+        pos_body_start = match.start()
+        entire_body = match.group()
+
+        # skip first newline
+        if entire_body[0] == '\n':
+            yield pos_body_start, Text.Whitespace, u'\n'
+            pos_body_start = pos_body_start + 1
+            entire_body = entire_body[1:]
+
+        # if it is not a mulitpart
+        if not self.content_type.startswith("multipart") or not self.boundary:
+            for i, t, v in self.get_bodypart_tokens(entire_body):
+                yield pos_body_start + i, t, v
+            return
+
+        # find boundary
+        bdry_pattern = r"^--%s(--)?\n" % re.escape(self.boundary)
+        bdry_matcher = re.compile(bdry_pattern, re.MULTILINE)
+
+        # some data has prefix text before first boundary
+        m = bdry_matcher.search(entire_body)
+        if m:
+            pos_part_start = pos_body_start + m.end()
+            pos_iter_start = lpos_end = m.end()
+            yield pos_body_start, Text, entire_body[:m.start()]
+            yield pos_body_start + lpos_end, String.Delimiter, m.group()
+        else:
+            pos_part_start = pos_body_start
+            pos_iter_start = 0
+
+        # process tokens of each body part
+        for m in bdry_matcher.finditer(entire_body, pos_iter_start):
+            # bodypart
+            lpos_start = pos_part_start - pos_body_start
+            lpos_end = m.start()
+            part = entire_body[lpos_start:lpos_end]
+            for i, t, v in self.get_bodypart_tokens(part):
+                yield pos_part_start + i, t, v
+
+            # boundary
+            yield pos_body_start + lpos_end, String.Delimiter, m.group()
+            pos_part_start = pos_body_start + m.end()
+
+        # some data has suffix text after last boundary
+        lpos_start = pos_part_start - pos_body_start
+        if lpos_start != len(entire_body):
+            yield pos_part_start, Text, entire_body[lpos_start:]
+
+    def get_bodypart_tokens(self, text):
+        # return if:
+        #  * no content
+        #  * no content type specific
+        #  * content encoding is not readable
+        #  * max recurrsion exceed
+        if not text.strip() or not self.content_type:
+            return [(0, Other, text)]
+
+        cte = self.content_transfer_encoding
+        if cte and cte not in {"8bit", "7bit", "quoted-printable"}:
+            return [(0, Other, text)]
+
+        if self.max_nested_level == 0:
+            return [(0, Other, text)]
+
+        # get lexer
+        try:
+            lexer = get_lexer_for_mimetype(self.content_type)
+        except ClassNotFound:
+            return [(0, Other, text)]
+
+        if isinstance(lexer, type(self)):
+            lexer.max_nested_level = self.max_nested_level - 1
+
+        return lexer.get_tokens_unprocessed(text)
+
+    def store_content_type(self, match):
+        self.content_type = match.group(1)
+
+        prefix_len = match.start(1) - match.start(0)
+        yield match.start(0), Text.Whitespace, match.group(0)[:prefix_len]
+        yield match.start(1), Name.Label, match.group(2)
+        yield match.end(2), String.Delimiter, u"/"
+        yield match.start(3), Name.Label, match.group(3)
+
+    def get_content_type_subtokens(self, match):
+        yield match.start(1), Text, match.group(1)
+        yield match.start(2), Text.Whitespace, match.group(2)
+        yield match.start(3), Name.Attribute, match.group(3)
+        yield match.start(4), Operator, match.group(4)
+        yield match.start(5), String, match.group(5)
+
+        if match.group(3).lower() == "boundary":
+            boundary = match.group(5).strip()
+            if boundary[0] == '"' and boundary[-1] == '"':
+                boundary = boundary[1:-1]
+            self.boundary = boundary
+
+    def store_content_transfer_encoding(self, match):
+        self.content_transfer_encoding = match.group(0).lower()
+        yield match.start(0), Name.Constant, match.group(0)
+
+    attention_headers = {"content-type", "content-transfer-encoding"}
+
+    tokens = {
+        "root": [
+            (r"^([\w-]+):( *)([\s\S]*?\n)(?![ \t])", get_header_tokens),
+            (r"^$[\s\S]+", get_body_tokens),
+        ],
+        "header": [
+            # folding
+            (r"\n[ \t]", Text.Whitespace),
+            (r"\n(?![ \t])", Text.Whitespace, "#pop"),
+        ],
+        "content-type": [
+            include("header"),
+            (
+                r"^\s*((multipart|application|audio|font|image|model|text|video"
+                r"|message)/([\w-]+))",
+                store_content_type,
+            ),
+            (r'(;)((?:[ \t]|\n[ \t])*)([\w:-]+)(=)([\s\S]*?)(?=;|\n(?![ \t]))',
+             get_content_type_subtokens),
+            (r';[ \t]*\n(?![ \t])', Text, '#pop'),
+        ],
+        "content-transfer-encoding": [
+            include("header"),
+            (r"([\w-]+)", store_content_transfer_encoding),
+        ],
+    }
index aff8160d3ce6b4bb910a38932c76db8a30b80cbb..461af88134263d612a47e3d92b8e440a6947c518 100644 (file)
@@ -30,7 +30,7 @@ class SMLLexer(RegexLexer):
     filenames = ['*.sml', '*.sig', '*.fun']
     mimetypes = ['text/x-standardml', 'application/x-standardml']
 
-    alphanumid_reserved = set((
+    alphanumid_reserved = {
         # Core
         'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
         'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
@@ -39,16 +39,16 @@ class SMLLexer(RegexLexer):
         # Modules
         'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
         'struct', 'structure', 'where',
-    ))
+    }
 
-    symbolicid_reserved = set((
+    symbolicid_reserved = {
         # Core
         ':', r'\|', '=', '=>', '->', '#',
         # Modules
         ':>',
-    ))
+    }
 
-    nonid_reserved = set(('(', ')', '[', ']', '{', '}', ',', ';', '...', '_'))
+    nonid_reserved = {'(', ')', '[', ']', '{', '}', ',', ';', '...', '_'}
 
     alphanumid_re = r"[a-zA-Z][\w']*"
     symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
index 251bca2a01a57772d28d6d8d581c0ffdb2ba3b1b..0a8dd7df4db633f5081377f65d718d86c97cf56e 100644 (file)
@@ -68,29 +68,29 @@ class DelphiLexer(Lexer):
         'dispose', 'exit', 'false', 'new', 'true'
     )
 
-    BLOCK_KEYWORDS = set((
+    BLOCK_KEYWORDS = {
         'begin', 'class', 'const', 'constructor', 'destructor', 'end',
         'finalization', 'function', 'implementation', 'initialization',
         'label', 'library', 'operator', 'procedure', 'program', 'property',
         'record', 'threadvar', 'type', 'unit', 'uses', 'var'
-    ))
+    }
 
-    FUNCTION_MODIFIERS = set((
+    FUNCTION_MODIFIERS = {
         'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
         'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
         'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
         'override', 'assembler'
-    ))
+    }
 
     # XXX: those aren't global. but currently we know no way for defining
     #      them just for the type context.
-    DIRECTIVES = set((
+    DIRECTIVES = {
         'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
         'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
         'published', 'public'
-    ))
+    }
 
-    BUILTIN_TYPES = set((
+    BUILTIN_TYPES = {
         'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
         'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
         'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
@@ -104,7 +104,7 @@ class DelphiLexer(Lexer):
         'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
         'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
         'widechar', 'widestring', 'word', 'wordbool'
-    ))
+    }
 
     BUILTIN_UNITS = {
         'System': (
@@ -246,7 +246,7 @@ class DelphiLexer(Lexer):
         )
     }
 
-    ASM_REGISTERS = set((
+    ASM_REGISTERS = {
         'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
         'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
         'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
@@ -255,9 +255,9 @@ class DelphiLexer(Lexer):
         'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
         'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
         'xmm6', 'xmm7'
-    ))
+    }
 
-    ASM_INSTRUCTIONS = set((
+    ASM_INSTRUCTIONS = {
         'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
         'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
         'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
@@ -296,7 +296,7 @@ class DelphiLexer(Lexer):
         'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
         'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
         'xlatb', 'xor'
-    ))
+    }
 
     def __init__(self, **options):
         Lexer.__init__(self, **options)
index 576df424734fdb6b67ca33c6a6af5062878c68f2..3cdfbd03e8ca93486dba1b0bdcf8f5a3053c92d6 100644 (file)
@@ -86,25 +86,25 @@ class SourcePawnLexer(RegexLexer):
         ]
     }
 
-    SM_TYPES = set(('Action', 'bool', 'Float', 'Plugin', 'String', 'any',
-                    'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType',
-                    'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart',
-                    'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow',
-                    'ConVarBounds', 'QueryCookie', 'ReplySource',
-                    'ConVarQueryResult', 'ConVarQueryFinished', 'Function',
-                    'Action', 'Identity', 'PluginStatus', 'PluginInfo', 'DBResult',
-                    'DBBindType', 'DBPriority', 'PropType', 'PropFieldType',
-                    'MoveType', 'RenderMode', 'RenderFx', 'EventHookMode',
-                    'EventHook', 'FileType', 'FileTimeMode', 'PathType',
-                    'ParamType', 'ExecType', 'DialogType', 'Handle', 'KvDataTypes',
-                    'NominateResult', 'MapChange', 'MenuStyle', 'MenuAction',
-                    'MenuSource', 'RegexError', 'SDKCallType', 'SDKLibrary',
-                    'SDKFuncConfSource', 'SDKType', 'SDKPassMethod', 'RayType',
-                    'TraceEntityFilter', 'ListenOverride', 'SortOrder', 'SortType',
-                    'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus',
-                    'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond',
-                    'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType',
-                    'TopMenuPosition', 'TopMenuObject', 'UserMsg'))
+    SM_TYPES = {'Action', 'bool', 'Float', 'Plugin', 'String', 'any',
+                'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType',
+                'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart',
+                'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow',
+                'ConVarBounds', 'QueryCookie', 'ReplySource',
+                'ConVarQueryResult', 'ConVarQueryFinished', 'Function',
+                'Action', 'Identity', 'PluginStatus', 'PluginInfo', 'DBResult',
+                'DBBindType', 'DBPriority', 'PropType', 'PropFieldType',
+                'MoveType', 'RenderMode', 'RenderFx', 'EventHookMode',
+                'EventHook', 'FileType', 'FileTimeMode', 'PathType',
+                'ParamType', 'ExecType', 'DialogType', 'Handle', 'KvDataTypes',
+                'NominateResult', 'MapChange', 'MenuStyle', 'MenuAction',
+                'MenuSource', 'RegexError', 'SDKCallType', 'SDKLibrary',
+                'SDKFuncConfSource', 'SDKType', 'SDKPassMethod', 'RayType',
+                'TraceEntityFilter', 'ListenOverride', 'SortOrder', 'SortType',
+                'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus',
+                'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond',
+                'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType',
+                'TopMenuPosition', 'TopMenuObject', 'UserMsg'}
 
     def __init__(self, **options):
         self.smhighlighting = get_bool_opt(options,
index fa91880fd9dce4788c4d2492d44aefcbbc37739f..4a6a14f0eac3edb03e2d01accdf14d5c1ec1ab37 100644 (file)
@@ -55,7 +55,7 @@ class PraatLexer(RegexLexer):
         'exitScript', 'exp', 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ',
         'floor', 'gaussP', 'gaussQ', 'hertzToBark', 'hertzToErb', 'hertzToMel',
         'hertzToSemitones', 'imax', 'imin', 'incompleteBeta', 'incompleteGammaP', 'index',
-        'index_regex', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ',
+        'index_regex', 'integer', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ',
         'invGaussQ', 'invSigmoid', 'invStudentQ', 'length', 'ln', 'lnBeta', 'lnGamma',
         'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', 'natural', 'number',
         'numberOfColumns', 'numberOfRows', 'numberOfSelected', 'objectsAreIdentical',
@@ -63,9 +63,9 @@ class PraatLexer(RegexLexer):
         'positive', 'randomBinomial', 'randomGauss', 'randomInteger', 'randomPoisson',
         'randomUniform', 'real', 'readFile', 'removeObject', 'rindex', 'rindex_regex',
         'round', 'runScript', 'runSystem', 'runSystem_nocheck', 'selectObject',
-        'selected', 'semitonesToHertz', 'sentencetext', 'sigmoid', 'sin', 'sinc',
+        'selected', 'semitonesToHertz', 'sentence', 'sentencetext', 'sigmoid', 'sin', 'sinc',
         'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP',
-        'studentQ', 'tan', 'tanh', 'variableExists', 'word', 'writeFile', 'writeFileLine',
+        'studentQ', 'tan', 'tanh', 'text', 'variableExists', 'word', 'writeFile', 'writeFileLine',
         'writeInfo', 'writeInfoLine',
     )
 
@@ -90,9 +90,9 @@ class PraatLexer(RegexLexer):
         'KNN', 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries',
         'LinearRegression', 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline',
         'ManPages', 'Manipulation', 'Matrix', 'MelFilter', 'MelSpectrogram',
-        'MixingMatrix', 'Movie', 'Network', 'OTGrammar', 'OTHistory', 'OTMulti', 'PCA',
-        'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo', 'Pitch',
-        'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial',
+        'MixingMatrix', 'Movie', 'Network', 'Object', 'OTGrammar', 'OTHistory', 'OTMulti',
+        'PCA', 'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo',
+        'Pitch', 'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial',
         'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier',
         'ResultsMFC', 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct',
         'Similarity', 'SimpleString', 'SortedSetOfString', 'Sound', 'Speaker',
@@ -112,6 +112,10 @@ class PraatLexer(RegexLexer):
         'defaultDirectory',
     )
 
+    object_attributes = (
+        'ncol', 'nrow', 'xmin', 'ymin', 'xmax', 'ymax', 'nx', 'ny', 'dx', 'dy',
+    )
+
     tokens = {
         'root': [
             (r'(\s+)(#.*?$)',  bygroups(Text, Comment.Single)),
@@ -148,7 +152,9 @@ class PraatLexer(RegexLexer):
         ],
         'command': [
             (r'( ?[\w()-]+ ?)', Keyword),
-            (r"'(?=.*')", String.Interpol, 'string_interpolated'),
+
+            include('string_interpolated'),
+
             (r'\.{3}', Keyword, ('#pop', 'old_arguments')),
             (r':', Keyword, ('#pop', 'comma_list')),
             (r'\s', Text, '#pop'),
@@ -207,50 +213,49 @@ class PraatLexer(RegexLexer):
             (r'\n', Text, '#pop'),
             (r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
         ],
-        'object_attributes': [
-            (r'\.?(n(col|row)|[xy]min|[xy]max|[nd][xy])\b', Name.Builtin, '#pop'),
-            (r'(\.?(?:col|row)\$)(\[)',
-             bygroups(Name.Builtin, Text), 'variable_name'),
-            (r'(\$?)(\[)',
-             bygroups(Name.Builtin, Text), ('#pop', 'comma_list')),
+        'object_reference': [
+          include('string_interpolated'),
+          (r'([a-z][a-zA-Z0-9_]*|\d+)', Name.Builtin),
+
+          (words(object_attributes, prefix=r'\.'), Name.Builtin, '#pop'),
+
+          (r'\$', Name.Builtin),
+          (r'\[', Text, '#pop'),
         ],
         'variable_name': [
             include('operator'),
             include('number'),
 
             (words(variables_string,  suffix=r'\$'), Name.Variable.Global),
-            (words(variables_numeric, suffix=r'\b'), Name.Variable.Global),
-
-            (r'\bObject_\w+', Name.Builtin, 'object_attributes'),
-            (words(objects, prefix=r'\b', suffix=r'_\w+'),
-             Name.Builtin, 'object_attributes'),
+            (words(variables_numeric,
+             suffix=r'(?=[^a-zA-Z0-9\._"\'\$#\[:\(]|\s|^|$)'),
+             Name.Variable.Global),
 
-            (r"\b(Object_)(')",
-             bygroups(Name.Builtin, String.Interpol),
-             ('object_attributes', 'string_interpolated')),
-            (words(objects, prefix=r'\b', suffix=r"(_)(')"),
-             bygroups(Name.Builtin, Name.Builtin, String.Interpol),
-             ('object_attributes', 'string_interpolated')),
+            (words(objects, prefix=r'\b', suffix=r"(_)"),
+             bygroups(Name.Builtin, Name.Builtin),
+             'object_reference'),
 
             (r'\.?_?[a-z][\w.]*(\$|#)?', Text),
             (r'[\[\]]', Punctuation, 'comma_list'),
-            (r"'(?=.*')", String.Interpol, 'string_interpolated'),
+
+            include('string_interpolated'),
         ],
         'operator': [
             (r'([+\/*<>=!-]=?|[&*|][&*|]?|\^|<>)',       Operator),
             (r'(?<![\w.])(and|or|not|div|mod)(?![\w.])', Operator.Word),
         ],
         'string_interpolated': [
-            (r'\.?[_a-z][\w.]*[$#]?(?:\[[a-zA-Z0-9,]+\])?(:[0-9]+)?',
+            (r'\'[_a-z][^\[\]\'":]*(\[([\d,]+|"[\w\d,]+")\])?(:[0-9]+)?\'',
              String.Interpol),
-            (r"'",          String.Interpol, '#pop'),
         ],
         'string_unquoted': [
             (r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
 
             (r'\n',       Text,            '#pop'),
             (r'\s',       Text),
-            (r"'(?=.*')", String.Interpol, 'string_interpolated'),
+
+            include('string_interpolated'),
+
             (r"'",        String),
             (r"[^'\n]+",  String),
         ],
@@ -258,11 +263,14 @@ class PraatLexer(RegexLexer):
             (r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
 
             (r'"',          String,          '#pop'),
-            (r"'(?=.*')",   String.Interpol, 'string_interpolated'),
+
+            include('string_interpolated'),
+
             (r"'",          String),
             (r'[^\'"\n]+',  String),
         ],
         'old_form': [
+            (r'(\s+)(#.*?$)',  bygroups(Text, Comment.Single)),
             (r'\s+', Text),
 
             (r'(optionmenu|choice)([ \t]+\S+:[ \t]+)',
index 8dbbc6d4eb8b52fb5d20b22af266af40fd94aceb..70783625e0bc66fd4bf69c5f6314e913251eab03 100644 (file)
@@ -107,19 +107,19 @@ class LogtalkLexer(RegexLexer):
             (r'\n', Text),
             (r'\s+', Text),
             # Numbers
-            (r"0'.", Number),
+            (r"0'[\\]?.", Number),
             (r'0b[01]+', Number.Bin),
             (r'0o[0-7]+', Number.Oct),
             (r'0x[0-9a-fA-F]+', Number.Hex),
             (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
             # Variables
-            (r'([A-Z_]\w*)', Name.Variable),
+            (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
             # Event handlers
             (r'(after|before)(?=[(])', Keyword),
             # Message forwarding handler
             (r'forward(?=[(])', Keyword),
             # Execution-context methods
-            (r'(parameter|this|se(lf|nder))(?=[(])', Keyword),
+            (r'(context|parameter|this|se(lf|nder))(?=[(])', Keyword),
             # Reflection
             (r'(current_predicate|predicate_property)(?=[(])', Keyword),
             # DCGs and term expansion
@@ -135,20 +135,23 @@ class LogtalkLexer(RegexLexer):
             # Events
             (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
             # Flags
-            (r'(current|set)_logtalk_flag(?=[(])', Keyword),
+            (r'(create|current|set)_logtalk_flag(?=[(])', Keyword),
             # Compiling, loading, and library paths
-            (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make)(?=[(])', Keyword),
+            (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make(_target_action)?)(?=[(])', Keyword),
             (r'\blogtalk_make\b', Keyword),
             # Database
             (r'(clause|retract(all)?)(?=[(])', Keyword),
             (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
             # Control constructs
             (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
-            (r'(fa(il|lse)|true)\b', Keyword),
+            (r'(fa(il|lse)|true|(instantiation|system)_error)\b', Keyword),
+            (r'(type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword),
             # All solutions
             (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
-            # Multi-threading meta-predicates
-            (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
+            # Multi-threading predicates
+            (r'threaded(_(ca(ll|ncel)|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
+            # Engine predicates
+            (r'threaded_engine(_(create|destroy|self|next|next_reified|yield|post|fetch))?(?=[(])', Keyword),
             # Term unification
             (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword),
             # Term creation and decomposition
@@ -160,8 +163,7 @@ class LogtalkLexer(RegexLexer):
             # Other arithmetic functors
             (r'(cos|a(cos|sin|tan|tan2)|exp|log|s(in|qrt)|xor)(?=[(])', Keyword),
             # Term testing
-            (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|'
-             r'ground|acyclic_term)(?=[(])', Keyword),
+            (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword),
             # Term comparison
             (r'compare(?=[(])', Keyword),
             # Stream selection and control
@@ -226,10 +228,10 @@ class LogtalkLexer(RegexLexer):
             (r'\^', Operator),
             # Strings
             (r'"(\\\\|\\"|[^"])*"', String),
-            # Ponctuation
+            # Punctuation
             (r'[()\[\],.|]', Text),
             # Atoms
-            (r"[a-z]\w*", Text),
+            (r"[a-z][a-zA-Z0-9_]*", Text),
             (r"'", String, 'quoted_atom'),
         ],
 
@@ -244,36 +246,35 @@ class LogtalkLexer(RegexLexer):
         'directive': [
             # Conditional compilation directives
             (r'(el)?if(?=[(])', Keyword, 'root'),
-            (r'(e(lse|ndif))[.]', Keyword, 'root'),
+            (r'(e(lse|ndif))(?=[.])', Keyword, 'root'),
             # Entity directives
             (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
-            (r'(end_(category|object|protocol))[.]', Keyword, 'root'),
+            (r'(end_(category|object|protocol))(?=[.])', Keyword, 'root'),
             # Predicate scope directives
             (r'(public|protected|private)(?=[(])', Keyword, 'root'),
             # Other directives
             (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
             (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'),
-            (r'(built_in|dynamic|synchronized|threaded)[.]', Keyword, 'root'),
-            (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|'
-             r's(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
+            (r'(built_in|dynamic|synchronized|threaded)(?=[.])', Keyword, 'root'),
+            (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
             (r'op(?=[(])', Keyword, 'root'),
             (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
-            (r'[a-z]\w*(?=[(])', Text, 'root'),
-            (r'[a-z]\w*[.]', Text, 'root'),
+            (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
+            (r'[a-z][a-zA-Z0-9_]*(?=[.])', Text, 'root'),
         ],
 
         'entityrelations': [
             (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
             # Numbers
-            (r"0'.", Number),
+            (r"0'[\\]?.", Number),
             (r'0b[01]+', Number.Bin),
             (r'0o[0-7]+', Number.Oct),
             (r'0x[0-9a-fA-F]+', Number.Hex),
             (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
             # Variables
-            (r'([A-Z_]\w*)', Name.Variable),
+            (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
             # Atoms
-            (r"[a-z]\w*", Text),
+            (r"[a-z][a-zA-Z0-9_]*", Text),
             (r"'", String, 'quoted_atom'),
             # Strings
             (r'"(\\\\|\\"|[^"])*"', String),
@@ -281,7 +282,7 @@ class LogtalkLexer(RegexLexer):
             (r'([)]\.)', Text, 'root'),
             # Scope operator
             (r'(::)', Operator),
-            # Ponctuation
+            # Punctuation
             (r'[()\[\],.|]', Text),
             # Comments
             (r'%.*?\n', Comment),
index 0f9c4d415973e6de4ef48a91385c8064f0f62215..5f700e7f5d7b73a964cd93ace373da631a4966f2 100644 (file)
@@ -19,21 +19,288 @@ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
 from pygments import unistring as uni
 
 __all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
-           'Python3Lexer', 'Python3TracebackLexer', 'CythonLexer',
-           'DgLexer', 'NumPyLexer']
+           'Python2Lexer', 'Python2TracebackLexer',
+           'CythonLexer', 'DgLexer', 'NumPyLexer']
 
 line_re = re.compile('.*?\n')
 
 
 class PythonLexer(RegexLexer):
     """
-    For `Python <http://www.python.org>`_ source code.
+    For `Python <http://www.python.org>`_ source code (version 3.x).
+
+    .. versionadded:: 0.10
+
+    .. versionchanged:: 2.5
+       This is now the default ``PythonLexer``.  It is still available as the
+       alias ``Python3Lexer``.
     """
 
     name = 'Python'
-    aliases = ['python', 'py', 'sage']
-    filenames = ['*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage']
-    mimetypes = ['text/x-python', 'application/x-python']
+    aliases = ['python', 'py', 'sage', 'python3', 'py3']
+    filenames = [
+        '*.py',
+        '*.pyw',
+        # Jython
+        '*.jy',
+        # Sage
+        '*.sage',
+        # SCons
+        '*.sc',
+        'SConstruct',
+        'SConscript',
+        # Skylark/Starlark (used by Bazel, Buck, and Pants)
+        '*.bzl',
+        'BUCK',
+        'BUILD',
+        'BUILD.bazel',
+        'WORKSPACE',
+        # Twisted Application infrastructure
+        '*.tac',
+    ]
+    mimetypes = ['text/x-python', 'application/x-python',
+                 'text/x-python3', 'application/x-python3']
+
+    flags = re.MULTILINE | re.UNICODE
+
+    uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
+
+    def innerstring_rules(ttype):
+        return [
+            # the old style '%s' % (...) string formatting (still valid in Py3)
+            (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+             '[hlL]?[E-GXc-giorsaux%]', String.Interpol),
+            # the new style '{}'.format(...) string formatting
+            (r'\{'
+             r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?'  # field name
+             r'(\![sra])?'                       # conversion
+             r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
+             r'\}', String.Interpol),
+
+            # backslashes, quotes and formatting signs must be parsed one at a time
+            (r'[^\\\'"%{\n]+', ttype),
+            (r'[\'"\\]', ttype),
+            # unhandled string formatting sign
+            (r'%|(\{{1,2})', ttype)
+            # newlines are an error (use "nl" state)
+        ]
+
+    tokens = {
+        'root': [
+            (r'\n', Text),
+            (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
+             bygroups(Text, String.Affix, String.Doc)),
+            (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
+             bygroups(Text, String.Affix, String.Doc)),
+            (r'[^\S\n]+', Text),
+            (r'\A#!.+$', Comment.Hashbang),
+            (r'#.*$', Comment.Single),
+            (r'[]{}:(),;[]', Punctuation),
+            (r'\\\n', Text),
+            (r'\\', Text),
+            (r'(in|is|and|or|not)\b', Operator.Word),
+            (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
+            include('keywords'),
+            (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
+            (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
+            (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+             'fromimport'),
+            (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+             'import'),
+            include('builtins'),
+            include('magicfuncs'),
+            include('magicvars'),
+            # raw strings
+            ('(?i)(rb|br|fr|rf|r)(""")',
+             bygroups(String.Affix, String.Double), 'tdqs'),
+            ("(?i)(rb|br|fr|rf|r)(''')",
+             bygroups(String.Affix, String.Single), 'tsqs'),
+            ('(?i)(rb|br|fr|rf|r)(")',
+             bygroups(String.Affix, String.Double), 'dqs'),
+            ("(?i)(rb|br|fr|rf|r)(')",
+             bygroups(String.Affix, String.Single), 'sqs'),
+            # non-raw strings
+            ('([uUbBfF]?)(""")', bygroups(String.Affix, String.Double),
+             combined('stringescape', 'tdqs')),
+            ("([uUbBfF]?)(''')", bygroups(String.Affix, String.Single),
+             combined('stringescape', 'tsqs')),
+            ('([uUbBfF]?)(")', bygroups(String.Affix, String.Double),
+             combined('stringescape', 'dqs')),
+            ("([uUbBfF]?)(')", bygroups(String.Affix, String.Single),
+             combined('stringescape', 'sqs')),
+            include('name'),
+            include('numbers'),
+        ],
+        'keywords': [
+            (words((
+                'assert', 'async', 'await', 'break', 'continue', 'del', 'elif',
+                'else', 'except', 'finally', 'for', 'global', 'if', 'lambda',
+                'pass', 'raise', 'nonlocal', 'return', 'try', 'while', 'yield',
+                'yield from', 'as', 'with'), suffix=r'\b'),
+             Keyword),
+            (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
+        ],
+        'builtins': [
+            (words((
+                '__import__', 'abs', 'all', 'any', 'bin', 'bool', 'bytearray',
+                'bytes', 'chr', 'classmethod', 'cmp', 'compile', 'complex',
+                'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'filter',
+                'float', 'format', 'frozenset', 'getattr', 'globals', 'hasattr',
+                'hash', 'hex', 'id', 'input', 'int', 'isinstance', 'issubclass',
+                'iter', 'len', 'list', 'locals', 'map', 'max', 'memoryview',
+                'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'print',
+                'property', 'range', 'repr', 'reversed', 'round', 'set', 'setattr',
+                'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple',
+                'type', 'vars', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
+             Name.Builtin),
+            (r'(?<!\.)(self|Ellipsis|NotImplemented|cls)\b', Name.Builtin.Pseudo),
+            (words((
+                'ArithmeticError', 'AssertionError', 'AttributeError',
+                'BaseException', 'BufferError', 'BytesWarning', 'DeprecationWarning',
+                'EOFError', 'EnvironmentError', 'Exception', 'FloatingPointError',
+                'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError',
+                'ImportWarning', 'IndentationError', 'IndexError', 'KeyError',
+                'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError',
+                'NotImplementedError', 'OSError', 'OverflowError',
+                'PendingDeprecationWarning', 'ReferenceError', 'ResourceWarning',
+                'RuntimeError', 'RuntimeWarning', 'StopIteration',
+                'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit',
+                'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
+                'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
+                'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError',
+                'Warning', 'WindowsError', 'ZeroDivisionError',
+                # new builtin exceptions from PEP 3151
+                'BlockingIOError', 'ChildProcessError', 'ConnectionError',
+                'BrokenPipeError', 'ConnectionAbortedError', 'ConnectionRefusedError',
+                'ConnectionResetError', 'FileExistsError', 'FileNotFoundError',
+                'InterruptedError', 'IsADirectoryError', 'NotADirectoryError',
+                'PermissionError', 'ProcessLookupError', 'TimeoutError',
+                # others new in Python 3
+                'StopAsyncIteration'),
+                prefix=r'(?<!\.)', suffix=r'\b'),
+             Name.Exception),
+        ],
+        'magicfuncs': [
+            (words((
+                '__abs__', '__add__', '__aenter__', '__aexit__', '__aiter__',
+                '__and__', '__anext__', '__await__', '__bool__', '__bytes__',
+                '__call__', '__complex__', '__contains__', '__del__', '__delattr__',
+                '__delete__', '__delitem__', '__dir__', '__divmod__', '__enter__',
+                '__eq__', '__exit__', '__float__', '__floordiv__', '__format__',
+                '__ge__', '__get__', '__getattr__', '__getattribute__',
+                '__getitem__', '__gt__', '__hash__', '__iadd__', '__iand__',
+                '__ifloordiv__', '__ilshift__', '__imatmul__', '__imod__',
+                '__imul__', '__index__', '__init__', '__instancecheck__',
+                '__int__', '__invert__', '__ior__', '__ipow__', '__irshift__',
+                '__isub__', '__iter__', '__itruediv__', '__ixor__', '__le__',
+                '__len__', '__length_hint__', '__lshift__', '__lt__', '__matmul__',
+                '__missing__', '__mod__', '__mul__', '__ne__', '__neg__',
+                '__new__', '__next__', '__or__', '__pos__', '__pow__',
+                '__prepare__', '__radd__', '__rand__', '__rdivmod__', '__repr__',
+                '__reversed__', '__rfloordiv__', '__rlshift__', '__rmatmul__',
+                '__rmod__', '__rmul__', '__ror__', '__round__', '__rpow__',
+                '__rrshift__', '__rshift__', '__rsub__', '__rtruediv__',
+                '__rxor__', '__set__', '__setattr__', '__setitem__', '__str__',
+                '__sub__', '__subclasscheck__', '__truediv__',
+                '__xor__'), suffix=r'\b'),
+             Name.Function.Magic),
+        ],
+        'magicvars': [
+            (words((
+                '__annotations__', '__bases__', '__class__', '__closure__',
+                '__code__', '__defaults__', '__dict__', '__doc__', '__file__',
+                '__func__', '__globals__', '__kwdefaults__', '__module__',
+                '__mro__', '__name__', '__objclass__', '__qualname__',
+                '__self__', '__slots__', '__weakref__'), suffix=r'\b'),
+             Name.Variable.Magic),
+        ],
+        'numbers': [
+            (r'(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)'
+             r'([eE][+-]?\d(?:_?\d)*)?', Number.Float),
+            (r'\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?', Number.Float),
+            (r'0[oO](?:_?[0-7])+', Number.Oct),
+            (r'0[bB](?:_?[01])+', Number.Bin),
+            (r'0[xX](?:_?[a-fA-F0-9])+', Number.Hex),
+            (r'\d(?:_?\d)*', Number.Integer),
+        ],
+        'name': [
+            (r'@' + uni_name, Name.Decorator),
+            (r'@', Operator),  # new matrix multiplication operator
+            (uni_name, Name),
+        ],
+        'funcname': [
+            include('magicfuncs'),
+            (uni_name, Name.Function, '#pop'),
+            default('#pop'),
+        ],
+        'classname': [
+            (uni_name, Name.Class, '#pop'),
+        ],
+        'import': [
+            (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
+            (r'\.', Name.Namespace),
+            (uni_name, Name.Namespace),
+            (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
+            default('#pop')  # all else: go back
+        ],
+        'fromimport': [
+            (r'(\s+)(import)\b', bygroups(Text, Keyword.Namespace), '#pop'),
+            (r'\.', Name.Namespace),
+            # if None occurs here, it's "raise x from None", since None can
+            # never be a module name
+            (r'None\b', Name.Builtin.Pseudo, '#pop'),
+            (uni_name, Name.Namespace),
+            default('#pop'),
+        ],
+        'stringescape': [
+            (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+             r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+        ],
+        'strings-single': innerstring_rules(String.Single),
+        'strings-double': innerstring_rules(String.Double),
+        'dqs': [
+            (r'"', String.Double, '#pop'),
+            (r'\\\\|\\"|\\\n', String.Escape),  # included here for raw strings
+            include('strings-double')
+        ],
+        'sqs': [
+            (r"'", String.Single, '#pop'),
+            (r"\\\\|\\'|\\\n", String.Escape),  # included here for raw strings
+            include('strings-single')
+        ],
+        'tdqs': [
+            (r'"""', String.Double, '#pop'),
+            include('strings-double'),
+            (r'\n', String.Double)
+        ],
+        'tsqs': [
+            (r"'''", String.Single, '#pop'),
+            include('strings-single'),
+            (r'\n', String.Single)
+        ],
+    }
+
+    def analyse_text(text):
+        return shebang_matches(text, r'pythonw?(3(\.\d)?)?')
+
+
+Python3Lexer = PythonLexer
+
+
+class Python2Lexer(RegexLexer):
+    """
+    For `Python 2.x <http://www.python.org>`_ source code.
+
+    .. versionchanged:: 2.5
+       This class has been renamed from ``PythonLexer``.  ``PythonLexer`` now
+       refers to the Python 3 variant.  File name patterns like ``*.py`` have
+       been moved to Python 3 as well.
+    """
+
+    name = 'Python 2.x'
+    aliases = ['python2', 'py2']
+    filenames = []  # now taken over by PythonLexer (3.x)
+    mimetypes = ['text/x-python2', 'application/x-python2']
 
     def innerstring_rules(ttype):
         return [
@@ -124,15 +391,15 @@ class PythonLexer(RegexLexer):
                 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
                 'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
                 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
-                'MemoryError', 'ModuleNotFoundError', 'NameError', 'NotImplemented', 'NotImplementedError',
-                'OSError', 'OverflowError', 'OverflowWarning', 'PendingDeprecationWarning',
-                'RecursionError', 'ReferenceError', 'RuntimeError', 'RuntimeWarning', 'StandardError',
-                'StopIteration', 'StopAsyncIteration', 'SyntaxError', 'SyntaxWarning', 'SystemError',
-                'SystemExit', 'TabError', 'TypeError', 'UnboundLocalError',
-                'UnicodeDecodeError', 'UnicodeEncodeError', 'UnicodeError',
-                'UnicodeTranslateError', 'UnicodeWarning', 'UserWarning',
-                'ValueError', 'VMSError', 'Warning', 'WindowsError',
-                'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
+                'MemoryError', 'ModuleNotFoundError', 'NameError',
+                'NotImplementedError', 'OSError', 'OverflowError', 'OverflowWarning',
+                'PendingDeprecationWarning', 'RecursionError', 'ReferenceError',
+                'RuntimeError', 'RuntimeWarning', 'StandardError', 'StopIteration',
+                'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit',
+                'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
+                'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
+                'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError', 'Warning',
+                'WindowsError', 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
              Name.Exception),
         ],
         'magicfuncs': [
@@ -238,170 +505,10 @@ class PythonLexer(RegexLexer):
     }
 
     def analyse_text(text):
-        return shebang_matches(text, r'pythonw?(2(\.\d)?)?') or \
+        return shebang_matches(text, r'pythonw?2(\.\d)?') or \
             'import ' in text[:1000]
 
 
-class Python3Lexer(RegexLexer):
-    """
-    For `Python <http://www.python.org>`_ source code (version 3.0).
-
-    .. versionadded:: 0.10
-    """
-
-    name = 'Python 3'
-    aliases = ['python3', 'py3']
-    filenames = []  # Nothing until Python 3 gets widespread
-    mimetypes = ['text/x-python3', 'application/x-python3']
-
-    flags = re.MULTILINE | re.UNICODE
-
-    uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
-
-    def innerstring_rules(ttype):
-        return [
-            # the old style '%s' % (...) string formatting (still valid in Py3)
-            (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
-             '[hlL]?[E-GXc-giorsaux%]', String.Interpol),
-            # the new style '{}'.format(...) string formatting
-            (r'\{'
-             r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?'  # field name
-             r'(\![sra])?'                       # conversion
-             r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
-             r'\}', String.Interpol),
-
-            # backslashes, quotes and formatting signs must be parsed one at a time
-            (r'[^\\\'"%{\n]+', ttype),
-            (r'[\'"\\]', ttype),
-            # unhandled string formatting sign
-            (r'%|(\{{1,2})', ttype)
-            # newlines are an error (use "nl" state)
-        ]
-
-    tokens = PythonLexer.tokens.copy()
-    tokens['keywords'] = [
-        (words((
-            'assert', 'async', 'await', 'break', 'continue', 'del', 'elif',
-            'else', 'except', 'finally', 'for', 'global', 'if', 'lambda', 'pass',
-            'raise', 'nonlocal', 'return', 'try', 'while', 'yield', 'yield from',
-            'as', 'with'), suffix=r'\b'),
-         Keyword),
-        (words((
-            'True', 'False', 'None'), suffix=r'\b'),
-         Keyword.Constant),
-    ]
-    tokens['builtins'] = [
-        (words((
-            '__import__', 'abs', 'all', 'any', 'bin', 'bool', 'bytearray', 'bytes',
-            'chr', 'classmethod', 'cmp', 'compile', 'complex', 'delattr', 'dict',
-            'dir', 'divmod', 'enumerate', 'eval', 'filter', 'float', 'format',
-            'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',
-            'input', 'int', 'isinstance', 'issubclass', 'iter', 'len', 'list',
-            'locals', 'map', 'max', 'memoryview', 'min', 'next', 'object', 'oct',
-            'open', 'ord', 'pow', 'print', 'property', 'range', 'repr', 'reversed',
-            'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod', 'str',
-            'sum', 'super', 'tuple', 'type', 'vars', 'zip'), prefix=r'(?<!\.)',
-            suffix=r'\b'),
-         Name.Builtin),
-        (r'(?<!\.)(self|Ellipsis|NotImplemented|cls)\b', Name.Builtin.Pseudo),
-        (words((
-            'ArithmeticError', 'AssertionError', 'AttributeError',
-            'BaseException', 'BufferError', 'BytesWarning', 'DeprecationWarning',
-            'EOFError', 'EnvironmentError', 'Exception', 'FloatingPointError',
-            'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError',
-            'ImportWarning', 'IndentationError', 'IndexError', 'KeyError',
-            'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError',
-            'NotImplementedError', 'OSError', 'OverflowError',
-            'PendingDeprecationWarning', 'ReferenceError', 'ResourceWarning',
-            'RuntimeError', 'RuntimeWarning', 'StopIteration',
-            'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError',
-            'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
-            'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
-            'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError', 'Warning',
-            'WindowsError', 'ZeroDivisionError',
-            # new builtin exceptions from PEP 3151
-            'BlockingIOError', 'ChildProcessError', 'ConnectionError',
-            'BrokenPipeError', 'ConnectionAbortedError', 'ConnectionRefusedError',
-            'ConnectionResetError', 'FileExistsError', 'FileNotFoundError',
-            'InterruptedError', 'IsADirectoryError', 'NotADirectoryError',
-            'PermissionError', 'ProcessLookupError', 'TimeoutError'),
-            prefix=r'(?<!\.)', suffix=r'\b'),
-         Name.Exception),
-    ]
-    tokens['magicfuncs'] = [
-        (words((
-            '__abs__', '__add__', '__aenter__', '__aexit__', '__aiter__', '__and__',
-            '__anext__', '__await__', '__bool__', '__bytes__', '__call__',
-            '__complex__', '__contains__', '__del__', '__delattr__', '__delete__',
-            '__delitem__', '__dir__', '__divmod__', '__enter__', '__eq__', '__exit__',
-            '__float__', '__floordiv__', '__format__', '__ge__', '__get__',
-            '__getattr__', '__getattribute__', '__getitem__', '__gt__', '__hash__',
-            '__iadd__', '__iand__', '__ifloordiv__', '__ilshift__', '__imatmul__',
-            '__imod__', '__import__', '__imul__', '__index__', '__init__',
-            '__instancecheck__', '__int__', '__invert__', '__ior__', '__ipow__',
-            '__irshift__', '__isub__', '__iter__', '__itruediv__', '__ixor__',
-            '__le__', '__len__', '__length_hint__', '__lshift__', '__lt__',
-            '__matmul__', '__missing__', '__mod__', '__mul__', '__ne__', '__neg__',
-            '__new__', '__next__', '__or__', '__pos__', '__pow__', '__prepare__',
-            '__radd__', '__rand__', '__rdivmod__', '__repr__', '__reversed__',
-            '__rfloordiv__', '__rlshift__', '__rmatmul__', '__rmod__', '__rmul__',
-            '__ror__', '__round__', '__rpow__', '__rrshift__', '__rshift__',
-            '__rsub__', '__rtruediv__', '__rxor__', '__set__', '__setattr__',
-            '__setitem__', '__str__', '__sub__', '__subclasscheck__', '__truediv__',
-            '__xor__'), suffix=r'\b'),
-         Name.Function.Magic),
-    ]
-    tokens['magicvars'] = [
-        (words((
-            '__annotations__', '__bases__', '__class__', '__closure__', '__code__',
-            '__defaults__', '__dict__', '__doc__', '__file__', '__func__',
-            '__globals__', '__kwdefaults__', '__module__', '__mro__', '__name__',
-            '__objclass__', '__qualname__', '__self__', '__slots__', '__weakref__'),
-            suffix=r'\b'),
-         Name.Variable.Magic),
-    ]
-    tokens['numbers'] = [
-        (r'(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)'
-         r'([eE][+-]?\d(?:_?\d)*)?', Number.Float),
-        (r'\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?', Number.Float),
-        (r'0[oO](?:_?[0-7])+', Number.Oct),
-        (r'0[bB](?:_?[01])+', Number.Bin),
-        (r'0[xX](?:_?[a-fA-F0-9])+', Number.Hex),
-        (r'\d(?:_?\d)*', Number.Integer)
-    ]
-    tokens['backtick'] = []
-    tokens['name'] = [
-        (r'@\w+', Name.Decorator),
-        (r'@', Operator),  # new matrix multiplication operator
-        (uni_name, Name),
-    ]
-    tokens['funcname'] = [
-        (uni_name, Name.Function, '#pop')
-    ]
-    tokens['classname'] = [
-        (uni_name, Name.Class, '#pop')
-    ]
-    tokens['import'] = [
-        (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
-        (r'\.', Name.Namespace),
-        (uni_name, Name.Namespace),
-        (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
-        default('#pop')  # all else: go back
-    ]
-    tokens['fromimport'] = [
-        (r'(\s+)(import)\b', bygroups(Text, Keyword), '#pop'),
-        (r'\.', Name.Namespace),
-        (uni_name, Name.Namespace),
-        default('#pop'),
-    ]
-    tokens['strings-single'] = innerstring_rules(String.Single)
-    tokens['strings-double'] = innerstring_rules(String.Double)
-
-
-    def analyse_text(text):
-        return shebang_matches(text, r'pythonw?3(\.\d)?')
-
-
 class PythonConsoleLexer(Lexer):
     """
     For Python console output or doctests, such as:
@@ -419,25 +526,27 @@ class PythonConsoleLexer(Lexer):
     Additional options:
 
     `python3`
-        Use Python 3 lexer for code.  Default is ``False``.
+        Use Python 3 lexer for code.  Default is ``True``.
 
         .. versionadded:: 1.0
+        .. versionchanged:: 2.5
+           Now defaults to ``True``.
     """
     name = 'Python console session'
     aliases = ['pycon']
     mimetypes = ['text/x-python-doctest']
 
     def __init__(self, **options):
-        self.python3 = get_bool_opt(options, 'python3', False)
+        self.python3 = get_bool_opt(options, 'python3', True)
         Lexer.__init__(self, **options)
 
     def get_tokens_unprocessed(self, text):
         if self.python3:
-            pylexer = Python3Lexer(**self.options)
-            tblexer = Python3TracebackLexer(**self.options)
-        else:
             pylexer = PythonLexer(**self.options)
             tblexer = PythonTracebackLexer(**self.options)
+        else:
+            pylexer = Python2Lexer(**self.options)
+            tblexer = Python2TracebackLexer(**self.options)
 
         curcode = ''
         insertions = []
@@ -492,23 +601,28 @@ class PythonConsoleLexer(Lexer):
 
 class PythonTracebackLexer(RegexLexer):
     """
-    For Python tracebacks.
+    For Python 3.x tracebacks, with support for chained exceptions.
 
-    .. versionadded:: 0.7
+    .. versionadded:: 1.0
+
+    .. versionchanged:: 2.5
+       This is now the default ``PythonTracebackLexer``.  It is still available
+       as the alias ``Python3TracebackLexer``.
     """
 
     name = 'Python Traceback'
-    aliases = ['pytb']
-    filenames = ['*.pytb']
-    mimetypes = ['text/x-python-traceback']
+    aliases = ['pytb', 'py3tb']
+    filenames = ['*.pytb', '*.py3tb']
+    mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback']
 
     tokens = {
         'root': [
-            # Cover both (most recent call last) and (innermost last)
-            # The optional ^C allows us to catch keyboard interrupt signals.
-            (r'^(\^C)?(Traceback.*\n)',
-             bygroups(Text, Generic.Traceback), 'intb'),
-            # SyntaxError starts with this.
+            (r'\n', Text),
+            (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
+            (r'^During handling of the above exception, another '
+             r'exception occurred:\n\n', Generic.Traceback),
+            (r'^The above exception was the direct cause of the '
+             r'following exception:\n\n', Generic.Traceback),
             (r'^(?=  File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
             (r'^.*\n', Other),
         ],
@@ -529,27 +643,34 @@ class PythonTracebackLexer(RegexLexer):
     }
 
 
-class Python3TracebackLexer(RegexLexer):
+Python3TracebackLexer = PythonTracebackLexer
+
+
+class Python2TracebackLexer(RegexLexer):
     """
-    For Python 3.0 tracebacks, with support for chained exceptions.
+    For Python tracebacks.
 
-    .. versionadded:: 1.0
+    .. versionadded:: 0.7
+
+    .. versionchanged:: 2.5
+       This class has been renamed from ``PythonTracebackLexer``.
+       ``PythonTracebackLexer`` now refers to the Python 3 variant.
     """
 
-    name = 'Python 3.0 Traceback'
-    aliases = ['py3tb']
-    filenames = ['*.py3tb']
-    mimetypes = ['text/x-python3-traceback']
+    name = 'Python 2.x Traceback'
+    aliases = ['py2tb']
+    filenames = ['*.py2tb']
+    mimetypes = ['text/x-python2-traceback']
 
     tokens = {
         'root': [
-            (r'\n', Text),
-            (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
-            (r'^During handling of the above exception, another '
-             r'exception occurred:\n\n', Generic.Traceback),
-            (r'^The above exception was the direct cause of the '
-             r'following exception:\n\n', Generic.Traceback),
+            # Cover both (most recent call last) and (innermost last)
+            # The optional ^C allows us to catch keyboard interrupt signals.
+            (r'^(\^C)?(Traceback.*\n)',
+             bygroups(Text, Generic.Traceback), 'intb'),
+            # SyntaxError starts with this.
             (r'^(?=  File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
+            (r'^.*\n', Other),
         ],
         'intb': [
             (r'^(  File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
@@ -557,7 +678,7 @@ class Python3TracebackLexer(RegexLexer):
             (r'^(  File )("[^"]+")(, line )(\d+)(\n)',
              bygroups(Text, Name.Builtin, Text, Number, Text)),
             (r'^(    )(.+)(\n)',
-             bygroups(Text, using(Python3Lexer), Text)),
+             bygroups(Text, using(Python2Lexer), Text)),
             (r'^([ \t]*)(\.\.\.)(\n)',
              bygroups(Text, Comment, Text)),  # for doctests...
             (r'^([^:]+)(: )(.+)(\n)',
@@ -860,7 +981,7 @@ class NumPyLexer(PythonLexer):
     mimetypes = []
     filenames = []
 
-    EXTRA_KEYWORDS = set((
+    EXTRA_KEYWORDS = {
         'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
         'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
         'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
@@ -925,7 +1046,7 @@ class NumPyLexer(PythonLexer):
         'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
         'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
         'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
-    ))
+    }
 
     def get_tokens_unprocessed(self, text):
         for index, token, value in \
@@ -936,6 +1057,6 @@ class NumPyLexer(PythonLexer):
                 yield index, token, value
 
     def analyse_text(text):
-        return (shebang_matches(text, r'pythonw?(2(\.\d)?)?') or
+        return (shebang_matches(text, r'pythonw?(3(\.\d)?)?') or
                 'import ' in text[:1000]) \
             and ('import numpy' in text or 'from numpy import' in text)
index 8682387e16adde706a75f3ba59085203c93c3299..5927a686d4d3e4b43e0aeb0fc12ac7b40dc9987e 100644 (file)
@@ -15,7 +15,7 @@ from pygments.lexer import RegexLexer, bygroups, default
 from pygments.token import Keyword, Punctuation, String, Number, Operator, Generic, \
     Whitespace, Name, Literal, Comment, Text
 
-__all__ = ['SparqlLexer', 'TurtleLexer']
+__all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer']
 
 
 class SparqlLexer(RegexLexer):
@@ -275,3 +275,149 @@ class TurtleLexer(RegexLexer):
         for t in ('@base ', 'BASE ', '@prefix ', 'PREFIX '):
             if re.search(r'^\s*%s' % t, text):
                 return 0.80
+
+
+class ShExCLexer(RegexLexer):
+    """
+    Lexer for `ShExC <https://shex.io/shex-semantics/#shexc>`_ shape expressions language syntax.
+    """
+    name = 'ShExC'
+    aliases = ['shexc', 'shex']
+    filenames = ['*.shex']
+    mimetypes = ['text/shex']
+
+    # character group definitions ::
+
+    PN_CHARS_BASE_GRP = (u'a-zA-Z'
+                         u'\u00c0-\u00d6'
+                         u'\u00d8-\u00f6'
+                         u'\u00f8-\u02ff'
+                         u'\u0370-\u037d'
+                         u'\u037f-\u1fff'
+                         u'\u200c-\u200d'
+                         u'\u2070-\u218f'
+                         u'\u2c00-\u2fef'
+                         u'\u3001-\ud7ff'
+                         u'\uf900-\ufdcf'
+                         u'\ufdf0-\ufffd')
+
+    PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
+
+    PN_CHARS_GRP = (PN_CHARS_U_GRP +
+                    r'\-' +
+                    r'0-9' +
+                    u'\u00b7' +
+                    u'\u0300-\u036f' +
+                    u'\u203f-\u2040')
+
+    HEX_GRP = '0-9A-Fa-f'
+
+    PN_LOCAL_ESC_CHARS_GRP = r"_~.\-!$&'()*+,;=/?#@%"
+
+    # terminal productions ::
+
+    PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
+
+    PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
+
+    PN_CHARS = '[' + PN_CHARS_GRP + ']'
+
+    HEX = '[' + HEX_GRP + ']'
+
+    PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
+
+    UCHAR_NO_BACKSLASH = '(?:u' + HEX + '{4}|U' + HEX + '{8})'
+
+    UCHAR = r'\\' + UCHAR_NO_BACKSLASH
+
+    IRIREF = r'<(?:[^\x00-\x20<>"{}|^`\\]|' + UCHAR + ')*>'
+
+    BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
+                       '.]*' + PN_CHARS + ')?'
+
+    PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
+
+    PERCENT = '%' + HEX + HEX
+
+    PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
+
+    PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
+
+    PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
+                '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
+                PN_CHARS_GRP + ':]|' + PLX + '))?')
+
+    EXPONENT = r'[eE][+-]?\d+'
+
+    # Lexer token definitions ::
+
+    tokens = {
+        'root': [
+            (r'\s+', Text),
+            # keywords ::
+            (r'(?i)(base|prefix|start|external|'
+             r'literal|iri|bnode|nonliteral|length|minlength|maxlength|'
+             r'mininclusive|minexclusive|maxinclusive|maxexclusive|'
+             r'totaldigits|fractiondigits|'
+             r'closed|extra)\b', Keyword),
+            (r'(a)\b', Keyword),
+            # IRIs ::
+            ('(' + IRIREF + ')', Name.Label),
+            # blank nodes ::
+            ('(' + BLANK_NODE_LABEL + ')', Name.Label),
+            # prefixed names ::
+            (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + ')?',
+             bygroups(Name.Namespace, Punctuation, Name.Tag)),
+            # boolean literals ::
+            (r'(true|false)', Keyword.Constant),
+            # double literals ::
+            (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
+            # decimal literals ::
+            (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
+            # integer literals ::
+            (r'[+\-]?\d+', Number.Integer),
+            # operators ::
+            (r'[@|$&=*+?^\-~]', Operator),
+            # operator keywords ::
+            (r'(?i)(and|or|not)\b', Operator.Word),
+            # punctuation characters ::
+            (r'[(){}.;,:^\[\]]', Punctuation),
+            # line comments ::
+            (r'#[^\n]*', Comment),
+            # strings ::
+            (r'"""', String, 'triple-double-quoted-string'),
+            (r'"', String, 'single-double-quoted-string'),
+            (r"'''", String, 'triple-single-quoted-string'),
+            (r"'", String, 'single-single-quoted-string'),
+        ],
+        'triple-double-quoted-string': [
+            (r'"""', String, 'end-of-string'),
+            (r'[^\\]+', String),
+            (r'\\', String, 'string-escape'),
+        ],
+        'single-double-quoted-string': [
+            (r'"', String, 'end-of-string'),
+            (r'[^"\\\n]+', String),
+            (r'\\', String, 'string-escape'),
+        ],
+        'triple-single-quoted-string': [
+            (r"'''", String, 'end-of-string'),
+            (r'[^\\]+', String),
+            (r'\\', String.Escape, 'string-escape'),
+        ],
+        'single-single-quoted-string': [
+            (r"'", String, 'end-of-string'),
+            (r"[^'\\\n]+", String),
+            (r'\\', String, 'string-escape'),
+        ],
+        'string-escape': [
+            (UCHAR_NO_BACKSLASH, String.Escape, '#pop'),
+            (r'.', String.Escape, '#pop'),
+        ],
+        'end-of-string': [
+            (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
+             bygroups(Operator, Name.Function), '#pop:2'),
+            (r'\^\^', Operator, '#pop:2'),
+            default('#pop:2'),
+        ],
+    }
index 6cc88b953a9d0ad956cc5bc46c4167e247087ff8..ccd4e5f6ccf80cc2d0cec51cc1cc1e16a5cb59a8 100644 (file)
@@ -26,7 +26,7 @@ class ResourceLexer(RegexLexer):
     """
     name = 'ResourceBundle'
     aliases = ['resource', 'resourcebundle']
-    filenames = ['*.txt']
+    filenames = []
 
     _types = (':table', ':array', ':string', ':bin', ':import', ':intvector',
               ':int', ':alias')
index 1288da8db0b2b04ed2401dc7c0ad03da6e0b1388..642c90c5c1f290e70527dcb152a535cb0cbdf918 100644 (file)
@@ -64,7 +64,7 @@ class RobotFrameworkLexer(Lexer):
     """
     name = 'RobotFramework'
     aliases = ['robotframework']
-    filenames = ['*.txt', '*.robot']
+    filenames = ['*.robot']
     mimetypes = ['text/x-robotframework']
 
     def __init__(self, **options):
index 723895d19775d9fa117ce675d6aa01fd54b47cd2..8bcbde6714be3570d277a47368fe2dcb57a4b65b 100644 (file)
@@ -43,17 +43,17 @@ class RubyLexer(ExtendedRegexLexer):
 
     def heredoc_callback(self, match, ctx):
         # okay, this is the hardest part of parsing Ruby...
-        # match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
+        # match: 1 = <<[-~]?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
 
         start = match.start(1)
-        yield start, Operator, match.group(1)        # <<-?
+        yield start, Operator, match.group(1)        # <<[-~]?
         yield match.start(2), String.Heredoc, match.group(2)   # quote ", ', `
         yield match.start(3), String.Delimiter, match.group(3) # heredoc name
         yield match.start(4), String.Heredoc, match.group(4)   # quote again
 
         heredocstack = ctx.__dict__.setdefault('heredocstack', [])
         outermost = not bool(heredocstack)
-        heredocstack.append((match.group(1) == '<<-', match.group(3)))
+        heredocstack.append((match.group(1) in ('<<-', '<<~'), match.group(3)))
 
         ctx.pos = match.start(5)
         ctx.end = match.end(5)
@@ -247,10 +247,10 @@ class RubyLexer(ExtendedRegexLexer):
              Name.Builtin),
             (r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
             # normal heredocs
-            (r'(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
+            (r'(?<!\w)(<<[-~]?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
              heredoc_callback),
             # empty string heredocs
-            (r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback),
+            (r'(<<[-~]?)("|\')()(\2)(.*?\n)', heredoc_callback),
             (r'__END__', Comment.Preproc, 'end-part'),
             # multiline regex (after keywords or assignments)
             (r'(?:^|(?<=[=<>~!:])|'
index 2b9e3129bbdc4a96c9cf84a31be8c07d48082e7a..f731785fe68dc453314df05fd73e25b0eaf40c95 100644 (file)
@@ -59,7 +59,7 @@ class RustLexer(RegexLexer):
     tokens = {
         'root': [
             # rust allows a file to start with a shebang, but if the first line
-            # starts with #![ then its not a shebang but a crate attribute.
+            # starts with #![ then it's not a shebang but a crate attribute.
             (r'#![^[\r\n].*$', Comment.Preproc),
             default('base'),
         ],
@@ -78,10 +78,10 @@ class RustLexer(RegexLexer):
             (r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
             # Keywords
             (words((
-                'as', 'box', 'const', 'crate', 'else', 'extern',
-                'for', 'if', 'impl', 'in', 'loop', 'match', 'move',
-                'mut', 'pub', 'ref', 'return', 'static', 'super',
-                'trait', 'unsafe', 'use', 'where', 'while'), suffix=r'\b'),
+                'as', 'async', 'await', 'box', 'const', 'crate', 'else',
+                'extern', 'for', 'if', 'impl', 'in', 'loop', 'match', 'move',
+                'mut', 'pub', 'ref', 'return', 'static', 'super', 'trait',
+                'try', 'unsafe', 'use', 'where', 'while'), suffix=r'\b'),
              Keyword),
             (words(('abstract', 'alignof', 'become', 'do', 'final', 'macro',
                     'offsetof', 'override', 'priv', 'proc', 'pure', 'sizeof',
@@ -95,7 +95,7 @@ class RustLexer(RegexLexer):
             (r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Text, Keyword)),
             keyword_types,
             (r'self\b', Name.Builtin.Pseudo),
-            # Prelude (taken from Rusts src/libstd/prelude.rs)
+            # Prelude (taken from Rust's src/libstd/prelude.rs)
             builtin_types,
             # Path seperators, so types don't catch them.
             (r'::\b', Text),
diff --git a/pygments/lexers/scdoc.py b/pygments/lexers/scdoc.py
new file mode 100644 (file)
index 0000000..4916393
--- /dev/null
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+"""
+    pygments.lexers.scdoc
+    ~~~~~~~~~~~~~~~~~~~~~
+
+    Lexer for scdoc, a simple man page generator.
+
+    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, \
+    using, this
+from pygments.token import Text, Comment, Keyword, String, \
+    Generic
+
+
+__all__ = ['ScdocLexer']
+
+
+class ScdocLexer(RegexLexer):
+    """
+    `scdoc` is a simple man page generator for POSIX systems written in C99.
+    https://git.sr.ht/~sircmpwn/scdoc
+
+    .. versionadded:: 2.5
+    """
+    name = 'scdoc'
+    aliases = ['scdoc', 'scd']
+    filenames = ['*.scd', '*.scdoc']
+    flags = re.MULTILINE
+
+    tokens = {
+        'root': [
+            # comment
+            (r'^(;.+\n)', bygroups(Comment)),
+
+            # heading with pound prefix
+            (r'^(#)([^#].+\n)', bygroups(Generic.Heading, Text)),
+            (r'^(#{2})(.+\n)', bygroups(Generic.Subheading, Text)),
+            # bulleted lists
+            (r'^(\s*)([*-])(\s)(.+\n)',
+            bygroups(Text, Keyword, Text, using(this, state='inline'))),
+            # numbered lists
+            (r'^(\s*)(\.+\.)( .+\n)',
+            bygroups(Text, Keyword, using(this, state='inline'))),
+            # quote
+            (r'^(\s*>\s)(.+\n)', bygroups(Keyword, Generic.Emph)),
+            # text block
+            (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)),
+
+            include('inline'),
+        ],
+        'inline': [
+            # escape
+            (r'\\.', Text),
+            # underlines
+            (r'(\s)(_[^_]+_)(\W|\n)', bygroups(Text, Generic.Emph, Text)),
+            # bold
+            (r'(\s)(\*[^\*]+\*)(\W|\n)', bygroups(Text, Generic.Strong, Text)),
+            # inline code
+            (r'`[^`]+`', String.Backtick),
+
+            # general text, must come last!
+            (r'[^\\\s]+', Text),
+            (r'.', Text),
+        ],
+    }
index 972c4004b35f74277e0562777e20dd1044c58a46..c12cb3f137fa118462e6db3cf8181d4c3864a118 100644 (file)
@@ -154,6 +154,9 @@ class ShellSessionBaseLexer(Lexer):
 
     .. versionadded:: 2.1
     """
+
+    _venv = re.compile(r'^(\([^)]*\))(\s*)')
+
     def get_tokens_unprocessed(self, text):
         innerlexer = self._innerLexerCls(**self.options)
 
@@ -164,11 +167,24 @@ class ShellSessionBaseLexer(Lexer):
 
         for match in line_re.finditer(text):
             line = match.group()
-            m = re.match(self._ps1rgx, line)
             if backslash_continuation:
                 curcode += line
                 backslash_continuation = curcode.endswith('\\\n')
-            elif m:
+                continue
+            
+            venv_match = self._venv.match(line)
+            if venv_match:
+                venv = venv_match.group(1)
+                venv_whitespace = venv_match.group(2)
+                insertions.append((len(curcode),
+                    [(0, Generic.Prompt.VirtualEnv, venv)]))
+                if venv_whitespace:
+                    insertions.append((len(curcode),
+                        [(0, Text, venv_whitespace)]))
+                line = line[venv_match.end():]
+
+            m = self._ps1rgx.match(line)
+            if m:
                 # To support output lexers (say diff output), the output
                 # needs to be broken by prompts whenever the output lexer
                 # changes.
@@ -211,9 +227,9 @@ class BashSessionLexer(ShellSessionBaseLexer):
     mimetypes = ['application/x-shell-session', 'application/x-sh-session']
 
     _innerLexerCls = BashLexer
-    _ps1rgx = \
+    _ps1rgx = re.compile(
         r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
-        r'?|\[\S+[@:][^\n]+\].+))\s*[$#%])(.*\n?)'
+        r'?|\[\S+[@:][^\n]+\].+))\s*[$#%])(.*\n?)')
     _ps2 = '>'
 
 
@@ -540,7 +556,7 @@ class MSDOSSessionLexer(ShellSessionBaseLexer):
     mimetypes = []
 
     _innerLexerCls = BatchLexer
-    _ps1rgx = r'^([^>]*>)(.*\n?)'
+    _ps1rgx = re.compile(r'^([^>]*>)(.*\n?)')
     _ps2 = 'More? '
 
 
@@ -625,7 +641,7 @@ class TcshSessionLexer(ShellSessionBaseLexer):
     mimetypes = []
 
     _innerLexerCls = TcshLexer
-    _ps1rgx = r'^([^>]+>)(.*\n?)'
+    _ps1rgx = re.compile(r'^([^>]+>)(.*\n?)')
     _ps2 = '? '
 
 
@@ -756,7 +772,7 @@ class PowerShellSessionLexer(ShellSessionBaseLexer):
     mimetypes = []
 
     _innerLexerCls = PowerShellLexer
-    _ps1rgx = r'^(PS [^>]+> )(.*\n?)'
+    _ps1rgx = re.compile(r'^(PS [^>]+> )(.*\n?)')
     _ps2 = '>> '
 
 
index b631410531b550bdf29b2a64d7e8d0f55ae07574..76e5929d38a7eecf0d0acf3a1e19a0acb3f3ace1 100644 (file)
@@ -26,7 +26,7 @@ class SlashLanguageLexer(ExtendedRegexLexer):
     def right_angle_bracket(lexer, match, ctx):
         if len(ctx.stack) > 1 and ctx.stack[-2] == "string":
             ctx.stack.pop()
-        yield match.start(), String.Interpol, "}"
+        yield match.start(), String.Interpol, u"}"
         ctx.pos = match.end()
         pass
 
diff --git a/pygments/lexers/solidity.py b/pygments/lexers/solidity.py
new file mode 100644 (file)
index 0000000..9966837
--- /dev/null
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+"""
+    pygments.lexers.solidity
+    ~~~~~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Solidity.
+
+    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation
+
+__all__ = ['SolidityLexer']
+
+
+class SolidityLexer(RegexLexer):
+    """
+    For Solidity source code.
+
+    .. versionadded:: 2.5
+    """
+
+    name = 'Solidity'
+    aliases = ['solidity']
+    filenames = ['*.sol']
+    mimetypes = []
+
+    flags = re.MULTILINE | re.UNICODE
+
+    datatype = (
+        r'\b(address|bool|((bytes|hash|int|string|uint)(8|16|24|32|40|48|56|64'
+        r'|72|80|88|96|104|112|120|128|136|144|152|160|168|176|184|192|200|208'
+        r'|216|224|232|240|248|256)?))\b'
+    )
+
+    tokens = {
+        'root': [
+            include('whitespace'),
+            include('comments'),
+            (r'\bpragma\s+solidity\b', Keyword, 'pragma'),
+            (r'\b(contract)(\s+)([a-zA-Z_]\w*)',
+             bygroups(Keyword, Text.WhiteSpace, Name.Entity)),
+            (datatype + r'(\s+)((external|public|internal|private)\s+)?' +
+             r'([a-zA-Z_]\w*)',
+             bygroups(Keyword.Type, None, None, None, Text.WhiteSpace, Keyword,
+                      None, Name.Variable)),
+            (r'\b(enum|event|function|struct)(\s+)([a-zA-Z_]\w*)',
+             bygroups(Keyword.Type, Text.WhiteSpace, Name.Variable)),
+            (r'\b(msg|block|tx)\.([A-Za-z_][A-Za-z0-9_]*)\b', Keyword),
+            (words((
+                'block', 'break', 'constant', 'constructor', 'continue',
+                'contract', 'do', 'else', 'external', 'false', 'for',
+                'function', 'if', 'import', 'inherited', 'internal', 'is',
+                'library', 'mapping', 'memory', 'modifier', 'msg', 'new',
+                'payable', 'private', 'public', 'require', 'return',
+                'returns', 'struct', 'suicide', 'throw', 'this', 'true',
+                'tx', 'var', 'while'), prefix=r'\b', suffix=r'\b'),
+             Keyword.Type),
+            (words(('keccak256',), prefix=r'\b', suffix=r'\b'), Name.Builtin),
+            (datatype, Keyword.Type),
+            include('constants'),
+            (r'[a-zA-Z_]\w*', Text),
+            (r'[!<=>+*/-]', Operator),
+            (r'[.;:{}(),\[\]]', Punctuation)
+        ],
+        'comments': [
+            (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
+            (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
+            (r'/(\\\n)?[*][\w\W]*', Comment.Multiline)
+        ],
+        'constants': [
+            (r'("([\\]"|.)*?")', String.Double),
+            (r"('([\\]'|.)*?')", String.Single),
+            (r'\b0[xX][0-9a-fA-F]+\b', Number.Hex),
+            (r'\b\d+\b', Number.Decimal),
+        ],
+        'pragma': [
+            include('whitespace'),
+            include('comments'),
+            (r'(\^|>=|<)(\s*)(\d+\.\d+\.\d+)',
+             bygroups(Operator, Text.WhiteSpace, Keyword)),
+            (r';', Punctuation, '#pop')
+        ],
+        'whitespace': [
+            (r'\s+', Text.WhiteSpace),
+            (r'\n', Text.WhiteSpace)
+        ]
+    }
index 1b3e9724ee654aa3c9f8b6a5d914ad25853cf13e..4016c5949b052c87766ff940d32aa383203fb25e 100644 (file)
@@ -35,6 +35,7 @@ class TextLexer(Lexer):
     def analyse_text(text):
         return TextLexer.priority
 
+
 _ttype_cache = {}
 
 line_re = re.compile(b'.*?\n')
index d789052d6f58afe655a362278cc0cab3729ce828..0788cd90404cb963db46705f9c63fefcbdd01fcb 100644 (file)
@@ -212,7 +212,7 @@ class PlPgsqlLexer(PostgresBase, RegexLexer):
     mimetypes = ['text/x-plpgsql']
 
     flags = re.IGNORECASE
-    tokens = dict((k, l[:]) for (k, l) in iteritems(PostgresLexer.tokens))
+    tokens = {k: l[:] for (k, l) in iteritems(PostgresLexer.tokens)}
 
     # extend the keywords list
     for i, pattern in enumerate(tokens['root']):
@@ -246,7 +246,7 @@ class PsqlRegexLexer(PostgresBase, RegexLexer):
     aliases = []    # not public
 
     flags = re.IGNORECASE
-    tokens = dict((k, l[:]) for (k, l) in iteritems(PostgresLexer.tokens))
+    tokens = {k: l[:] for (k, l) in iteritems(PostgresLexer.tokens)}
 
     tokens['root'].append(
         (r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
@@ -547,7 +547,7 @@ class TransactSqlLexer(RegexLexer):
             rating = 1.0
         else:
             name_between_backtick_count = len(
-                name_between_backtick_re.findall((text)))
+                name_between_backtick_re.findall(text))
             name_between_bracket_count = len(
                 name_between_bracket_re.findall(text))
             # We need to check if there are any names using
@@ -643,7 +643,7 @@ class MySqlLexer(RegexLexer):
     def analyse_text(text):
         rating = 0
         name_between_backtick_count = len(
-            name_between_backtick_re.findall((text)))
+            name_between_backtick_re.findall(text))
         name_between_bracket_count = len(
             name_between_bracket_re.findall(text))
         # Same logic as above in the TSQL analysis
index 8c42363769c0f044aefec27f637d00b493b6d554..f891242cb81991d4f897113135015a16c65a54ca 100644 (file)
@@ -226,7 +226,7 @@ class VelocityLexer(RegexLexer):
              'directiveparams'),
             (r'(#\{?)(' + identifier + r')(\}|\b)',
              bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
-            (r'\$\{?', Punctuation, 'variable')
+            (r'\$!?\{?', Punctuation, 'variable')
         ],
         'variable': [
             (identifier, Name.Variable),
@@ -249,7 +249,7 @@ class VelocityLexer(RegexLexer):
             (r'\]', Operator, '#pop')
         ],
         'funcparams': [
-            (r'\$\{?', Punctuation, 'variable'),
+            (r'\$!?\{?', Punctuation, 'variable'),
             (r'\s+', Text),
             (r'[,:]', Punctuation),
             (r'"(\\\\|\\"|[^"])*"', String.Double),
@@ -274,7 +274,7 @@ class VelocityLexer(RegexLexer):
             rv += 0.15
         if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
             rv += 0.15
-        if re.search(r'\$\{?[a-zA-Z_]\w*(\([^)]*\))?'
+        if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
                      r'(\.\w+(\([^)]*\))?)*\}?', text):
             rv += 0.01
         return rv
@@ -1802,27 +1802,26 @@ class HandlebarsLexer(RegexLexer):
         'root': [
             (r'[^{]+', Other),
 
+                               # Comment start {{!  }} or {{!-- 
             (r'\{\{!.*\}\}', Comment),
 
+                               # HTML Escaping open {{{expression
             (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
+
+                               # {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
+            (r'(\{\{)([#~/]+)([^\s}]*)', bygroups(Comment.Preproc, Number.Attribute,Number.Attribute), 'tag'),
             (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
         ],
 
         'tag': [
             (r'\s+', Text),
+                               # HTML Escaping close }}}
             (r'\}\}\}', Comment.Special, '#pop'),
-            (r'\}\}', Comment.Preproc, '#pop'),
-
-            # Handlebars
-            (r'([#/]*)(each|if|unless|else|with|log|in(line)?)', bygroups(Keyword,
-             Keyword)),
-            (r'#\*inline', Keyword),
-
-            # General {{#block}}
-            (r'([#/])([\w-]+)', bygroups(Name.Function, Name.Function)),
+                               # blockClose}}, includes optional tilde ~
+            (r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
 
             # {{opt=something}}
-            (r'([\w-]+)(=)', bygroups(Name.Attribute, Operator)),
+            (r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
 
             # Partials {{> ...}}
             (r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
@@ -1845,7 +1844,7 @@ class HandlebarsLexer(RegexLexer):
             include('generic'),
         ],
         'variable': [
-            (r'[a-zA-Z][\w-]*', Name.Variable),
+            (r'[()/@a-zA-Z][\w-]*', Name.Variable),
             (r'\.[\w-]+', Name.Variable),
             (r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable),
         ],
index 0fd1778a2c18dd3bb594ed94bbb86fe91fd39f8d..1d7483da2475d4b62d0f61676c5a4e9fb124b30f 100644 (file)
@@ -154,5 +154,5 @@ class TeraTermLexer(RegexLexer):
     def analyse_text(text):
         result = 0.0
         if re.search(TeraTermLexer.tokens['commands'][0][0], text):
-            result += 0.60
+            result += 0.01
         return result
index a3aed0c0005e9603a7f44d4d10e3418bc86e3be3..d3a191b08f9c2befadf7f86a9d593bb198e1e011 100644 (file)
 
 import re
 
-from pygments.lexer import RegexLexer, bygroups
+from pygments.lexers import guess_lexer, get_lexer_by_name
+from pygments.lexer import RegexLexer, bygroups, default, do_insertions
 from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Generic, Literal
+    Number, Generic, Literal, Punctuation
 from pygments.util import ClassNotFound
 
-__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer']
+__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer',
+           'NotmuchLexer']
 
 
 class IrcLogsLexer(RegexLexer):
@@ -295,3 +297,86 @@ class TodotxtLexer(RegexLexer):
             (r'\s+', IncompleteTaskText),
         ],
     }
+
+
+class NotmuchLexer(RegexLexer):
+    """
+    For `Notmuch <https://notmuchmail.org/>`_ email text format.
+
+    .. versionadded:: 2.5
+
+    Additional options accepted:
+
+    `body_lexer`
+        If given, highlight the contents of the message body with the specified
+        lexer, else guess it according to the body content (default: ``None``).
+    """
+
+    name = 'Notmuch'
+    aliases = ['notmuch']
+
+    def _highlight_code(self, match):
+        code = match.group(1)
+
+        try:
+            if self.body_lexer:
+                lexer = get_lexer_by_name(self.body_lexer)
+            else:
+                lexer = guess_lexer(code.strip())
+        except ClassNotFound:
+            lexer = get_lexer_by_name('text')
+
+        for item in lexer.get_tokens_unprocessed(code):
+            yield item
+
+    tokens = {
+        'root': [
+            (r'\fmessage{\s*', Keyword, ('message', 'message-attr')),
+        ],
+        'message-attr': [
+            (r'(\s*id:\s*)([^\s]+)', bygroups(Name.Attribute, String)),
+            (r'(\s*(?:depth|match|excluded):\s*)(\d+)',
+             bygroups(Name.Attribute, Number.Integer)),
+            (r'(\s*filename:\s*)(.+\n)',
+             bygroups(Name.Attribute, String)),
+            default('#pop'),
+        ],
+        'message': [
+            (r'\fmessage}\n', Keyword, '#pop'),
+            (r'\fheader{\n', Keyword, 'header'),
+            (r'\fbody{\n', Keyword, 'body'),
+        ],
+        'header': [
+            (r'\fheader}\n', Keyword, '#pop'),
+            (r'((?:Subject|From|To|Cc|Date):\s*)(.*\n)',
+             bygroups(Name.Attribute, String)),
+            (r'(.*)(\s*\(.*\))(\s*\(.*\)\n)',
+             bygroups(Generic.Strong, Literal, Name.Tag)),
+        ],
+        'body': [
+            (r'\fpart{\n', Keyword, 'part'),
+            (r'\f(part|attachment){\s*', Keyword, ('part', 'part-attr')),
+            (r'\fbody}\n', Keyword, '#pop'),
+        ],
+        'part-attr': [
+            (r'(ID:\s*)(\d+)', bygroups(Name.Attribute, Number.Integer)),
+            (r'(,\s*)((?:Filename|Content-id):\s*)([^,]+)',
+             bygroups(Punctuation, Name.Attribute, String)),
+            (r'(,\s*)(Content-type:\s*)(.+\n)',
+             bygroups(Punctuation, Name.Attribute, String)),
+            default('#pop'),
+        ],
+        'part': [
+            (r'\f(?:part|attachment)}\n', Keyword, '#pop'),
+            (r'\f(?:part|attachment){\s*', Keyword, ('#push', 'part-attr')),
+            (r'^Non-text part: .*\n', Comment),
+            (r'(?s)(.*?(?=\f(?:part|attachment)}\n))', _highlight_code),
+        ],
+    }
+
+    def analyse_text(text):
+        return 1.0 if text.startswith('\fmessage{') else 0.0
+
+    def __init__(self, **options):
+        self.body_lexer = options.get('body_lexer', None)
+        RegexLexer.__init__(self, **options)
index 3d08cefc6198f72a12b9f47e0256d1f5beacc5cb..745292bd11ccbbb47241c2e673f886937eaa1d8f 100644 (file)
@@ -113,9 +113,6 @@ class TypoScriptLexer(RegexLexer):
 
     flags = re.DOTALL | re.MULTILINE
 
-    # Slightly higher than TypeScript (which is 0).
-    priority = 0.0
-
     tokens = {
         'root': [
             include('comment'),
diff --git a/pygments/lexers/zig.py b/pygments/lexers/zig.py
new file mode 100644 (file)
index 0000000..7850fdf
--- /dev/null
@@ -0,0 +1,126 @@
+# -*- coding: utf-8 -*-
+"""
+    pygments.lexers.zig
+    ~~~~~~~~~~~~~~~~~~~~
+
+    Lexers for Zig.
+
+    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import re
+from pygments.lexer import RegexLexer, bygroups, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+    Number, Punctuation, Error, Whitespace
+
+__all__ = ['ZigLexer']
+
+class ZigLexer(RegexLexer):
+    """
+    For `Zig <http://www.ziglang.org>`_ source code.
+
+    grammar: https://ziglang.org/documentation/master/#Grammar
+    """
+    name = 'Zig'
+    aliases = ['zig']
+    filenames = ['*.zig']
+    mimetypes = ['text/zig']
+
+    type_keywords = (
+        words(('bool', 'f16', 'f32', 'f64', 'f128', 'void', 'noreturn', 'type', 'anyerror', 'promise',
+                'i0', 'u0', 'isize',  'usize', 'comptime_int', 'comptime_float',
+                'c_short', 'c_ushort', 'c_int', 'c_uint', 'c_long', 'c_ulong', 'c_longlong', 'c_ulonglong', 'c_longdouble', 'c_void'
+                'i8', 'u8', 'i16', 'u16', 'i32', 'u32', 'i64', 'u64', 'i128', 'u128' 
+                ), suffix=r'\b'),
+        Keyword.Type)
+    
+    storage_keywords = (
+        words(('const', 'var', 'extern', 'packed', 'export', 'pub', 'noalias',
+               'inline', 'comptime', 'nakedcc', 'stdcallcc', 'volatile', 'allowzero',
+               'align', 'linksection', 'threadlocal'), suffix=r'\b'),
+        Keyword.Reserved)
+
+    structure_keywords = (
+        words(('struct', 'enum', 'union', 'error'), suffix=r'\b'),
+        Keyword)
+
+    statement_keywords = (
+        words(('break', 'return', 'continue', 'asm', 'defer', 'errdefer', 
+               'unreachable', 'try', 'catch', 'async', 'await', 'suspend', 
+               'resume', 'cancel'), suffix=r'\b'),
+        Keyword)
+
+    conditional_keywords = (
+        words(('if', 'else', 'switch', 'and', 'or', 'orelse'), suffix=r'\b'),
+        Keyword)
+
+    repeat_keywords = (
+        words(('while', 'for'), suffix=r'\b'),
+        Keyword)
+
+    other_keywords = (
+        words(('fn', 'usingnamespace', 'test'), suffix=r'\b'),
+        Keyword)
+
+    constant_keywords = (
+        words(('true', 'false', 'null', 'undefined'), suffix=r'\b'),
+        Keyword.Constant)
+
+    tokens = {
+        'root': [
+            (r'\n', Whitespace),
+            (r'\s+', Whitespace),
+            (r'//.*?\n', Comment.Single),
+
+            # Keywords
+            statement_keywords, 
+            storage_keywords,
+            structure_keywords,
+            repeat_keywords,
+            type_keywords,
+            constant_keywords,
+            conditional_keywords,
+            other_keywords,
+
+            # Floats
+            (r'0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?', Number.Float),
+            (r'0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+', Number.Float),
+            (r'[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?', Number.Float),
+            (r'[0-9]+\.?[eE][-+]?[0-9]+', Number.Float),
+
+            # Integers
+            (r'0b[01]+', Number.Bin),
+            (r'0o[0-7]+', Number.Oct),
+            (r'0x[0-9a-fA-F]+', Number.Hex),
+            (r'[0-9]+', Number.Integer),
+
+            # Identifier
+            (r'@[a-zA-Z_]\w*',Name.Builtin),
+            (r'[a-zA-Z_]\w*', Name),
+
+            # Characters
+            (r'\'\\\'\'', String.Escape),
+            (r'\'\\(|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'', String.Escape),
+            (r'\'[^\\\']\'', String),
+
+            # Strings
+            (r'\\\\[^\n]*', String.Heredoc),
+            (r'c\\\\[^\n]*', String.Heredoc),
+            (r'c?"',String, 'string'),
+
+            # Operators, Punctuation
+            (r'[+%=><|^!?/\-*&~:]', Operator),
+            (r'[{}()\[\],.;]', Punctuation)
+        ],
+        'string': [
+            (r'\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])', String.Escape),
+            (r'[^\\"\n]+', String),
+            (r'"', String, '#pop')
+        ]
+    }
+
+    def get_tokens_unprocessed(self, text):
+        for index, token, value in \
+                RegexLexer.get_tokens_unprocessed(self, text):
+            yield index, token, value
index 8de9e43bef587ceb4643b19103fab9fcffe423d0..c0614718a231bc8d4e630fd7909f4c91f2790505 100644 (file)
@@ -50,6 +50,7 @@ STYLE_MAP = {
     'stata':       'stata_light::StataLightStyle',
     'stata-light': 'stata_light::StataLightStyle',
     'stata-dark':  'stata_dark::StataDarkStyle',
+    'inkpot':      'inkpot::InkPotStyle',
 }
 
 
diff --git a/pygments/styles/inkpot.py b/pygments/styles/inkpot.py
new file mode 100644 (file)
index 0000000..a030b8b
--- /dev/null
@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+"""
+    pygments.styles.inkpot
+    ~~~~~~~~~~~~~~~~~~~~~~
+
+    A highlighting style for Pygments, inspired by the Inkpot theme for VIM.
+
+    :copyright: Copyright 2018 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Text, Other, \
+     Keyword, Name, Comment, String, Error, \
+     Number, Operator, Generic, Whitespace, Punctuation
+
+
+class InkPotStyle(Style):
+    background_color = "#1e1e27"
+    default_style = ""
+    styles = {
+        Text:                      "#cfbfad",
+        Other:                     "#cfbfad",
+        Whitespace:                "#434357",
+        Comment:                   "#cd8b00",
+        Comment.Preproc:           "#409090",
+        Comment.PreprocFile:       "bg:#404040 #ffcd8b",
+        Comment.Special:           "#808bed",
+
+        Keyword:                   "#808bed",
+        Keyword.Pseudo:            "nobold",
+        Keyword.Type:              "#ff8bff",
+
+        Operator:                  "#666666",
+
+        Punctuation:               "#cfbfad",
+
+        Name:                      "#cfbfad",
+        Name.Attribute:            "#cfbfad",
+        Name.Builtin.Pseudo:       '#ffff00',
+        Name.Builtin:              "#808bed",
+        Name.Class:                "#ff8bff",
+        Name.Constant:             "#409090",
+        Name.Decorator:            "#409090",
+        Name.Exception:            "#ff0000",
+        Name.Function:             "#c080d0",
+        Name.Label:                "#808bed",
+        Name.Namespace:            "#ff0000",
+        Name.Variable:             "#cfbfad",
+
+        String:                    "bg:#404040 #ffcd8b",
+        String.Doc:                "#808bed",
+
+        Number:                    "#f0ad6d",
+
+        Generic.Heading:           "bold #000080",
+        Generic.Subheading:        "bold #800080",
+        Generic.Deleted:           "#A00000",
+        Generic.Inserted:          "#00A000",
+        Generic.Error:             "#FF0000",
+        Generic.Emph:              "italic",
+        Generic.Strong:            "bold",
+        Generic.Prompt:            "bold #000080",
+        Generic.Output:            "#888",
+        Generic.Traceback:         "#04D",
+
+        Error:                     "bg:#6e2e2e #ffffff"
+    }
index 4580df2be65807d090183dc8a2bbcb854892b552..c9db9f22188860498797281753e1d8d9f5ba3ee9 100644 (file)
@@ -92,14 +92,15 @@ class MonokaiStyle(Style):
         String.Single:             "",        # class: 's1'
         String.Symbol:             "",        # class: 'ss'
 
+
         Generic:                   "",        # class: 'g'
         Generic.Deleted:           "#f92672", # class: 'gd',
         Generic.Emph:              "italic",  # class: 'ge'
         Generic.Error:             "",        # class: 'gr'
         Generic.Heading:           "",        # class: 'gh'
         Generic.Inserted:          "#a6e22e", # class: 'gi'
-        Generic.Output:            "",        # class: 'go'
-        Generic.Prompt:            "",        # class: 'gp'
+        Generic.Output:            "#66d9ef", # class: 'go'
+        Generic.Prompt:            "bold #f92672", # class: 'gp'
         Generic.Strong:            "bold",    # class: 'gs'
         Generic.Subheading:        "#75715e", # class: 'gu'
         Generic.Traceback:         "",        # class: 'gt'
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644 (file)
index 4754a9d..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-coverage
-nose
-pyflakes
-pylint
-tox
diff --git a/scripts/.release-checklist.swp b/scripts/.release-checklist.swp
deleted file mode 100644 (file)
index 2ec1a56..0000000
Binary files a/scripts/.release-checklist.swp and /dev/null differ
index efc1e1e8b9ebc65117ba3b8d7c3ec273f11152f5..f1bd0f38db1ff589cd8f934661f4245f5c018537 100644 (file)
@@ -1,24 +1,24 @@
 Release checklist
 =================
 
-* Check ``hg status``
+* Check ``git status``
 * ``make check``
 * LATER when configured properly: ``make pylint``
 * ``tox``
 * Update version info in ``setup.py/__init__.py``
 * Check setup.py metadata: long description, trove classifiers
 * Update release date/code name in ``CHANGES``
-* ``hg commit``
+* ``git commit``
 * ``make clean``
 * ``python2 setup.py release bdist_wheel``
 * ``python3 setup.py release bdist_wheel sdist``
 * ``twine upload dist/Pygments-$NEWVER*``
 * Check PyPI release page for obvious errors
-* ``hg tag``
+* ``git tag -a``
 * Merge default into stable if this was a ``x.y.0``
 * Update homepage (release info), regenerate docs (+printable!)
 * Add new version/milestone to tracker categories
 * Write announcement and send to mailing list/python-announce
 * Update version info, add new ``CHANGES`` entry for next version
-* ``hg commit``
-* ``hg push``
+* ``git commit``
+* ``git push``
index e3d43fc007a8604ef61032f897a7230d8ad78097..61f1c7f2e03d928e007fa527d2808d2eb4c14500 100755 (executable)
--- a/setup.py
+++ b/setup.py
@@ -21,35 +21,11 @@ are:
 :license: BSD, see LICENSE for details.
 """
 
-try:
-    from setuptools import setup, find_packages
-    have_setuptools = True
-except ImportError:
-    from distutils.core import setup
-    def find_packages(*args, **kwargs):
-        return [
-            'pygments',
-            'pygments.lexers',
-            'pygments.formatters',
-            'pygments.styles',
-            'pygments.filters',
-        ]
-    have_setuptools = False
-
-if have_setuptools:
-    add_keywords = dict(
-        entry_points = {
-            'console_scripts': ['pygmentize = pygments.cmdline:main'],
-        },
-    )
-else:
-    add_keywords = dict(
-        scripts = ['pygmentize'],
-    )
+from setuptools import setup, find_packages
 
 setup(
     name = 'Pygments',
-    version = '2.4.2',
+    version = '2.5.1',
     url = 'http://pygments.org/',
     license = 'BSD License',
     author = 'Georg Brandl',
@@ -57,7 +33,10 @@ setup(
     description = 'Pygments is a syntax highlighting package written in Python.',
     long_description = __doc__,
     keywords = 'syntax highlighting',
-    packages = find_packages(),
+    packages = find_packages(include=['pygments']),
+    entry_points = {
+        'console_scripts': ['pygmentize = pygments.cmdline:main'],
+    },
     platforms = 'any',
     zip_safe = False,
     include_package_data = True,
@@ -75,9 +54,11 @@ setup(
         'Programming Language :: Python :: 3.5',
         'Programming Language :: Python :: 3.6',
         'Programming Language :: Python :: 3.7',
+        'Programming Language :: Python :: 3.8',
+        'Programming Language :: Python :: Implementation :: CPython',
+        'Programming Language :: Python :: Implementation :: PyPy',
         'Operating System :: OS Independent',
         'Topic :: Text Processing :: Filters',
         'Topic :: Utilities',
     ],
-    **add_keywords
 )
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/tests/examplefiles/MIME_example.eml b/tests/examplefiles/MIME_example.eml
new file mode 100644 (file)
index 0000000..e25ef5a
--- /dev/null
@@ -0,0 +1,34 @@
+From: Some One <someone@example.com>
+MIME-Version: 1.0
+Content-Type: multipart/mixed;
+        boundary="+Testboundary text"
+
+This is a multipart message in MIME format.
+
+--+Testboundary text
+Content-Type: multipart/alternative;
+        boundary="hello, boundary"
+
+--hello, boundary
+Content-Type: text/plain
+
+this is the body text
+
+--hello, boundary
+Content-Type: text/html;
+       charset="utf-8"
+Content-Transfer-Encoding: quoted-printable
+
+<font color=3D"#FF0000">This is the body text</font>
+
+--hello, boundary--
+--+Testboundary text
+Content-Type: text/plain;
+Content-Disposition: attachment;
+        filename="test.txt"
+Content-Transfer-Encoding: base64
+
+dGhpcyBpcyB0aGUgYXR0YWNobWVudCB0ZXh0
+
+--+Testboundary text--
+Some additional content here.
diff --git a/tests/examplefiles/example.eml b/tests/examplefiles/example.eml
new file mode 100644 (file)
index 0000000..e25427f
--- /dev/null
@@ -0,0 +1,92 @@
+Mime-Version: 1.0 (Apple Message framework v730)\r
+Content-Type: multipart/mixed; boundary=Apple-Mail-13-196941151\r
+Message-Id: <9169D984-4E0B-45EF-82D4-8F5E53AD7012@example.com>\r
+From: foo@example.com\r
+Subject: testing\r
+Date: Mon, 6 Jun 2005 22:21:22 +0200\r
+To: blah@example.com\r
+\r
+\r
+--Apple-Mail-13-196941151\r
+Content-Transfer-Encoding: quoted-printable\r
+Content-Type: text/plain;\r
+       charset=ISO-8859-1;\r
+       delsp=yes;\r
+       format=flowed\r
+\r
+This is the first part.\r
+\r
+--Apple-Mail-13-196941151\r
+Content-Type: message/rfc822;\r
+  name="ForwardedMessage.eml";\r
+\r
+Return-Path: <xxxx@xxxx.com>\r
+X-Original-To: xxxx@xxxx.com\r
+Delivered-To: xxxx@xxxx.com\r
+Received: from localhost (localhost [127.0.0.1])\r
+       by xxx.xxxxx.com (Postfix) with ESMTP id 50FD3A96F\r
+       for <xxxx@xxxx.com>; Tue, 10 May 2005 17:26:50 +0000 (GMT)\r
+Received: from xxx.xxxxx.com ([127.0.0.1])\r
+ by localhost (xxx.xxxxx.com [127.0.0.1]) (amavisd-new, port 10024)\r
+ with LMTP id 70060-03 for <xxxx@xxxx.com>;\r
+ Tue, 10 May 2005 17:26:49 +0000 (GMT)\r
+Received: from xxx.xxxxx.com (xxx.xxxxx.com [69.36.39.150])\r
+       by xxx.xxxxx.com (Postfix) with ESMTP id 8B957A94B\r
+       for <xxxx@xxxx.com>; Tue, 10 May 2005 17:26:48 +0000 (GMT)\r
+Received: from xxx.xxxxx.com (xxx.xxxxx.com [64.233.184.203])\r
+       by xxx.xxxxx.com (Postfix) with ESMTP id 9972514824C\r
+       for <xxxx@xxxx.com>; Tue, 10 May 2005 12:26:40 -0500 (CDT)\r
+Received: by xxx.xxxxx.com with SMTP id 68so1694448wri\r
+        for <xxxx@xxxx.com>; Tue, 10 May 2005 10:26:40 -0700 (PDT)\r
+DomainKey-Signature: a=rsa-sha1; q=dns; c=nofws;\r
+        s=beta; d=xxxxx.com;\r
+        h=received:message-id:date:from:reply-to:to:subject:mime-version:content-type;\r
+        b=g8ZO5ttS6GPEMAz9WxrRk9+9IXBUfQIYsZLL6T88+ECbsXqGIgfGtzJJFn6o9CE3/HMrrIGkN5AisxVFTGXWxWci5YA/7PTVWwPOhJff5BRYQDVNgRKqMl/SMttNrrRElsGJjnD1UyQ/5kQmcBxq2PuZI5Zc47u6CILcuoBcM+A=\r
+Received: by 10.54.96.19 with SMTP id t19mr621017wrb;\r
+        Tue, 10 May 2005 10:26:39 -0700 (PDT)\r
+Received: by 10.54.110.5 with HTTP; Tue, 10 May 2005 10:26:39 -0700 (PDT)\r
+Message-ID: <xxxx@xxxx.com>\r
+Date: Tue, 10 May 2005 11:26:39 -0600\r
+From: Test Tester <xxxx@xxxx.com>\r
+Reply-To: Test Tester <xxxx@xxxx.com>\r
+To: xxxx@xxxx.com, xxxx@xxxx.com\r
+Subject: Another PDF\r
+Mime-Version: 1.0\r
+Content-Type: multipart/mixed;\r
+       boundary="----=_Part_2192_32400445.1115745999735"\r
+X-Virus-Scanned: amavisd-new at textdrive.com\r
+\r
+------=_Part_2192_32400445.1115745999735\r
+Content-Type: text/plain; charset=ISO-8859-1\r
+Content-Transfer-Encoding: quoted-printable\r
+Content-Disposition: inline\r
+\r
+Just attaching another PDF, here, to see what the message looks like,\r
+and to see if I can figure out what is going wrong here.\r
+\r
+------=_Part_2192_32400445.1115745999735\r
+Content-Type: application/pdf; name="broken.pdf"\r
+Content-Transfer-Encoding: base64\r
+Content-Disposition: attachment; filename="broken.pdf"\r
+\r
+JVBERi0xLjQNCiXk9tzfDQoxIDAgb2JqDQo8PCAvTGVuZ3RoIDIgMCBSDQogICAvRmlsdGVyIC9G\r
+bGF0ZURlY29kZQ0KPj4NCnN0cmVhbQ0KeJy9Wt2KJbkNvm/od6jrhZxYln9hWEh2p+8HBvICySaE\r
+ycLuTV4/1ifJ9qnq09NpSBimu76yLUuy/qzqcPz7+em3Ixx/CDc6CsXxs3b5+fvfjr/8cPz6/BRu\r
+rbfAx/n3739/fuJylJ5u5fjX81OuDr4deK4Bz3z/aDP+8fz0yw8g0Ofq7ktr1Mn+u28rvhy/jVeD\r
+QSa+9YNKHP/pxjvDNfVAx/m3MFz54FhvTbaseaxiDoN2LeMVMw+yA7RbHSCDzxZuaYB2E1Yay7QU\r
+x89vz0+tyFDKMlAHK5yqLmnjF+c4RjEiQIUeKwblXMe+AsZjN1J5yGQL5DHpDHksurM81rF6PKab\r
+gK6zAarIDzIiUY23rJsN9iorAE816aIu6lsgAdQFsuhhkHOUFgVjp2GjMqSewITXNQ27jrMeamkg\r
+1rPI3iLWG2CIaSBB+V1245YVRICGbbpYKHc2USFDl6M09acQVQYhlwIrkBNLISvXhGlF1wi5FHCw\r
+wxZkoGNJlVeJCEsqKA+3YAV5AMb6KkeaqEJQmFKKQU8T1pRi2ihE1Y4CDrqoYFFXYjJJOatsyzuI\r
+8SIlykuxKTMibWK8H1PgEvqYgs4GmQSrEjJAalgGirIhik+p4ZQN9E3ETFPAHE1b8pp1l/0Rc1gl\r
+fQs0ABWvyoZZzU8VnPXwVVcO9BEsyjEJaO6eBoZRyKGlrKoYoOygA8BGIzgwN3RQ15ouigG5idZQ\r
+fx2U4Db2CqiLO0WHAZoylGiCAqhniNQjFjQPSkmjwfNTgQ6M1Ih+eWo36wFmjIxDJZiGUBiWsAyR\r
+xX3EekGOizkGI96Ol9zVZTAivikURhRsHh2E3JhWMpSTZCnnonrLhMCodgrNcgo4uyJUJc6qnVss\r
+nrGd1Ptr0YwisCOYyIbUwVjV4xBUNLbguSO2YHujonAMJkMdSI7bIw91Akq2AUlMUWGFTMAOamjU\r
+OvZQCxIkY2pCpMFo/IwLdVLHs6nddwTRrgoVbvLU9eB0G4EMndV0TNoxHbt3JBWwK6hhv3iHfDtF\r
+yokB302IpEBTnWICde4uYc/1khDbSIkQopO6lcqamGBu1OSE3N5IPSsZX00CkSHRiiyx6HQIShsS\r
+HSVNswdVsaOUSAWq9aYhDtGDaoG5a3lBGkYt/lFlBFt1UqrYnzVtUpUQnLiZeouKgf1KhRBViRRk\r
+ExepJCzTwEmFDalIRbLEGtw0gfpESOpIAF/NnpPzcVCG86s0g2DuSyd41uhNGbEgaSrWEXORErbw\r
+------=_Part_2192_32400445.1115745999735--\r
+\r
+--Apple-Mail-13-196941151--\r
index 8557391955b3eedddad5a1ba729226cb9453a340..2b782b8d2fde63e60a185a6970fccfd6b8348d51 100644 (file)
@@ -1,4 +1,5 @@
 form Highlighter test
+  # This is a regular comment
   sentence Blank
   sentence My_sentence This should all be a string
   text My_text This should also all be a string
@@ -7,9 +8,11 @@ form Highlighter test
   boolean Text no
   boolean Quoted "yes"
   comment This should be a string
-  optionmenu Choice: 1
+  optionmenu Drop-down: 1
+    option Foo
+    option 100
+  choice Radio: 1
     option Foo
-    option Bar
     option 100
   real left_Range -123.6
   positive right_Range_max 3.3
@@ -17,6 +20,25 @@ form Highlighter test
   natural Nat 4
 endform
 
+beginPause: "Highlighter test"
+  sentence: "Blank", ""
+  sentence: "My sentence", "This should all be a string"
+  text: "My text", "This should also all be a string"
+  word: "My word", "Only the first word is a string, the rest is discarded"
+  boolean: "Binary", 1
+  comment: "This should be a string"
+  optionMenu: "Drop-down", 1
+    option: "Foo"
+    option: "100"
+  choice: "Choice", 1
+    option: "Foo"
+    option: "100"
+  real: "left Range", -123.6
+  positive: "right Range max", 3.3
+  integer: "Int", 4
+  natural: "Nat", 4
+button = endPause("Cancel", "OK", 1, 2)
+
 # Periods do not establish boundaries for keywords
 form.var = 10
 # Or operators
@@ -30,8 +52,7 @@ execute /path/to/file
 
 # Predefined variables
 a  = praatVersion
-a  = e
-a  = pi
+a  = e + pi * ( all+right) / left mod average + (mono - stereo)
 a$ = homeDirectory$ + tab$ + newline$
 a$ = temporaryDirectory$
 a$ = praatVersion$
@@ -40,6 +61,9 @@ a$ = homeDirectory$
 a$ = preferencesDirectory$
 a$ = defaultDirectory$
 nocheck selectObject: undefined
+# Not predefined variables
+a$ = e$
+a$ = pi$
 
 # Arrays are not comments
 a# = zero# (5, 6)
@@ -59,9 +83,43 @@ else macintosh == 1
   exit We are on Mac
 endif
 
-string$ = "Strings can be 'interpolated'"
+# Interpolation with precision digits
+echo unquoted 'a:3'
+echo unquoted 'a.a:3'
+echo unquoted 'a[1]:3'
+echo unquoted 'a1:3'
+
+appendInfoLine: "quoted 'a:3'"
+appendInfoLine: "quoted 'a.a:3'"
+appendInfoLine: "quoted 'a[1]:3'"
+appendInfoLine: "quoted 'a1:3'"
+
+# Interpolations are not recursive
+echo unquoted 'a'1':3'
+appendInfoLine: "quoted 'a'1':3'"
+
+# Interpolation without precision digits
+echo unquoted 'var' numeric
+echo unquoted 'var$' string
+echo unquoted 'var["a"]' numeric hash
+echo unquoted 'var$["a"]' string hash
+echo unquoted 'var[1]' numeric indexed variable
+echo unquoted 'var$[1]' string indexed variable
+
+appendInfoLine: "quoted 'var' numeric"
+appendInfoLine: "quoted 'var$' string"
+appendInfoLine: "quoted 'var["a"]' numeric hash"
+appendInfoLine: "quoted 'var$["a"]' string hash"
+appendInfoLine: "quoted 'var[1]' numeric indexed variable"
+appendInfoLine: "quoted 'var$[1]' string indexed variable"
+
+# Indeces in interpolations must be literal
+echo 'var[a]'
+echo 'var[a$]'
+
 string$ = "But don't interpolate everything!"
-string$(10)
+string$ = "interpolatin' " + "across" + " strings ain't cool either"
+string$(10) ; This is a function
 
 repeat
   string$ = string$ - right$(string$)
@@ -77,6 +135,12 @@ value$ = Table_'table'$[25, "f0"]
 fixed  = Sound_10.xmin
 fixed  = Object_foo.xmin
 fixed  = Procrustes_foo.nx
+var["vaa"] = 1 ; Hash
+
+# Special two-word keyword
+select all
+# Keyword with a predefined variable
+select  all
 
 # old-style procedure call
 call oldStyle "quoted" 2 unquoted string
@@ -103,7 +167,7 @@ endfor
 
 i = 1
 while i < n
-  i++
+  i += 1
   # Different styles of object selection
   select sound'i'
   sound = selected()
@@ -153,7 +217,7 @@ while i < n
     ..."duration response"
 
   # Function call with trailing space
-  removeObject: pitch, table 
+  removeObject: pitch, table
 
   # Picture window commands
   selectObject: sound
@@ -251,7 +315,7 @@ procedure newStyle (.str1$, .num, .str2$)
   .local = Get total duration
   .local = Get 'some' duration
   .local = Get 'some[1]' value... hello 10 p[i]
-  .local = Get 'some[1,3]' value: "hello", 10, 'p[i]'
+  .local = Get 'some[1,3]' value: "hello", 10, p[i]
   .local = Get 'some$' duration
   .local = Get 'some$[1]' duration
 endproc
diff --git a/tests/examplefiles/example.shex b/tests/examplefiles/example.shex
new file mode 100644 (file)
index 0000000..8fab2c8
--- /dev/null
@@ -0,0 +1,20 @@
+PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
+PREFIX schema: <http://schema.org/>
+PREFIX skos: <http://www.w3.org/2004/02/skos/core#>
+PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
+PREFIX ex: <http://pygments.example/#>
+
+ex:Lexer {
+  rdfs:label xsd:string;
+  skos:altLabel xsd:string*;
+  ex:filenames xsd:string+;
+  ex:mimetypes xsd:string+;
+  ex:priority xsd:decimal MinInclusive 0.0 MaxExclusive 1.0; # seems to be the de facto range of currently defined priorities
+  ex:lexes @ex:Language*;
+}
+
+ex:Language {
+  schema:description rdf:langString*;
+  schema:url IRI?;
+}
diff --git a/tests/examplefiles/example.zig b/tests/examplefiles/example.zig
new file mode 100644 (file)
index 0000000..32e7284
--- /dev/null
@@ -0,0 +1,263 @@
+const std = @import("std");
+const Allocator = mem.Allocator;
+const mem = std.mem;
+const ast = std.zig.ast;
+const Visib = @import("visib.zig").Visib;
+const event = std.event;
+const Value = @import("value.zig").Value;
+const Token = std.zig.Token;
+const errmsg = @import("errmsg.zig");
+const Scope = @import("scope.zig").Scope;
+const Compilation = @import("compilation.zig").Compilation;
+
+pub const Decl = struct {
+    id: Id,
+    name: []const u8,
+    visib: Visib,
+    resolution: event.Future(Compilation.BuildError!void),
+    parent_scope: *Scope,
+
+    // TODO when we destroy the decl, deref the tree scope
+    tree_scope: *Scope.AstTree,
+
+    pub const Table = std.HashMap([]const u8, *Decl, mem.hash_slice_u8, mem.eql_slice_u8);
+
+    pub fn cast(base: *Decl, comptime T: type) ?*T {
+        if (base.id != @field(Id, @typeName(T))) return null;
+        return @fieldParentPtr(T, "base", base);
+    }
+
+    pub fn isExported(base: *const Decl, tree: *ast.Tree) bool {
+        switch (base.id) {
+            Id.Fn => {
+                const fn_decl = @fieldParentPtr(Fn, "base", base);
+                return fn_decl.isExported(tree);
+            },
+            else => return false,
+        }
+    }
+
+    pub fn getSpan(base: *const Decl) errmsg.Span {
+        switch (base.id) {
+            Id.Fn => {
+                const fn_decl = @fieldParentPtr(Fn, "base", base);
+                const fn_proto = fn_decl.fn_proto;
+                const start = fn_proto.fn_token;
+                const end = fn_proto.name_token orelse start;
+                return errmsg.Span{
+                    .first = start,
+                    .last = end + 1,
+                };
+            },
+            else => @panic("TODO"),
+        }
+    }
+
+    pub fn findRootScope(base: *const Decl) *Scope.Root {
+        return base.parent_scope.findRoot();
+    }
+
+    pub const Id = enum {
+        Var,
+        Fn,
+        CompTime,
+    };
+
+    pub const Var = struct {
+        base: Decl,
+    };
+
+    pub const Fn = struct {
+        base: Decl,
+        value: Val,
+        fn_proto: *ast.Node.FnProto,
+
+        // TODO https://github.com/ziglang/zig/issues/683 and then make this anonymous
+        pub const Val = union(enum) {
+            Unresolved: void,
+            Fn: *Value.Fn,
+            FnProto: *Value.FnProto,
+        };
+
+        pub fn externLibName(self: Fn, tree: *ast.Tree) ?[]const u8 {
+            return if (self.fn_proto.extern_export_inline_token) |tok_index| x: {
+                const token = tree.tokens.at(tok_index);
+                break :x switch (token.id) {
+                    Token.Id.Extern => tree.tokenSlicePtr(token),
+                    else => null,
+                };
+            } else null;
+        }
+
+        pub fn isExported(self: Fn, tree: *ast.Tree) bool {
+            if (self.fn_proto.extern_export_inline_token) |tok_index| {
+                const token = tree.tokens.at(tok_index);
+                return token.id == Token.Id.Keyword_export;
+            } else {
+                return false;
+            }
+        }
+    };
+
+    pub const CompTime = struct {
+        base: Decl,
+    };
+};
+
+pub const info_zen =
+    \\
+    \\ * Communicate intent precisely.
+    \\ * Edge cases matter.
+    \\ * Favor reading code over writing code.
+    \\ * Only one obvious way to do things.
+    \\ * Runtime crashes are better than bugs.
+    \\ * Compile errors are better than runtime crashes.
+    \\ * Incremental improvements.
+    \\ * Avoid local maximums.
+    \\ * Reduce the amount one must remember.
+    \\ * Minimize energy spent on coding style.
+    \\ * Together we serve end users.
+    \\
+    \\
+;
+
+fn cmdZen(allocator: *Allocator, args: []const []const u8) !void {
+    try stdout.write(info_zen);
+}
+
+const usage_internal =
+    \\usage: zig internal [subcommand]
+    \\
+    \\Sub-Commands:
+    \\  build-info                   Print static compiler build-info
+    \\
+    \\
+;
+
+fn cmdInternal(allocator: *Allocator, args: []const []const u8) !void {
+    if (args.len == 0) {
+        try stderr.write(usage_internal);
+        os.exit(1);
+    }
+
+    const sub_commands = []Command{Command{
+        .name = "build-info",
+        .exec = cmdInternalBuildInfo,
+    }};
+
+    for (sub_commands) |sub_command| {
+        if (mem.eql(u8, sub_command.name, args[0])) {
+            try sub_command.exec(allocator, args[1..]);
+            return;
+        }
+    }
+
+    try stderr.print("unknown sub command: {}\n\n", args[0]);
+    try stderr.write(usage_internal);
+}
+
+fn cmdInternalBuildInfo(allocator: *Allocator, args: []const []const u8) !void {
+    try stdout.print(
+        \\ZIG_CMAKE_BINARY_DIR {}
+        \\ZIG_CXX_COMPILER     {}
+        \\ZIG_LLVM_CONFIG_EXE  {}
+        \\ZIG_LLD_INCLUDE_PATH {}
+        \\ZIG_LLD_LIBRARIES    {}
+        \\ZIG_STD_FILES        {}
+        \\ZIG_C_HEADER_FILES   {}
+        \\ZIG_DIA_GUIDS_LIB    {}
+        \\
+    ,
+        std.cstr.toSliceConst(c.ZIG_CMAKE_BINARY_DIR),
+        std.cstr.toSliceConst(c.ZIG_CXX_COMPILER),
+        std.cstr.toSliceConst(c.ZIG_LLVM_CONFIG_EXE),
+        std.cstr.toSliceConst(c.ZIG_LLD_INCLUDE_PATH),
+        std.cstr.toSliceConst(c.ZIG_LLD_LIBRARIES),
+        std.cstr.toSliceConst(c.ZIG_STD_FILES),
+        std.cstr.toSliceConst(c.ZIG_C_HEADER_FILES),
+        std.cstr.toSliceConst(c.ZIG_DIA_GUIDS_LIB),
+    );
+}
+
+fn test__floatuntisf(a: u128, expected: f32) void {
+    const x = __floatuntisf(a);
+    testing.expect(x == expected);
+}
+
+test "floatuntisf" {
+    test__floatuntisf(0, 0.0);
+
+    test__floatuntisf(1, 1.0);
+    test__floatuntisf(2, 2.0);
+    test__floatuntisf(20, 20.0);
+
+    test__floatuntisf(0x7FFFFF8000000000, 0x1.FFFFFEp+62);
+    test__floatuntisf(0x7FFFFF0000000000, 0x1.FFFFFCp+62);
+
+    test__floatuntisf(make_ti(0x8000008000000000, 0), 0x1.000001p+127);
+    test__floatuntisf(make_ti(0x8000000000000800, 0), 0x1.0p+127);
+    test__floatuntisf(make_ti(0x8000010000000000, 0), 0x1.000002p+127);
+
+    test__floatuntisf(make_ti(0x8000000000000000, 0), 0x1.000000p+127);
+
+    test__floatuntisf(0x0007FB72E8000000, 0x1.FEDCBAp+50);
+
+    test__floatuntisf(0x0007FB72EA000000, 0x1.FEDCBA8p+50);
+    test__floatuntisf(0x0007FB72EB000000, 0x1.FEDCBACp+50);
+
+    test__floatuntisf(0x0007FB72EC000000, 0x1.FEDCBBp+50);
+
+    test__floatuntisf(0x0007FB72E6000000, 0x1.FEDCB98p+50);
+    test__floatuntisf(0x0007FB72E7000000, 0x1.FEDCB9Cp+50);
+    test__floatuntisf(0x0007FB72E4000000, 0x1.FEDCB9p+50);
+
+    test__floatuntisf(0xFFFFFFFFFFFFFFFE, 0x1p+64);
+    test__floatuntisf(0xFFFFFFFFFFFFFFFF, 0x1p+64);
+
+    test__floatuntisf(0x0007FB72E8000000, 0x1.FEDCBAp+50);
+
+    test__floatuntisf(0x0007FB72EA000000, 0x1.FEDCBAp+50);
+    test__floatuntisf(0x0007FB72EB000000, 0x1.FEDCBAp+50);
+    test__floatuntisf(0x0007FB72EBFFFFFF, 0x1.FEDCBAp+50);
+    test__floatuntisf(0x0007FB72EC000000, 0x1.FEDCBCp+50);
+    test__floatuntisf(0x0007FB72E8000001, 0x1.FEDCBAp+50);
+
+    test__floatuntisf(0x0007FB72E6000000, 0x1.FEDCBAp+50);
+    test__floatuntisf(0x0007FB72E7000000, 0x1.FEDCBAp+50);
+    test__floatuntisf(0x0007FB72E7FFFFFF, 0x1.FEDCBAp+50);
+    test__floatuntisf(0x0007FB72E4000001, 0x1.FEDCBAp+50);
+    test__floatuntisf(0x0007FB72E4000000, 0x1.FEDCB8p+50);
+
+    test__floatuntisf(make_ti(0x0000000000001FED, 0xCB90000000000001), 0x1.FEDCBAp+76);
+    test__floatuntisf(make_ti(0x0000000000001FED, 0xCBA0000000000000), 0x1.FEDCBAp+76);
+    test__floatuntisf(make_ti(0x0000000000001FED, 0xCBAFFFFFFFFFFFFF), 0x1.FEDCBAp+76);
+    test__floatuntisf(make_ti(0x0000000000001FED, 0xCBB0000000000000), 0x1.FEDCBCp+76);
+    test__floatuntisf(make_ti(0x0000000000001FED, 0xCBB0000000000001), 0x1.FEDCBCp+76);
+    test__floatuntisf(make_ti(0x0000000000001FED, 0xCBBFFFFFFFFFFFFF), 0x1.FEDCBCp+76);
+    test__floatuntisf(make_ti(0x0000000000001FED, 0xCBC0000000000000), 0x1.FEDCBCp+76);
+    test__floatuntisf(make_ti(0x0000000000001FED, 0xCBC0000000000001), 0x1.FEDCBCp+76);
+    test__floatuntisf(make_ti(0x0000000000001FED, 0xCBD0000000000000), 0x1.FEDCBCp+76);
+    test__floatuntisf(make_ti(0x0000000000001FED, 0xCBD0000000000001), 0x1.FEDCBEp+76);
+    test__floatuntisf(make_ti(0x0000000000001FED, 0xCBDFFFFFFFFFFFFF), 0x1.FEDCBEp+76);
+    test__floatuntisf(make_ti(0x0000000000001FED, 0xCBE0000000000000), 0x1.FEDCBEp+76);
+}
+
+fn trimStart(slice: []const u8, ch: u8) []const u8 {
+    var i: usize = 0;
+    const test_string = "test\"string";
+    for (slice) |b| {
+        if (b == '\xa3') break;
+        if (b == '\ua3d3') break;
+        if (b == '\Ua3d3d3') break;
+        if (b == '\t') break;
+        if (b == '\n') break;
+        if (b == '\\') break;
+        if (b == '\'') break;
+        if (b == '"') break;
+        if (b != 'n') break;
+        if (b != '-') break;
+        i += 1;
+    }
+
+    return slice[i..];
+}
diff --git a/tests/examplefiles/notmuch_example b/tests/examplefiles/notmuch_example
new file mode 100644 (file)
index 0000000..61be8c6
--- /dev/null
@@ -0,0 +1,15 @@
+\fmessage{ id:5d0693e2.1c69fb81.d5fc9.1f6e@mx.google.com depth:0 match:1 excluded:0 filename:/home/user/mail/INBOX/new/1560712171_0.11014.blue,U=20254,FMD5=7e33429f656f1e6e9d79b29c3f82c57e:2,
+\fheader{
+John Doe <john.doe@example.com> (1 mins. ago) (inbox unread)
+Subject: Hello world!
+From: john.doe@example.com
+Date: Sun, 16 Jun 2019 16:00:00 -0300
+\fheader}
+\fbody{
+\fpart{ ID: 1, Content-type: text/plain
+#!/bin/sh
+
+echo 'Hello world!'
+\fpart}
+\fbody}
+\fmessage}
index 8deeeb3dd80d045d6114d1e0f26663622df13181..0a00fa54d69ded289240e38ee08c78dc7e504e6a 100644 (file)
Binary files a/tests/examplefiles/output/99_bottles_of_beer.chpl and b/tests/examplefiles/output/99_bottles_of_beer.chpl differ
index 82c613df9d031ef6702d3c27c326f9f2769b38c8..718f515e623e9b9f718114ab840f55617763bf72 100644 (file)
Binary files a/tests/examplefiles/output/AcidStateAdvanced.hs and b/tests/examplefiles/output/AcidStateAdvanced.hs differ
index 0dd4e11c5d1c77176f9f5ecab4a488bd796a553d..f52b19d18e996919d75d007a3848737e2a72d62a 100644 (file)
Binary files a/tests/examplefiles/output/AlternatingGroup.mu and b/tests/examplefiles/output/AlternatingGroup.mu differ
index 241487dab00f749d92fbd846c7d1817a45cedcfc..ba53683021cd45a7d62a09179bd3d0fb8a36a173 100644 (file)
Binary files a/tests/examplefiles/output/BOM.js and b/tests/examplefiles/output/BOM.js differ
index 3cc659858e94c3f06d76d5d5095294c2e0c16b30..39f9abad86c0c0043d632a5b6853b1576cdcc3a9 100644 (file)
Binary files a/tests/examplefiles/output/Blink.ino and b/tests/examplefiles/output/Blink.ino differ
index 6247fc913cc026c640df7921ae59ea4c85d7140c..f946e82a997f0e6d2780e82abdea03711ff30758 100644 (file)
Binary files a/tests/examplefiles/output/CPDictionary.j and b/tests/examplefiles/output/CPDictionary.j differ
index f822b1e9bd5fa67a1e77f0f20ad7d078707b904e..da25afcde0c416e6543c58257254dd4088a7389b 100644 (file)
Binary files a/tests/examplefiles/output/Charmci.ci and b/tests/examplefiles/output/Charmci.ci differ
index a17be09eaad5592d9a966ae0717dc15ee02e0022..db1794242f29576904c51b57094ed441789540a3 100644 (file)
Binary files a/tests/examplefiles/output/Config.in.cache and b/tests/examplefiles/output/Config.in.cache differ
index 80f21143cb520619bbae4106f7a2ad6cb617b88c..e632c6f0cd5a8670b1f3c8bc357e0ea5c3dedb03 100644 (file)
Binary files a/tests/examplefiles/output/Constants.mo and b/tests/examplefiles/output/Constants.mo differ
index 4fd78730c22dc011c683c360a653eb67c5af8f42..5c8f12e385ad024bc2b79e610936df91a97ad2a4 100644 (file)
Binary files a/tests/examplefiles/output/DancingSudoku.lhs and b/tests/examplefiles/output/DancingSudoku.lhs differ
index acf9a2b63582bc9919345753af2eedd6473339ac..0adebebf000ce43221a78335b03cbece3aeb5bca 100644 (file)
Binary files a/tests/examplefiles/output/Deflate.fs and b/tests/examplefiles/output/Deflate.fs differ
index 5f59f9011dc4cff39fede7ca7ab2d0f56b6dec58..435bed8140935b5f22b1072e9f08021b5085ecc9 100644 (file)
Binary files a/tests/examplefiles/output/Error.pmod and b/tests/examplefiles/output/Error.pmod differ
index 320cabc634c5e0367f9127e2ffe45631d53fe43e..847a7c4ce54457b247f83f15cb294775d24c5b33 100644 (file)
Binary files a/tests/examplefiles/output/Errors.scala and b/tests/examplefiles/output/Errors.scala differ
index ab14b559a499484cd40af99ae4aa80f52be7bdce..8ed0be5cd457994643e0c900cbe9c20c0797fb47 100644 (file)
Binary files a/tests/examplefiles/output/FakeFile.pike and b/tests/examplefiles/output/FakeFile.pike differ
index d68207599ec050d6ce47a25b63d55f4c86612dca..277c2142a5dbcf70a32410d503d9fc0b15014550 100644 (file)
Binary files a/tests/examplefiles/output/Get-CommandDefinitionHtml.ps1 and b/tests/examplefiles/output/Get-CommandDefinitionHtml.ps1 differ
index 69f8aadf71b0d6d0b267f4feecc3cfa4aee31ebd..9293b0cec5a50e932e004354e3c3da7975949037 100644 (file)
Binary files a/tests/examplefiles/output/IPDispatchC.nc and b/tests/examplefiles/output/IPDispatchC.nc differ
index c6bd0bf2d9d4f5eada739b7f9735bc6559e9f4d6..a777073afdcf40aae3af367584e5b43f9da8f59c 100644 (file)
Binary files a/tests/examplefiles/output/IPDispatchP.nc and b/tests/examplefiles/output/IPDispatchP.nc differ
index 37355e39bafc8d863846682e27f743fd41a40a04..2812fe39e01f2000ae9cd31b34e0ed08348596d5 100644 (file)
Binary files a/tests/examplefiles/output/Intro.java and b/tests/examplefiles/output/Intro.java differ
diff --git a/tests/examplefiles/output/MIME_example.eml b/tests/examplefiles/output/MIME_example.eml
new file mode 100644 (file)
index 0000000..071c3db
Binary files /dev/null and b/tests/examplefiles/output/MIME_example.eml differ
index 14ff256aa3b45918870e8f8ae3fc427679994687..d6e77d3adf94099f3bbcfd432d847788f8d9430d 100644 (file)
Binary files a/tests/examplefiles/output/Makefile and b/tests/examplefiles/output/Makefile differ
index 0aa13b4005dbf51c29a847ac2a1c0977fffd4977..d0faa8d1bc1e5a5d3d73d5d52dc97dac3010593f 100644 (file)
Binary files a/tests/examplefiles/output/Object.st and b/tests/examplefiles/output/Object.st differ
index 3338f6869c96dc040b2ee8400033f2509c08cd38..8902ebf1f057f1df87c04ddb8cd60baa29674b47 100644 (file)
Binary files a/tests/examplefiles/output/OrderedMap.hx and b/tests/examplefiles/output/OrderedMap.hx differ
index a63b5e27a67361645530a7468eab8dc3ee9056af..59708d2b331d80cfd31e1dcc204e1d1bc1c21425 100644 (file)
Binary files a/tests/examplefiles/output/RoleQ.pm6 and b/tests/examplefiles/output/RoleQ.pm6 differ
index b2fe309cc88447f3d6282c5da1ad0574fc1de60f..ff5778f4b34e40e07de26c6b0e335bcc6e530bff 100644 (file)
Binary files a/tests/examplefiles/output/SmallCheck.hs and b/tests/examplefiles/output/SmallCheck.hs differ
index d8443787b462c443513e061312847da6dcf022fd..b34d2dd2cdaf9f9f4d140f626a06a57bdd9ef8ae 100644 (file)
Binary files a/tests/examplefiles/output/Sorting.mod and b/tests/examplefiles/output/Sorting.mod differ
index aac1380abc0c5facf387b2a5ea654af4c184dfa4..0d6dc1c2f94725b9c801cf25c679d32bfb78a1fb 100644 (file)
Binary files a/tests/examplefiles/output/StdGeneric.icl and b/tests/examplefiles/output/StdGeneric.icl differ
index a6b70e7197f248dd5eb216c41b2a77a00ec439a0..eb2b1ead4c20b06b7db42c25c31b754579c478ed 100644 (file)
Binary files a/tests/examplefiles/output/Sudoku.lhs and b/tests/examplefiles/output/Sudoku.lhs differ
index b05ea25cdb7fdc2f4b8c5850a9cd01d6426794f0..ad2713699d4cc681774dc181d82fc1657c366f88 100644 (file)
Binary files a/tests/examplefiles/output/abnf_example1.abnf and b/tests/examplefiles/output/abnf_example1.abnf differ
index 0087baeee30382075b9483919e991c156b75dc4c..9ea5be64b91db0521f743ebfc9e813c8722b4767 100644 (file)
Binary files a/tests/examplefiles/output/abnf_example2.abnf and b/tests/examplefiles/output/abnf_example2.abnf differ
index 33b0071ac49aa436e6e04b3f5d17b0b7c33491ff..75198438d3476bb8aae73918eea440c1a930423c 100644 (file)
Binary files a/tests/examplefiles/output/addressbook.proto and b/tests/examplefiles/output/addressbook.proto differ
index 2ab2719deee679042604bc8f3404e7a8760ca89c..09bb186d8237d1a066ad12b29adec36505fe0020 100644 (file)
Binary files a/tests/examplefiles/output/ahcon.f and b/tests/examplefiles/output/ahcon.f differ
index f6db8a40cae1146762609fa13fe19012f13cb1ae..6ba8de57d31550b470c077a004230314c8b4be46 100644 (file)
Binary files a/tests/examplefiles/output/all.nit and b/tests/examplefiles/output/all.nit differ
index b9f77f02667f41efd8ecd007863150dd7a2df92a..db544a9a6a476a69b2d2656825bcdf8c6210d4d9 100644 (file)
Binary files a/tests/examplefiles/output/antlr_ANTLRv3.g and b/tests/examplefiles/output/antlr_ANTLRv3.g differ
index c849d70727f47c7c0a0499fd00638458c98f5ba0..a1d9bcd22ba58c91fab67a59e16e3b1318ce8b53 100644 (file)
Binary files a/tests/examplefiles/output/antlr_throws and b/tests/examplefiles/output/antlr_throws differ
index 93d4ebcae952356c82995196597c22f3c0aee9d3..d65d245af92510a0c694eb994fe7723aaf940524 100644 (file)
Binary files a/tests/examplefiles/output/apache2.conf and b/tests/examplefiles/output/apache2.conf differ
index d66c0da483e9282f9f90bf8abdd22b05c224280e..e3a653c5247f095c72519cb83b4170182ccaa22e 100644 (file)
Binary files a/tests/examplefiles/output/as3_test.as and b/tests/examplefiles/output/as3_test.as differ
index 572c8e2455250b2136084e4034de4313e9194a23..b0a17c3a35ca2736d52be8dfaa8a6567b7971cec 100644 (file)
Binary files a/tests/examplefiles/output/as3_test2.as and b/tests/examplefiles/output/as3_test2.as differ
index 20d26b673b5205b2366852e668b55bce3142f31d..a9d509da4a5ed1058792f5b4588502c56d5fdeb9 100644 (file)
Binary files a/tests/examplefiles/output/as3_test3.as and b/tests/examplefiles/output/as3_test3.as differ
index 92e1d558e32c7ce1f132bc6e945b8af299eed721..5c762ba9e7a2aaef3a5393c546cd915d04325510 100644 (file)
Binary files a/tests/examplefiles/output/aspx-cs_example and b/tests/examplefiles/output/aspx-cs_example differ
index e19515ab7c7ef9bdfa33ccee268cedbeb8ceddee..90f1fd18b255e0a6b4d4e02fe8233cf635f686f8 100644 (file)
Binary files a/tests/examplefiles/output/autoit_submit.au3 and b/tests/examplefiles/output/autoit_submit.au3 differ
index 27da9daeeeb92c6d11b5541153b5eec92ec7cc93..1a2896f3fbc5e13f81943b63b8f7b9beb345c412 100644 (file)
Binary files a/tests/examplefiles/output/automake.mk and b/tests/examplefiles/output/automake.mk differ
index d0d67774a5cba4d2bc853a9b07921a6c6d11633b..ce65b5d01b124f3cfb8b2ffe805a1815187951b0 100644 (file)
Binary files a/tests/examplefiles/output/badcase.java and b/tests/examplefiles/output/badcase.java differ
index dee342421ed12dfcc5f9f25f2df91bf32a932c6a..b168d18e3229c67f9b42964f1b4f5621c344dc63 100644 (file)
Binary files a/tests/examplefiles/output/bigtest.nsi and b/tests/examplefiles/output/bigtest.nsi differ
index b7e3ad4c45131fd72369bb5b8742e3f44f115ae1..6e5fb54d93c6db1d4c6bb23673eff486f6e17aee 100644 (file)
Binary files a/tests/examplefiles/output/bnf_example1.bnf and b/tests/examplefiles/output/bnf_example1.bnf differ
index 8b64170009dfb39fadaba6e0ca06bf63a165f8dd..0eb7700f62fb29f4764843400b1b38a9a260fae3 100644 (file)
Binary files a/tests/examplefiles/output/boot-9.scm and b/tests/examplefiles/output/boot-9.scm differ
index fd6c230170402bb6332c14bd7f8b60d8728b19f8..4cb5304e69ffb6506220d583c994e7941ae13669 100644 (file)
Binary files a/tests/examplefiles/output/ca65_example and b/tests/examplefiles/output/ca65_example differ
index 18d5d25619d4706e2be42c7c0bc0dc606d527c57..ce374397c176a37de647144790c436f502b5c1e0 100644 (file)
Binary files a/tests/examplefiles/output/capdl_example.cdl and b/tests/examplefiles/output/capdl_example.cdl differ
index c28016d127ab0149674b2c93faa6efb34d150bf8..e6251cac78c2c3e791112fd992eca3ec789cb25d 100644 (file)
Binary files a/tests/examplefiles/output/cbmbas_example and b/tests/examplefiles/output/cbmbas_example differ
index cbe4a81659d9ca47b2e73a6e9b908b504b0f771b..96355c84b2b231e55687c4b4fd8e374bfe66b01e 100644 (file)
Binary files a/tests/examplefiles/output/cells.ps and b/tests/examplefiles/output/cells.ps differ
index 06ab7b12b1f23d05d69db173ba9eebfb734ab821..e126d360c8e7993847099e1e4aef51d3fe9caa0a 100644 (file)
Binary files a/tests/examplefiles/output/ceval.c and b/tests/examplefiles/output/ceval.c differ
index f014059577ea8634c55137438dad4c56c789e359..de022ea9833cdefc9f6c86f1827edd8cbb6f7897 100644 (file)
Binary files a/tests/examplefiles/output/char.scala and b/tests/examplefiles/output/char.scala differ
index 5da8cc5f0f76e475cf8973e5a9fcfc81eebc9eb1..d931adcb00033a29d5fc72d245dd3372e0e8c0f1 100644 (file)
Binary files a/tests/examplefiles/output/cheetah_example.html and b/tests/examplefiles/output/cheetah_example.html differ
index 8f6a1feb2d9a8013ca656535ec234a55fa7b2077..3bfba1ea46767d19ca5d16288b534320c3340771 100644 (file)
Binary files a/tests/examplefiles/output/classes.dylan and b/tests/examplefiles/output/classes.dylan differ
index dac39c0c0ebad1ec42d1c2cf3e2e8b67d8225a5b..95b170e21daa9e40abb594502404a7f49ddd803f 100644 (file)
Binary files a/tests/examplefiles/output/clojure-weird-keywords.clj and b/tests/examplefiles/output/clojure-weird-keywords.clj differ
index a3d3b2ca38a5be845a4924a8e486f08f886c08d7..5d2b1708667b469b8f9600f68915641fd3d2adae 100644 (file)
Binary files a/tests/examplefiles/output/condensed_ruby.rb and b/tests/examplefiles/output/condensed_ruby.rb differ
index cc56a42badb6d3391bf09ddaa782dfdc702b4bb3..aee39da2767ad8c4a6ed2f16e17bc907876912e2 100644 (file)
Binary files a/tests/examplefiles/output/coq_RelationClasses and b/tests/examplefiles/output/coq_RelationClasses differ
index 76b86358634a26a886e7178578a3c3a20496650c..7a4ac56c9cafc42ed78d07090f9baf38c347f754 100644 (file)
Binary files a/tests/examplefiles/output/core.cljs and b/tests/examplefiles/output/core.cljs differ
index f89291b1e389bbabd327f42d15dba9baf3d9d6c0..903ff425b754b9886efb714fef98c387771f3e2f 100644 (file)
Binary files a/tests/examplefiles/output/database.pytb and b/tests/examplefiles/output/database.pytb differ
index 73e86888b338502d833c0d6f02c18b0a3c01e909..a24be372f6be7d3f3a7eb31320014a88fa32c09e 100644 (file)
Binary files a/tests/examplefiles/output/de.MoinMoin.po and b/tests/examplefiles/output/de.MoinMoin.po differ
index 13061da11b5216ea1cd14066fdd339cd08f70174..e22a7335df183fbbc088cc4d03d680359d9d4755 100644 (file)
Binary files a/tests/examplefiles/output/demo.ahk and b/tests/examplefiles/output/demo.ahk differ
index 7b72a46d32dc0da3f47aca89bdf1b5540c463bb8..be9988662524362c89e143d10553fc86476a2906 100644 (file)
Binary files a/tests/examplefiles/output/demo.cfm and b/tests/examplefiles/output/demo.cfm differ
index 706ac2cc51e8546cf2e21383b37c1ea44f88de48..d0bd69798aaa34b006989585ab7631505f29f5a4 100644 (file)
Binary files a/tests/examplefiles/output/demo.css.in and b/tests/examplefiles/output/demo.css.in differ
index 10d689457569a3a13c5716c360fb3e156a84250a..670f41a575025dc661d0b097ca91016abdefe29a 100644 (file)
Binary files a/tests/examplefiles/output/demo.frt and b/tests/examplefiles/output/demo.frt differ
index e9f20aa35c400690c22bf738e153f4f46b05d548..e0c1eb31121e5ece4dd29c1d27944027753657f4 100644 (file)
Binary files a/tests/examplefiles/output/demo.hbs and b/tests/examplefiles/output/demo.hbs differ
index a5fd72ed892c4c528018134d6e7b4eff73ce6ba3..9f4952101a731f28128b3853b29e2b23aa81393c 100644 (file)
Binary files a/tests/examplefiles/output/demo.js.in and b/tests/examplefiles/output/demo.js.in differ
index fbb79dc9c1bd83ad1a290f7bcfc549390849a7fb..73b93168139f6fb27f34315ce7dc9072ee6c34b5 100644 (file)
Binary files a/tests/examplefiles/output/demo.thrift and b/tests/examplefiles/output/demo.thrift differ
index c6b7fb8a92e0c86b30426736618f209c2cfc4425..4a7349c0255e94a2b3f929183b376875234de37e 100644 (file)
Binary files a/tests/examplefiles/output/demo.xul.in and b/tests/examplefiles/output/demo.xul.in differ
index 4a9e360f9f0b3fa728094ef6b9f53a075bf927b6..86b4615cfff39217f73c16b51e0b0c3863af93de 100644 (file)
Binary files a/tests/examplefiles/output/django_sample.html+django and b/tests/examplefiles/output/django_sample.html+django differ
index 69428d8a3e352164b9fd7402dbcf12ae867fcdf6..abead67f3e8dbac11ec6d86239eb1140b63969c3 100644 (file)
Binary files a/tests/examplefiles/output/docker.docker and b/tests/examplefiles/output/docker.docker differ
index d29fb58930b4df3fbbb7f501317001310803d99d..f6319ae65c285ce97b0855f549fa207d942df165 100644 (file)
Binary files a/tests/examplefiles/output/durexmania.aheui and b/tests/examplefiles/output/durexmania.aheui differ
index ca36a9f373fb99a8341c1f3fd0817d4fce7b6b4b..7d368d19dd44ceb3d2d02f07da2bd50a7fccb185 100644 (file)
Binary files a/tests/examplefiles/output/dwarf.cw and b/tests/examplefiles/output/dwarf.cw differ
index c0186450010fc97cfb7d859f8bc7c3362744f192..2d7f9f0fb2bd2807558bdd4f710611c7b4626838 100644 (file)
Binary files a/tests/examplefiles/output/eg_example1.eg and b/tests/examplefiles/output/eg_example1.eg differ
index 97c13e0fd0e95cf0718137f3a8fb7ad96b699405..7acd5cb083bf453757a632d70161b9d8b4c1abf4 100644 (file)
Binary files a/tests/examplefiles/output/ember.handlebars and b/tests/examplefiles/output/ember.handlebars differ
index b227c19a9d0ee801750f68a1c34b22c25d2e8db2..fe3e1bd5ba90a4ec18335893a79a4280e1d0def5 100644 (file)
Binary files a/tests/examplefiles/output/erl_session and b/tests/examplefiles/output/erl_session differ
index a8a103dfbe69e1054934afa4ba2023a2dd828cba..08420f21458ec14b39dca7d64cb78994230f7408 100644 (file)
Binary files a/tests/examplefiles/output/es6.js and b/tests/examplefiles/output/es6.js differ
index 1196f905add662e3e2624fcc72676ea73f97e3f0..9ba518a4137152912074740b3f08300202ece0a1 100644 (file)
Binary files a/tests/examplefiles/output/escape_semicolon.clj and b/tests/examplefiles/output/escape_semicolon.clj differ
index cb82763e4e8b457ce5854e33a48dad41cd5eaae5..bc8b2bd114dce4e51ba14266b294c55d5c4c29bf 100644 (file)
Binary files a/tests/examplefiles/output/eval.rs and b/tests/examplefiles/output/eval.rs differ
index e8e3f0541bee3c6aa1bff5c69c646840de166cc9..9e71ea163113446ec57e3038b47b174cffdd7e57 100644 (file)
Binary files a/tests/examplefiles/output/evil_regex.js and b/tests/examplefiles/output/evil_regex.js differ
index 4145e6ea8e3f4016261bacc39482ab113110ab2a..fd99a04e05237342609cb777d5bfd7b731e22888 100644 (file)
Binary files a/tests/examplefiles/output/example.Rd and b/tests/examplefiles/output/example.Rd differ
index fd90915117061a66031228fa10eb0a7209b63548..867e945fe053ec23627351490af1a0122c4c9100 100644 (file)
Binary files a/tests/examplefiles/output/example.als and b/tests/examplefiles/output/example.als differ
index 38e2d399bb02479a156b222e4edc1e2e02d2063b..0a70c95f2d79ea99c0ee230b75f9803fafc5ba28 100644 (file)
Binary files a/tests/examplefiles/output/example.bat and b/tests/examplefiles/output/example.bat differ
index fb48ae21e3ad9313f3e46f9b1080f15a6c0df419..b9d37f707fb5e17cfcac8d49cfc8780cfc8903c3 100644 (file)
Binary files a/tests/examplefiles/output/example.bbc and b/tests/examplefiles/output/example.bbc differ
index 7e8a71a38b6c90070835622214867cbc74fdde9d..955e11855b6a0984e15fadf7e13a8b3dcf2b5cb2 100644 (file)
Binary files a/tests/examplefiles/output/example.bc and b/tests/examplefiles/output/example.bc differ
index ba5469c48ecb7a56fb7feab3b4983b7af7c1482b..a2fdd9f6dbb42a3ea50bdd833105ce8de121e63b 100644 (file)
Binary files a/tests/examplefiles/output/example.boa and b/tests/examplefiles/output/example.boa differ
index 70e2aa20e09cd3c57a920555ca60fef12fc8c427..85ae01ae015f19e76850d77f8340e775ee0e0c44 100644 (file)
Binary files a/tests/examplefiles/output/example.bug and b/tests/examplefiles/output/example.bug differ
index ff9a9ecb6544693c720d8b57b0e546c3ff7ab1d3..d1db6df0357dd6337dbe1e8f4a98daf554a9db0a 100644 (file)
Binary files a/tests/examplefiles/output/example.c and b/tests/examplefiles/output/example.c differ
index 70e8afd732ce069767303893f793847e8a71c525..96f8b664a9a073ac43f7bee97768626783045a23 100644 (file)
Binary files a/tests/examplefiles/output/example.ceylon and b/tests/examplefiles/output/example.ceylon differ
index c91660edd582d33cf149f7ca8a2d44e2f38bf0de..cb355fcb67d2e548c547a824c7c1df3bdc483bbf 100644 (file)
Binary files a/tests/examplefiles/output/example.chai and b/tests/examplefiles/output/example.chai differ
index bfc9a3945660ca7bc3afd8e58aa65f886252ea7d..924c751bb2da51615a7f878618a6d2a6bc3b6a3e 100644 (file)
Binary files a/tests/examplefiles/output/example.clay and b/tests/examplefiles/output/example.clay differ
index daf6282182cd23279e4b23a29c7358f7ede79574..7cf0af75e6bce60574d1d6f27f3bd17c8b791602 100644 (file)
Binary files a/tests/examplefiles/output/example.cls and b/tests/examplefiles/output/example.cls differ
index 22cee263311d563c82a09051a585b262987eb3c2..b6fcdd8d27973e170f9ddddf71fe493ae15bead2 100644 (file)
Binary files a/tests/examplefiles/output/example.cob and b/tests/examplefiles/output/example.cob differ
index 3522d59b3570f30ec80e95c6b14bea8ed0d3b2e1..05290fbdeb262bb481458fb6a2c263a013262887 100644 (file)
Binary files a/tests/examplefiles/output/example.coffee and b/tests/examplefiles/output/example.coffee differ
index e4d0e792c75af90160b56c25a7a8e042fc171519..9967b298a62e2bc81084cda235812cc0a15055f9 100644 (file)
Binary files a/tests/examplefiles/output/example.cpp and b/tests/examplefiles/output/example.cpp differ
index 497bf95847ffeffa3112e9cad947facd5419634c..578b9e9992ae4aa7487e5ab2a43b06c19e5950b1 100644 (file)
Binary files a/tests/examplefiles/output/example.e and b/tests/examplefiles/output/example.e differ
index 9cc0fea1f32472f2b43c34bc057b4a3e93a4369e..a893369a0aa4b5488f1d8d6ace78abf17fa8eb65 100644 (file)
Binary files a/tests/examplefiles/output/example.elm and b/tests/examplefiles/output/example.elm differ
diff --git a/tests/examplefiles/output/example.eml b/tests/examplefiles/output/example.eml
new file mode 100644 (file)
index 0000000..7192256
Binary files /dev/null and b/tests/examplefiles/output/example.eml differ
index 0fccb5ae56437b3e2684a8639f6eda10291add8a..32f1a1151a7357765945097ebd9269a7093aedb0 100644 (file)
Binary files a/tests/examplefiles/output/example.ezt and b/tests/examplefiles/output/example.ezt differ
index 70ccbd53ff5522649973765421ee8b47af396fec..962f5331ff1eb15278414827ebe7e7cbd6a7e784 100644 (file)
Binary files a/tests/examplefiles/output/example.f90 and b/tests/examplefiles/output/example.f90 differ
index 0ece0b633fda15a39cc780ba8742d55f75406220..efa791cdac180ba35bf380e0b9c8775f4408e459 100644 (file)
Binary files a/tests/examplefiles/output/example.feature and b/tests/examplefiles/output/example.feature differ
index 7974b149dcd06a2db49390c4e377b97ec1ce9f40..de1c31cfb6e8dd80138dba591d7f37fbe1f64579 100644 (file)
Binary files a/tests/examplefiles/output/example.fish and b/tests/examplefiles/output/example.fish differ
index 2f6a97302dd4ca2eaae8a9a28b25b3774f98fb37..24fd634796bbbac564e3c493996d9b1de97816fe 100644 (file)
Binary files a/tests/examplefiles/output/example.flo and b/tests/examplefiles/output/example.flo differ
index 0e8a651bbbabb1783d4cb8c2fb6bffcadc75f32c..f9138aa5e6cb9f0363b4b10c20cdde0e4fe4ccbe 100644 (file)
Binary files a/tests/examplefiles/output/example.gd and b/tests/examplefiles/output/example.gd differ
index 86e92fefa5e99df1da5af8383cb238ba3b7c215f..67cbb530279acea16467091a7b6c80a00c034f6e 100644 (file)
Binary files a/tests/examplefiles/output/example.gi and b/tests/examplefiles/output/example.gi differ
index 4f9d0e00b0ab9c5d57192b25700454a051698e7e..c50100ecad3caba692ac639ed0a82b08fb0c551a 100644 (file)
Binary files a/tests/examplefiles/output/example.golo and b/tests/examplefiles/output/example.golo differ
index f1eb275191a7c16938de165c47547846fe1eacb1..d1f11ce12f89a229b549e3643fa3ff15ddd28753 100644 (file)
Binary files a/tests/examplefiles/output/example.groovy and b/tests/examplefiles/output/example.groovy differ
index 4997b23b434753cf0545073d68343e6a1d149214..88fd1d80ee692c4ca254888a98a657fec52c705c 100644 (file)
Binary files a/tests/examplefiles/output/example.gs and b/tests/examplefiles/output/example.gs differ
index b23d60ba763c5e95c17bbf8adc5db9bbfd4261ef..81729cccdaf3d17a0b58e2ece07e607842e234cf 100644 (file)
Binary files a/tests/examplefiles/output/example.gst and b/tests/examplefiles/output/example.gst differ
index 5d5bb6aa32b17dbdd1eae59f9d3cadf8374a4af1..67225f810e76d93e6cb16206834b20b37cdf496c 100644 (file)
Binary files a/tests/examplefiles/output/example.hlsl and b/tests/examplefiles/output/example.hlsl differ
index aba5835ae5b8c2295fde01dbf5c5ada290fad973..f77f6be4750fbe94ceb69281f3d6d95e21d01328 100644 (file)
Binary files a/tests/examplefiles/output/example.hs and b/tests/examplefiles/output/example.hs differ
index 2426286d7fe38d917e2ff49f55bf35e907655a96..766e877bcd8b73d856c3a1a28586d65453de60e4 100644 (file)
Binary files a/tests/examplefiles/output/example.hx and b/tests/examplefiles/output/example.hx differ
index e0647ae21eb7ac306b2789c88ca0daa645739733..0e31a7d460b0e6610efc5050537c281754006c65 100644 (file)
Binary files a/tests/examplefiles/output/example.i6t and b/tests/examplefiles/output/example.i6t differ
index bd7566458909651c5f554188b6dc94f2aa2f6e93..1f5e19584ae8214c4cb53e3a0abc5f22ee128563 100644 (file)
Binary files a/tests/examplefiles/output/example.i7x and b/tests/examplefiles/output/example.i7x differ
index 1f8f4cf3a55b44ee315265df7f32c371be880819..9b8854e0539fadc08ef637557a4b85f31de66f15 100644 (file)
Binary files a/tests/examplefiles/output/example.icn and b/tests/examplefiles/output/example.icn differ
index d2b240f9170e6f4d564a1c50c1528814ad61e4fc..84f58ed677fe167d2a0afe89e478a81becb0f549 100644 (file)
Binary files a/tests/examplefiles/output/example.icon and b/tests/examplefiles/output/example.icon differ
index c2db467620e7ee9b5b92dcd3e6bebe4819ca2792..73b6841f553eb317202c40d2a26972fbdb80d33a 100644 (file)
Binary files a/tests/examplefiles/output/example.j and b/tests/examplefiles/output/example.j differ
index 07ad4e5ec6f4eb48bba10c1967bb9c30170f5d0f..05d8489c9ef648a1d130bdd07ff3aaa6a1a99164 100644 (file)
Binary files a/tests/examplefiles/output/example.jag and b/tests/examplefiles/output/example.jag differ
index 2971ef3f5907731a87aab1a3c433b1de09f33b97..5a0fa02749d763b7ad1395c4abb9ca81de921a06 100644 (file)
Binary files a/tests/examplefiles/output/example.java and b/tests/examplefiles/output/example.java differ
index f7fce9ec7a980b84a9fcbd147adf204e52868e52..db26c96a74f6288b17ce8efa29fb72ddfebf7f30 100644 (file)
Binary files a/tests/examplefiles/output/example.jcl and b/tests/examplefiles/output/example.jcl differ
index 7fb69b233578b5c1b9c47a1ad55e86b44af7819b..0790842a0229b075563b7c29f3d3f188fe24d080 100644 (file)
Binary files a/tests/examplefiles/output/example.jsgf and b/tests/examplefiles/output/example.jsgf differ
index 1f213e43b2fca40aa5deba30440d55b0121f6ab0..1959c9c43942ee9e934fdc9ec88a9d8488bee078 100644 (file)
Binary files a/tests/examplefiles/output/example.jsonld and b/tests/examplefiles/output/example.jsonld differ
index b6aed68d66796c8e673c8c569537da1920333264..d98b1c70be913b17f504d11d2952aae79267519d 100644 (file)
Binary files a/tests/examplefiles/output/example.juttle and b/tests/examplefiles/output/example.juttle differ
index b5d89777df5fce3643e33634f2e97dfc03304136..478c78d5926142fbdd49fc2466d07a8ecaae1a0c 100644 (file)
Binary files a/tests/examplefiles/output/example.kal and b/tests/examplefiles/output/example.kal differ
index 6971411c05d02141e0a8c6a052b62b5f41c1dce0..a76633e782100527b5ab3c6ad22e955e083bb427 100644 (file)
Binary files a/tests/examplefiles/output/example.kt and b/tests/examplefiles/output/example.kt differ
index 7f104f4522f2dc9707efcb775486cacae9ce54ad..b7731d4883394cc820ae3a5a4ac61a660064ac30 100644 (file)
Binary files a/tests/examplefiles/output/example.lagda and b/tests/examplefiles/output/example.lagda differ
index 968cdb09a6c0c875514577ad5c4590ffeb33f59e..587a76baf3a9eb712c05b6c63867b285d54226f4 100644 (file)
Binary files a/tests/examplefiles/output/example.liquid and b/tests/examplefiles/output/example.liquid differ
index ab2eed8d9dfcc357534b48eeb95ddb0c2e2daf10..9eb1e741a11d2e530f86249dc154edb409b2879a 100644 (file)
Binary files a/tests/examplefiles/output/example.lua and b/tests/examplefiles/output/example.lua differ
index 1a30f51bf79b7b3d55568dedc238b2ec2e1718d8..f136137f0c7882b5a8d532254d3697aa8f5d3f64 100644 (file)
Binary files a/tests/examplefiles/output/example.ma and b/tests/examplefiles/output/example.ma differ
index 5c269a1e49a98ba18976ce48ee0bc5a69d31394d..dad207af36c4cea942b571f208effff762f0c057 100644 (file)
Binary files a/tests/examplefiles/output/example.mac and b/tests/examplefiles/output/example.mac differ
index fdd14199afb8223398fe793201312d14db460dae..970de2f0c32ce6d3506707aa43b5592ce1d58dc4 100644 (file)
Binary files a/tests/examplefiles/output/example.md and b/tests/examplefiles/output/example.md differ
index 54a7f8385e5866f1e8bb625678bd61ab3dba1d8c..e6e8a81bfb6b6ea92103cf77b9d3fdd9f171d8d5 100644 (file)
Binary files a/tests/examplefiles/output/example.monkey and b/tests/examplefiles/output/example.monkey differ
index 2d454f8edab5b41ae7f59fdc2df888da9832b471..9bf24705e63846c48d298d1173e8d96bbeb169c9 100644 (file)
Binary files a/tests/examplefiles/output/example.moo and b/tests/examplefiles/output/example.moo differ
index 1aaf693dc7d5c22d233d55c2c2e0dc43f9e4e5e3..cfbb482681d40c25f32e5743a18567b04193ff95 100644 (file)
Binary files a/tests/examplefiles/output/example.moon and b/tests/examplefiles/output/example.moon differ
index 915edf0516a3286a9feda25d1773426bc141809d..f61d20529e67fba322043661acc6d3d76c117f3d 100644 (file)
Binary files a/tests/examplefiles/output/example.mq4 and b/tests/examplefiles/output/example.mq4 differ
index d074816ec39b9c0c4f72c48ce57aabdd418e09de..2e22d5ea3f7609596e61bc3e006e917ab691f541 100644 (file)
Binary files a/tests/examplefiles/output/example.mqh and b/tests/examplefiles/output/example.mqh differ
index ad33edc463259a8a928fe9e49565250f544e5ea9..a3131591e37ff244279230f2ed8ab23b2a23c09b 100644 (file)
Binary files a/tests/examplefiles/output/example.msc and b/tests/examplefiles/output/example.msc differ
index 19ac1a619706cc0bce8207d0e0c0814de59083ef..19fce3d0a3107b18221cc9d3eaf856fce5414470 100644 (file)
Binary files a/tests/examplefiles/output/example.ng2 and b/tests/examplefiles/output/example.ng2 differ
index 367a6b63ee9623a44e60a50f9ddea0a7748539ba..4fedaae12682b6e7b18a8b7bcd9a8166fb07de01 100644 (file)
Binary files a/tests/examplefiles/output/example.ni and b/tests/examplefiles/output/example.ni differ
index a07e308c1dfd8092ffb90992054e74a1df61308e..26ab67da0a72ca6bd63a01f498f8e3f941ff0a28 100644 (file)
Binary files a/tests/examplefiles/output/example.nim and b/tests/examplefiles/output/example.nim differ
index 0f13ac170d9e9f2630d8506476290c3e1484054e..7597072f57226c244adae204eee3a17b90f5cbe6 100644 (file)
Binary files a/tests/examplefiles/output/example.nix and b/tests/examplefiles/output/example.nix differ
index 52495768ea0565a5da53adb4f8f20a0448f10c26..cd6cc9245f36f8ad7a759479746f43a5c13010d7 100644 (file)
Binary files a/tests/examplefiles/output/example.ns2 and b/tests/examplefiles/output/example.ns2 differ
index 0701b12f0812ca486db67deede3d57d8bc59000c..e1cba906c29e0396ec3d22be55a0bdd37b805561 100644 (file)
Binary files a/tests/examplefiles/output/example.pas and b/tests/examplefiles/output/example.pas differ
index ebfa861cfbe86e0da3dcbb67b582c60048545fa1..3742ede34b315d4fe1373c5cd21357e3b8763443 100644 (file)
Binary files a/tests/examplefiles/output/example.pcmk and b/tests/examplefiles/output/example.pcmk differ
index 02658b994e7890b2d99290a24fe8802246e864bc..214e6a6856d2998ea67dcd8d4c5f458d0697b1b0 100644 (file)
Binary files a/tests/examplefiles/output/example.pony and b/tests/examplefiles/output/example.pony differ
index 8cd2b1474e6024c686437297fc06084da2cdc3d7..02400001168e2cbeb6bd2e50d18a86678dff97c3 100644 (file)
Binary files a/tests/examplefiles/output/example.pp and b/tests/examplefiles/output/example.pp differ
index 172f3bd9cf0d2221e807117ae087677420fe149d..c6187c393a313e7c7a242c3321560d12c256ebf1 100644 (file)
Binary files a/tests/examplefiles/output/example.praat and b/tests/examplefiles/output/example.praat differ
index 5a842c4ba4409d1af1df89cb6db34ad33468e0cc..af89b9209c5f02c1595ad6f458f9058a4f270ab0 100644 (file)
Binary files a/tests/examplefiles/output/example.prg and b/tests/examplefiles/output/example.prg differ
index debd9accb6df5589f2e739b8d4812c1158105dc5..fb18af191958f92bb1e969c44d89da99ebdbcb9c 100644 (file)
Binary files a/tests/examplefiles/output/example.rb and b/tests/examplefiles/output/example.rb differ
index 3e4758c9174f02431ff6ee562870236e32bd1b72..27c99946077b17ffb27d55dfa376a0733f2e3d8c 100644 (file)
Binary files a/tests/examplefiles/output/example.red and b/tests/examplefiles/output/example.red differ
index f7209d62edea5a308130bd13d8b64da7878a5c2f..3e81a0a613fca94ee5204650284d89d688616e1b 100644 (file)
Binary files a/tests/examplefiles/output/example.reds and b/tests/examplefiles/output/example.reds differ
index 11538b221b68a9f2d749c303660db6932ca5d86f..679b5597907a59c88f15c0eac46302997bb1126d 100644 (file)
Binary files a/tests/examplefiles/output/example.reg and b/tests/examplefiles/output/example.reg differ
index a8737d84523b8cb745d9d63f758e890d12308774..4770fe300c22eba803976da8daaf012642b6f9c4 100644 (file)
Binary files a/tests/examplefiles/output/example.rexx and b/tests/examplefiles/output/example.rexx differ
index aacee11bc8717198cb52c64ee05fc8a37f131387..e990a45f3d78479556cf3e3d8efe6826014de6e6 100644 (file)
Binary files a/tests/examplefiles/output/example.rhtml and b/tests/examplefiles/output/example.rhtml differ
index 26f52533015089d620e8bff89c48911dd43eccb0..fd8c984ccd04fd367cb8abd6d4c1d2c500dbf81d 100644 (file)
Binary files a/tests/examplefiles/output/example.rkt and b/tests/examplefiles/output/example.rkt differ
index 22d06b13d07248357af50ba2d57a7aeccbeea7f8..672fcb6636f7c0b11f2a0a8097535aa8b9db6a7c 100644 (file)
Binary files a/tests/examplefiles/output/example.rpf and b/tests/examplefiles/output/example.rpf differ
index 37f2649e8375204b9287bf5f422fe797ec1b3392..ba78ed26ab3e990010d9f6bb043f98741c92031c 100644 (file)
Binary files a/tests/examplefiles/output/example.rts and b/tests/examplefiles/output/example.rts differ
index 004c4bb62fc00b7141b1287842ede671ce09e133..5c950bef916ee91ae799a027599abd42178b3185 100644 (file)
Binary files a/tests/examplefiles/output/example.sbl and b/tests/examplefiles/output/example.sbl differ
index 0362c7025c7baefc84043e04dbcb646023a96d76..8e6c17d2e5528d75b80caebe3ea5bae17d5e4d14 100644 (file)
Binary files a/tests/examplefiles/output/example.scd and b/tests/examplefiles/output/example.scd differ
index 2ca32823ab3b6d99566dc9d724c5cdb8350d63a4..9e5cdae37310550d0601b5974f541e63b86a46f8 100644 (file)
Binary files a/tests/examplefiles/output/example.sgf and b/tests/examplefiles/output/example.sgf differ
index 489bdac755d50f5756913c5ed464e39731da835f..9093c6385e54c0e8a4db49d715343a8c9205a546 100644 (file)
Binary files a/tests/examplefiles/output/example.sh and b/tests/examplefiles/output/example.sh differ
index a8146b08386311060b26948d11352a93c8d079f3..cebcbfddd2494f5214245795dc8c514a9dc6db0e 100644 (file)
Binary files a/tests/examplefiles/output/example.sh-session and b/tests/examplefiles/output/example.sh-session differ
index 55fc4bd60ea88f1dc7173d44d5ede7f8e096d402..93953b38b25dde5e2d032388ca708b84b43a5bac 100644 (file)
Binary files a/tests/examplefiles/output/example.shell-session and b/tests/examplefiles/output/example.shell-session differ
diff --git a/tests/examplefiles/output/example.shex b/tests/examplefiles/output/example.shex
new file mode 100644 (file)
index 0000000..3506d28
Binary files /dev/null and b/tests/examplefiles/output/example.shex differ
index 0e3e804cd488b33efbc9f293c09b82123e6a0377..c2e0b05e989da69c5a692fd7b53cbf96c6ba1a31 100644 (file)
Binary files a/tests/examplefiles/output/example.sl and b/tests/examplefiles/output/example.sl differ
index 7751e28d340566757d10a4168cd376327a93e5b9..f5ffd4c3c4211a597e9fa45a3559bd8c55bf8c89 100644 (file)
Binary files a/tests/examplefiles/output/example.slim and b/tests/examplefiles/output/example.slim differ
index 0d3aa3a0fc127fbdd576ca607cfff8a08b62a378..0f4ceb6420474207e1eccfca47e1d8d51663603f 100644 (file)
Binary files a/tests/examplefiles/output/example.sls and b/tests/examplefiles/output/example.sls differ
index 87a82c49396bb47d01c9a47860d3fc4b27571ea6..bdffd00ea9cb06197866df277bec1328bddf0d3e 100644 (file)
Binary files a/tests/examplefiles/output/example.sml and b/tests/examplefiles/output/example.sml differ
index f5d06dec035f73385fe1feabfcac2c17aece5ce9..39aecefa89498cad59ef0e82b0daaf05743c6232 100644 (file)
Binary files a/tests/examplefiles/output/example.snobol and b/tests/examplefiles/output/example.snobol differ
index f05262a283e2d6f244d59676d7d1d62d97e6eb5c..33c532b088c94142445cd1c8a4161bad15bdf699 100644 (file)
Binary files a/tests/examplefiles/output/example.stan and b/tests/examplefiles/output/example.stan differ
index 73e6e9c5087162324b24100028356a03114e815c..e4e229a1db2c38e685ecffb908f319f0f4a0c3b9 100644 (file)
Binary files a/tests/examplefiles/output/example.tap and b/tests/examplefiles/output/example.tap differ
index ee655db573a2dfc989dffcc8367eb9b489e6be71..6c45fad51a33d8ba528071316455db0b6df9c745 100644 (file)
Binary files a/tests/examplefiles/output/example.tasm and b/tests/examplefiles/output/example.tasm differ
index 70f3d03e8c3e75ee371bdb37fc3f872ff400f78f..2d05d657ea7910af5ce0dcb4863213c0795c2ab8 100644 (file)
Binary files a/tests/examplefiles/output/example.tea and b/tests/examplefiles/output/example.tea differ
index bc0c5326bc92b0de24a0671e5a59d3826250239b..712811e9fcef79718242c2a24d28e264984ff06a 100644 (file)
Binary files a/tests/examplefiles/output/example.tf and b/tests/examplefiles/output/example.tf differ
index 1aa969c8bb850226bd2fe6e4f43ed1afb9047235..ee42f55892d9b7732e1b49cab0c21ffc554a59af 100644 (file)
Binary files a/tests/examplefiles/output/example.thy and b/tests/examplefiles/output/example.thy differ
index 42865cd900c7f1e62798a863bbf3e21753389a25..11adda9b6d427ea23ecc10806f8ca346bc8f9414 100644 (file)
Binary files a/tests/examplefiles/output/example.todotxt and b/tests/examplefiles/output/example.todotxt differ
index 1b4695895e61224467e9e8f68244adcf45edc3ac..4a443e3d767ec6f726c619bf33614edffb5f42c4 100644 (file)
Binary files a/tests/examplefiles/output/example.toml and b/tests/examplefiles/output/example.toml differ
index eb209e32fcd139ca4086ce9913713593b887415b..df5cd2b8ef58c24f639b595245b19e4838be23ed 100644 (file)
Binary files a/tests/examplefiles/output/example.ttl and b/tests/examplefiles/output/example.ttl differ
index 5436718c327498a2743a2333f73b469ce54d019e..6c0852043e551b99ced790180fdd10c099c9b2e4 100644 (file)
Binary files a/tests/examplefiles/output/example.u and b/tests/examplefiles/output/example.u differ
index 6129506f2e305009704d52d717299575af1f3549..19c108bf5cf4cf7e5edb01b12d27fec95e33d3d5 100644 (file)
Binary files a/tests/examplefiles/output/example.u1 and b/tests/examplefiles/output/example.u1 differ
index c895f4e78ecaab187119845961f194f03bc0a3e3..bf02ffc949905c24903dcc3c37294f55adf4bd9e 100644 (file)
Binary files a/tests/examplefiles/output/example.vbs and b/tests/examplefiles/output/example.vbs differ
index a43963ee253815617cea6b3a57ebd5c3ee3a81ae..64988f81c4c8cadcedb08a8de20274c43ed53c0e 100644 (file)
Binary files a/tests/examplefiles/output/example.weechatlog and b/tests/examplefiles/output/example.weechatlog differ
index 30c9185ee4fb15ac0bf7aeaf209326b51806a39d..e9b7c015ae3ff79dc69bf0d8b635a556d7b08ac4 100644 (file)
Binary files a/tests/examplefiles/output/example.whiley and b/tests/examplefiles/output/example.whiley differ
index 14d9672905a3b42908a42dfe31335fb4dd114004..8dc925398839f2ff0c3f072275986a8973177c83 100644 (file)
Binary files a/tests/examplefiles/output/example.x10 and b/tests/examplefiles/output/example.x10 differ
index df40f437e81e4813d4def176385396b91f781847..540c5b1e8bd8dab730ce3436a6329f3be8dc91f1 100644 (file)
Binary files a/tests/examplefiles/output/example.xhtml and b/tests/examplefiles/output/example.xhtml differ
index 23fa72b531c0253aa551c61dab8af6d5449135f2..b67d256dd0eed95df5021380b5f43969af835d58 100644 (file)
Binary files a/tests/examplefiles/output/example.xtend and b/tests/examplefiles/output/example.xtend differ
index 145610c87cab8643bfecc500b36fbc36ecb0cf0c..16db5607324cbe14083be6a25f627275659f5663 100644 (file)
Binary files a/tests/examplefiles/output/example.xtm and b/tests/examplefiles/output/example.xtm differ
index 7c2d1d0827d415ceaba8a85386f48a75009cf0bd..05b8915a0f29fd1074b357e5a76c98e1e9106d83 100644 (file)
Binary files a/tests/examplefiles/output/example.yaml and b/tests/examplefiles/output/example.yaml differ
diff --git a/tests/examplefiles/output/example.zig b/tests/examplefiles/output/example.zig
new file mode 100644 (file)
index 0000000..9f22410
Binary files /dev/null and b/tests/examplefiles/output/example.zig differ
index 652181d2a07f7c765fe6b7286106a17fcf76da18..61f8e1ed75a99d758360aa65315e4568cbf88725 100644 (file)
Binary files a/tests/examplefiles/output/example1.cadl and b/tests/examplefiles/output/example1.cadl differ
index 135c1ab7009aa2a0c1ca6ce4dc4580851935aa70..21b988ecb5e046f3fef2dd8897ce38525ea4da38 100644 (file)
Binary files a/tests/examplefiles/output/example2.aspx and b/tests/examplefiles/output/example2.aspx differ
index 79d042a2583119419a540e29d044a379c90eba4e..196bd3248a29048dfbe5114d4d9622825c778390 100644 (file)
Binary files a/tests/examplefiles/output/example2.cpp and b/tests/examplefiles/output/example2.cpp differ
index bb55e2f823578713fdacfc6106b90eb54f5b7082..36dd70c3baee9ec532b2af99ff4d1e6410adc2a4 100644 (file)
Binary files a/tests/examplefiles/output/example2.msc and b/tests/examplefiles/output/example2.msc differ
index ddaa98d75723c8f8275309b3489c64c638f34ab7..27beb2038e8d2c0fdd1e7ca553292b6109ef166c 100644 (file)
Binary files a/tests/examplefiles/output/exampleScript.cfc and b/tests/examplefiles/output/exampleScript.cfc differ
index 4e0f9e97cf8272de1fd532334dfdf974513b2f0a..2e0c664210cef408d0ce88289db248ec46955bce 100644 (file)
Binary files a/tests/examplefiles/output/exampleTag.cfc and b/tests/examplefiles/output/exampleTag.cfc differ
index bf384356fe041fbe5a93d8c30451c118f3cb5ca5..62f0e125a735bc7d76711a85f556f5fe0b8208ef 100644 (file)
Binary files a/tests/examplefiles/output/example_coq.v and b/tests/examplefiles/output/example_coq.v differ
index 23dd79144c5f2d33c837b4e6e24c0b0664ee7feb..c9ba1c5fd4e31b8b6f0c4d4966002f79cd78fdd4 100644 (file)
Binary files a/tests/examplefiles/output/example_elixir.ex and b/tests/examplefiles/output/example_elixir.ex differ
index a2a825f3b3e03a69299f7231668ddc7e68bf9196..65cc0b3f1034d26b88c11b49d2e8d23f7c849376 100644 (file)
Binary files a/tests/examplefiles/output/example_file.fy and b/tests/examplefiles/output/example_file.fy differ
index 27d1ff116cd3305e7cbcc68e480b46f9f26bd35f..e1303ad893b87470ee3698493e08cbad41b30991 100644 (file)
Binary files a/tests/examplefiles/output/ezhil_primefactors.n and b/tests/examplefiles/output/ezhil_primefactors.n differ
index 2f9b5e5c7153ac6744f343963bf4a53d6c299b40..766db177fe7614cd96ec8949cbf8d4eb2cdde253 100644 (file)
Binary files a/tests/examplefiles/output/fennelview.fnl and b/tests/examplefiles/output/fennelview.fnl differ
index 9ca1585b6f40fffb3eced4adafa53d948e08883d..96a1d68e8d2170244ec7a68a728e123c4ef639c1 100644 (file)
Binary files a/tests/examplefiles/output/fibonacci.tokigun.aheui and b/tests/examplefiles/output/fibonacci.tokigun.aheui differ
index e8f92415fe90ede7ac41fb7d1cae773813050cad..3072199372a93b457cae0735c7203577e5d453de 100644 (file)
Binary files a/tests/examplefiles/output/firefox.mak and b/tests/examplefiles/output/firefox.mak differ
index 9821ef7885e8e4545eedc8c10b8a1dfe0d46859a..ac38064d31880873157d052eb3505344284cafa7 100644 (file)
Binary files a/tests/examplefiles/output/flatline_example and b/tests/examplefiles/output/flatline_example differ
index d3de72c6a1aab889e9dc73bc6fe82ce3e04bdcea..35a8d6d7fc00c37c5422060721be287e7bceed36 100644 (file)
Binary files a/tests/examplefiles/output/flipflop.sv and b/tests/examplefiles/output/flipflop.sv differ
index ab2ce68b8031c024057f58d414b360773d4929b5..48dfcbfbc334a5a22089d84f99522fb4921514c1 100644 (file)
Binary files a/tests/examplefiles/output/foo.sce and b/tests/examplefiles/output/foo.sce differ
index 54a9ef2c51777edd7e1ba5be5c4ce4d06129da45..bcbb0a6bef207493893ddb154ab5b4a65d5bb358 100644 (file)
Binary files a/tests/examplefiles/output/format.ml and b/tests/examplefiles/output/format.ml differ
index 381ae1c2ff9c77dfc4d3b8e09d4411f704e19942..8a3cceda1f0b4b8ec7b0c4b16bdbab048515803c 100644 (file)
Binary files a/tests/examplefiles/output/freefem.edp and b/tests/examplefiles/output/freefem.edp differ
index 00819512bf3cfb390ff1b3d313ea8d440d110705..724649e1efa5fb8bb7c73e205dc5d88942120892 100644 (file)
Binary files a/tests/examplefiles/output/fucked_up.rb and b/tests/examplefiles/output/fucked_up.rb differ
index 022332c3f5e0132374df8d8543c69b239e00120c..cf66b9fa18d6df02f136cfb565f9f87e08c149e9 100644 (file)
Binary files a/tests/examplefiles/output/function.mu and b/tests/examplefiles/output/function.mu differ
index ab73490b31edc498cf38fbe531edd9b6394f6df3..352839d7149c99a9016024539bc3aa2e680349af 100644 (file)
Binary files a/tests/examplefiles/output/functional.rst and b/tests/examplefiles/output/functional.rst differ
index 0d1f45b30c81546a80734e48579fc0386c39305a..00d08b489c706186692eb3e20a798b51600dff9d 100644 (file)
Binary files a/tests/examplefiles/output/garcia-wachs.kk and b/tests/examplefiles/output/garcia-wachs.kk differ
index 79eb6aea88e9ad7d71ed3e3ab326f4981249c45c..b09b6172e3236950903a0c43189cba77f7313030 100644 (file)
Binary files a/tests/examplefiles/output/genclass.clj and b/tests/examplefiles/output/genclass.clj differ
index 58e655c33b46af969684852bf6cf286cc035b088..b4d1d275a4bce2cce34ccca1f73d0232c6178599 100644 (file)
Binary files a/tests/examplefiles/output/genshi_example.xml+genshi and b/tests/examplefiles/output/genshi_example.xml+genshi differ
index e5a91a019b312ae465e780bf891c714e3460d95b..375862941bd6fe870432244ee10156a21503ba9f 100644 (file)
Binary files a/tests/examplefiles/output/genshitext_example.genshitext and b/tests/examplefiles/output/genshitext_example.genshitext differ
index 2410ae32773c22f01d548327ee21755047244c38..7a324f66332b0ef7ade07d2b9b39b25240278882 100644 (file)
Binary files a/tests/examplefiles/output/glsl.frag and b/tests/examplefiles/output/glsl.frag differ
index c2197171f55f7bf412e92863c78f31adc1aa5b04..e1cbe331545f4c60b23af7194f986b184003bcb2 100644 (file)
Binary files a/tests/examplefiles/output/glsl.vert and b/tests/examplefiles/output/glsl.vert differ
index 394cca6f1fc9ecad6a949352af18834a0772bbcb..6b9ea43c88ad6999bb2605b98982a91c013973b6 100644 (file)
Binary files a/tests/examplefiles/output/grammar-test.p6 and b/tests/examplefiles/output/grammar-test.p6 differ
index bc035e533f32706d5549121c71ac8fba7d8d7e04..80f115d183745f786d8dc761c51049b8861690fb 100644 (file)
Binary files a/tests/examplefiles/output/guidance.smv and b/tests/examplefiles/output/guidance.smv differ
index 1fdff2bbe612ac62af7cd118304818e37f91b07a..1ef6ff94eb7eb51bd6a07909b1f16b149edf14b2 100644 (file)
Binary files a/tests/examplefiles/output/hash_syntax.rb and b/tests/examplefiles/output/hash_syntax.rb differ
index 84ae062e3992ff02ce3d1644d2eaf6dcbcbce977..d28a9afa49fc7cad178b7c8d28a643d7e9e64907 100644 (file)
Binary files a/tests/examplefiles/output/hello-world.puzzlet.aheui and b/tests/examplefiles/output/hello-world.puzzlet.aheui differ
index b02f8c3da6794d7102ca5bcce86a84ad8b0cf799..0621fc27711d4e94af4b9c084a2b646b8487cef3 100644 (file)
Binary files a/tests/examplefiles/output/hello.at and b/tests/examplefiles/output/hello.at differ
index 5fc7f82fb692ddbec7fc1aee3335a46dd03c03f6..364f0aa436be6365a3303f5ed1101c2cceb8dae0 100644 (file)
Binary files a/tests/examplefiles/output/hello.golo and b/tests/examplefiles/output/hello.golo differ
index 6be07ef12343b22bdfc0a05822faee31b1eea074..bfd033815129e34f067ea4b5250b2a1985f24797 100644 (file)
Binary files a/tests/examplefiles/output/hello.lsl and b/tests/examplefiles/output/hello.lsl differ
index c2893fcce5312420404cd9ee80be6f9208c2c4f3..b5377c13c960e9f3d51a19eb6599a0c9bc545a5b 100644 (file)
Binary files a/tests/examplefiles/output/hello.smali and b/tests/examplefiles/output/hello.smali differ
index 0f237a9ca325bd47780803e83b5aa21ed30a4f1f..c15daf184515b19c599a478916dcc6372390f687 100644 (file)
Binary files a/tests/examplefiles/output/hello.sp and b/tests/examplefiles/output/hello.sp differ
index 3837dfd6f27e45794d19cba3d8d9d425f40a11c7..ce463d6557cfce415031fdd6e9260693e5143f15 100644 (file)
Binary files a/tests/examplefiles/output/hexdump_debugexe and b/tests/examplefiles/output/hexdump_debugexe differ
index 362a658593181d31c456293ff57894c7f57547a0..77a7d574d015b3da335a46a94ca2c572616806ca 100644 (file)
Binary files a/tests/examplefiles/output/hexdump_hd and b/tests/examplefiles/output/hexdump_hd differ
index 6c697d4a68f116a122f9d7f79a9055ddb0000f32..19a82120613479ebf6e325fa1137a83308cc90a7 100644 (file)
Binary files a/tests/examplefiles/output/hexdump_hexcat and b/tests/examplefiles/output/hexdump_hexcat differ
index 33866aa66fdbd2829d0b7b166deab2307616724c..1bf44bc1ce1fe651fb20f462a3814dea6190a6eb 100644 (file)
Binary files a/tests/examplefiles/output/hexdump_hexdump and b/tests/examplefiles/output/hexdump_hexdump differ
index 63cf7cfc0b50343e0677edbabb856c6bbf2ad878..eb3fc2460fe91e16e0d8cf44d5f69ed69596227b 100644 (file)
Binary files a/tests/examplefiles/output/hexdump_od and b/tests/examplefiles/output/hexdump_od differ
index caf1ea8a860b10eb65b901d16c960f1329202714..8ac0e59d660ce083341f1c4642903e92f45a0049 100644 (file)
Binary files a/tests/examplefiles/output/hexdump_xxd and b/tests/examplefiles/output/hexdump_xxd differ
index 81ff9707d547647013749012297b716f3f707502..ed09128f2e4c378ec930d8887f1405fe7555a54e 100644 (file)
Binary files a/tests/examplefiles/output/html+php_faulty.php and b/tests/examplefiles/output/html+php_faulty.php differ
index ab7924a2799f0b33abf1a0ea279d4c2a6e9d54cc..999c58aad5c6781741342d2e26edc57f8a253450 100644 (file)
Binary files a/tests/examplefiles/output/http_request_example and b/tests/examplefiles/output/http_request_example differ
index 516e8c2dd4533652423fe59b44a895b5054dab18..20c7d5fe5dbd9d90328457c747bd63256d1e616d 100644 (file)
Binary files a/tests/examplefiles/output/http_response_example and b/tests/examplefiles/output/http_response_example differ
index 18689de0dd1dceace5546db14963370824a4d130..ce2084e3db13e085b98ec3bcaf245b37f02c7415 100644 (file)
Binary files a/tests/examplefiles/output/hybris_File.hy and b/tests/examplefiles/output/hybris_File.hy differ
index fa62ff7c9bfb6fd0d7ddb51d9d0335e90dea11c3..9981b4492944227ddac941bc9a057eba4a7f14f1 100644 (file)
Binary files a/tests/examplefiles/output/idl_sample.pro and b/tests/examplefiles/output/idl_sample.pro differ
index 9c1290fe8fd83f9026508522a7790cbe4dd2e49b..441027388649c64f6a5063d7e270fc992b411121 100644 (file)
Binary files a/tests/examplefiles/output/iex_example and b/tests/examplefiles/output/iex_example differ
index 50b218d907c824e063613362ce907e176330167d..e3b07a132f006c2baa7745444e4e2e41a73d4503 100644 (file)
Binary files a/tests/examplefiles/output/inet_pton6.dg and b/tests/examplefiles/output/inet_pton6.dg differ
index cbfe4b9902c2ce6d457b878c8294d099d3606313..e05a432c6b2446dd89848e551f43d45c399659ae 100644 (file)
Binary files a/tests/examplefiles/output/inform6_example and b/tests/examplefiles/output/inform6_example differ
index 5329718c290a3d3a9c2184af33612d4ae88146de..5d3021bd114d7ed26033d468860d5cc461f52d21 100644 (file)
Binary files a/tests/examplefiles/output/interp.scala and b/tests/examplefiles/output/interp.scala differ
index 8c8dcd315a73b2a8bbf59678059b76e83ce2aa35..a8ac3e6568e2cc843e865035739f9a8a9b952c4d 100644 (file)
Binary files a/tests/examplefiles/output/intro.ik and b/tests/examplefiles/output/intro.ik differ
index 61732ea6e7278e65a201d4f2d823f4d6b86c7a79..e107f79aa36f935b614b2870043c5f42ad2e1189 100644 (file)
Binary files a/tests/examplefiles/output/ints.php and b/tests/examplefiles/output/ints.php differ
index cfdc539f8202973ac2476350a66ca26556e37b8b..eb03921b7604d9f1d73fa913e8f85d21d827095f 100644 (file)
Binary files a/tests/examplefiles/output/intsyn.fun and b/tests/examplefiles/output/intsyn.fun differ
index 16915b640d54225b11d9e5a4fc21e6b4cd162f00..586fc68fa3102a48fbfe160ff995f52e98c80be4 100644 (file)
Binary files a/tests/examplefiles/output/intsyn.sig and b/tests/examplefiles/output/intsyn.sig differ
index a6ecdd1098f85c3670c8c5b86bb731f7e9b30eef..51bd8bc2b973f9cf453cfb6be2f53bf261de06ae 100644 (file)
Binary files a/tests/examplefiles/output/irb_heredoc and b/tests/examplefiles/output/irb_heredoc differ
index c399034b81a96e321b02911d9cb9c2337cda2b6e..8458c78ee51fba1699da33ee6bea4d617dcb062c 100644 (file)
Binary files a/tests/examplefiles/output/irc.lsp and b/tests/examplefiles/output/irc.lsp differ
index 1eaaee4888b78be18b571f0eddee2c296b73f911..b84d4222cfae145385adb3609b59aa39046e17d1 100644 (file)
Binary files a/tests/examplefiles/output/java.properties and b/tests/examplefiles/output/java.properties differ
index 7b2777f26debc2e3310de87af287b7743c16184e..54d7efb6f11f9a8c6a4dd7f4acca2ad0b0118de3 100644 (file)
Binary files a/tests/examplefiles/output/jbst_example1.jbst and b/tests/examplefiles/output/jbst_example1.jbst differ
index 83f992aefb4cbca47334f8c74f6de1b637998bf0..1dc86308f9bc97363590ed5cba04bf24e67d6e13 100644 (file)
Binary files a/tests/examplefiles/output/jbst_example2.jbst and b/tests/examplefiles/output/jbst_example2.jbst differ
index 0fa440fb4fcd1ea54a669b1c147136f6b9809d01..007d48468ed0afe0b04a6fea8e394542cf0601e2 100644 (file)
Binary files a/tests/examplefiles/output/jinjadesignerdoc.rst and b/tests/examplefiles/output/jinjadesignerdoc.rst differ
index 1c0362d36961e2d0c3c96686bc45f1e9a078d46d..28ed79bd97fdb21f067b25ee18ecc1d12c2a4f9b 100644 (file)
Binary files a/tests/examplefiles/output/json.lasso and b/tests/examplefiles/output/json.lasso differ
index f2e8a4a4a2b1b19d3bf5c018ac7a5805c30b7452..7b11b1a2266a2acb42b24f863f4d451324847e7b 100644 (file)
Binary files a/tests/examplefiles/output/json.lasso9 and b/tests/examplefiles/output/json.lasso9 differ
index 7dc9bfe5f3a9b5f7280fc77a270fecf0e75e1bc4..4dc4da8c1c03d1bb1b629ec5fc9c94bee9a0a6af 100644 (file)
Binary files a/tests/examplefiles/output/language.hy and b/tests/examplefiles/output/language.hy differ
index 56cac7acdedd1274972e0f70256cf4793127707d..66a8cf916515be02269d274f754bfef5ead45ffd 100644 (file)
Binary files a/tests/examplefiles/output/lighttpd_config.conf and b/tests/examplefiles/output/lighttpd_config.conf differ
index 1af3da68aefd06280dd79a4a2acc8d8e55d71e01..6300d987e9b7d29c8cfd54fb7f698f36f88cb3da 100644 (file)
Binary files a/tests/examplefiles/output/limbo.b and b/tests/examplefiles/output/limbo.b differ
index d6adfedd4d726c409334692a8311b0773fa51063..a8f30ab3e457e5914da432420e3ef93d9ee2d4a1 100644 (file)
Binary files a/tests/examplefiles/output/linecontinuation.py and b/tests/examplefiles/output/linecontinuation.py differ
index f762b9e1cb88d5a7b56e1e1feba8e67b70ea2e1d..f8d49c89f1cd8328d83d9858a42675b77ae00cf0 100644 (file)
Binary files a/tests/examplefiles/output/livescript-demo.ls and b/tests/examplefiles/output/livescript-demo.ls differ
index 1822f82bd784a92c03cfa643b1bd1e34eaa66eeb..62fde5786e440c6a991a11cf19c0b01a66abd082 100644 (file)
Binary files a/tests/examplefiles/output/logos_example.xm and b/tests/examplefiles/output/logos_example.xm differ
index fad88803cada8bbd73e15c82b7d2acc68a071cc3..b5f4b043c7f6fd311f377701316ca7a1541c1f59 100644 (file)
Binary files a/tests/examplefiles/output/ltmain.sh and b/tests/examplefiles/output/ltmain.sh differ
index b6c156fe06dc6e433c1abc4caba134ed68436ff3..c75c8356a69253be5d5b7eefa3847a4024d323a9 100644 (file)
Binary files a/tests/examplefiles/output/main.cmake and b/tests/examplefiles/output/main.cmake differ
index 7476e4f7f5ee1074229ad63a8621471418671bb9..691508dce02a4463f832f758966cfe422ab37423 100644 (file)
Binary files a/tests/examplefiles/output/markdown.lsp and b/tests/examplefiles/output/markdown.lsp differ
index f5e60c6e6d015edd1d50ce297ec8994dd07df818..335cdc750cf87eb67bad1b2755a6f6de660fe00f 100644 (file)
Binary files a/tests/examplefiles/output/matlab_noreturn and b/tests/examplefiles/output/matlab_noreturn differ
index ef164b5e32ada64cc8bdd7ba80372f42c8ca7ddf..25f67efb2afcf68366803f283ffdee36f1df1446 100644 (file)
Binary files a/tests/examplefiles/output/matlab_sample and b/tests/examplefiles/output/matlab_sample differ
index 3000a93f4ad93fc23966056ccca987865e11ca45..363a5e2e768a1e36184376c0ec30ef7104c2f867 100644 (file)
Binary files a/tests/examplefiles/output/matlabsession_sample.txt and b/tests/examplefiles/output/matlabsession_sample.txt differ
index 036170ba6b60914a7d6fc48d4de7997b6e18375d..3bfee890aa5cb39f276360706d737f783ece6013 100644 (file)
Binary files a/tests/examplefiles/output/metagrammar.treetop and b/tests/examplefiles/output/metagrammar.treetop differ
index e33bb4844eeb178f16f1a5a93944065493316db9..78d0c7452561bff042c3de5dc6554d2f24fbbd96 100644 (file)
Binary files a/tests/examplefiles/output/minehunt.qml and b/tests/examplefiles/output/minehunt.qml differ
index 388ebae1e301c7d4d92de0f4b570bfab9661369b..e903271b0020d48d8a8860282554fcc70abe8deb 100644 (file)
Binary files a/tests/examplefiles/output/minimal.ns2 and b/tests/examplefiles/output/minimal.ns2 differ
index d2c677ee75b180afdac68f1f085504e4305d8a13..068fc305e19f5e43f632715789261dbd1dd9253b 100644 (file)
Binary files a/tests/examplefiles/output/modula2_test_cases.def and b/tests/examplefiles/output/modula2_test_cases.def differ
index a05647f39437518e13fe64c7162c093eb76b86c1..db0440efbe123a7fda38c5e937a48f4cdeab34ad 100644 (file)
Binary files a/tests/examplefiles/output/moin_SyntaxReference.txt and b/tests/examplefiles/output/moin_SyntaxReference.txt differ
index 3e8f95a1136297a151da8700a0ae5acf7eadc702..59fe086fbbaece3f2a60a420889a9e5b19359a3e 100644 (file)
Binary files a/tests/examplefiles/output/multiline_regexes.rb and b/tests/examplefiles/output/multiline_regexes.rb differ
index abf262bbbf45ad1d7fa872e7cd41714f0a777e35..cc5a0224b0c8ddbb4ce49a60208d56377a6dffe7 100644 (file)
Binary files a/tests/examplefiles/output/nanomsg.intr and b/tests/examplefiles/output/nanomsg.intr differ
index 1bde85144937b76fbb3a13ff38f44ceb8991d79f..00ac80b1e6374602c7289ce16ff129abd7b3f534 100644 (file)
Binary files a/tests/examplefiles/output/nasm_aoutso.asm and b/tests/examplefiles/output/nasm_aoutso.asm differ
index 6f41096e692270d7717c34cd7b6f75849e64823f..39383f9c104a9823f00a08c46bd3b012eece51d6 100644 (file)
Binary files a/tests/examplefiles/output/nasm_objexe.asm and b/tests/examplefiles/output/nasm_objexe.asm differ
index 45ec4b9dff018fd605eaefc902537e0b3e69b7fa..005119e308a4f38f1b8214a6d1be8eebcf9aef0d 100644 (file)
Binary files a/tests/examplefiles/output/nemerle_sample.n and b/tests/examplefiles/output/nemerle_sample.n differ
index 1e59c3082efde24070f9f907ab950be5b06879c7..4a9c50e123e2da6821f53edfb7e19619d8c71726 100644 (file)
Binary files a/tests/examplefiles/output/nginx_nginx.conf and b/tests/examplefiles/output/nginx_nginx.conf differ
index 46b5a01ea440eb38613eda226fea965d9b8dd613..8129edfce36ff01a3e3c96844b2fb82a4feeec1f 100644 (file)
Binary files a/tests/examplefiles/output/noexcept.cpp and b/tests/examplefiles/output/noexcept.cpp differ
index 3600ed89ff6f8dfe523de1f8870689805d561314..dc5732c368e9b4d65d597edbdf79b096375e9335 100644 (file)
Binary files a/tests/examplefiles/output/numbers.c and b/tests/examplefiles/output/numbers.c differ
index d2decc6f8879a92ec5e434c2b07e37f1cd615ba8..7f3e27beb8d51f7fa1e3b8f372b67a466d8d74eb 100644 (file)
Binary files a/tests/examplefiles/output/objc_example.m and b/tests/examplefiles/output/objc_example.m differ
index 33285f91d784f98c7b892b3d57fbfa93b3e24b39..db195c59ffb76c3e3797337239824edf4e0e9c5e 100644 (file)
Binary files a/tests/examplefiles/output/openedge_example and b/tests/examplefiles/output/openedge_example differ
index eaacbab99850f5bd0ede9fbc0b61f6ad9a2fb969..aba71e6a4e1f8d0ef16ee1eb0e6a1c498a6b24cc 100644 (file)
Binary files a/tests/examplefiles/output/pacman.conf and b/tests/examplefiles/output/pacman.conf differ
index 0ba63b91f4a0478ee395c84ae9153af8c3d30a1b..b05c97e27726da16ed5481c7739ea4bda816d217 100644 (file)
Binary files a/tests/examplefiles/output/pacman.ijs and b/tests/examplefiles/output/pacman.ijs differ
index 4283a0a0bc38bda797744d9b1bbe182458f36633..c46322546ae5f1be132a9539aacb9bfb84725119 100644 (file)
Binary files a/tests/examplefiles/output/pawn_example and b/tests/examplefiles/output/pawn_example differ
index f1e4754dd3836bd05807f943ab0204dd458ec5fc..b7b0f190ca75732ca2d7c2ba6fa999e0c4cbc6f7 100644 (file)
Binary files a/tests/examplefiles/output/perl_misc and b/tests/examplefiles/output/perl_misc differ
index b68bd7bf3fe9d9da36d8dc7fa00d0abc6e409b78..9fa2a8cd7c599c62ff1dea4732540b5975f715f7 100644 (file)
Binary files a/tests/examplefiles/output/perl_perl5db and b/tests/examplefiles/output/perl_perl5db differ
index 68b3690d5dded3cde41c8a4e71c7d6ceab0ecfc2..bc079d13056a358e8dcafb10e8752881f74242f3 100644 (file)
Binary files a/tests/examplefiles/output/perl_regex-delims and b/tests/examplefiles/output/perl_regex-delims differ
index a35d2e9fbf8e46ff9aebda851bffca4ecd50947a..fb06127c1e277af640571e669d7e2608a1cc4cab 100644 (file)
Binary files a/tests/examplefiles/output/perlfunc.1 and b/tests/examplefiles/output/perlfunc.1 differ
index 1c0974d92761b32746b9d4f117ae9144f90501ee..c84a98f07f43d0b96dfc8ca8c4fc01d62e8d5c26 100644 (file)
Binary files a/tests/examplefiles/output/phpMyAdmin.spec and b/tests/examplefiles/output/phpMyAdmin.spec differ
index 4b7b85616091110061724b0b166d7dfa3b385829..dc886d80b6f1b0e111c71c92ed23bd09e0ce8ebf 100644 (file)
Binary files a/tests/examplefiles/output/phpcomplete.vim and b/tests/examplefiles/output/phpcomplete.vim differ
index 53f85f91ddddf48a59eff175a16cabaabc9a8c08..a0fe96168d68e79a86594698cd54db72a447a81d 100644 (file)
Binary files a/tests/examplefiles/output/pkgconfig_example.pc and b/tests/examplefiles/output/pkgconfig_example.pc differ
index 06953995a8b48d77ba5952f0b330643c3a73d020..68a6a3915686233ebc535726345c8ed9373b476c 100644 (file)
Binary files a/tests/examplefiles/output/plain.bst and b/tests/examplefiles/output/plain.bst differ
index e2083ccfed7294ed79cd6bc608ebb47429384968..c2cb49417fdffe23f27352ef312d9e37eb30f71d 100644 (file)
Binary files a/tests/examplefiles/output/pleac.in.rb and b/tests/examplefiles/output/pleac.in.rb differ
index 47b20d6e91ef080908475e70d519840295d4736c..0a7c7b3eea065b83c4f6fa996e8bc0e4e8fc14fa 100644 (file)
Binary files a/tests/examplefiles/output/postgresql_test.txt and b/tests/examplefiles/output/postgresql_test.txt differ
index 763d060ac13e377d16100836533cef7f24fa4034..994cfc29d81285defe627fe4478b65a136595bcd 100644 (file)
Binary files a/tests/examplefiles/output/pppoe.applescript and b/tests/examplefiles/output/pppoe.applescript differ
index a7e7950f16ea6cda30531c3e469d089a37d36489..7d691ad51ef13c84e9906c305d306d62c13ca1ab 100644 (file)
Binary files a/tests/examplefiles/output/psql_session.txt and b/tests/examplefiles/output/psql_session.txt differ
index 1ce2dca4a862b798fa4f1345960ddb0a568a8e36..5305ec379a211256b881cf21c661e0fd82e969ee 100644 (file)
Binary files a/tests/examplefiles/output/py3_test.txt and b/tests/examplefiles/output/py3_test.txt differ
index c5749265224101da37bad6ab33703794fd133d06..446462c7d3893a1e62fa0841625fedcc7ac4a981 100644 (file)
Binary files a/tests/examplefiles/output/py3tb_test.py3tb and b/tests/examplefiles/output/py3tb_test.py3tb differ
index f259fb4215fadf4f9ad895df1406091cac65dca1..82c04ebb81228929ebe68503b33635a0c2543f54 100644 (file)
Binary files a/tests/examplefiles/output/pycon_ctrlc_traceback and b/tests/examplefiles/output/pycon_ctrlc_traceback differ
index 879bc0624640e9fb818cc0035c8081f02d114e8c..ba015f2b798c34689054635778aed66bfd511c5c 100644 (file)
Binary files a/tests/examplefiles/output/pycon_test.pycon and b/tests/examplefiles/output/pycon_test.pycon differ
index c29ddeacb503cc94f49c4a4f60d59f0ab0b06a34..86480a84ceb1a54017725fd791006fc7c1990a44 100644 (file)
Binary files a/tests/examplefiles/output/pytb_test2.pytb and b/tests/examplefiles/output/pytb_test2.pytb differ
index 1bab41a869f478321fe4707070d11c4aa0dd1325..230bb432e79eacac983b7fcc1edc3c151528fc58 100644 (file)
Binary files a/tests/examplefiles/output/pytb_test3.pytb and b/tests/examplefiles/output/pytb_test3.pytb differ
index 22c516e0e580eb719fa89547a67c69ddeb89761c..dcb66f58c51fd9f67ebddaf0f876b782f418ce2a 100644 (file)
Binary files a/tests/examplefiles/output/python25-bsd.mak and b/tests/examplefiles/output/python25-bsd.mak differ
index 4ba4d5b5d638818d13be90bb82a4f065111af2c8..89b75e401f3cb0f130a2134ab1f9e2329b5574f5 100644 (file)
Binary files a/tests/examplefiles/output/qbasic_example and b/tests/examplefiles/output/qbasic_example differ
index 9d431e1c0df6e7e5db0a4a4f6bc8f2b18e3efdcf..ab43f8accb10d4d760f8818ef6917c3a211aa473 100644 (file)
Binary files a/tests/examplefiles/output/qsort.prolog and b/tests/examplefiles/output/qsort.prolog differ
index 0bdd4db22a7a9c2f4913f43b35315dfef6d0c67b..8326a997f90c7a011b7a0507601129bdbee879e0 100644 (file)
Binary files a/tests/examplefiles/output/r-console-transcript.Rout and b/tests/examplefiles/output/r-console-transcript.Rout differ
index c241f5f01258cafcef0cb5ceafbda66fd70728e5..cc8b738530771e3b914925643d33ac138568fc9c 100644 (file)
Binary files a/tests/examplefiles/output/r6rs-comments.scm and b/tests/examplefiles/output/r6rs-comments.scm differ
index 3f1ada9eefc8f4a3d648e7a1d353cc8c92b84764..41089eeaf2a442b8b3054b91e9a5fa6b3330bd6e 100644 (file)
Binary files a/tests/examplefiles/output/ragel-cpp_rlscan and b/tests/examplefiles/output/ragel-cpp_rlscan differ
index c4bd3837894151112cd9786cd3594057776c7cc3..a53cff9f5b365d3b91158c2f31da59197ca4b2d2 100644 (file)
Binary files a/tests/examplefiles/output/ragel-cpp_snippet and b/tests/examplefiles/output/ragel-cpp_snippet differ
index 58c23691c3c42e1631fece08c6de574f20377492..b762e04bdf7a5e4c7e2247c0b0ab3acabbc2d6ce 100644 (file)
Binary files a/tests/examplefiles/output/regex.js and b/tests/examplefiles/output/regex.js differ
index 13645601a492e1657296dd5024ac7af8d1cb4ea1..48d65dead25bcc90ba23f0384e752836b07e84b9 100644 (file)
Binary files a/tests/examplefiles/output/resourcebundle_demo and b/tests/examplefiles/output/resourcebundle_demo differ
index 63b448d4c8dea94fbb59c5d4cfc485a8cd841255..4d164294c1b3b7215600cef5533ab3c270af0701 100644 (file)
Binary files a/tests/examplefiles/output/reversi.lsp and b/tests/examplefiles/output/reversi.lsp differ
index 8dc1f9c40eee0c47ed01097a1df0fd34fe37252c..4fd2e832e86e1f6dca9ed2a8dc6abb743bf76567 100644 (file)
Binary files a/tests/examplefiles/output/rnc_example.rnc and b/tests/examplefiles/output/rnc_example.rnc differ
index 74e372c70fbb376b9785d1bd373ffab2e35e4152..f73d5f6623a8909b9fdf37f58c8f0261e222fbf0 100644 (file)
Binary files a/tests/examplefiles/output/roboconf.graph and b/tests/examplefiles/output/roboconf.graph differ
index 751254338bd730b066c9d6903938849893454c8e..5006ad67408880854c15a207801ecd3be92c7e0c 100644 (file)
Binary files a/tests/examplefiles/output/roboconf.instances and b/tests/examplefiles/output/roboconf.instances differ
index 08cd5883128d0a7aa3c0230b6a31fcdfc8af1f12..7f91d80e6c5092ff83cf23ba560333d1827638ce 100644 (file)
Binary files a/tests/examplefiles/output/robotframework_test.txt and b/tests/examplefiles/output/robotframework_test.txt differ
index b553c976d0d2a854b7257b4b51eb5da70049f7f6..0875040270f741a0122cd7724d2f3d8ae6bac383 100644 (file)
Binary files a/tests/examplefiles/output/rql-queries.rql and b/tests/examplefiles/output/rql-queries.rql differ
index 8f91a033a7a0688534e4aeb22f7bf96cde143bac..15aef915af0d027ac985de036e7284b1a76258e6 100644 (file)
Binary files a/tests/examplefiles/output/ruby_func_def.rb and b/tests/examplefiles/output/ruby_func_def.rb differ
index 9d038634d242b40395c1a75ae860b63b44993655..ba2fed92b8ecc2d4982bfa43efa276d673f8c1a9 100644 (file)
Binary files a/tests/examplefiles/output/sample.qvto and b/tests/examplefiles/output/sample.qvto differ
index bc6ea56c196da6f4e6e5190045e39d868be9156c..49e031f57df62b857adf2d92abc99aeb68b3d42d 100644 (file)
Binary files a/tests/examplefiles/output/scilab.sci and b/tests/examplefiles/output/scilab.sci differ
index 1963e7f27416e9dfb983cba653dbe7706f564546..3a21997d8ff83df3f30c41b066f95e35462bd58c 100644 (file)
Binary files a/tests/examplefiles/output/scope.cirru and b/tests/examplefiles/output/scope.cirru differ
index a6a873dde26aa9f583c6d7c112e8c2add05a9814..6f3dc3fd6cf590476bf9cd144e10c8947cf680d9 100644 (file)
Binary files a/tests/examplefiles/output/session.dylan-console and b/tests/examplefiles/output/session.dylan-console differ
index f5819ace82d7040fac7921669be6270ea9351441..3dbf7cd24d68e652b2a483cc970c445f392153c0 100644 (file)
Binary files a/tests/examplefiles/output/sibling.prolog and b/tests/examplefiles/output/sibling.prolog differ
index fc06cec8043cc70545f7ccf217e7cae2d9213721..36529ddbffbd98b88e2d51d5f9569a10eef0ea77 100644 (file)
Binary files a/tests/examplefiles/output/simple.camkes and b/tests/examplefiles/output/simple.camkes differ
index 76f7da8bd40ae9ce08b2a0b2eb26fa43ad0e3c1f..fbe15686312829ea97fba7e53903935720c86db0 100644 (file)
Binary files a/tests/examplefiles/output/simple.croc and b/tests/examplefiles/output/simple.croc differ
index dbe030c07eda8c1306b01a093818af08eb4bd4a1..6c00cf0a77d08257447108954cefe59efb771665 100644 (file)
Binary files a/tests/examplefiles/output/smarty_example.html and b/tests/examplefiles/output/smarty_example.html differ
index c80d90d2604639c5dd5ea60ef737ae08e515ce4a..ca88dba655fc56cfa184c90a24b4007149416e62 100644 (file)
Binary files a/tests/examplefiles/output/source.lgt and b/tests/examplefiles/output/source.lgt differ
index 2659a319706da666261219af86eef9910578ab12..5b6837f655355506ae62befb227e9f4238054357 100644 (file)
Binary files a/tests/examplefiles/output/sources.list and b/tests/examplefiles/output/sources.list differ
index 3b708cc157dc6f235eeb57d6c44fae25989bbab5..1deea7cbfc73e5c2ad57f9a27d700135fd038ee2 100644 (file)
Binary files a/tests/examplefiles/output/sparql.rq and b/tests/examplefiles/output/sparql.rq differ
index 64339f01d18664c13dddd6088fad0126ec1a249f..3ea403b223640ccf3c4fc231c47753aa7668dcdc 100644 (file)
Binary files a/tests/examplefiles/output/sphere.pov and b/tests/examplefiles/output/sphere.pov differ
index 45bc178d273aaf4dc2f1ea92a8b7bd357a0753f6..590788aec31a1a345dcaf4a47b41532aeb640441 100644 (file)
Binary files a/tests/examplefiles/output/sqlite3.sqlite3-console and b/tests/examplefiles/output/sqlite3.sqlite3-console differ
index 931ddc08cc9dcb734d3ce8c7512915e315e3d63f..875617f61b9b8018a60862665e4e4ba8d95ba05d 100644 (file)
Binary files a/tests/examplefiles/output/squid.conf and b/tests/examplefiles/output/squid.conf differ
index a63660365a693852d5dc96822ee78d92934ba2de..ea000cd1362144fa0eb1348c4138898ce2df4b22 100644 (file)
Binary files a/tests/examplefiles/output/string.jl and b/tests/examplefiles/output/string.jl differ
index 52a71d8feec19bdb91fe631a3f95b2e7c444fc7d..d1abc369b53df50d6b79621aeaca3ea860b39ac2 100644 (file)
Binary files a/tests/examplefiles/output/string_delimiters.d and b/tests/examplefiles/output/string_delimiters.d differ
index 30b92cf0ad37728a140b459c4fdece2652fcdfa2..f5e399bdc6cb0b07caac50abbc902282dd6ba413 100644 (file)
Binary files a/tests/examplefiles/output/stripheredoc.sh and b/tests/examplefiles/output/stripheredoc.sh differ
index 14e49a093be4c5aadeeff7e5c7dc0b25a58c9abe..0214f0e54fe5eefade3cd47108e66e9ad5a17873 100644 (file)
Binary files a/tests/examplefiles/output/subr.el and b/tests/examplefiles/output/subr.el differ
index 09548b62c9f82b4aa6f63049dd430c7f2b0ec296..61526dd850f86dd4969a5eba4e1fa9fa9e21be80 100644 (file)
Binary files a/tests/examplefiles/output/swig_java.swg and b/tests/examplefiles/output/swig_java.swg differ
index 7d6c57d4f5aaf207d7eaf4fa79967caed6325bb2..6c209b823a4ac1c65d3660c0a1bf325e6afb94aa 100644 (file)
Binary files a/tests/examplefiles/output/swig_std_vector.i and b/tests/examplefiles/output/swig_std_vector.i differ
index 4bf61cb35b56d843c4db8d2423622bf432fb9e63..4ef0e006cfa4357e7307e96b41865477e55d8b0e 100644 (file)
Binary files a/tests/examplefiles/output/tads3_example.t and b/tests/examplefiles/output/tads3_example.t differ
index 9706ff8c5cec499dba82f6738cca504dfb5feab8..bbbb1e1e95e86170dc55f12a4ad49c4222be30f8 100644 (file)
Binary files a/tests/examplefiles/output/teraterm.ttl and b/tests/examplefiles/output/teraterm.ttl differ
index ca2a273fdabb18f18c05392ba5bc51c91b888e49..dea0ad0daf04d0d1248b1bdc23c192b784b18383 100644 (file)
Binary files a/tests/examplefiles/output/termcap and b/tests/examplefiles/output/termcap differ
index c8f042833576e77784f7cdfd4b2432ffb8a696c8..c3258c91c1dba44477b99d01aa34667240a8502d 100644 (file)
Binary files a/tests/examplefiles/output/terminfo and b/tests/examplefiles/output/terminfo differ
index be79c94447b3fb07085223114bc71e2756ffd2a9..0aa7909d8c3431591ddf36941da2586a9fa9d1d1 100644 (file)
Binary files a/tests/examplefiles/output/test-3.0.xq and b/tests/examplefiles/output/test-3.0.xq differ
index 35b1202d96c7eb62145002763f51b46d8ae5c609..9c4acbbfc80bb0b6e54f303488e8057e7ce02ea2 100644 (file)
Binary files a/tests/examplefiles/output/test-exist-update.xq and b/tests/examplefiles/output/test-exist-update.xq differ
index d81aa232eba9b17d852c6050735cb3f034b3c4d3..ed29937a2055b6016af1d1f423cdf2ec268e0f7e 100644 (file)
Binary files a/tests/examplefiles/output/test.R and b/tests/examplefiles/output/test.R differ
index d3299c0ecc0cba444fb70ddbccccf3d36f9ec3b2..523bd954d751e1d098b822c17444d2985535dce2 100644 (file)
Binary files a/tests/examplefiles/output/test.adb and b/tests/examplefiles/output/test.adb differ
index bf37577bc3bd8eca97ae4deffe9edae6e5dbc95c..fdb7d48a9142a25dd64c7fd2d621fcf2a8229087 100644 (file)
Binary files a/tests/examplefiles/output/test.adls and b/tests/examplefiles/output/test.adls differ
index e579d10237ba011868bee71b68aaca6b273f2e94..5039ea11c1d3acbaacceaac44f0d04d0d59438c6 100644 (file)
Binary files a/tests/examplefiles/output/test.agda and b/tests/examplefiles/output/test.agda differ
index 5ab5c49d9c8bcab5544ca47b031c5c074d4cf76a..9ab981b262a5dbb6be3ab7f7f7499c1f1f123e50 100644 (file)
Binary files a/tests/examplefiles/output/test.apl and b/tests/examplefiles/output/test.apl differ
index 06b36c2281a86c4a437c9b8aff521c37a27fd392..47e7e967d25cb8567f92241daf9d9dc23ea7e89c 100644 (file)
Binary files a/tests/examplefiles/output/test.asy and b/tests/examplefiles/output/test.asy differ
index 4a377ea3ee824a78e0d17ec06b3716579e818ad1..1dbd6ecffdbe0bb0171d3fdefadc2c0cedfa18ec 100644 (file)
Binary files a/tests/examplefiles/output/test.awk and b/tests/examplefiles/output/test.awk differ
index 5d704d46605680350e46da6f769609e34a317d7a..9ab3d1ff3c9aaccdcd437379bf2215127a624ca7 100644 (file)
Binary files a/tests/examplefiles/output/test.bb and b/tests/examplefiles/output/test.bb differ
index 980bea0dec2afee8de24deb706060fa41f126d0b..97131ee4fe4c937c879d38e8d206d302e6251bc3 100644 (file)
Binary files a/tests/examplefiles/output/test.bib and b/tests/examplefiles/output/test.bib differ
index 422b4e18b011263414289c2242d7c07af80d243c..4d44322d3d42124e9c590b81f7f80aaf5b9cbdc4 100644 (file)
Binary files a/tests/examplefiles/output/test.bmx and b/tests/examplefiles/output/test.bmx differ
index 36a5473ef2fa53f4232cac73d61ae6e7adda66fc..aaf2f6ebbcb3ac7635b0c826df1bee7127322821 100644 (file)
Binary files a/tests/examplefiles/output/test.boo and b/tests/examplefiles/output/test.boo differ
index 78b3d475eb55fd0b03ca5a34c3249ed6110689fa..38dcd68aceb1c60c5a8e772c533ba28cb060633f 100644 (file)
Binary files a/tests/examplefiles/output/test.bpl and b/tests/examplefiles/output/test.bpl differ
index 928eeb649e774083fd1db4120d51e8ca058d1cb9..6f3b10d2762e5eab0a47f8fc9ec0d317c3f4e11e 100644 (file)
Binary files a/tests/examplefiles/output/test.bro and b/tests/examplefiles/output/test.bro differ
index e4bd92a2e0bc98afbafed265d4e4745d99d61cd1..12f6ec6f5615b141ccdf1dcf6f1af0f8fc8ffa5d 100644 (file)
Binary files a/tests/examplefiles/output/test.cadl and b/tests/examplefiles/output/test.cadl differ
index ef5a3be5008b155ea940256b468a90e87315d7e6..c7d2769a8aad094320133f13d4449b7aba0c13db 100644 (file)
Binary files a/tests/examplefiles/output/test.cr and b/tests/examplefiles/output/test.cr differ
index 280d69e38aec9479e1052e42aa5301276f4e95e2..6ec1134adce9f66a0f3a09ebf15fcd144e4d650e 100644 (file)
Binary files a/tests/examplefiles/output/test.cs and b/tests/examplefiles/output/test.cs differ
index c9e0355c5807418ebea08b25fa28894eaa2ffdbc..bfae43484da4c17f50fb1ecee59e4da290cdac7d 100644 (file)
Binary files a/tests/examplefiles/output/test.csd and b/tests/examplefiles/output/test.csd differ
index 440934b8d9150ce36d3b41cb38fe47c6cd9beea9..e2f949e594b83985000863a00b1860b677915192 100644 (file)
Binary files a/tests/examplefiles/output/test.css and b/tests/examplefiles/output/test.css differ
index 225c18a9c8458cf267df14bb1d6889df6c37549a..584088ca9144559a329c1091ba0e2afe9169fa59 100644 (file)
Binary files a/tests/examplefiles/output/test.cu and b/tests/examplefiles/output/test.cu differ
index 1662e4cec00e968c25b10e72138881e6f1a8949d..80f875162ab775e07345f60f0d4babe6af02a236 100644 (file)
Binary files a/tests/examplefiles/output/test.cyp and b/tests/examplefiles/output/test.cyp differ
index 7f70a38aa4106be33534a436412f974b68cb28f1..be1ca84bd01a3668843cd7f1b2781626039de996 100644 (file)
Binary files a/tests/examplefiles/output/test.d and b/tests/examplefiles/output/test.d differ
index 73a279e492e1d65b75faf9f934ca75bab1318032..37db41e0419d8d6c9d826d9ddfc16bfa993d3c81 100644 (file)
Binary files a/tests/examplefiles/output/test.dart and b/tests/examplefiles/output/test.dart differ
index 5677e21751f123fb7ce84835d442ed402eb71fca..da481e495f5a0dc4a3cca765a195e84f83c2f660 100644 (file)
Binary files a/tests/examplefiles/output/test.dtd and b/tests/examplefiles/output/test.dtd differ
index a3e57e4ddb75d19ffc073e12944948db848d1b55..1b3eb3dae94f4ce7da4ed169d634960499435496 100644 (file)
Binary files a/tests/examplefiles/output/test.ebnf and b/tests/examplefiles/output/test.ebnf differ
index 51672e3c053cf0c0252a3b7453db0b35584f699d..aa1768c1cbcd8d0a72715013b6f658ecb32d2303 100644 (file)
Binary files a/tests/examplefiles/output/test.ec and b/tests/examplefiles/output/test.ec differ
index 4e9270a7eda253829f8a610a5159818e00ee6f5d..b0042d0ffc1c6b7c04cbc3c74a4bc2f7bc7bba48 100644 (file)
Binary files a/tests/examplefiles/output/test.eh and b/tests/examplefiles/output/test.eh differ
index 1d9582a01d43f3d8909f1173123256c6d87e5af8..bfe0cac9575f38422d7c191fe2950085f3d6d54d 100644 (file)
Binary files a/tests/examplefiles/output/test.erl and b/tests/examplefiles/output/test.erl differ
index 541ecb7a2c96ae47d091d6fc638b5a3da031b3cd..735d11257f32cf6107c025a69c8a6b5f15dc3487 100644 (file)
Binary files a/tests/examplefiles/output/test.escript and b/tests/examplefiles/output/test.escript differ
index c934db11f08b49097135cde2d24d29dd3fed07fd..118dd6fb59a60d88b48ba709c0fe0f0bc52a0dbc 100644 (file)
Binary files a/tests/examplefiles/output/test.evoque and b/tests/examplefiles/output/test.evoque differ
index fc8f955f88cef5ae9fbd3d6977fd0277dffee08b..1f66daa66402a0a3dd23d6de8a3ee2bed4aa95ca 100644 (file)
Binary files a/tests/examplefiles/output/test.fan and b/tests/examplefiles/output/test.fan differ
index b2f2459901ddeaf3c7e9f2173c05c977dbeb2d57..ac942fbb862abf0e5ca4f6f4619d07d8588105b3 100644 (file)
Binary files a/tests/examplefiles/output/test.flx and b/tests/examplefiles/output/test.flx differ
index 72f825de34a2035cc12aeaca55d1348375d79a47..4d36036600a8482ff0ef66f2d48644dc3813dfab 100644 (file)
Binary files a/tests/examplefiles/output/test.gdc and b/tests/examplefiles/output/test.gdc differ
index 15081ac2e1bc9873eb8a2fb6ddd121d4ff0dd158..c49530437fb2e4bc5096f6cc7df3608e2b2a6cfa 100644 (file)
Binary files a/tests/examplefiles/output/test.gradle and b/tests/examplefiles/output/test.gradle differ
index 5f373174db5f283216d69d7be1aa15e613c8517b..6c8581deca06f6a83396a7c3c2d8871fc636476d 100644 (file)
Binary files a/tests/examplefiles/output/test.groovy and b/tests/examplefiles/output/test.groovy differ
index b864987368209e248641f172469898e430f130b1..6fb22826d9b33f1d522fcdbe060024068b5b2592 100644 (file)
Binary files a/tests/examplefiles/output/test.hsail and b/tests/examplefiles/output/test.hsail differ
index 3ca49c7ed630445fc1a834677a8d8a95bd9bf326..811383abb2f3517b1ec58e78960ef3b71e9131d2 100644 (file)
Binary files a/tests/examplefiles/output/test.html and b/tests/examplefiles/output/test.html differ
index 6868b81e62456720d8cac27927a0b05362c09834..590e976ffe2a21660d8e127958cc162122c6164c 100644 (file)
Binary files a/tests/examplefiles/output/test.idr and b/tests/examplefiles/output/test.idr differ
index 1c1b69dde0b16722309e91682790e802312bd792..27f3d4a8b99f5121f4012023f6774ef4766d18b8 100644 (file)
Binary files a/tests/examplefiles/output/test.ini and b/tests/examplefiles/output/test.ini differ
index 2165f585a42dfc7d4d122f833cc1a0075379ded6..758717e4aff0b5f4f749d0b7ced9dd0c3e3975da 100644 (file)
Binary files a/tests/examplefiles/output/test.java and b/tests/examplefiles/output/test.java differ
index 7591b242a4d62fad19050850e183f8ce1afc7643..29564449850d9b1cdefd6f6322ce987bd0dc3b89 100644 (file)
Binary files a/tests/examplefiles/output/test.jsp and b/tests/examplefiles/output/test.jsp differ
index ed923399161b3e7db5d64d7425e96e936424e4dd..5fdafbc7261f62f4f4969badb760770f811a8687 100644 (file)
Binary files a/tests/examplefiles/output/test.lean and b/tests/examplefiles/output/test.lean differ
index 02d0782fef5315edfaa42a98ca076ab874905fe5..286579538ab1815108c4e8ac0a96ac2c2afa9269 100644 (file)
Binary files a/tests/examplefiles/output/test.maql and b/tests/examplefiles/output/test.maql differ
index 2d01e3a42585c7f9f7346b33b7070f3e91452ba7..feff18bb83319117c984c25a4a6d7927891bc577 100644 (file)
Binary files a/tests/examplefiles/output/test.mask and b/tests/examplefiles/output/test.mask differ
index 88abc6e633b4137a45c3f739ab4468fcbd5ffbc3..2cb31479bf09b93a133c7020c0e358ff175c359a 100644 (file)
Binary files a/tests/examplefiles/output/test.mod and b/tests/examplefiles/output/test.mod differ
index db439a6d5af4979ef4fd3c8efc37a36e5278538b..f6c5148fb8a35312eb2aeb9f4cfb0a6edaf9df93 100644 (file)
Binary files a/tests/examplefiles/output/test.moo and b/tests/examplefiles/output/test.moo differ
index fef50b0132dc36712d7103f8a63abe6ba364c3a4..c62ee87f182c329c0135e4f6c2cd304a7601695b 100644 (file)
Binary files a/tests/examplefiles/output/test.mt and b/tests/examplefiles/output/test.mt differ
index 41a10bc1440a531ccead5a2da32022ed0e308662..778b932c381b7214abed43f101e2418fc61e4e0c 100644 (file)
Binary files a/tests/examplefiles/output/test.myt and b/tests/examplefiles/output/test.myt differ
index 13ac2f0da623a31e5a4b16fdbdfd1c1f6855979b..291af18b7d3d5a7ab544f5442933659da600a521 100644 (file)
Binary files a/tests/examplefiles/output/test.ncl and b/tests/examplefiles/output/test.ncl differ
index 0c4fab3a08ac2129a9c58ad446380b45374004cd..345892bf8dd7915a0bb7bef7e023522934560495 100644 (file)
Binary files a/tests/examplefiles/output/test.nim and b/tests/examplefiles/output/test.nim differ
index 6787aa82886ec4431ee594c074a4d9ef3eeb9e17..4b3ceb371223b17c12591a7d3a95649cb3a1e67a 100644 (file)
Binary files a/tests/examplefiles/output/test.odin and b/tests/examplefiles/output/test.odin differ
index 25557b0b5355c214668656082107128d4b0ba7c1..c943f39641d1968b3f7efb64463af1f51a7dd71e 100644 (file)
Binary files a/tests/examplefiles/output/test.opa and b/tests/examplefiles/output/test.opa differ
index ad4e973cb55d2f1a4feb4fdb82f9492bc474237d..a59242fa23afd68849a908da2f887427718fbc39 100644 (file)
Binary files a/tests/examplefiles/output/test.orc and b/tests/examplefiles/output/test.orc differ
index da5b6288f155e0551fe3a6accc954ad4903e17d0..b30d0ef61a2fd614b2b17e8b1eeab841f44e23c6 100644 (file)
Binary files a/tests/examplefiles/output/test.p6 and b/tests/examplefiles/output/test.p6 differ
index 8e036b98b9556f4751dfd4b441f634286243c7d9..5a459bd09d33d17d58b96c71d527e51ea984b5aa 100644 (file)
Binary files a/tests/examplefiles/output/test.pan and b/tests/examplefiles/output/test.pan differ
index 8ce6070f92ac6804370d578868062654f327a046..a737de175f88401e0bfdb8bd05355f707e941a5e 100644 (file)
Binary files a/tests/examplefiles/output/test.pas and b/tests/examplefiles/output/test.pas differ
index 8e3ad6d3c3263b08f2f700c2d94d14933e66e3bf..94b28d4640b6b06bc44cd69483ec682283e3a3cb 100644 (file)
Binary files a/tests/examplefiles/output/test.php and b/tests/examplefiles/output/test.php differ
index 284aaee61c92a48927f289167cc0827fd3e95944..5628c613b912b61700b08e7424561d84d6801909 100644 (file)
Binary files a/tests/examplefiles/output/test.pig and b/tests/examplefiles/output/test.pig differ
index 389f2753ca0e968e4660f5bbe7831aef551490bf..e64cd2d897c71be9c8f659e8617e0042102d11b5 100644 (file)
Binary files a/tests/examplefiles/output/test.plot and b/tests/examplefiles/output/test.plot differ
index f89fde2a3c794e4ccbec45e3f75f4fed034ed1a3..02dffcc4bbe94c5ed87fc6f31855008ee93a0a59 100644 (file)
Binary files a/tests/examplefiles/output/test.ps1 and b/tests/examplefiles/output/test.ps1 differ
index 4946c6c59896b6bc677706e00509d2ce772ad9e6..99f67531fb124f12d560710e1314cd9eefc5a2b9 100644 (file)
Binary files a/tests/examplefiles/output/test.psl and b/tests/examplefiles/output/test.psl differ
index a3abfe52f35ff0082d2c4f0e65a7a0a686dd47df..4f21640efd1c091c43a986b8379d76491e2616df 100644 (file)
Binary files a/tests/examplefiles/output/test.pwn and b/tests/examplefiles/output/test.pwn differ
index 93a57b31c69080b101df9fcbc55533674aeac8d3..66f4e18e63848c1d578943f0b99bdf07edc9b8fa 100644 (file)
Binary files a/tests/examplefiles/output/test.pypylog and b/tests/examplefiles/output/test.pypylog differ
index 2178b950ba3e300eee0d73d7ad3ad0efd758aa54..6ec9a034596f152b7e3d9f2c7c27a629f58ab168 100644 (file)
Binary files a/tests/examplefiles/output/test.r3 and b/tests/examplefiles/output/test.r3 differ
index d4c195dadfc4f73bd96c757dfeac03d50863141a..dc588b1e1c389beab569bd1981358f2417e395db 100644 (file)
Binary files a/tests/examplefiles/output/test.rb and b/tests/examplefiles/output/test.rb differ
index eae0eed207c113a9ab0465229bdeba0d7498e034..e0a103b6660bff8bf7aa5d42a44605405ef91832 100644 (file)
Binary files a/tests/examplefiles/output/test.rhtml and b/tests/examplefiles/output/test.rhtml differ
index d84ad39e271aad62a749315a5bcac554898756bd..445a99fff2766eb7513a74fb50176186bd4257c3 100644 (file)
Binary files a/tests/examplefiles/output/test.rsl and b/tests/examplefiles/output/test.rsl differ
index a7378f9e7eb9b3ba1aaab9b1b010dea916d7b4bb..2f7e8a89a85fdd639b4e29e0a093de8864b408f8 100644 (file)
Binary files a/tests/examplefiles/output/test.scaml and b/tests/examplefiles/output/test.scaml differ
index e4d7331af3a53d2f4a3f61b9fdbf5162e11ff4ad..e0fe92bbd4eb0d88adf259f7648e666c2aac8797 100644 (file)
Binary files a/tests/examplefiles/output/test.sco and b/tests/examplefiles/output/test.sco differ
index b1f41b5621050ac015f301b52dcfb9dd647549d6..2b827c61b922b5ce48903a99d4b151da4ae87a7a 100644 (file)
Binary files a/tests/examplefiles/output/test.shen and b/tests/examplefiles/output/test.shen differ
index 0c9cd804252ba872e2895162524b962c7388c4cb..32327f92838fcdbb46e5ef3ad7431931b1907617 100644 (file)
Binary files a/tests/examplefiles/output/test.sil and b/tests/examplefiles/output/test.sil differ
index 02dca53fc630398f6b475ebd64462b8964ad0a88..cf632f7f4ea31d26f2024e6b4d3a362cbf918aa6 100644 (file)
Binary files a/tests/examplefiles/output/test.ssp and b/tests/examplefiles/output/test.ssp differ
index a17dbaa4a1a877d20f498164ebf357232010f654..d48d7f984cefee800b2c377c81f4b28ddded914c 100644 (file)
Binary files a/tests/examplefiles/output/test.swift and b/tests/examplefiles/output/test.swift differ
index 12b302195344ce37d0e2269abeb3b84d121abde4..ac7b4f63de360aa05234fea51cee588ec57ff258 100644 (file)
Binary files a/tests/examplefiles/output/test.tcsh and b/tests/examplefiles/output/test.tcsh differ
index 986e65c241791a28bfeacd3f7fd8f461d5dff64c..aefce4e2d9efcd5a6f93ff715ec78930062dd23f 100644 (file)
Binary files a/tests/examplefiles/output/test.vb and b/tests/examplefiles/output/test.vb differ
index 50f5c2ca3d4eda3a424440d5d7852b8da97c1934..95d99c83c3859828e549007b66316f8b7e4641cf 100644 (file)
Binary files a/tests/examplefiles/output/test.vhdl and b/tests/examplefiles/output/test.vhdl differ
index 067c97077080299c2e97533b992442d1c5af7238..02cc0a49d2d25f5aeb565d712ebbce484601c232 100644 (file)
Binary files a/tests/examplefiles/output/test.xqy and b/tests/examplefiles/output/test.xqy differ
index 08a65ca9a70ea1bda93b9dd55e66f199284ff041..111b5e5be818e92200cbc267ff8b895abfc1d472 100644 (file)
Binary files a/tests/examplefiles/output/test.xsl and b/tests/examplefiles/output/test.xsl differ
index d6625208eceb7df7fc805f19fedfbc458fa20198..f610f324bc534f8e37a6010a9b33f6f500d07d28 100644 (file)
Binary files a/tests/examplefiles/output/test.zep and b/tests/examplefiles/output/test.zep differ
index 7fee853067e2fcbd82e689696649cc8f4097e3de..b282934f23761be320ef5bf966d95423b1f8297e 100644 (file)
Binary files a/tests/examplefiles/output/test2.odin and b/tests/examplefiles/output/test2.odin differ
index b54339943a8458e9ffc18d384333457b96593e4a..f117a4b00269e291fb234165210dbe40b2b52aad 100644 (file)
Binary files a/tests/examplefiles/output/test2.pypylog and b/tests/examplefiles/output/test2.pypylog differ
index fad2921a0a793e986fd3c2e487d7500b707a3968..9976b2b859af30b96fede7f44ba8b821ed0f4465 100644 (file)
Binary files a/tests/examplefiles/output/test_basic.adls and b/tests/examplefiles/output/test_basic.adls differ
index 92dd9a7b1c1eab9baadba9871f839a42ca2952fc..33a6bf97381d86866fab37f933009679a229f5a8 100644 (file)
Binary files a/tests/examplefiles/output/truncated.pytb and b/tests/examplefiles/output/truncated.pytb differ
index 200beb629da4e4769a945501818596e5a1c759f8..06c254ae69db9dfa2df227ba422b10acba9ff615 100644 (file)
Binary files a/tests/examplefiles/output/tsql_example.sql and b/tests/examplefiles/output/tsql_example.sql differ
index 49fe9178e666a1486f897eb6cc8d6b8263ab6787..9bc293d31e67d5b7c9b7068a00ded2fbf1bfc729 100644 (file)
Binary files a/tests/examplefiles/output/twig_test and b/tests/examplefiles/output/twig_test differ
index edccdb06ac960ada6753473531faff885df6ef30..9c70bc2e766d9cb54a53c174ea3f5367580e56dd 100644 (file)
Binary files a/tests/examplefiles/output/type.lisp and b/tests/examplefiles/output/type.lisp differ
index 6c3dcb7e39bb533dbb1a0fd39eda556459bbbeed..15674e4fad5c8f1b8fc532673064ceb0a1196b49 100644 (file)
Binary files a/tests/examplefiles/output/typescript_example and b/tests/examplefiles/output/typescript_example differ
index 8356faa3787acd66eaab8fead11dda7517aef9f1..4a1b4a40168f9c7d51be059566128a3614be65aa 100644 (file)
Binary files a/tests/examplefiles/output/typoscript_example and b/tests/examplefiles/output/typoscript_example differ
index f65166b715954114d0a605510b7923c86b1064ae..943c677ddf6436a01d5e53ae93f6fada1ca52929 100644 (file)
Binary files a/tests/examplefiles/output/underscore.coffee and b/tests/examplefiles/output/underscore.coffee differ
index e560c8cb5d8ece6945349b6a5348c250f5dcd900..8790c2b76b720222f35d06047b10abdeb9e5f442 100644 (file)
Binary files a/tests/examplefiles/output/unicode.applescript and b/tests/examplefiles/output/unicode.applescript differ
index c1693d91caa63e4151041a9d38e3ad15d8a51537..afae403604456e71e7c96727f91fa23bdf4263d4 100644 (file)
Binary files a/tests/examplefiles/output/unicode.go and b/tests/examplefiles/output/unicode.go differ
index 2e883e5dd709cf932b71131a649fe9f498863989..3b9aae7edfda757cfd29e9576d48a2496e2bec83 100644 (file)
Binary files a/tests/examplefiles/output/unicode.js and b/tests/examplefiles/output/unicode.js differ
index 8553f564d4b3ea4d00b89390e9b4e4aaad700523..8a58ea51ad67cd1a9f278fb46ce3a6a798d95122 100644 (file)
Binary files a/tests/examplefiles/output/unicodedoc.py and b/tests/examplefiles/output/unicodedoc.py differ
index 506c8625e1f2262cc77b65b84087ef716aba5194..bbf39c28fa6556996daad4db50a2223942974a48 100644 (file)
Binary files a/tests/examplefiles/output/unix-io.lid and b/tests/examplefiles/output/unix-io.lid differ
index 0860d1c941ac32625d351f1a079e13533ee40c8d..13a0047e5a27571a8caa964c4db2c39f474783d3 100644 (file)
Binary files a/tests/examplefiles/output/varnish.vcl and b/tests/examplefiles/output/varnish.vcl differ
index dc5dfc41698dc81cc2dfe8ba9d330e23dd4655b2..312f868c06230e2714f6d9c49ffa8db3d814d844 100644 (file)
Binary files a/tests/examplefiles/output/vbnet_test.bas and b/tests/examplefiles/output/vbnet_test.bas differ
index f94f4f8ab2861b0e8be96b2ff9c95ac9b7ee3a07..c84a29155bb7e0f7b0549630714d4d95b77bfcce 100644 (file)
Binary files a/tests/examplefiles/output/vctreestatus_hg and b/tests/examplefiles/output/vctreestatus_hg differ
index fed517f4255b7ce75c310c9882b1a72cc40501e6..9cacb7dde57bde0802ee3b96344e703a58939cd0 100644 (file)
Binary files a/tests/examplefiles/output/vimrc and b/tests/examplefiles/output/vimrc differ
index 47ab6c1433efae657b0ffae1163c15d33ed5c418..a3cc8403a9171802a3bfa0d1b5615e71638cb027 100644 (file)
Binary files a/tests/examplefiles/output/vpath.mk and b/tests/examplefiles/output/vpath.mk differ
index add5a6164dcd2898b0291c990ee7788897fa42b9..449d8846c03839e16ff1fe61ae58e94a448f5f15 100644 (file)
Binary files a/tests/examplefiles/output/wdiff_example1.wdiff and b/tests/examplefiles/output/wdiff_example1.wdiff differ
index 75d0c196c69899748db991218dd9b4e6ff553eac..5398a3840460c767019a1c0a54f0446abca871ba 100644 (file)
Binary files a/tests/examplefiles/output/wdiff_example3.wdiff and b/tests/examplefiles/output/wdiff_example3.wdiff differ
index 21b21607744bf0a7139fa07cbdfc3ea4bd09f471..4effb47ee9f8c373a5d17ffc99d08ec50833b4b9 100644 (file)
Binary files a/tests/examplefiles/output/webkit-transition.css and b/tests/examplefiles/output/webkit-transition.css differ
index 1e4d6e65e3169e350e6a66228c77579ae5ce32ef..ac212a851f90667687290fa29066b426cd077646 100644 (file)
Binary files a/tests/examplefiles/output/while.pov and b/tests/examplefiles/output/while.pov differ
index d6024c87fb1d54a1fdeb2e6611f6b168f03d5fb8..734f27212f0b91dda915809fc3366be4aa10edf7 100644 (file)
Binary files a/tests/examplefiles/output/wiki.factor and b/tests/examplefiles/output/wiki.factor differ
index e54407e918fb2ac8e79f05f71745aaf2ea8803be..eea90218a51644fa7745e7ebd89779f9d983dfc8 100644 (file)
Binary files a/tests/examplefiles/output/xml_example and b/tests/examplefiles/output/xml_example differ
index a76bfdc1b393fcafad17352b1d7695bd87749b1b..012eeb20d12f658ccfcf6722420d8ec4107cd305 100644 (file)
Binary files a/tests/examplefiles/output/xorg.conf and b/tests/examplefiles/output/xorg.conf differ
index 99d5b290a3c1fc87da17aeb130e43a267bf1a291..66a32994b1fc88e8ad87b23376046c66eaec65b4 100644 (file)
Binary files a/tests/examplefiles/output/yahalom.cpsa and b/tests/examplefiles/output/yahalom.cpsa differ
index 2b3adf966e36607b79a1357652725b2426d9a6ec..1f621831baadc5b82ade04c12d85e8b84f95258b 100644 (file)
Binary files a/tests/examplefiles/output/zmlrpc.f90 and b/tests/examplefiles/output/zmlrpc.f90 differ
diff --git a/tests/examplefiles/scdoc_manual.scd b/tests/examplefiles/scdoc_manual.scd
new file mode 100644 (file)
index 0000000..65a2b36
--- /dev/null
@@ -0,0 +1,197 @@
+scdoc(5)
+
+# NAME
+
+scdoc - document format for writing manual pages
+
+# SYNTAX
+
+Input files must use the UTF-8 encoding.
+
+## PREAMBLE
+
+Each scdoc file must begin with the following preamble:
+
+       *name*(_section_) ["left\_footer" ["center\_header"]]
+
+*name* is the name of the man page you are writing, and _section_ is the section
+you're writing for (see *man*(1) for information on manual sections).
+
+_left\_footer_ and _center\_header_ are optional arguments which set the text
+positioned at those locations in the generated man page, and *must* be
+surrounded with double quotes.
+
+## SECTION HEADERS
+
+Each section of your man page should begin with something similar to the
+following:
+
+       # HEADER NAME
+
+Subsection headers are also understood - use two hashes. Each header must have
+an empty line on either side.
+
+## PARAGRAPHS
+
+Begin a new paragraph with an empty line.
+
+## LINE BREAKS
+
+Insert a line break by ending a line with \+\+.
+
+The result looks++
+like this.
+
+## FORMATTING
+
+Text can be made *bold* or _underlined_ with asterisks and underscores: \*bold\*
+or \_underlined\_. Underscores in the_middle_of_words will be disregarded.
+
+## INDENTATION
+
+You may indent lines with tab characters (*\\t*) to indent them by 4 spaces in
+the output. Indented lines may not contain headers.
+
+       The result looks something like this.
+
+       You may use multiple lines and most _formatting_.
+
+Deindent to return to normal, or indent again to increase your indentation
+depth.
+
+## LISTS
+
+You may start bulleted lists with dashes (-), like so:
+
+```
+- Item 1
+- Item 2
+       - Subitem 1
+       - Subitem 2
+- Item 3
+```
+
+The result looks like this:
+
+- Item 1
+- Item 2
+       - Subitem 1
+       - Subitem 2
+- Item 3
+
+You may also extend long entries onto another line by giving it the same indent
+level, plus two spaces. They will be rendered as a single list entry.
+
+```
+- Item 1 is pretty long so let's
+  break it up onto two lines
+- Item 2 is shorter
+       - But its children can go on
+         for a while
+```
+
+- Item 1 is pretty long so let's
+  break it up onto two lines
+- Item 2 is shorter
+       - But its children can go on
+         for a while
+
+## NUMBERED LISTS
+
+Numbered lists are similar to normal lists, but begin with periods (.) instead
+of dashes (-), like so:
+
+```
+. Item 1
+. Item 2
+. Item 3,
+  with multiple lines
+```
+
+. Item 1
+. Item 2
+. Item 3,
+  with multiple lines
+
+## TABLES
+
+To begin a table, add an empty line followed by any number of rows.
+
+Each line of a table should start with | or : to start a new row or column
+respectively (or space to continue the previous cell on multiple lines),
+followed by [ or - or ] to align the contents to the left, center, or right,
+followed by a space and the contents of that cell.  You may use a space instead
+of an alignment specifier to inherit the alignment of the same column in the
+previous row.
+
+The first character of the first row is not limited to | and has special
+meaning. [ will produce a table with borders around each cell. | will produce a
+table with no borders. ] will produce a table with one border around the whole
+table.
+
+To conclude your table, add an empty line after the last row.
+
+```
+[[ *Foo*
+:- _Bar_
+:-
+|  *Row 1*
+:  Hello
+:] world!
+|  *Row 2*
+:  こんにちは
+:  世界
+   !
+```
+
+[[ *Foo*
+:- _Bar_
+:-
+|  *Row 1*
+:  Hello
+:] world!
+|  *Row 2*
+:  こんにちは
+:  世界
+   !
+
+## LITERAL TEXT
+
+You may turn off scdoc formatting and output literal text with escape codes and
+literal blocks. Inserting a \\ into your source will cause the subsequent symbol
+to be treated as a literal and copied directly to the output. You may also make
+blocks of literal syntax like so:
+
+```
+\```
+_This formatting_ will *not* be interpreted by scdoc.
+\```
+```
+
+These blocks will be indented one level. Note that literal text is shown
+literally in the man viewer - that is, it's not a means for inserting your own
+roff macros into the output. Note that \\ is still interpreted within literal
+blocks, which for example can be useful to output \``` inside of a literal
+block.
+
+## COMMENTS
+
+Lines beginning with ; and a space are ignored.
+
+```
+; This is a comment
+```
+
+# CONVENTIONS
+
+By convention, all scdoc documents should be hard wrapped at 80 columns.
+
+# SEE ALSO
+
+*scdoc*(1)
+
+# AUTHORS
+
+Maintained by Drew DeVault <sir@cmpwn.com>. Up-to-date sources can be found at
+https://git.sr.ht/~sircmpwn/scdoc and bugs/patches can be submitted by email to
+~sircmpwn/public-inbox@lists.sr.ht.
index d113303e4865f2cd2c591f4f1072e90e360d7a16..1e9eaa8df3be50efc01dd7ca98b30cf10ae1c203 100644 (file)
@@ -56,6 +56,7 @@ lua_exec {{
 
 #include/**/"file.udo"
 #include/**/|file.udo|
+#includestr/**/"$MACRO..udo"
 
 #ifdef MACRO
 #else
index d997c1b3874ad65582e779fa5a2d6ba4d73138e6..cffcfded37c9ec6ac9ccfbe2ea6a5245757f6159 100644 (file)
@@ -20,3 +20,4 @@ n label
   }
 }
 #include "score.sco"
+#includestr/**/"$MACRO..sco"
diff --git a/tests/examplefiles/test.sol b/tests/examplefiles/test.sol
new file mode 100644 (file)
index 0000000..f7a6495
--- /dev/null
@@ -0,0 +1,74 @@
+pragma solidity ^0.4.20;
+
+pragma solidity >=0.4.0 <0.7.0;
+
+// one-line singleline comment
+
+/* one-line multiline comment */
+
+/*
+  multi-line multiline comment
+*/
+
+contract ContractName {
+
+    address public publicaddress;
+
+    uint varname1 = 1234;
+    int varname2 = 0x12abcdEF;
+
+    string astringsingle = 'test "string" value\' single';
+    string astringdouble = "test 'string' value\" double";
+
+    enum State {
+      NotStarted,
+      WorkInProgress,
+      Done
+    }
+    State public state;
+
+    struct AStruct {
+        string name;
+        uint8 type;
+    }
+
+    mapping(address => AStruct) registry;
+
+    event Paid(uint256 value);
+    event Received(uint256 time);
+    event Withdraw(uint256 value);
+
+    function addRegistry(string _name, uint8 _type) {
+        AStruct memory newItem = AStruct({
+            name: _name,
+            type: _type
+        });
+
+        registry[msg.sender] = newItem;
+    }
+
+    function getHash(AStruct item) returns(uint) {
+        return uint(keccak256(item.name, item.type));
+    }
+
+    function pay() public payable {
+      require(msg.sender == astronaut);
+      state = State.Paid;
+      Paid(msg.value);
+    }
+
+    function receive() public {
+      require(msg.sender == arbiter);
+      require(state == State.Paid);
+      state = State.Received;
+      Received(now);
+    }
+
+    function withdraw() public {
+      require(msg.sender == shipper);
+      require(state == State.Received);
+      state = State.Withdrawn;
+      Withdraw(this.balance);
+      shipper.transfer(this.balance);
+    }
+}
diff --git a/tests/examplefiles/test.zeek b/tests/examplefiles/test.zeek
new file mode 100644 (file)
index 0000000..ee6bad8
--- /dev/null
@@ -0,0 +1,181 @@
+# An example of the Zeek scripting language.
+
+##! A Zeekygen-style summmary comment.
+
+# TODO: just an example of a todo-indicator
+
+@load base/frameworks/notice
+
+@if ( F )
+@endif
+
+module Example;
+
+export {
+
+  type mycount: count;
+
+  type SimpleEnum: enum { ONE, TWO, THREE };
+
+  redef enum SimpleEnum += {
+
+    ## A Zeekygen-style comment.
+    FOUR,
+    FIVE, ##< A Zeekygen-style comment.
+  };
+
+  type SimpleRecord: record {
+    field1: count;
+    field2: bool;
+  } &redef;
+
+  redef record SimpleRecord += {
+
+    field3: string &optional;
+
+    field4: string &default="blah";
+  };
+
+  const init_option: bool = T;
+
+  option runtime_option: bool = F;
+
+  global test_opaque: opaque of md5;
+
+  global test_vector: vector of count;
+
+  global myfunction: function(msg: string, c: count &default=0): count;
+
+  global myhook: hook(tag: string);
+
+  global myevent: event(tag: string);
+}
+
+function myfunction(msg: string, c: count): count
+  {
+  print "in myfunction", msg, c;
+  return 0;
+  }
+
+event myevent(msg: string) &priority=1
+  {
+  print "in myevent";
+  }
+
+hook myhook(msg: string)
+  {
+  print "in myevent";
+  }
+
+event zeek_init()
+  {
+  local b = T;
+  local s = "\xff\xaf\"and more after the escaped quote";
+  local p = /foo|bar\xbe\/and more after the escaped slash/;
+  local c = 10;
+
+  local sr = SimpleRecord($field1 = 0, $field2 = T, $field3 = "hi");
+
+  print sr?$field3, sr$field1;
+
+  local myset: set[string] = set("one", "two", "three");
+
+  add myset["four"];
+  delete myset["one"];
+
+  for ( ms in myset )
+    {
+    print ms is string, s as string;
+
+    print s[1:3];
+
+    local tern: count = s == "two" ? 2 : 0;
+
+    if ( s !in myset )
+       print fmt("error %4.2f: %s", 3.14159, "wtf?");
+    }
+
+  switch ( c ) {
+  case 1:
+    break;
+  case 2:
+    fallthrough;
+  default:
+    break;
+  }
+
+  if ( ! b )
+    print "here";
+  else
+    print "there";
+
+  while ( c != 0 )
+    {
+    if ( c >= 5 )
+      c += 0;
+    else if ( c == 8 )
+      c -= 0;
+
+    c = c / 1;
+    c = c / 1;
+    c = c - 1;
+    }
+
+  print |myset|;
+  print ~5;
+  print 1 & 0xff;
+  print 2 ^ 5;
+
+  myfunction ("hello function");
+  hook myhook("hell hook");
+  event myevent("hello event");
+  schedule 1sec { myevent("hello scheduled event") };
+
+  print 0, 7;
+  print 0xff, 0xdeadbeef;
+
+  print 3.14159;
+  print 1234.0;
+  print 1234e0;
+  print .003E-23;
+  print .003E+23;
+
+  print 123/udp;
+  print 8000/tcp;
+  print 13/icmp;
+  print 42/unknown;
+
+  print google.com;
+  print 192.168.50.1;
+  print 255.255.255.255;
+  print 0.0.0.0;
+
+  print 10.0.0.0/16;
+
+  print [2001:0db8:85a3:0000:0000:8a2e:0370:7334];
+  # test for case insensitivity
+  print [2001:0DB8:85A3:0000:0000:8A2E:0370:7334];
+  # any case mixture is allowed
+  print [2001:0dB8:85a3:0000:0000:8A2E:0370:7334];
+  # leading zeroes of a 16-bit group may be omitted
+  print [2001:db8:85a3:0:0:8a2e:370:7334];
+  # a single occurrence of consecutive groups of zeroes may be replaced by ::
+  print [2001:db8:85a3::8a2e:370:7334];
+  # all zeroes should work
+  print [0:0:0:0:0:0:0:0];
+  # all zeroes condensed should work
+  print [::];
+  # hybrid ipv6-ipv4 address should work
+  print [2001:db8:0:0:0:FFFF:192.168.0.5];
+  # hybrid ipv6-ipv4 address with zero ommission should work
+  print [2001:db8::FFFF:192.168.0.5];
+
+  print [2001:0db8:85a3:0000:0000:8a2e:0370:7334]/64;
+
+  print 1day, 1days, 1.0day, 1.0days;
+  print 1hr, 1hrs, 1.0hr, 1.0hrs;
+  print 1min, 1mins, 1.0min, 1.0mins;
+  print 1sec, 1secs, 1.0sec, 1.0secs;
+  print 1msec, 1msecs, 1.0msec, 1.0msecs;
+  print 1usec, 1usecs, 1.0usec, 1.0usecs;
+  }
diff --git a/tests/run.py b/tests/run.py
deleted file mode 100644 (file)
index edebc7a..0000000
+++ /dev/null
@@ -1,58 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    Pygments unit tests
-    ~~~~~~~~~~~~~~~~~~
-
-    Usage::
-
-        python run.py [testfile ...]
-
-
-    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import print_function
-
-import os
-import sys
-import warnings
-
-# only find tests in this directory
-if os.path.dirname(__file__):
-    os.chdir(os.path.dirname(__file__))
-
-# make FutureWarnings (coming from Regex syntax most likely) and
-# DeprecationWarnings due to non-raw strings an error
-warnings.filterwarnings("error", module=r"pygments\..*",
-                        category=FutureWarning)
-warnings.filterwarnings("error", module=r".*pygments.*",
-                        category=DeprecationWarning)
-
-
-try:
-    import nose
-except ImportError:
-    print('nose is required to run the Pygments test suite')
-    sys.exit(1)
-
-# make sure the current source is first on sys.path
-sys.path.insert(0, '..')
-
-if '--with-coverage' not in sys.argv:
-    # if running with coverage, pygments should not be imported before coverage
-    # is started, otherwise it will count already executed lines as uncovered
-    try:
-        import pygments
-    except ImportError as err:
-        print('Cannot find Pygments to test: %s' % err)
-        sys.exit(1)
-    else:
-        print('Pygments %s test suite running (Python %s)...' %
-              (pygments.__version__, sys.version.split()[0]),
-              file=sys.stderr)
-else:
-    print('Pygments test suite running (Python %s)...' % sys.version.split()[0],
-          file=sys.stderr)
-
-nose.main()
diff --git a/tests/string_asserts.py b/tests/string_asserts.py
deleted file mode 100644 (file)
index a02c52b..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    Pygments string assert utility
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-class StringTests(object):
-
-    def assertStartsWith(self, haystack, needle, msg=None):
-        if msg is None:
-            msg = "'{0}' does not start with '{1}'".format(haystack, needle)
-        if not haystack.startswith(needle):
-            raise(AssertionError(msg))
-
-    def assertEndsWith(self, haystack, needle, msg=None):
-        if msg is None:
-            msg = "'{0}' does not end with '{1}'".format(haystack, needle)
-        if not haystack.endswith(needle):
-            raise(AssertionError(msg))
diff --git a/tests/support.py b/tests/support.py
deleted file mode 100644 (file)
index c66ac66..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-# coding: utf-8
-"""
-Support for Pygments tests
-"""
-
-import os
-
-from nose import SkipTest
-
-
-def location(mod_name):
-    """
-    Return the file and directory that the code for *mod_name* is in.
-    """
-    source = mod_name.endswith("pyc") and mod_name[:-1] or mod_name
-    source = os.path.abspath(source)
-    return source, os.path.dirname(source)
diff --git a/tests/test_apache_conf.py b/tests/test_apache_conf.py
new file mode 100644 (file)
index 0000000..1e906c1
--- /dev/null
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+"""
+    Basic Apache Configuration Test
+    ~~~~~~~~~~~~~~~~~--------------
+
+    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+
+import textwrap
+
+import pytest
+
+from pygments.token import Text, Number, Token
+from pygments.lexers import configs
+
+
+@pytest.fixture(scope='module')
+def lexer():
+    yield configs.ApacheConfLexer()
+
+
+def test_multiline_comment(lexer):
+    fragment = '#SecAction \\\n  "id:\'900004\', \\\n  phase:1, \\\n  t:none, \\\n  setvar:tx.anomaly_score_blocking=on, \\\n  nolog, \\\n  pass"\n  \n'
+    tokens = [
+        (Token.Comment, '#SecAction \\\n  "id:\'900004\', \\\n  phase:1, \\\n  t:none, \\\n  setvar:tx.anomaly_score_blocking=on, \\\n  nolog, \\\n  pass"'),
+        (Token.Text, '\n  \n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+def test_multiline_argument(lexer):
+        fragment = 'SecAction \\\n  "id:\'900001\', \\\n  phase:1, \\\n  t:none, \\\n  setvar:tx.critical_anomaly_score=5, \\\n  setvar:tx.error_anomaly_score=4, \\\n  setvar:tx.warning_anomaly_score=3, \\\n  setvar:tx.notice_anomaly_score=2, \\\n  nolog, \\\n  pass"\n'
+        tokens = [
+            (Token.Name.Builtin, 'SecAction'),
+            (Token.Text, ' '),
+            (Token.Text, '\\\n'),
+            (Token.Text, '  '),
+            (Token.Literal.String.Double, '"id:\'900001\', \\\n  phase:1, \\\n  t:none, \\\n  setvar:tx.critical_anomaly_score=5, \\\n  setvar:tx.error_anomaly_score=4, \\\n  setvar:tx.warning_anomaly_score=3, \\\n  setvar:tx.notice_anomaly_score=2, \\\n  nolog, \\\n  pass"'),
+            (Token.Text, ''),
+            (Token.Text, '\n'),
+        ]
+        assert list(lexer.get_tokens(fragment)) == tokens
\ No newline at end of file
index 03d10cd255cac20e16711e5f089105d2a948a723..e2255f5bd491bab68dcdae34eb0c43341ce955a4 100644 (file)
@@ -3,72 +3,71 @@
     Pygments Basic lexers tests
     ~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-    :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
     :license: BSD, see LICENSE for details.
 """
-import unittest
+
+import pytest
 
 from pygments.lexers.basic import VBScriptLexer
 from pygments.token import Error, Name, Number, Punctuation, String, Whitespace
 
 
-class VBScriptLexerTest(unittest.TestCase):
-
-    def setUp(self):
-        self.lexer = VBScriptLexer()
-
-    def _assert_are_tokens_of_type(self, examples, expected_token_type):
-        for test_number, example in enumerate(examples.split(), 1):
-            token_count = 0
-            for token_type, token_value in self.lexer.get_tokens(example):
-                if token_type != Whitespace:
-                    token_count += 1
-                    self.assertEqual(
-                        token_type, expected_token_type,
-                        'token_type #%d for %s is be %s but must be %s' %
-                        (test_number, token_value, token_type, expected_token_type))
-            self.assertEqual(
-                token_count, 1,
-                '%s must yield exactly 1 token instead of %d' %
-                (example, token_count))
-
-    def _assert_tokens_match(self, text, expected_tokens_without_trailing_newline):
-        actual_tokens = tuple(self.lexer.get_tokens(text))
-        if (len(actual_tokens) >= 1) and (actual_tokens[-1] == (Whitespace, '\n')):
-            actual_tokens = tuple(actual_tokens[:-1])
-        self.assertEqual(
-            expected_tokens_without_trailing_newline, actual_tokens,
-            'text must yield expected tokens: %s' % text)
-
-    def test_can_lex_float(self):
-        self._assert_are_tokens_of_type(
-            '1. 1.e1 .1 1.2 1.2e3 1.2e+3 1.2e-3 1e2', Number.Float)
-        self._assert_tokens_match(
-            '1e2.1e2',
-            ((Number.Float, '1e2'), (Number.Float, '.1e2'))
-        )
-
-    def test_can_reject_almost_float(self):
-        self._assert_tokens_match(
-            '.e1',
-            ((Punctuation, '.'), (Name, 'e1')))
-
-    def test_can_lex_integer(self):
-        self._assert_are_tokens_of_type(
-            '1 23 456', Number.Integer)
-
-    def test_can_lex_names(self):
-        self._assert_are_tokens_of_type(
-            u'thingy thingy123 _thingy _123', Name)
-
-    def test_can_recover_after_unterminated_string(self):
-        self._assert_tokens_match(
-            '"x\nx',
-            ((String.Double, '"'), (String.Double, 'x'), (Error, '\n'), (Name, 'x'))
-        )
-
-    def test_can_recover_from_invalid_character(self):
-        self._assert_tokens_match(
-            'a;bc\nd',
-            ((Name, 'a'), (Error, ';bc\n'), (Name, 'd'))
-        )
+@pytest.fixture(scope='module')
+def lexer():
+    yield VBScriptLexer()
+
+
+def assert_are_tokens_of_type(lexer, examples, expected_token_type):
+    for test_number, example in enumerate(examples.split(), 1):
+        token_count = 0
+        for token_type, token_value in lexer.get_tokens(example):
+            if token_type != Whitespace:
+                token_count += 1
+                assert token_type == expected_token_type, \
+                    'token_type #%d for %s is be %s but must be %s' % \
+                    (test_number, token_value, token_type, expected_token_type)
+        assert token_count == 1, \
+            '%s must yield exactly 1 token instead of %d' % (example, token_count)
+
+
+def assert_tokens_match(lexer, text, expected_tokens_without_trailing_newline):
+    actual_tokens = tuple(lexer.get_tokens(text))
+    if (len(actual_tokens) >= 1) and (actual_tokens[-1] == (Whitespace, '\n')):
+        actual_tokens = tuple(actual_tokens[:-1])
+    assert expected_tokens_without_trailing_newline == actual_tokens, \
+        'text must yield expected tokens: %s' % text
+
+
+def test_can_lex_float(lexer):
+    assert_are_tokens_of_type(lexer,
+                              '1. 1.e1 .1 1.2 1.2e3 1.2e+3 1.2e-3 1e2',
+                              Number.Float)
+    assert_tokens_match(lexer,
+                        '1e2.1e2',
+                        ((Number.Float, '1e2'), (Number.Float, '.1e2')))
+
+
+def test_can_reject_almost_float(lexer):
+    assert_tokens_match(lexer, '.e1', ((Punctuation, '.'), (Name, 'e1')))
+
+
+def test_can_lex_integer(lexer):
+    assert_are_tokens_of_type(lexer, '1 23 456', Number.Integer)
+
+
+def test_can_lex_names(lexer):
+    assert_are_tokens_of_type(lexer, u'thingy thingy123 _thingy _123', Name)
+
+
+def test_can_recover_after_unterminated_string(lexer):
+    assert_tokens_match(lexer,
+                        '"x\nx',
+                        ((String.Double, '"'), (String.Double, 'x'),
+                         (Error, '\n'), (Name, 'x')))
+
+
+def test_can_recover_from_invalid_character(lexer):
+    assert_tokens_match(lexer,
+                        'a;bc\nd',
+                        ((Name, 'a'), (Error, ';bc\n'), (Name, 'd')))
index b1b69267647bb4355e680d37005dba09d488f0bb..056f106a12ba319c9d0387a9773ff7582916ad98 100644 (file)
@@ -10,7 +10,9 @@
 from __future__ import print_function
 
 import random
-import unittest
+from os import path
+
+import pytest
 
 from pygments import lexers, formatters, lex, format
 from pygments.token import _TokenType, Text
@@ -18,116 +20,105 @@ from pygments.lexer import RegexLexer
 from pygments.formatters.img import FontNotFound
 from pygments.util import text_type, StringIO, BytesIO, xrange, ClassNotFound
 
-import support
-
-TESTFILE, TESTDIR = support.location(__file__)
+TESTDIR = path.dirname(path.abspath(__file__))
+TESTFILE = path.join(TESTDIR, 'test_basic_api.py')
 
 test_content = [chr(i) for i in xrange(33, 128)] * 5
 random.shuffle(test_content)
 test_content = ''.join(test_content) + '\n'
 
 
-def test_lexer_instantiate_all():
+@pytest.mark.parametrize('name', lexers.LEXERS)
+def test_lexer_instantiate_all(name):
     # instantiate every lexer, to see if the token type defs are correct
-    def verify(name):
-        getattr(lexers, name)
-    for x in lexers.LEXERS:
-        yield verify, x
+    getattr(lexers, name)
 
 
-def test_lexer_classes():
+@pytest.mark.parametrize('cls', lexers._iter_lexerclasses(plugins=False))
+def test_lexer_classes(cls):
     # test that every lexer class has the correct public API
-    def verify(cls):
-        assert type(cls.name) is str
-        for attr in 'aliases', 'filenames', 'alias_filenames', 'mimetypes':
-            assert hasattr(cls, attr)
-            assert type(getattr(cls, attr)) is list, \
-                "%s: %s attribute wrong" % (cls, attr)
-        result = cls.analyse_text("abc")
-        assert isinstance(result, float) and 0.0 <= result <= 1.0
-        result = cls.analyse_text(".abc")
-        assert isinstance(result, float) and 0.0 <= result <= 1.0
-
-        assert all(al.lower() == al for al in cls.aliases)
-
-        inst = cls(opt1="val1", opt2="val2")
-        if issubclass(cls, RegexLexer):
-            if not hasattr(cls, '_tokens'):
-                # if there's no "_tokens", the lexer has to be one with
-                # multiple tokendef variants
-                assert cls.token_variants
-                for variant in cls.tokens:
-                    assert 'root' in cls.tokens[variant]
-            else:
-                assert 'root' in cls._tokens, \
-                       '%s has no root state' % cls
-
-        if cls.name in ['XQuery', 'Opa']:   # XXX temporary
-            return
-
-        try:
-            tokens = list(inst.get_tokens(test_content))
-        except KeyboardInterrupt:
-            raise KeyboardInterrupt(
-                'interrupted %s.get_tokens(): test_content=%r' %
-                (cls.__name__, test_content))
-        txt = ""
-        for token in tokens:
-            assert isinstance(token, tuple)
-            assert isinstance(token[0], _TokenType)
-            assert isinstance(token[1], text_type)
-            txt += token[1]
-        assert txt == test_content, "%s lexer roundtrip failed: %r != %r" % \
-            (cls.name, test_content, txt)
-
-    for lexer in lexers._iter_lexerclasses(plugins=False):
-        yield verify, lexer
-
-
-def test_lexer_options():
+    assert type(cls.name) is str
+    for attr in 'aliases', 'filenames', 'alias_filenames', 'mimetypes':
+        assert hasattr(cls, attr)
+        assert type(getattr(cls, attr)) is list, \
+            "%s: %s attribute wrong" % (cls, attr)
+    result = cls.analyse_text("abc")
+    assert isinstance(result, float) and 0.0 <= result <= 1.0
+    result = cls.analyse_text(".abc")
+    assert isinstance(result, float) and 0.0 <= result <= 1.0
+
+    assert all(al.lower() == al for al in cls.aliases)
+
+    inst = cls(opt1="val1", opt2="val2")
+    if issubclass(cls, RegexLexer):
+        if not hasattr(cls, '_tokens'):
+            # if there's no "_tokens", the lexer has to be one with
+            # multiple tokendef variants
+            assert cls.token_variants
+            for variant in cls.tokens:
+                assert 'root' in cls.tokens[variant]
+        else:
+            assert 'root' in cls._tokens, \
+                   '%s has no root state' % cls
+
+    if cls.name in ['XQuery', 'Opa']:   # XXX temporary
+        return
+
+    try:
+        tokens = list(inst.get_tokens(test_content))
+    except KeyboardInterrupt:
+        raise KeyboardInterrupt(
+            'interrupted %s.get_tokens(): test_content=%r' %
+            (cls.__name__, test_content))
+    txt = ""
+    for token in tokens:
+        assert isinstance(token, tuple)
+        assert isinstance(token[0], _TokenType)
+        assert isinstance(token[1], text_type)
+        txt += token[1]
+    assert txt == test_content, "%s lexer roundtrip failed: %r != %r" % \
+        (cls.name, test_content, txt)
+
+
+@pytest.mark.parametrize('cls', lexers._iter_lexerclasses(plugins=False))
+def test_lexer_options(cls):
+    if cls.__name__ == 'RawTokenLexer':
+        # this one is special
+        return
+
     # test that the basic options work
     def ensure(tokens, output):
         concatenated = ''.join(token[1] for token in tokens)
         assert concatenated == output, \
-            '%s: %r != %r' % (lexer, concatenated, output)
-
-    def verify(cls):
-        inst = cls(stripnl=False)
-        ensure(inst.get_tokens('a\nb'), 'a\nb\n')
-        ensure(inst.get_tokens('\n\n\n'), '\n\n\n')
-        inst = cls(stripall=True)
-        ensure(inst.get_tokens('   \n  b\n\n\n'), 'b\n')
-        # some lexers require full lines in input
-        if ('ConsoleLexer' not in cls.__name__ and
-            'SessionLexer' not in cls.__name__ and
-            not cls.__name__.startswith('Literate') and
-            cls.__name__ not in ('ErlangShellLexer', 'RobotFrameworkLexer')):
-            inst = cls(ensurenl=False)
-            ensure(inst.get_tokens('a\nb'), 'a\nb')
-            inst = cls(ensurenl=False, stripall=True)
-            ensure(inst.get_tokens('a\nb\n\n'), 'a\nb')
-
-    for lexer in lexers._iter_lexerclasses(plugins=False):
-        if lexer.__name__ == 'RawTokenLexer':
-            # this one is special
-            continue
-        yield verify, lexer
+            '%s: %r != %r' % (cls, concatenated, output)
+
+    inst = cls(stripnl=False)
+    ensure(inst.get_tokens('a\nb'), 'a\nb\n')
+    ensure(inst.get_tokens('\n\n\n'), '\n\n\n')
+    inst = cls(stripall=True)
+    ensure(inst.get_tokens('   \n  b\n\n\n'), 'b\n')
+    # some lexers require full lines in input
+    if ('ConsoleLexer' not in cls.__name__ and
+        'SessionLexer' not in cls.__name__ and
+        not cls.__name__.startswith('Literate') and
+        cls.__name__ not in ('ErlangShellLexer', 'RobotFrameworkLexer')):
+        inst = cls(ensurenl=False)
+        ensure(inst.get_tokens('a\nb'), 'a\nb')
+        inst = cls(ensurenl=False, stripall=True)
+        ensure(inst.get_tokens('a\nb\n\n'), 'a\nb')
 
 
 def test_get_lexers():
     # test that the lexers functions work
-    def verify(func, args):
-        x = func(opt='val', *args)
-        assert isinstance(x, lexers.PythonLexer)
-        assert x.options["opt"] == "val"
-
     for func, args in [(lexers.get_lexer_by_name, ("python",)),
                        (lexers.get_lexer_for_filename, ("test.py",)),
                        (lexers.get_lexer_for_mimetype, ("text/x-python",)),
-                       (lexers.guess_lexer, ("#!/usr/bin/python -O\nprint",)),
+                       (lexers.guess_lexer, ("#!/usr/bin/python3 -O\nprint",)),
                        (lexers.guess_lexer_for_filename, ("a.py", "<%= @foo %>"))
                        ]:
-        yield verify, func, args
+        x = func(opt='val', *args)
+        assert isinstance(x, lexers.PythonLexer)
+        assert x.options["opt"] == "val"
 
     for cls, (_, lname, aliases, _, mimetypes) in lexers.LEXERS.items():
         assert cls == lexers.find_lexer_class(lname).__name__
@@ -146,38 +137,35 @@ def test_get_lexers():
         raise Exception
 
 
-def test_formatter_public_api():
+@pytest.mark.parametrize('cls', [getattr(formatters, name)
+                                 for name in formatters.FORMATTERS])
+def test_formatter_public_api(cls):
     # test that every formatter class has the correct public API
     ts = list(lexers.PythonLexer().get_tokens("def f(): pass"))
     string_out = StringIO()
     bytes_out = BytesIO()
 
-    def verify(formatter):
-        info = formatters.FORMATTERS[formatter.__name__]
-        assert len(info) == 5
-        assert info[1], "missing formatter name"
-        assert info[2], "missing formatter aliases"
-        assert info[4], "missing formatter docstring"
-
-        try:
-            inst = formatter(opt1="val1")
-        except (ImportError, FontNotFound) as e:
-            raise support.SkipTest(e)
-
-        try:
-            inst.get_style_defs()
-        except NotImplementedError:
-            # may be raised by formatters for which it doesn't make sense
-            pass
-
-        if formatter.unicodeoutput:
-            inst.format(ts, string_out)
-        else:
-            inst.format(ts, bytes_out)
+    info = formatters.FORMATTERS[cls.__name__]
+    assert len(info) == 5
+    assert info[1], "missing formatter name"
+    assert info[2], "missing formatter aliases"
+    assert info[4], "missing formatter docstring"
+
+    try:
+        inst = cls(opt1="val1")
+    except (ImportError, FontNotFound) as e:
+        pytest.skip(str(e))
+
+    try:
+        inst.get_style_defs()
+    except NotImplementedError:
+        # may be raised by formatters for which it doesn't make sense
+        pass
 
-    for name in formatters.FORMATTERS:
-        formatter = getattr(formatters, name)
-        yield verify, formatter
+    if cls.unicodeoutput:
+        inst.format(ts, string_out)
+    else:
+        inst.format(ts, bytes_out)
 
 
 def test_formatter_encodings():
@@ -201,37 +189,33 @@ def test_formatter_encodings():
     assert u"ä".encode("utf8") in format(tokens, fmt)
 
 
-def test_formatter_unicode_handling():
+@pytest.mark.parametrize('cls', [getattr(formatters, name)
+                                 for name in formatters.FORMATTERS])
+def test_formatter_unicode_handling(cls):
     # test that the formatter supports encoding and Unicode
     tokens = list(lexers.PythonLexer(encoding='utf-8').
                   get_tokens("def f(): 'ä'"))
 
-    def verify(formatter):
-        try:
-            inst = formatter(encoding=None)
-        except (ImportError, FontNotFound) as e:
-            # some dependency or font not installed
-            raise support.SkipTest(e)
-
-        if formatter.name != 'Raw tokens':
-            out = format(tokens, inst)
-            if formatter.unicodeoutput:
-                assert type(out) is text_type, '%s: %r' % (formatter, out)
-
-            inst = formatter(encoding='utf-8')
-            out = format(tokens, inst)
-            assert type(out) is bytes, '%s: %r' % (formatter, out)
-            # Cannot test for encoding, since formatters may have to escape
-            # non-ASCII characters.
-        else:
-            inst = formatter()
-            out = format(tokens, inst)
-            assert type(out) is bytes, '%s: %r' % (formatter, out)
-
-    for formatter, info in formatters.FORMATTERS.items():
-        # this tests the automatic importing as well
-        fmter = getattr(formatters, formatter)
-        yield verify, fmter
+    try:
+        inst = cls(encoding=None)
+    except (ImportError, FontNotFound) as e:
+        # some dependency or font not installed
+        pytest.skip(str(e))
+
+    if cls.name != 'Raw tokens':
+        out = format(tokens, inst)
+        if cls.unicodeoutput:
+            assert type(out) is text_type, '%s: %r' % (cls, out)
+
+        inst = cls(encoding='utf-8')
+        out = format(tokens, inst)
+        assert type(out) is bytes, '%s: %r' % (cls, out)
+        # Cannot test for encoding, since formatters may have to escape
+        # non-ASCII characters.
+    else:
+        inst = cls()
+        out = format(tokens, inst)
+        assert type(out) is bytes, '%s: %r' % (cls, out)
 
 
 def test_get_formatters():
@@ -268,7 +252,7 @@ def test_bare_class_handler():
         assert False, 'nothing raised'
 
 
-class FiltersTest(unittest.TestCase):
+class TestFilters(object):
 
     def test_basic(self):
         filters_args = [
@@ -287,19 +271,18 @@ class FiltersTest(unittest.TestCase):
             with open(TESTFILE, 'rb') as fp:
                 text = fp.read().decode('utf-8')
             tokens = list(lx.get_tokens(text))
-            self.assertTrue(all(isinstance(t[1], text_type)
-                                for t in tokens),
-                            '%s filter did not return Unicode' % x)
+            assert all(isinstance(t[1], text_type) for t in tokens), \
+                '%s filter did not return Unicode' % x
             roundtext = ''.join([t[1] for t in tokens])
             if x not in ('whitespace', 'keywordcase', 'gobble'):
                 # these filters change the text
-                self.assertEqual(roundtext, text,
-                                 "lexer roundtrip with %s filter failed" % x)
+                assert roundtext == text, \
+                    "lexer roundtrip with %s filter failed" % x
 
     def test_raiseonerror(self):
         lx = lexers.PythonLexer()
         lx.add_filter('raiseonerror', excclass=RuntimeError)
-        self.assertRaises(RuntimeError, list, lx.get_tokens('$'))
+        assert pytest.raises(RuntimeError, list, lx.get_tokens('$'))
 
     def test_whitespace(self):
         lx = lexers.PythonLexer()
@@ -307,7 +290,7 @@ class FiltersTest(unittest.TestCase):
         with open(TESTFILE, 'rb') as fp:
             text = fp.read().decode('utf-8')
         lxtext = ''.join([t[1] for t in list(lx.get_tokens(text))])
-        self.assertFalse(' ' in lxtext)
+        assert ' ' not in lxtext
 
     def test_keywordcase(self):
         lx = lexers.PythonLexer()
@@ -315,15 +298,15 @@ class FiltersTest(unittest.TestCase):
         with open(TESTFILE, 'rb') as fp:
             text = fp.read().decode('utf-8')
         lxtext = ''.join([t[1] for t in list(lx.get_tokens(text))])
-        self.assertTrue('Def' in lxtext and 'Class' in lxtext)
+        assert 'Def' in lxtext and 'Class' in lxtext
 
     def test_codetag(self):
         lx = lexers.PythonLexer()
         lx.add_filter('codetagify')
         text = u'# BUG: text'
         tokens = list(lx.get_tokens(text))
-        self.assertEqual('# ', tokens[0][1])
-        self.assertEqual('BUG', tokens[1][1])
+        assert '# ' == tokens[0][1]
+        assert 'BUG' == tokens[1][1]
 
     def test_codetag_boundary(self):
         # ticket #368
@@ -331,4 +314,4 @@ class FiltersTest(unittest.TestCase):
         lx.add_filter('codetagify')
         text = u'# DEBUG: text'
         tokens = list(lx.get_tokens(text))
-        self.assertEqual('# DEBUG: text', tokens[0][1])
+        assert '# DEBUG: text' == tokens[0][1]
index 2f1c395a12367d67240039c3f605fae73022039b..756a65892e2e7245fddb93cabe1e6f32d4e11a11 100644 (file)
 """
 
 import textwrap
-import unittest
+
+import pytest
 
 from pygments.lexers import BibTeXLexer, BSTLexer
 from pygments.token import Token
 
 
-class BibTeXTest(unittest.TestCase):
-    def setUp(self):
-        self.lexer = BibTeXLexer()
-
-    def testPreamble(self):
-        data = u'@PREAMBLE{"% some LaTeX code here"}'
-        tokens = [
-            (Token.Name.Class, u'@PREAMBLE'),
-            (Token.Punctuation, u'{'),
-            (Token.String, u'"'),
-            (Token.String, u'% some LaTeX code here'),
-            (Token.String, u'"'),
-            (Token.Punctuation, u'}'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
-
-    def testString(self):
-        data = u'@STRING(SCI = "Science")'
-        tokens = [
-            (Token.Name.Class, u'@STRING'),
-            (Token.Punctuation, u'('),
-            (Token.Name.Attribute, u'SCI'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'='),
-            (Token.Text, u' '),
-            (Token.String, u'"'),
-            (Token.String, u'Science'),
-            (Token.String, u'"'),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
-
-    def testEntry(self):
-        data = u"""
-            This is a comment.
-
-            @ARTICLE{ruckenstein-diffusion,
-                author = "Liu, Hongquin" # and # "Ruckenstein, Eli",
-                year = 1997,
-                month = JAN,
-                pages = "888-895"
-            }
-        """
-
-        tokens = [
-            (Token.Comment, u'This is a comment.'),
-            (Token.Text, u'\n\n'),
-            (Token.Name.Class, u'@ARTICLE'),
-            (Token.Punctuation, u'{'),
-            (Token.Name.Label, u'ruckenstein-diffusion'),
-            (Token.Punctuation, u','),
-            (Token.Text, u'\n    '),
-            (Token.Name.Attribute, u'author'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'='),
-            (Token.Text, u' '),
-            (Token.String, u'"'),
-            (Token.String, u'Liu, Hongquin'),
-            (Token.String, u'"'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'#'),
-            (Token.Text, u' '),
-            (Token.Name.Variable, u'and'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'#'),
-            (Token.Text, u' '),
-            (Token.String, u'"'),
-            (Token.String, u'Ruckenstein, Eli'),
-            (Token.String, u'"'),
-            (Token.Punctuation, u','),
-            (Token.Text, u'\n    '),
-            (Token.Name.Attribute, u'year'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'='),
-            (Token.Text, u' '),
-            (Token.Number, u'1997'),
-            (Token.Punctuation, u','),
-            (Token.Text, u'\n    '),
-            (Token.Name.Attribute, u'month'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'='),
-            (Token.Text, u' '),
-            (Token.Name.Variable, u'JAN'),
-            (Token.Punctuation, u','),
-            (Token.Text, u'\n    '),
-            (Token.Name.Attribute, u'pages'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'='),
-            (Token.Text, u' '),
-            (Token.String, u'"'),
-            (Token.String, u'888-895'),
-            (Token.String, u'"'),
-            (Token.Text, u'\n'),
-            (Token.Punctuation, u'}'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(list(self.lexer.get_tokens(textwrap.dedent(data))), tokens)
-
-    def testComment(self):
-        data = '@COMMENT{test}'
-        tokens = [
-            (Token.Comment, u'@COMMENT'),
-            (Token.Comment, u'{test}'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
-
-    def testMissingBody(self):
-        data = '@ARTICLE xxx'
-        tokens = [
-            (Token.Name.Class, u'@ARTICLE'),
-            (Token.Text, u' '),
-            (Token.Error, u'x'),
-            (Token.Error, u'x'),
-            (Token.Error, u'x'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
-
-    def testMismatchedBrace(self):
-        data = '@PREAMBLE(""}'
-        tokens = [
-            (Token.Name.Class, u'@PREAMBLE'),
-            (Token.Punctuation, u'('),
-            (Token.String, u'"'),
-            (Token.String, u'"'),
-            (Token.Error, u'}'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(list(self.lexer.get_tokens(data)), tokens)
-
-
-class BSTTest(unittest.TestCase):
-    def setUp(self):
-        self.lexer = BSTLexer()
-
-    def testBasicBST(self):
-        data = """
-            % BibTeX standard bibliography style `plain'
-
-            INTEGERS { output.state before.all }
-
-            FUNCTION {sort.format.title}
-            { 't :=
-            "A " #2
-                "An " #3
-                "The " #4 t chop.word
-                chop.word
+@pytest.fixture(scope='module')
+def lexer():
+    yield BibTeXLexer()
+
+
+def test_preamble(lexer):
+    data = u'@PREAMBLE{"% some LaTeX code here"}'
+    tokens = [
+        (Token.Name.Class, u'@PREAMBLE'),
+        (Token.Punctuation, u'{'),
+        (Token.String, u'"'),
+        (Token.String, u'% some LaTeX code here'),
+        (Token.String, u'"'),
+        (Token.Punctuation, u'}'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(data)) == tokens
+
+
+def test_string(lexer):
+    data = u'@STRING(SCI = "Science")'
+    tokens = [
+        (Token.Name.Class, u'@STRING'),
+        (Token.Punctuation, u'('),
+        (Token.Name.Attribute, u'SCI'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'='),
+        (Token.Text, u' '),
+        (Token.String, u'"'),
+        (Token.String, u'Science'),
+        (Token.String, u'"'),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(data)) == tokens
+
+
+def test_entry(lexer):
+    data = u"""
+        This is a comment.
+
+        @ARTICLE{ruckenstein-diffusion,
+            author = "Liu, Hongquin" # and # "Ruckenstein, Eli",
+            year = 1997,
+            month = JAN,
+            pages = "888-895"
+        }
+    """
+
+    tokens = [
+        (Token.Comment, u'This is a comment.'),
+        (Token.Text, u'\n\n'),
+        (Token.Name.Class, u'@ARTICLE'),
+        (Token.Punctuation, u'{'),
+        (Token.Name.Label, u'ruckenstein-diffusion'),
+        (Token.Punctuation, u','),
+        (Token.Text, u'\n    '),
+        (Token.Name.Attribute, u'author'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'='),
+        (Token.Text, u' '),
+        (Token.String, u'"'),
+        (Token.String, u'Liu, Hongquin'),
+        (Token.String, u'"'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'#'),
+        (Token.Text, u' '),
+        (Token.Name.Variable, u'and'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'#'),
+        (Token.Text, u' '),
+        (Token.String, u'"'),
+        (Token.String, u'Ruckenstein, Eli'),
+        (Token.String, u'"'),
+        (Token.Punctuation, u','),
+        (Token.Text, u'\n    '),
+        (Token.Name.Attribute, u'year'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'='),
+        (Token.Text, u' '),
+        (Token.Number, u'1997'),
+        (Token.Punctuation, u','),
+        (Token.Text, u'\n    '),
+        (Token.Name.Attribute, u'month'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'='),
+        (Token.Text, u' '),
+        (Token.Name.Variable, u'JAN'),
+        (Token.Punctuation, u','),
+        (Token.Text, u'\n    '),
+        (Token.Name.Attribute, u'pages'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'='),
+        (Token.Text, u' '),
+        (Token.String, u'"'),
+        (Token.String, u'888-895'),
+        (Token.String, u'"'),
+        (Token.Text, u'\n'),
+        (Token.Punctuation, u'}'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(textwrap.dedent(data))) == tokens
+
+
+def test_comment(lexer):
+    data = '@COMMENT{test}'
+    tokens = [
+        (Token.Comment, u'@COMMENT'),
+        (Token.Comment, u'{test}'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(data)) == tokens
+
+
+def test_missing_body(lexer):
+    data = '@ARTICLE xxx'
+    tokens = [
+        (Token.Name.Class, u'@ARTICLE'),
+        (Token.Text, u' '),
+        (Token.Error, u'x'),
+        (Token.Error, u'x'),
+        (Token.Error, u'x'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(data)) == tokens
+
+
+def test_mismatched_brace(lexer):
+    data = '@PREAMBLE(""}'
+    tokens = [
+        (Token.Name.Class, u'@PREAMBLE'),
+        (Token.Punctuation, u'('),
+        (Token.String, u'"'),
+        (Token.String, u'"'),
+        (Token.Error, u'}'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(data)) == tokens
+
+
+def test_basic_bst():
+    lexer = BSTLexer()
+    data = """
+        % BibTeX standard bibliography style `plain'
+
+        INTEGERS { output.state before.all }
+
+        FUNCTION {sort.format.title}
+        { 't :=
+        "A " #2
+            "An " #3
+            "The " #4 t chop.word
             chop.word
-            sortify
-            #1 global.max$ substring$
-            }
-
-            ITERATE {call.type$}
-        """
-        tokens = [
-            (Token.Comment.SingleLine, "% BibTeX standard bibliography style `plain'"),
-            (Token.Text, u'\n\n'),
-            (Token.Keyword, u'INTEGERS'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'{'),
-            (Token.Text, u' '),
-            (Token.Name.Variable, u'output.state'),
-            (Token.Text, u' '),
-            (Token.Name.Variable, u'before.all'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'}'),
-            (Token.Text, u'\n\n'),
-            (Token.Keyword, u'FUNCTION'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'{'),
-            (Token.Name.Variable, u'sort.format.title'),
-            (Token.Punctuation, u'}'),
-            (Token.Text, u'\n'),
-            (Token.Punctuation, u'{'),
-            (Token.Text, u' '),
-            (Token.Name.Function, u"'t"),
-            (Token.Text, u' '),
-            (Token.Name.Variable, u':='),
-            (Token.Text, u'\n'),
-            (Token.Literal.String, u'"A "'),
-            (Token.Text, u' '),
-            (Token.Literal.Number, u'#2'),
-            (Token.Text, u'\n    '),
-            (Token.Literal.String, u'"An "'),
-            (Token.Text, u' '),
-            (Token.Literal.Number, u'#3'),
-            (Token.Text, u'\n    '),
-            (Token.Literal.String, u'"The "'),
-            (Token.Text, u' '),
-            (Token.Literal.Number, u'#4'),
-            (Token.Text, u' '),
-            (Token.Name.Variable, u't'),
-            (Token.Text, u' '),
-            (Token.Name.Variable, u'chop.word'),
-            (Token.Text, u'\n    '),
-            (Token.Name.Variable, u'chop.word'),
-            (Token.Text, u'\n'),
-            (Token.Name.Variable, u'chop.word'),
-            (Token.Text, u'\n'),
-            (Token.Name.Variable, u'sortify'),
-            (Token.Text, u'\n'),
-            (Token.Literal.Number, u'#1'),
-            (Token.Text, u' '),
-            (Token.Name.Builtin, u'global.max$'),
-            (Token.Text, u' '),
-            (Token.Name.Builtin, u'substring$'),
-            (Token.Text, u'\n'),
-            (Token.Punctuation, u'}'),
-            (Token.Text, u'\n\n'),
-            (Token.Keyword, u'ITERATE'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'{'),
-            (Token.Name.Builtin, u'call.type$'),
-            (Token.Punctuation, u'}'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(list(self.lexer.get_tokens(textwrap.dedent(data))), tokens)
+        chop.word
+        sortify
+        #1 global.max$ substring$
+        }
+
+        ITERATE {call.type$}
+    """
+    tokens = [
+        (Token.Comment.SingleLine, "% BibTeX standard bibliography style `plain'"),
+        (Token.Text, u'\n\n'),
+        (Token.Keyword, u'INTEGERS'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'{'),
+        (Token.Text, u' '),
+        (Token.Name.Variable, u'output.state'),
+        (Token.Text, u' '),
+        (Token.Name.Variable, u'before.all'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'}'),
+        (Token.Text, u'\n\n'),
+        (Token.Keyword, u'FUNCTION'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'{'),
+        (Token.Name.Variable, u'sort.format.title'),
+        (Token.Punctuation, u'}'),
+        (Token.Text, u'\n'),
+        (Token.Punctuation, u'{'),
+        (Token.Text, u' '),
+        (Token.Name.Function, u"'t"),
+        (Token.Text, u' '),
+        (Token.Name.Variable, u':='),
+        (Token.Text, u'\n'),
+        (Token.Literal.String, u'"A "'),
+        (Token.Text, u' '),
+        (Token.Literal.Number, u'#2'),
+        (Token.Text, u'\n    '),
+        (Token.Literal.String, u'"An "'),
+        (Token.Text, u' '),
+        (Token.Literal.Number, u'#3'),
+        (Token.Text, u'\n    '),
+        (Token.Literal.String, u'"The "'),
+        (Token.Text, u' '),
+        (Token.Literal.Number, u'#4'),
+        (Token.Text, u' '),
+        (Token.Name.Variable, u't'),
+        (Token.Text, u' '),
+        (Token.Name.Variable, u'chop.word'),
+        (Token.Text, u'\n    '),
+        (Token.Name.Variable, u'chop.word'),
+        (Token.Text, u'\n'),
+        (Token.Name.Variable, u'chop.word'),
+        (Token.Text, u'\n'),
+        (Token.Name.Variable, u'sortify'),
+        (Token.Text, u'\n'),
+        (Token.Literal.Number, u'#1'),
+        (Token.Text, u' '),
+        (Token.Name.Builtin, u'global.max$'),
+        (Token.Text, u' '),
+        (Token.Name.Builtin, u'substring$'),
+        (Token.Text, u'\n'),
+        (Token.Punctuation, u'}'),
+        (Token.Text, u'\n\n'),
+        (Token.Keyword, u'ITERATE'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'{'),
+        (Token.Name.Builtin, u'call.type$'),
+        (Token.Punctuation, u'}'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(textwrap.dedent(data))) == tokens
index e7147a6cf2fa31280eafc0cb1c081ab858bd2e67..e3175215a9579196c9c547ecec0bd85c7a9b15b4 100644 (file)
@@ -7,40 +7,40 @@
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
-import os
+import pytest
 
 from pygments.token import Token
 from pygments.lexers import ColdfusionHtmlLexer
 
 
-class ColdfusionHtmlLexerTest(unittest.TestCase):
-
-    def setUp(self):
-        self.lexer = ColdfusionHtmlLexer()
-
-    def testBasicComment(self):
-        fragment = u'<!--- cfcomment --->'
-        expected = [
-            (Token.Text, u''),
-            (Token.Comment.Multiline, u'<!---'),
-            (Token.Comment.Multiline, u' cfcomment '),
-            (Token.Comment.Multiline, u'--->'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
-
-    def testNestedComment(self):
-        fragment = u'<!--- nested <!--- cfcomment ---> --->'
-        expected = [
-            (Token.Text, u''),
-            (Token.Comment.Multiline, u'<!---'),
-            (Token.Comment.Multiline, u' nested '),
-            (Token.Comment.Multiline, u'<!---'),
-            (Token.Comment.Multiline, u' cfcomment '),
-            (Token.Comment.Multiline, u'--->'),
-            (Token.Comment.Multiline, u' '),
-            (Token.Comment.Multiline, u'--->'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
+@pytest.fixture(scope='module')
+def lexer():
+    yield ColdfusionHtmlLexer()
+
+
+def test_basic_comment(lexer):
+    fragment = u'<!--- cfcomment --->'
+    expected = [
+        (Token.Text, u''),
+        (Token.Comment.Multiline, u'<!---'),
+        (Token.Comment.Multiline, u' cfcomment '),
+        (Token.Comment.Multiline, u'--->'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == expected
+
+
+def test_nested_comment(lexer):
+    fragment = u'<!--- nested <!--- cfcomment ---> --->'
+    expected = [
+        (Token.Text, u''),
+        (Token.Comment.Multiline, u'<!---'),
+        (Token.Comment.Multiline, u' nested '),
+        (Token.Comment.Multiline, u'<!---'),
+        (Token.Comment.Multiline, u' cfcomment '),
+        (Token.Comment.Multiline, u'--->'),
+        (Token.Comment.Multiline, u' '),
+        (Token.Comment.Multiline, u'--->'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == expected
index 64b765ef689a0d38d1bdb6209c650895654008ef..69f39b249fe6c0184e0dae4198c924311dc82e81 100644 (file)
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
-import os
 import textwrap
 
+import pytest
+
 from pygments.token import Text, Number, Token
 from pygments.lexers import CLexer
 
 
-class CLexerTest(unittest.TestCase):
+@pytest.fixture(scope='module')
+def lexer():
+    yield CLexer()
 
-    def setUp(self):
-        self.lexer = CLexer()
 
-    def testNumbers(self):
-        code = '42 23.42 23. .42 023 0xdeadbeef 23e+42 42e-23'
-        wanted = []
-        for item in zip([Number.Integer, Number.Float, Number.Float,
-                         Number.Float, Number.Oct, Number.Hex,
-                         Number.Float, Number.Float], code.split()):
-            wanted.append(item)
-            wanted.append((Text, ' '))
-        wanted = wanted[:-1] + [(Text, '\n')]
-        self.assertEqual(list(self.lexer.get_tokens(code)), wanted)
+def test_numbers(lexer):
+    code = '42 23.42 23. .42 023 0xdeadbeef 23e+42 42e-23'
+    wanted = []
+    for item in zip([Number.Integer, Number.Float, Number.Float,
+                     Number.Float, Number.Oct, Number.Hex,
+                     Number.Float, Number.Float], code.split()):
+        wanted.append(item)
+        wanted.append((Text, ' '))
+    wanted = wanted[:-1] + [(Text, '\n')]
+    assert list(lexer.get_tokens(code)) == wanted
 
-    def testSwitch(self):
-        fragment = u'''\
-        int main()
-        {
-            switch (0)
-            {
-                case 0:
-                default:
-                    ;
-            }
-        }
-        '''
-        tokens = [
-            (Token.Keyword.Type, u'int'),
-            (Token.Text, u' '),
-            (Token.Name.Function, u'main'),
-            (Token.Punctuation, u'('),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-            (Token.Punctuation, u'{'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'    '),
-            (Token.Keyword, u'switch'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'('),
-            (Token.Literal.Number.Integer, u'0'),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'    '),
-            (Token.Punctuation, u'{'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'        '),
-            (Token.Keyword, u'case'),
-            (Token.Text, u' '),
-            (Token.Literal.Number.Integer, u'0'),
-            (Token.Operator, u':'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'        '),
-            (Token.Keyword, u'default'),
-            (Token.Operator, u':'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'            '),
-            (Token.Punctuation, u';'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'    '),
-            (Token.Punctuation, u'}'),
-            (Token.Text, u'\n'),
-            (Token.Punctuation, u'}'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
 
-    def testSwitchSpaceBeforeColon(self):
-        fragment = u'''\
-        int main()
+def test_switch(lexer):
+    fragment = u'''\
+    int main()
+    {
+        switch (0)
         {
-            switch (0)
-            {
-                case 0 :
-                default :
-                    ;
-            }
+            case 0:
+            default:
+                ;
         }
-        '''
-        tokens = [
-            (Token.Keyword.Type, u'int'),
-            (Token.Text, u' '),
-            (Token.Name.Function, u'main'),
-            (Token.Punctuation, u'('),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-            (Token.Punctuation, u'{'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'    '),
-            (Token.Keyword, u'switch'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'('),
-            (Token.Literal.Number.Integer, u'0'),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'    '),
-            (Token.Punctuation, u'{'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'        '),
-            (Token.Keyword, u'case'),
-            (Token.Text, u' '),
-            (Token.Literal.Number.Integer, u'0'),
-            (Token.Text, u' '),
-            (Token.Operator, u':'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'        '),
-            (Token.Keyword, u'default'),
-            (Token.Text, u' '),
-            (Token.Operator, u':'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'            '),
-            (Token.Punctuation, u';'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'    '),
-            (Token.Punctuation, u'}'),
-            (Token.Text, u'\n'),
-            (Token.Punctuation, u'}'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
+    }
+    '''
+    tokens = [
+        (Token.Keyword.Type, u'int'),
+        (Token.Text, u' '),
+        (Token.Name.Function, u'main'),
+        (Token.Punctuation, u'('),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+        (Token.Punctuation, u'{'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'    '),
+        (Token.Keyword, u'switch'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'('),
+        (Token.Literal.Number.Integer, u'0'),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'    '),
+        (Token.Punctuation, u'{'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'        '),
+        (Token.Keyword, u'case'),
+        (Token.Text, u' '),
+        (Token.Literal.Number.Integer, u'0'),
+        (Token.Operator, u':'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'        '),
+        (Token.Keyword, u'default'),
+        (Token.Operator, u':'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'            '),
+        (Token.Punctuation, u';'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'    '),
+        (Token.Punctuation, u'}'),
+        (Token.Text, u'\n'),
+        (Token.Punctuation, u'}'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens
 
-    def testLabel(self):
-        fragment = u'''\
-        int main()
-        {
-        foo:
-          goto foo;
-        }
-        '''
-        tokens = [
-            (Token.Keyword.Type, u'int'),
-            (Token.Text, u' '),
-            (Token.Name.Function, u'main'),
-            (Token.Punctuation, u'('),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-            (Token.Punctuation, u'{'),
-            (Token.Text, u'\n'),
-            (Token.Name.Label, u'foo'),
-            (Token.Punctuation, u':'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'  '),
-            (Token.Keyword, u'goto'),
-            (Token.Text, u' '),
-            (Token.Name, u'foo'),
-            (Token.Punctuation, u';'),
-            (Token.Text, u'\n'),
-            (Token.Punctuation, u'}'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
 
-    def testLabelSpaceBeforeColon(self):
-        fragment = u'''\
-        int main()
+def test_switch_space_before_colon(lexer):
+    fragment = u'''\
+    int main()
+    {
+        switch (0)
         {
-        foo :
-          goto foo;
+            case 0 :
+            default :
+                ;
         }
-        '''
-        tokens = [
-            (Token.Keyword.Type, u'int'),
-            (Token.Text, u' '),
-            (Token.Name.Function, u'main'),
-            (Token.Punctuation, u'('),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-            (Token.Punctuation, u'{'),
-            (Token.Text, u'\n'),
-            (Token.Name.Label, u'foo'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u':'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'  '),
-            (Token.Keyword, u'goto'),
-            (Token.Text, u' '),
-            (Token.Name, u'foo'),
-            (Token.Punctuation, u';'),
-            (Token.Text, u'\n'),
-            (Token.Punctuation, u'}'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
+    }
+    '''
+    tokens = [
+        (Token.Keyword.Type, u'int'),
+        (Token.Text, u' '),
+        (Token.Name.Function, u'main'),
+        (Token.Punctuation, u'('),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+        (Token.Punctuation, u'{'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'    '),
+        (Token.Keyword, u'switch'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'('),
+        (Token.Literal.Number.Integer, u'0'),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'    '),
+        (Token.Punctuation, u'{'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'        '),
+        (Token.Keyword, u'case'),
+        (Token.Text, u' '),
+        (Token.Literal.Number.Integer, u'0'),
+        (Token.Text, u' '),
+        (Token.Operator, u':'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'        '),
+        (Token.Keyword, u'default'),
+        (Token.Text, u' '),
+        (Token.Operator, u':'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'            '),
+        (Token.Punctuation, u';'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'    '),
+        (Token.Punctuation, u'}'),
+        (Token.Text, u'\n'),
+        (Token.Punctuation, u'}'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens
+
+
+def test_label(lexer):
+    fragment = u'''\
+    int main()
+    {
+    foo:
+      goto foo;
+    }
+    '''
+    tokens = [
+        (Token.Keyword.Type, u'int'),
+        (Token.Text, u' '),
+        (Token.Name.Function, u'main'),
+        (Token.Punctuation, u'('),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+        (Token.Punctuation, u'{'),
+        (Token.Text, u'\n'),
+        (Token.Name.Label, u'foo'),
+        (Token.Punctuation, u':'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'  '),
+        (Token.Keyword, u'goto'),
+        (Token.Text, u' '),
+        (Token.Name, u'foo'),
+        (Token.Punctuation, u';'),
+        (Token.Text, u'\n'),
+        (Token.Punctuation, u'}'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens
+
+
+def test_label_space_before_colon(lexer):
+    fragment = u'''\
+    int main()
+    {
+    foo :
+      goto foo;
+    }
+    '''
+    tokens = [
+        (Token.Keyword.Type, u'int'),
+        (Token.Text, u' '),
+        (Token.Name.Function, u'main'),
+        (Token.Punctuation, u'('),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+        (Token.Punctuation, u'{'),
+        (Token.Text, u'\n'),
+        (Token.Name.Label, u'foo'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u':'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'  '),
+        (Token.Keyword, u'goto'),
+        (Token.Text, u' '),
+        (Token.Name, u'foo'),
+        (Token.Punctuation, u';'),
+        (Token.Text, u'\n'),
+        (Token.Punctuation, u'}'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens
+
+
+def test_label_followed_by_statement(lexer):
+    fragment = u'''\
+    int main()
+    {
+    foo:return 0;
+      goto foo;
+    }
+    '''
+    tokens = [
+        (Token.Keyword.Type, u'int'),
+        (Token.Text, u' '),
+        (Token.Name.Function, u'main'),
+        (Token.Punctuation, u'('),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+        (Token.Punctuation, u'{'),
+        (Token.Text, u'\n'),
+        (Token.Name.Label, u'foo'),
+        (Token.Punctuation, u':'),
+        (Token.Keyword, u'return'),
+        (Token.Text, u' '),
+        (Token.Literal.Number.Integer, u'0'),
+        (Token.Punctuation, u';'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'  '),
+        (Token.Keyword, u'goto'),
+        (Token.Text, u' '),
+        (Token.Name, u'foo'),
+        (Token.Punctuation, u';'),
+        (Token.Text, u'\n'),
+        (Token.Punctuation, u'}'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens
 
-    def testLabelFollowedByStatement(self):
-        fragment = u'''\
-        int main()
-        {
-        foo:return 0;
-          goto foo;
-        }
-        '''
-        tokens = [
-            (Token.Keyword.Type, u'int'),
-            (Token.Text, u' '),
-            (Token.Name.Function, u'main'),
-            (Token.Punctuation, u'('),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-            (Token.Punctuation, u'{'),
-            (Token.Text, u'\n'),
-            (Token.Name.Label, u'foo'),
-            (Token.Punctuation, u':'),
-            (Token.Keyword, u'return'),
-            (Token.Text, u' '),
-            (Token.Literal.Number.Integer, u'0'),
-            (Token.Punctuation, u';'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'  '),
-            (Token.Keyword, u'goto'),
-            (Token.Text, u' '),
-            (Token.Name, u'foo'),
-            (Token.Punctuation, u';'),
-            (Token.Text, u'\n'),
-            (Token.Punctuation, u'}'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
 
-    def testPreprocFile(self):
-        fragment = u'#include <foo>\n'
-        tokens = [
-            (Token.Comment.Preproc, u'#'),
-            (Token.Comment.Preproc, u'include'),
-            (Token.Text, u' '),
-            (Token.Comment.PreprocFile, u'<foo>'),
-            (Token.Comment.Preproc, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+def test_preproc_file(lexer):
+    fragment = u'#include <foo>\n'
+    tokens = [
+        (Token.Comment.Preproc, u'#'),
+        (Token.Comment.Preproc, u'include'),
+        (Token.Text, u' '),
+        (Token.Comment.PreprocFile, u'<foo>'),
+        (Token.Comment.Preproc, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
 
-    def testPreprocFile2(self):
-        fragment = u'#include "foo.h"\n'
-        tokens = [
-            (Token.Comment.Preproc, u'#'),
-            (Token.Comment.Preproc, u'include'),
-            (Token.Text, u' '),
-            (Token.Comment.PreprocFile, u'"foo.h"'),
-            (Token.Comment.Preproc, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
 
+def test_preproc_file2(lexer):
+    fragment = u'#include "foo.h"\n'
+    tokens = [
+        (Token.Comment.Preproc, u'#'),
+        (Token.Comment.Preproc, u'include'),
+        (Token.Text, u' '),
+        (Token.Comment.PreprocFile, u'"foo.h"'),
+        (Token.Comment.Preproc, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
index 169d690d7ebe260e467ec029413a05a50f662d9d..d56e2ae52fd87b7eccb54bb682f18beab00d2202 100644 (file)
@@ -14,14 +14,16 @@ import os
 import re
 import sys
 import tempfile
-import unittest
+from os import path
+
+from pytest import raises
 
-import support
 from pygments import cmdline, highlight
 from pygments.util import BytesIO, StringIO
 
+TESTDIR = path.dirname(path.abspath(__file__))
+TESTFILE = path.join(TESTDIR, 'test_cmdline.py')
 
-TESTFILE, TESTDIR = support.location(__file__)
 TESTCODE = '''\
 def func(args):
     pass
@@ -65,249 +67,253 @@ def run_cmdline(*args, **kwds):
     return (ret, _decode_output(out), _decode_output(err))
 
 
-class CmdLineTest(unittest.TestCase):
-
-    def check_success(self, *cmdline, **kwds):
-        code, out, err = run_cmdline(*cmdline, **kwds)
-        self.assertEqual(code, 0)
-        self.assertEqual(err, '')
-        return out
-
-    def check_failure(self, *cmdline, **kwds):
-        expected_code = kwds.pop('code', 1)
-        code, out, err = run_cmdline(*cmdline, **kwds)
-        self.assertEqual(code, expected_code)
-        self.assertEqual(out, '')
-        return err
-
-    def test_normal(self):
-        # test that cmdline gives the same output as library api
-        from pygments.lexers import PythonLexer
-        from pygments.formatters import HtmlFormatter
-        filename = TESTFILE
-        with open(filename, 'rb') as fp:
-            code = fp.read()
-
-        output = highlight(code, PythonLexer(), HtmlFormatter())
-
-        o = self.check_success('-lpython', '-fhtml', filename)
-        self.assertEqual(o, output)
-
-    def test_stdin(self):
-        o = self.check_success('-lpython', '-fhtml', stdin=TESTCODE)
-        o = re.sub('<[^>]*>', '', o)
-        # rstrip is necessary since HTML inserts a \n after the last </div>
-        self.assertEqual(o.rstrip(), TESTCODE.rstrip())
-
-        # guess if no lexer given
-        o = self.check_success('-fhtml', stdin=TESTCODE)
-        o = re.sub('<[^>]*>', '', o)
-        # rstrip is necessary since HTML inserts a \n after the last </div>
-        self.assertEqual(o.rstrip(), TESTCODE.rstrip())
-
-    def test_outfile(self):
-        # test that output file works with and without encoding
-        fd, name = tempfile.mkstemp()
-        os.close(fd)
-        for opts in [['-fhtml', '-o', name, TESTFILE],
-                     ['-flatex', '-o', name, TESTFILE],
-                     ['-fhtml', '-o', name, '-O', 'encoding=utf-8', TESTFILE]]:
-            try:
-                self.check_success(*opts)
-            finally:
-                os.unlink(name)
-
-    def test_load_from_file(self):
-        lexer_file = os.path.join(TESTDIR, 'support', 'python_lexer.py')
-        formatter_file = os.path.join(TESTDIR, 'support', 'html_formatter.py')
-
-        # By default, use CustomLexer
-        o = self.check_success('-l', lexer_file, '-f', 'html',
-                               '-x', stdin=TESTCODE)
-        o = re.sub('<[^>]*>', '', o)
-        # rstrip is necessary since HTML inserts a \n after the last </div>
-        self.assertEqual(o.rstrip(), TESTCODE.rstrip())
-
-        # If user specifies a name, use it
-        o = self.check_success('-f', 'html', '-x', '-l',
-                               lexer_file + ':LexerWrapper', stdin=TESTCODE)
-        o = re.sub('<[^>]*>', '', o)
-        # rstrip is necessary since HTML inserts a \n after the last </div>
-        self.assertEqual(o.rstrip(), TESTCODE.rstrip())
-
-        # Should also work for formatters
-        o = self.check_success('-lpython', '-f',
-                               formatter_file + ':HtmlFormatterWrapper',
-                               '-x', stdin=TESTCODE)
-        o = re.sub('<[^>]*>', '', o)
-        # rstrip is necessary since HTML inserts a \n after the last </div>
-        self.assertEqual(o.rstrip(), TESTCODE.rstrip())
-
-    def test_stream_opt(self):
-        o = self.check_success('-lpython', '-s', '-fterminal', stdin=TESTCODE)
-        o = re.sub(r'\x1b\[.*?m', '', o)
-        self.assertEqual(o.replace('\r\n', '\n'), TESTCODE)
-
-    def test_h_opt(self):
-        o = self.check_success('-h')
-        self.assertTrue('Usage:' in o)
-
-    def test_L_opt(self):
-        o = self.check_success('-L')
-        self.assertTrue('Lexers' in o and 'Formatters' in o and
-                        'Filters' in o and 'Styles' in o)
-        o = self.check_success('-L', 'lexer')
-        self.assertTrue('Lexers' in o and 'Formatters' not in o)
-        self.check_success('-L', 'lexers')
-
-    def test_O_opt(self):
-        filename = TESTFILE
-        o = self.check_success('-Ofull=1,linenos=true,foo=bar',
-                               '-fhtml', filename)
-        self.assertTrue('<html' in o)
-        self.assertTrue('class="linenos"' in o)
-
-        # "foobar" is invalid for a bool option
-        e = self.check_failure('-Ostripnl=foobar', TESTFILE)
-        self.assertTrue('Error: Invalid value' in e)
-        e = self.check_failure('-Ostripnl=foobar', '-lpy')
-        self.assertTrue('Error: Invalid value' in e)
-
-    def test_P_opt(self):
-        filename = TESTFILE
-        o = self.check_success('-Pfull', '-Ptitle=foo, bar=baz=,',
-                               '-fhtml', filename)
-        self.assertTrue('<title>foo, bar=baz=,</title>' in o)
-
-    def test_F_opt(self):
-        filename = TESTFILE
-        o = self.check_success('-Fhighlight:tokentype=Name.Blubb,'
-                               'names=TESTFILE filename',
-                               '-fhtml', filename)
-        self.assertTrue('<span class="n n-Blubb' in o)
-
-    def test_H_opt(self):
-        o = self.check_success('-H', 'formatter', 'html')
-        self.assertTrue('HTML' in o)
-        o = self.check_success('-H', 'lexer', 'python')
-        self.assertTrue('Python' in o)
-        o = self.check_success('-H', 'filter', 'raiseonerror')
-        self.assertTrue('raiseonerror', o)
-        e = self.check_failure('-H', 'lexer', 'foobar')
-        self.assertTrue('not found' in e)
-
-    def test_S_opt(self):
-        o = self.check_success('-S', 'default', '-f', 'html', '-O', 'linenos=1')
-        lines = o.splitlines()
-        for line in lines:
-            # every line is for a token class
-            parts = line.split()
-            self.assertTrue(parts[0].startswith('.'))
-            self.assertTrue(parts[1] == '{')
-            if parts[0] != '.hll':
-                self.assertTrue(parts[-4] == '}')
-                self.assertTrue(parts[-3] == '/*')
-                self.assertTrue(parts[-1] == '*/')
-        self.check_failure('-S', 'default', '-f', 'foobar')
-
-    def test_N_opt(self):
-        o = self.check_success('-N', 'test.py')
-        self.assertEqual('python', o.strip())
-        o = self.check_success('-N', 'test.unknown')
-        self.assertEqual('text', o.strip())
-
-    def test_invalid_opts(self):
-        for opts in [
-            ('-X',),
-            ('-L', '-lpy'),
-            ('-L', '-fhtml'),
-            ('-L', '-Ox'),
-            ('-S', 'default', '-l', 'py', '-f', 'html'),
-            ('-S', 'default'),
-            ('-a', 'arg'),
-            ('-H',),
-            (TESTFILE, TESTFILE),
-            ('-H', 'formatter'),
-            ('-H', 'foo', 'bar'),
-            ('-s',),
-            ('-s', TESTFILE),
-        ]:
-            self.check_failure(*opts, code=2)
-
-    def test_errors(self):
-        # input file not found
-        e = self.check_failure('-lpython', 'nonexistent.py')
-        self.assertTrue('Error: cannot read infile' in e)
-        self.assertTrue('nonexistent.py' in e)
-
-        # lexer not found
-        e = self.check_failure('-lfooo', TESTFILE)
-        self.assertTrue('Error: no lexer for alias' in e)
-
-        # cannot load .py file without load_from_file flag
-        e = self.check_failure('-l', 'nonexistent.py', TESTFILE)
-        self.assertTrue('Error: no lexer for alias' in e)
-
-        # lexer file is missing/unreadable
-        e = self.check_failure('-l', 'nonexistent.py',
-                               '-x', TESTFILE)
-        self.assertTrue('Error: cannot read' in e)
-
-        # lexer file is malformed
-        e = self.check_failure('-l', 'support/empty.py',
-                               '-x', TESTFILE)
-        self.assertTrue('Error: no valid CustomLexer class found' in e)
-
-        # formatter not found
-        e = self.check_failure('-lpython', '-ffoo', TESTFILE)
-        self.assertTrue('Error: no formatter found for name' in e)
-
-        # formatter for outfile not found
-        e = self.check_failure('-ofoo.foo', TESTFILE)
-        self.assertTrue('Error: no formatter found for file name' in e)
-
-        # cannot load .py file without load_from_file flag
-        e = self.check_failure('-f', 'nonexistent.py', TESTFILE)
-        self.assertTrue('Error: no formatter found for name' in e)
-
-        # formatter file is missing/unreadable
-        e = self.check_failure('-f', 'nonexistent.py',
-                               '-x', TESTFILE)
-        self.assertTrue('Error: cannot read' in e)
-
-        # formatter file is malformed
-        e = self.check_failure('-f', 'support/empty.py',
-                               '-x', TESTFILE)
-        self.assertTrue('Error: no valid CustomFormatter class found' in e)
-
-        # output file not writable
-        e = self.check_failure('-o', os.path.join('nonexistent', 'dir', 'out.html'),
-                               '-lpython', TESTFILE)
-        self.assertTrue('Error: cannot open outfile' in e)
-        self.assertTrue('out.html' in e)
-
-        # unknown filter
-        e = self.check_failure('-F', 'foo', TESTFILE)
-        self.assertTrue('Error: filter \'foo\' not found' in e)
-
-    def test_exception(self):
-        cmdline.highlight = None  # override callable to provoke TypeError
+def check_success(*cmdline, **kwds):
+    code, out, err = run_cmdline(*cmdline, **kwds)
+    assert code == 0
+    assert err == ''
+    return out
+
+
+def check_failure(*cmdline, **kwds):
+    expected_code = kwds.pop('code', 1)
+    code, out, err = run_cmdline(*cmdline, **kwds)
+    assert code == expected_code
+    assert out == ''
+    return err
+
+
+def test_normal():
+    # test that cmdline gives the same output as library api
+    from pygments.lexers import PythonLexer
+    from pygments.formatters import HtmlFormatter
+    filename = TESTFILE
+    with open(filename, 'rb') as fp:
+        code = fp.read()
+
+    output = highlight(code, PythonLexer(), HtmlFormatter())
+
+    o = check_success('-lpython', '-fhtml', filename)
+    assert o == output
+
+
+def test_stdin():
+    o = check_success('-lpython', '-fhtml', stdin=TESTCODE)
+    o = re.sub('<[^>]*>', '', o)
+    # rstrip is necessary since HTML inserts a \n after the last </div>
+    assert o.rstrip() == TESTCODE.rstrip()
+
+    # guess if no lexer given
+    o = check_success('-fhtml', stdin=TESTCODE)
+    o = re.sub('<[^>]*>', '', o)
+    # rstrip is necessary since HTML inserts a \n after the last </div>
+    assert o.rstrip() == TESTCODE.rstrip()
+
+
+def test_outfile():
+    # test that output file works with and without encoding
+    fd, name = tempfile.mkstemp()
+    os.close(fd)
+    for opts in [['-fhtml', '-o', name, TESTFILE],
+                 ['-flatex', '-o', name, TESTFILE],
+                 ['-fhtml', '-o', name, '-O', 'encoding=utf-8', TESTFILE]]:
         try:
-            # unexpected exception while highlighting
-            e = self.check_failure('-lpython', TESTFILE)
-            self.assertTrue('*** Error while highlighting:' in e)
-            self.assertTrue('TypeError' in e)
-
-            # same with -v: should reraise the exception
-            try:
-                self.check_failure('-lpython', '-v', TESTFILE)
-            except Exception:
-                pass
-            else:
-                self.fail('exception not reraised')
+            check_success(*opts)
         finally:
-            cmdline.highlight = highlight
+            os.unlink(name)
+
+
+def test_load_from_file():
+    lexer_file = os.path.join(TESTDIR, 'support', 'python_lexer.py')
+    formatter_file = os.path.join(TESTDIR, 'support', 'html_formatter.py')
+
+    # By default, use CustomLexer
+    o = check_success('-l', lexer_file, '-f', 'html', '-x', stdin=TESTCODE)
+    o = re.sub('<[^>]*>', '', o)
+    # rstrip is necessary since HTML inserts a \n after the last </div>
+    assert o.rstrip() == TESTCODE.rstrip()
+
+    # If user specifies a name, use it
+    o = check_success('-f', 'html', '-x', '-l',
+                      lexer_file + ':LexerWrapper', stdin=TESTCODE)
+    o = re.sub('<[^>]*>', '', o)
+    # rstrip is necessary since HTML inserts a \n after the last </div>
+    assert o.rstrip() == TESTCODE.rstrip()
+
+    # Should also work for formatters
+    o = check_success('-lpython', '-f',
+                      formatter_file + ':HtmlFormatterWrapper',
+                      '-x', stdin=TESTCODE)
+    o = re.sub('<[^>]*>', '', o)
+    # rstrip is necessary since HTML inserts a \n after the last </div>
+    assert o.rstrip() == TESTCODE.rstrip()
+
+
+def test_stream_opt():
+    o = check_success('-lpython', '-s', '-fterminal', stdin=TESTCODE)
+    o = re.sub(r'\x1b\[.*?m', '', o)
+    assert o.replace('\r\n', '\n') == TESTCODE
+
+
+def test_h_opt():
+    o = check_success('-h')
+    assert 'Usage:' in o
+
+
+def test_L_opt():
+    o = check_success('-L')
+    assert 'Lexers' in o and 'Formatters' in o and 'Filters' in o and 'Styles' in o
+    o = check_success('-L', 'lexer')
+    assert 'Lexers' in o and 'Formatters' not in o
+    check_success('-L', 'lexers')
+
+
+def test_O_opt():
+    filename = TESTFILE
+    o = check_success('-Ofull=1,linenos=true,foo=bar', '-fhtml', filename)
+    assert '<html' in o
+    assert 'class="linenos"' in o
+
+    # "foobar" is invalid for a bool option
+    e = check_failure('-Ostripnl=foobar', TESTFILE)
+    assert 'Error: Invalid value' in e
+    e = check_failure('-Ostripnl=foobar', '-lpy')
+    assert 'Error: Invalid value' in e
+
+
+def test_P_opt():
+    filename = TESTFILE
+    o = check_success('-Pfull', '-Ptitle=foo, bar=baz=,', '-fhtml', filename)
+    assert '<title>foo, bar=baz=,</title>' in o
+
+
+def test_F_opt():
+    filename = TESTFILE
+    o = check_success('-Fhighlight:tokentype=Name.Blubb,'
+                      'names=TESTFILE filename', '-fhtml', filename)
+    assert '<span class="n n-Blubb' in o
+
+
+def test_H_opt():
+    o = check_success('-H', 'formatter', 'html')
+    assert 'HTML' in o
+    o = check_success('-H', 'lexer', 'python')
+    assert 'Python' in o
+    o = check_success('-H', 'filter', 'raiseonerror')
+    assert 'raiseonerror' in o
+    e = check_failure('-H', 'lexer', 'foobar')
+    assert 'not found' in e
+
+
+def test_S_opt():
+    o = check_success('-S', 'default', '-f', 'html', '-O', 'linenos=1')
+    lines = o.splitlines()
+    for line in lines:
+        # every line is for a token class
+        parts = line.split()
+        assert parts[0].startswith('.')
+        assert parts[1] == '{'
+        if parts[0] != '.hll':
+            assert parts[-4] == '}'
+            assert parts[-3] == '/*'
+            assert parts[-1] == '*/'
+    check_failure('-S', 'default', '-f', 'foobar')
+
+
+def test_N_opt():
+    o = check_success('-N', 'test.py')
+    assert 'python' == o.strip()
+    o = check_success('-N', 'test.unknown')
+    assert 'text' == o.strip()
+
+
+def test_invalid_opts():
+    for opts in [
+        ('-X',),
+        ('-L', '-lpy'),
+        ('-L', '-fhtml'),
+        ('-L', '-Ox'),
+        ('-S', 'default', '-l', 'py', '-f', 'html'),
+        ('-S', 'default'),
+        ('-a', 'arg'),
+        ('-H',),
+        (TESTFILE, TESTFILE),
+        ('-H', 'formatter'),
+        ('-H', 'foo', 'bar'),
+        ('-s',),
+        ('-s', TESTFILE),
+    ]:
+        check_failure(*opts, code=2)
+
+
+def test_errors():
+    # input file not found
+    e = check_failure('-lpython', 'nonexistent.py')
+    assert 'Error: cannot read infile' in e
+    assert 'nonexistent.py' in e
+
+    # lexer not found
+    e = check_failure('-lfooo', TESTFILE)
+    assert 'Error: no lexer for alias' in e
+
+    # cannot load .py file without load_from_file flag
+    e = check_failure('-l', 'nonexistent.py', TESTFILE)
+    assert 'Error: no lexer for alias' in e
+
+    # lexer file is missing/unreadable
+    e = check_failure('-l', 'nonexistent.py', '-x', TESTFILE)
+    assert 'Error: cannot read' in e
+
+    # lexer file is malformed
+    e = check_failure('-l', path.join(TESTDIR, 'support', 'empty.py'),
+                      '-x', TESTFILE)
+    assert 'Error: no valid CustomLexer class found' in e
+
+    # formatter not found
+    e = check_failure('-lpython', '-ffoo', TESTFILE)
+    assert 'Error: no formatter found for name' in e
+
+    # formatter for outfile not found
+    e = check_failure('-ofoo.foo', TESTFILE)
+    assert 'Error: no formatter found for file name' in e
+
+    # cannot load .py file without load_from_file flag
+    e = check_failure('-f', 'nonexistent.py', TESTFILE)
+    assert 'Error: no formatter found for name' in e
+
+    # formatter file is missing/unreadable
+    e = check_failure('-f', 'nonexistent.py', '-x', TESTFILE)
+    assert 'Error: cannot read' in e
+
+    # formatter file is malformed
+    e = check_failure('-f', path.join(TESTDIR, 'support', 'empty.py'),
+                      '-x', TESTFILE)
+    assert 'Error: no valid CustomFormatter class found' in e
+
+    # output file not writable
+    e = check_failure('-o', os.path.join('nonexistent', 'dir', 'out.html'),
+                      '-lpython', TESTFILE)
+    assert 'Error: cannot open outfile' in e
+    assert 'out.html' in e
+
+    # unknown filter
+    e = check_failure('-F', 'foo', TESTFILE)
+    assert 'Error: filter \'foo\' not found' in e
+
+
+def test_exception():
+    cmdline.highlight = None  # override callable to provoke TypeError
+    try:
+        # unexpected exception while highlighting
+        e = check_failure('-lpython', TESTFILE)
+        assert '*** Error while highlighting:' in e
+        assert 'TypeError' in e
+
+        # same with -v: should reraise the exception
+        assert raises(Exception, check_failure, '-lpython', '-v', TESTFILE)
+    finally:
+        cmdline.highlight = highlight
+
 
-    def test_parse_opts(self):
-        self.assertEqual(cmdline._parse_options(['  ', 'keyonly,key = value ']),
-                         {'keyonly': True, 'key': 'value'})
+def test_parse_opts():
+    assert cmdline._parse_options(['  ', 'keyonly,key = value ']) == \
+        {'keyonly': True, 'key': 'value'}
index ef59965cef8b1d3118cd423cb0808037ecc21bf9..a3ef33a3626698f54c4aa1cfd6e9feb3f9e34eb0 100644 (file)
@@ -7,27 +7,49 @@
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
-from pygments.lexers import CppLexer
+from pygments.lexers import CppLexer, CLexer
 from pygments.token import Token
 
+from pygments.lexers import guess_lexer
 
-class CppTest(unittest.TestCase):
-    def setUp(self):
-        self.lexer = CppLexer()
-
-    def testGoodComment(self):
-        fragment = u'/* foo */\n'
-        tokens = [
-            (Token.Comment.Multiline, u'/* foo */'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testOpenComment(self):
-        fragment = u'/* foo\n'
-        tokens = [
-            (Token.Comment.Multiline, u'/* foo\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+@pytest.fixture(scope='module')
+def lexer():
+    yield CppLexer()
+
+
+def test_good_comment(lexer):
+    fragment = u'/* foo */\n'
+    tokens = [
+        (Token.Comment.Multiline, u'/* foo */'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_open_comment(lexer):
+    fragment = u'/* foo\n'
+    tokens = [
+        (Token.Comment.Multiline, u'/* foo\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+def test_guess_c_lexer():
+    code = '''
+    #include <stdio.h>
+    #include <stdlib.h>
+
+    int main(void);
+
+    int main(void) {
+        uint8_t x = 42;
+        uint8_t y = x + 1;
+
+        /* exit 1 for success! */
+        return 1;
+    }
+    '''
+    lexer = guess_lexer(code)
+    assert isinstance(lexer, CLexer)
\ No newline at end of file
index 9a1588f2f10a41f3ba1cdf935a91dd43eb2ec772..f4909ac6aa6830e767070a3b4354711b45700bba 100644 (file)
 # -*- coding: utf-8 -*-
 """
     Basic CrystalLexer Test
-    ~~~~~~~~~~~~~~~~~~~~
+    ~~~~~~~~~~~~~~~~~~~~~~~
 
-    :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
     :license: BSD, see LICENSE for details.
 """
 
 from __future__ import unicode_literals
-import unittest
 
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Error
+import pytest
+
+from pygments.token import Text, Operator, Keyword, Name, String, Number, \
+    Punctuation, Error
 from pygments.lexers import CrystalLexer
 
 
-class CrystalTest(unittest.TestCase):
+@pytest.fixture(scope='module')
+def lexer():
+    yield CrystalLexer()
+
+
+def test_range_syntax1(lexer):
+    fragment = '1...3\n'
+    tokens = [
+        (Number.Integer, '1'),
+        (Operator, '...'),
+        (Number.Integer, '3'),
+        (Text, '\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_range_syntax2(lexer):
+    fragment = '1 .. 3\n'
+    tokens = [
+        (Number.Integer, '1'),
+        (Text, ' '),
+        (Operator, '..'),
+        (Text, ' '),
+        (Number.Integer, '3'),
+        (Text, '\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_interpolation_nested_curly(lexer):
+    fragment = (
+        '"A#{ (3..5).group_by { |x| x/2}.map '
+        'do |k,v| "#{k}" end.join }" + "Z"\n')
+    tokens = [
+        (String.Double, '"'),
+        (String.Double, 'A'),
+        (String.Interpol, '#{'),
+        (Text, ' '),
+        (Punctuation, '('),
+        (Number.Integer, '3'),
+        (Operator, '..'),
+        (Number.Integer, '5'),
+        (Punctuation, ')'),
+        (Operator, '.'),
+        (Name, 'group_by'),
+        (Text, ' '),
+        (String.Interpol, '{'),
+        (Text, ' '),
+        (Operator, '|'),
+        (Name, 'x'),
+        (Operator, '|'),
+        (Text, ' '),
+        (Name, 'x'),
+        (Operator, '/'),
+        (Number.Integer, '2'),
+        (String.Interpol, '}'),
+        (Operator, '.'),
+        (Name, 'map'),
+        (Text, ' '),
+        (Keyword, 'do'),
+        (Text, ' '),
+        (Operator, '|'),
+        (Name, 'k'),
+        (Punctuation, ','),
+        (Name, 'v'),
+        (Operator, '|'),
+        (Text, ' '),
+        (String.Double, '"'),
+        (String.Interpol, '#{'),
+        (Name, 'k'),
+        (String.Interpol, '}'),
+        (String.Double, '"'),
+        (Text, ' '),
+        (Keyword, 'end'),
+        (Operator, '.'),
+        (Name, 'join'),
+        (Text, ' '),
+        (String.Interpol, '}'),
+        (String.Double, '"'),
+        (Text, ' '),
+        (Operator, '+'),
+        (Text, ' '),
+        (String.Double, '"'),
+        (String.Double, 'Z'),
+        (String.Double, '"'),
+        (Text, '\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_operator_methods(lexer):
+    fragment = '([] of Int32).[]?(5)\n'
+    tokens = [
+        (Punctuation, '('),
+        (Operator, '['),
+        (Operator, ']'),
+        (Text, ' '),
+        (Keyword, 'of'),
+        (Text, ' '),
+        (Name.Builtin, 'Int32'),
+        (Punctuation, ')'),
+        (Operator, '.'),
+        (Name.Operator, '[]?'),
+        (Punctuation, '('),
+        (Number.Integer, '5'),
+        (Punctuation, ')'),
+        (Text, '\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_array_access(lexer):
+    fragment = '[5][5]?\n'
+    tokens = [
+        (Operator, '['),
+        (Number.Integer, '5'),
+        (Operator, ']'),
+        (Operator, '['),
+        (Number.Integer, '5'),
+        (Operator, ']?'),
+        (Text, '\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
 
-    def setUp(self):
-        self.lexer = CrystalLexer()
-        self.maxDiff = None
 
-    def testRangeSyntax1(self):
-        fragment = '1...3\n'
-        tokens = [
-            (Number.Integer, '1'),
-            (Operator, '...'),
-            (Number.Integer, '3'),
-            (Text, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+def test_numbers(lexer):
+    for kind, testset in [
+        (Number.Integer, '0  1  1_000_000  1u8  11231231231121312i64'),
+        (Number.Float, '0.0  1.0_f32  1_f32  0f64  1e+4  1e111  1_234.567_890'),
+        (Number.Bin, '0b1001_0110  0b0u8'),
+        (Number.Oct, '0o17  0o7_i32'),
+        (Number.Hex, '0xdeadBEEF'),
+    ]:
+        for fragment in testset.split():
+            assert list(lexer.get_tokens(fragment + '\n')) == \
+                [(kind, fragment), (Text, '\n')]
 
-    def testRangeSyntax2(self):
-        fragment = '1 .. 3\n'
-        tokens = [
-            (Number.Integer, '1'),
-            (Text, ' '),
-            (Operator, '..'),
-            (Text, ' '),
-            (Number.Integer, '3'),
-            (Text, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+    for fragment in '01  0b2  0x129g2  0o12358'.split():
+        assert next(lexer.get_tokens(fragment + '\n'))[0] == Error
 
-    def testInterpolationNestedCurly(self):
-        fragment = (
-            '"A#{ (3..5).group_by { |x| x/2}.map '
-            'do |k,v| "#{k}" end.join }" + "Z"\n')
-        tokens = [
-            (String.Double, '"'),
-            (String.Double, 'A'),
-            (String.Interpol, '#{'),
-            (Text, ' '),
-            (Punctuation, '('),
-            (Number.Integer, '3'),
-            (Operator, '..'),
-            (Number.Integer, '5'),
-            (Punctuation, ')'),
-            (Operator, '.'),
-            (Name, 'group_by'),
-            (Text, ' '),
-            (String.Interpol, '{'),
-            (Text, ' '),
-            (Operator, '|'),
-            (Name, 'x'),
-            (Operator, '|'),
-            (Text, ' '),
-            (Name, 'x'),
-            (Operator, '/'),
-            (Number.Integer, '2'),
-            (String.Interpol, '}'),
-            (Operator, '.'),
-            (Name, 'map'),
-            (Text, ' '),
-            (Keyword, 'do'),
-            (Text, ' '),
-            (Operator, '|'),
-            (Name, 'k'),
-            (Punctuation, ','),
-            (Name, 'v'),
-            (Operator, '|'),
-            (Text, ' '),
-            (String.Double, '"'),
-            (String.Interpol, '#{'),
-            (Name, 'k'),
-            (String.Interpol, '}'),
-            (String.Double, '"'),
-            (Text, ' '),
-            (Keyword, 'end'),
-            (Operator, '.'),
-            (Name, 'join'),
-            (Text, ' '),
-            (String.Interpol, '}'),
-            (String.Double, '"'),
-            (Text, ' '),
-            (Operator, '+'),
-            (Text, ' '),
-            (String.Double, '"'),
-            (String.Double, 'Z'),
-            (String.Double, '"'),
-            (Text, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
 
-    def testOperatorMethods(self):
-        fragment = '([] of Int32).[]?(5)\n'
-        tokens = [
-            (Punctuation, '('),
-            (Operator, '['),
-            (Operator, ']'),
-            (Text, ' '),
-            (Keyword, 'of'),
-            (Text, ' '),
-            (Name.Builtin, 'Int32'),
-            (Punctuation, ')'),
-            (Operator, '.'),
-            (Name.Operator, '[]?'),
-            (Punctuation, '('),
-            (Number.Integer, '5'),
-            (Punctuation, ')'),
-            (Text, '\n')
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-    
-    def testArrayAccess(self):
-        fragment = '[5][5]?\n'
-        tokens = [
-            (Operator, '['),
-            (Number.Integer, '5'),
-            (Operator, ']'),
-            (Operator, '['),
-            (Number.Integer, '5'),
-            (Operator, ']?'),
-            (Text, '\n')
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+def test_chars(lexer):
+    for fragment in ["'a'", "'я'", "'\\u{1234}'", "'\n'"]:
+        assert list(lexer.get_tokens(fragment + '\n')) == \
+            [(String.Char, fragment), (Text, '\n')]
+    assert next(lexer.get_tokens("'abc'"))[0] == Error
 
-    def testNumbers(self):
-        for kind, testset in [
-            (Number.Integer, '0  1  1_000_000  1u8  11231231231121312i64'),
-            (Number.Float, '0.0  1.0_f32  1_f32  0f64  1e+4  1e111  1_234.567_890'),
-            (Number.Bin, '0b1001_0110  0b0u8'),
-            (Number.Oct, '0o17  0o7_i32'),
-            (Number.Hex, '0xdeadBEEF'),
-        ]:
-            for fragment in testset.split():
-                self.assertEqual([(kind, fragment), (Text, '\n')],
-                                 list(self.lexer.get_tokens(fragment + '\n')))
 
-        for fragment in '01  0b2  0x129g2  0o12358'.split():
-            self.assertEqual(next(self.lexer.get_tokens(fragment + '\n'))[0],
-                             Error)
+def test_macro(lexer):
+    fragment = (
+        'def<=>(other : self) : Int\n'
+        '{%for field in %w(first_name middle_name last_name)%}\n'
+        'cmp={{field.id}}<=>other.{{field.id}}\n'
+        'return cmp if cmp!=0\n'
+        '{%end%}\n'
+        '0\n'
+        'end\n')
+    tokens = [
+        (Keyword, 'def'),
+        (Name.Function, '<=>'),
+        (Punctuation, '('),
+        (Name, 'other'),
+        (Text, ' '),
+        (Punctuation, ':'),
+        (Text, ' '),
+        (Keyword.Pseudo, 'self'),
+        (Punctuation, ')'),
+        (Text, ' '),
+        (Punctuation, ':'),
+        (Text, ' '),
+        (Name.Builtin, 'Int'),
+        (Text, '\n'),
+        (String.Interpol, '{%'),
+        (Keyword, 'for'),
+        (Text, ' '),
+        (Name, 'field'),
+        (Text, ' '),
+        (Keyword, 'in'),
+        (Text, ' '),
+        (String.Other, '%w('),
+        (String.Other, 'first_name middle_name last_name'),
+        (String.Other, ')'),
+        (String.Interpol, '%}'),
+        (Text, '\n'),
+        (Name, 'cmp'),
+        (Operator, '='),
+        (String.Interpol, '{{'),
+        (Name, 'field'),
+        (Operator, '.'),
+        (Name, 'id'),
+        (String.Interpol, '}}'),
+        (Operator, '<=>'),
+        (Name, 'other'),
+        (Operator, '.'),
+        (String.Interpol, '{{'),
+        (Name, 'field'),
+        (Operator, '.'),
+        (Name, 'id'),
+        (String.Interpol, '}}'),
+        (Text, '\n'),
+        (Keyword, 'return'),
+        (Text, ' '),
+        (Name, 'cmp'),
+        (Text, ' '),
+        (Keyword, 'if'),
+        (Text, ' '),
+        (Name, 'cmp'),
+        (Operator, '!='),
+        (Number.Integer, '0'),
+        (Text, '\n'),
+        (String.Interpol, '{%'),
+        (Keyword, 'end'),
+        (String.Interpol, '%}'),
+        (Text, '\n'),
+        (Number.Integer, '0'),
+        (Text, '\n'),
+        (Keyword, 'end'),
+        (Text, '\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
 
-    def testChars(self):
-        for fragment in ["'a'", "'я'", "'\\u{1234}'", "'\n'"]:
-            self.assertEqual([(String.Char, fragment), (Text, '\n')],
-                             list(self.lexer.get_tokens(fragment + '\n')))
-        self.assertEqual(next(self.lexer.get_tokens("'abc'"))[0], Error)
 
-    def testMacro(self):
-        fragment = (
-            'def<=>(other : self) : Int\n'
-            '{%for field in %w(first_name middle_name last_name)%}\n'
-            'cmp={{field.id}}<=>other.{{field.id}}\n'
-            'return cmp if cmp!=0\n'
-            '{%end%}\n'
-            '0\n'
-            'end\n')
-        tokens = [
-            (Keyword, 'def'),
-            (Name.Function, '<=>'),
-            (Punctuation, '('),
-            (Name, 'other'),
-            (Text, ' '),
-            (Punctuation, ':'),
-            (Text, ' '),
-            (Keyword.Pseudo, 'self'),
-            (Punctuation, ')'),
-            (Text, ' '),
-            (Punctuation, ':'),
-            (Text, ' '),
-            (Name.Builtin, 'Int'),
-            (Text, '\n'),
-            (String.Interpol, '{%'),
-            (Keyword, 'for'),
-            (Text, ' '),
-            (Name, 'field'),
-            (Text, ' '),
-            (Keyword, 'in'),
-            (Text, ' '),
-            (String.Other, '%w('),
-            (String.Other, 'first_name middle_name last_name'),
-            (String.Other, ')'),
-            (String.Interpol, '%}'),
-            (Text, '\n'),
-            (Name, 'cmp'),
-            (Operator, '='),
-            (String.Interpol, '{{'),
-            (Name, 'field'),
-            (Operator, '.'),
-            (Name, 'id'),
-            (String.Interpol, '}}'),
-            (Operator, '<=>'),
-            (Name, 'other'),
-            (Operator, '.'),
-            (String.Interpol, '{{'),
-            (Name, 'field'),
-            (Operator, '.'),
-            (Name, 'id'),
-            (String.Interpol, '}}'),
-            (Text, '\n'),
-            (Keyword, 'return'),
-            (Text, ' '),
-            (Name, 'cmp'),
-            (Text, ' '),
-            (Keyword, 'if'),
-            (Text, ' '),
-            (Name, 'cmp'),
-            (Operator, '!='),
-            (Number.Integer, '0'),
-            (Text, '\n'),
-            (String.Interpol, '{%'),
-            (Keyword, 'end'),
-            (String.Interpol, '%}'),
-            (Text, '\n'),
-            (Number.Integer, '0'),
-            (Text, '\n'),
-            (Keyword, 'end'),
-            (Text, '\n')
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+def test_lib(lexer):
+    fragment = (
+        '@[Link("some")]\nlib LibSome\n'
+        '@[CallConvention("X86_StdCall")]\nfun foo="some.foo"(thing : Void*) : LibC::Int\n'
+        'end\n')
+    tokens = [
+        (Operator, '@['),
+        (Name.Decorator, 'Link'),
+        (Punctuation, '('),
+        (String.Double, '"'),
+        (String.Double, 'some'),
+        (String.Double, '"'),
+        (Punctuation, ')'),
+        (Operator, ']'),
+        (Text, '\n'),
+        (Keyword, 'lib'),
+        (Text, ' '),
+        (Name.Namespace, 'LibSome'),
+        (Text, '\n'),
+        (Operator, '@['),
+        (Name.Decorator, 'CallConvention'),
+        (Punctuation, '('),
+        (String.Double, '"'),
+        (String.Double, 'X86_StdCall'),
+        (String.Double, '"'),
+        (Punctuation, ')'),
+        (Operator, ']'),
+        (Text, '\n'),
+        (Keyword, 'fun'),
+        (Text, ' '),
+        (Name.Function, 'foo'),
+        (Operator, '='),
+        (String.Double, '"'),
+        (String.Double, 'some.foo'),
+        (String.Double, '"'),
+        (Punctuation, '('),
+        (Name, 'thing'),
+        (Text, ' '),
+        (Punctuation, ':'),
+        (Text, ' '),
+        (Name.Builtin, 'Void'),
+        (Operator, '*'),
+        (Punctuation, ')'),
+        (Text, ' '),
+        (Punctuation, ':'),
+        (Text, ' '),
+        (Name, 'LibC'),
+        (Operator, '::'),
+        (Name.Builtin, 'Int'),
+        (Text, '\n'),
+        (Keyword, 'end'),
+        (Text, '\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
 
-    def testLib(self):
-        fragment = (
-            '@[Link("some")]\nlib LibSome\n'
-            '@[CallConvention("X86_StdCall")]\nfun foo="some.foo"(thing : Void*) : LibC::Int\n'
-            'end\n')
-        tokens = [
-            (Operator, '@['),
-            (Name.Decorator, 'Link'),
-            (Punctuation, '('),
-            (String.Double, '"'),
-            (String.Double, 'some'),
-            (String.Double, '"'),
-            (Punctuation, ')'),
-            (Operator, ']'),
-            (Text, '\n'),
-            (Keyword, 'lib'),
-            (Text, ' '),
-            (Name.Namespace, 'LibSome'),
-            (Text, '\n'),
-            (Operator, '@['),
-            (Name.Decorator, 'CallConvention'),
-            (Punctuation, '('),
-            (String.Double, '"'),
-            (String.Double, 'X86_StdCall'),
-            (String.Double, '"'),
-            (Punctuation, ')'),
-            (Operator, ']'),
-            (Text, '\n'),
-            (Keyword, 'fun'),
-            (Text, ' '),
-            (Name.Function, 'foo'),
-            (Operator, '='),
-            (String.Double, '"'),
-            (String.Double, 'some.foo'),
-            (String.Double, '"'),
-            (Punctuation, '('),
-            (Name, 'thing'),
-            (Text, ' '),
-            (Punctuation, ':'),
-            (Text, ' '),
-            (Name.Builtin, 'Void'),
-            (Operator, '*'),
-            (Punctuation, ')'),
-            (Text, ' '),
-            (Punctuation, ':'),
-            (Text, ' '),
-            (Name, 'LibC'),
-            (Operator, '::'),
-            (Name.Builtin, 'Int'),
-            (Text, '\n'),
-            (Keyword, 'end'),
-            (Text, '\n')
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
 
-    def testEscapedBracestring(self):
-        fragment = 'str.gsub(%r{\\\\\\\\}, "/")\n'
-        tokens = [
-            (Name, 'str'),
-            (Operator, '.'),
-            (Name, 'gsub'),
-            (Punctuation, '('),
-            (String.Regex, '%r{'),
-            (String.Regex, '\\\\'),
-            (String.Regex, '\\\\'),
-            (String.Regex, '}'),
-            (Punctuation, ','),
-            (Text, ' '),
-            (String.Double, '"'),
-            (String.Double, '/'),
-            (String.Double, '"'),
-            (Punctuation, ')'),
-            (Text, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+def test_escaped_bracestring(lexer):
+    fragment = 'str.gsub(%r{\\\\\\\\}, "/")\n'
+    tokens = [
+        (Name, 'str'),
+        (Operator, '.'),
+        (Name, 'gsub'),
+        (Punctuation, '('),
+        (String.Regex, '%r{'),
+        (String.Regex, '\\\\'),
+        (String.Regex, '\\\\'),
+        (String.Regex, '}'),
+        (Punctuation, ','),
+        (Text, ' '),
+        (String.Double, '"'),
+        (String.Double, '/'),
+        (String.Double, '"'),
+        (Punctuation, ')'),
+        (Text, '\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
index 8a253916711b4464ed6d8d5a4a901896b6e3fec4..7259cf5b798cfbf5e3f691a8cf9c81e9a6685ecb 100644 (file)
 # -*- coding: utf-8 -*-
 """
     Csound lexer tests
-    ~~~~~~~~~~~~~~~~~~~~
+    ~~~~~~~~~~~~~~~~~~
 
     :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
 from textwrap import dedent
 
-from pygments.token import Comment, Error, Keyword, Name, Number, Operator, Punctuation, \
-    String, Text
+import pytest
+
+from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
+    Punctuation, String, Text
 from pygments.lexers import CsoundOrchestraLexer
 
 
-class CsoundOrchestraTest(unittest.TestCase):
+@pytest.fixture(scope='module')
+def lexer():
+    yield CsoundOrchestraLexer()
+
+
+def test_comments(lexer):
+    fragment = dedent('''\
+        /*
+         * comment
+         */
+        ; comment
+        // comment
+    ''')
+    tokens = [
+        (Comment.Multiline, u'/*\n * comment\n */'),
+        (Text, u'\n'),
+        (Comment.Single, u'; comment'),
+        (Text, u'\n'),
+        (Comment.Single, u'// comment'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_instrument_blocks(lexer):
+    fragment = dedent('''\
+        instr/**/1,/**/N_a_M_e_,/**/+Name/**///
+          iDuration = p3
+          outc:a(aSignal)
+        endin
+    ''')
+    tokens = [
+        (Keyword.Declaration, u'instr'),
+        (Comment.Multiline, u'/**/'),
+        (Name.Function, u'1'),
+        (Punctuation, u','),
+        (Comment.Multiline, u'/**/'),
+        (Name.Function, u'N_a_M_e_'),
+        (Punctuation, u','),
+        (Comment.Multiline, u'/**/'),
+        (Punctuation, u'+'),
+        (Name.Function, u'Name'),
+        (Comment.Multiline, u'/**/'),
+        (Comment.Single, u'//'),
+        (Text, u'\n'),
+        (Text, u'  '),
+        (Keyword.Type, u'i'),
+        (Name, u'Duration'),
+        (Text, u' '),
+        (Operator, u'='),
+        (Text, u' '),
+        (Name.Variable.Instance, u'p3'),
+        (Text, u'\n'),
+        (Text, u'  '),
+        (Name.Builtin, u'outc'),
+        (Punctuation, u':'),
+        (Keyword.Type, u'a'),
+        (Punctuation, u'('),
+        (Keyword.Type, u'a'),
+        (Name, u'Signal'),
+        (Punctuation, u')'),
+        (Text, u'\n'),
+        (Keyword.Declaration, u'endin'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
 
-    def setUp(self):
-        self.lexer = CsoundOrchestraLexer()
-        self.maxDiff = None
 
-    def testComments(self):
-        fragment = dedent('''\
-            /*
-             * comment
-             */
-            ; comment
-            // comment
-        ''')
+def test_user_defined_opcodes(lexer):
+    fragment = dedent('''\
+        opcode/**/aUDO,/**/i[],/**/aik//
+          aUDO
+        endop
+    ''')
+    tokens = [
+        (Keyword.Declaration, u'opcode'),
+        (Comment.Multiline, u'/**/'),
+        (Name.Function, u'aUDO'),
+        (Punctuation, u','),
+        (Comment.Multiline, u'/**/'),
+        (Keyword.Type, u'i[]'),
+        (Punctuation, u','),
+        (Comment.Multiline, u'/**/'),
+        (Keyword.Type, u'aik'),
+        (Comment.Single, u'//'),
+        (Text, u'\n'),
+        (Text, u'  '),
+        (Name.Function, u'aUDO'),
+        (Text, u'\n'),
+        (Keyword.Declaration, u'endop'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_numbers(lexer):
+    fragment = '123 0123456789'
+    tokens = [
+        (Number.Integer, u'123'),
+        (Text, u' '),
+        (Number.Integer, u'0123456789'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+    fragment = '0xabcdef0123456789 0XABCDEF'
+    tokens = [
+        (Keyword.Type, u'0x'),
+        (Number.Hex, u'abcdef0123456789'),
+        (Text, u' '),
+        (Keyword.Type, u'0X'),
+        (Number.Hex, u'ABCDEF'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+    fragments = ['1e2', '3e+4', '5e-6', '7E8', '9E+0', '1E-2', '3.', '4.56', '.789']
+    for fragment in fragments:
         tokens = [
-            (Comment.Multiline, u'/*\n * comment\n */'),
-            (Text, u'\n'),
-            (Comment.Single, u'; comment'),
-            (Text, u'\n'),
-            (Comment.Single, u'// comment'),
+            (Number.Float, fragment),
             (Text, u'\n')
         ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testInstrumentBlocks(self):
-        fragment = dedent('''\
-            instr/**/1,/**/N_a_M_e_,/**/+Name/**///
-              iDuration = p3
-              outc:a(aSignal)
-            endin
-        ''')
+        assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_quoted_strings(lexer):
+    fragment = '"characters$MACRO."'
+    tokens = [
+        (String, u'"'),
+        (String, u'characters'),
+        (Comment.Preproc, u'$MACRO.'),
+        (String, u'"'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_braced_strings(lexer):
+    fragment = dedent('''\
+        {{
+        characters$MACRO.
+        }}
+    ''')
+    tokens = [
+        (String, u'{{'),
+        (String, u'\ncharacters$MACRO.\n'),
+        (String, u'}}'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_escape_sequences(lexer):
+    for character in ['\\', 'a', 'b', 'n', 'r', 't', '"', '012', '345', '67']:
+        escapedCharacter = '\\' + character
+        fragment = '"' + escapedCharacter + '"'
         tokens = [
-            (Keyword.Declaration, u'instr'),
-            (Comment.Multiline, u'/**/'),
-            (Name.Function, u'1'),
-            (Punctuation, u','),
-            (Comment.Multiline, u'/**/'),
-            (Name.Function, u'N_a_M_e_'),
-            (Punctuation, u','),
-            (Comment.Multiline, u'/**/'),
-            (Punctuation, u'+'),
-            (Name.Function, u'Name'),
-            (Comment.Multiline, u'/**/'),
-            (Comment.Single, u'//'),
-            (Text, u'\n'),
-            (Text, u'  '),
-            (Keyword.Type, u'i'),
-            (Name, u'Duration'),
-            (Text, u' '),
-            (Operator, u'='),
-            (Text, u' '),
-            (Name.Variable.Instance, u'p3'),
-            (Text, u'\n'),
-            (Text, u'  '),
-            (Name.Builtin, u'outc'),
-            (Punctuation, u':'),
-            (Keyword.Type, u'a'),
-            (Punctuation, u'('),
-            (Keyword.Type, u'a'),
-            (Name, u'Signal'),
-            (Punctuation, u')'),
-            (Text, u'\n'),
-            (Keyword.Declaration, u'endin'),
+            (String, u'"'),
+            (String.Escape, escapedCharacter),
+            (String, u'"'),
             (Text, u'\n')
         ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testUserDefinedOpcodes(self):
-        fragment = dedent('''\
-            opcode/**/aUDO,/**/i[],/**/aik//
-              aUDO
-            endop
-        ''')
+        assert list(lexer.get_tokens(fragment)) == tokens
+        fragment = '{{' + escapedCharacter + '}}'
         tokens = [
-            (Keyword.Declaration, u'opcode'),
-            (Comment.Multiline, u'/**/'),
-            (Name.Function, u'aUDO'),
-            (Punctuation, u','),
-            (Comment.Multiline, u'/**/'),
-            (Keyword.Type, u'i[]'),
-            (Punctuation, u','),
-            (Comment.Multiline, u'/**/'),
-            (Keyword.Type, u'aik'),
-            (Comment.Single, u'//'),
-            (Text, u'\n'),
-            (Text, u'  '),
-            (Name.Function, u'aUDO'),
-            (Text, u'\n'),
-            (Keyword.Declaration, u'endop'),
+            (String, u'{{'),
+            (String.Escape, escapedCharacter),
+            (String, u'}}'),
             (Text, u'\n')
         ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+        assert list(lexer.get_tokens(fragment)) == tokens
 
-    def testNumbers(self):
-        fragment = '123 0123456789'
+
+def test_operators(lexer):
+    fragments = ['+', '-', '~', u'¬', '!', '*', '/', '^', '%', '<<', '>>', '<', '>',
+                 '<=', '>=', '==', '!=', '&', '#', '|', '&&', '||', '?', ':', '+=',
+                 '-=', '*=', '/=']
+    for fragment in fragments:
         tokens = [
-            (Number.Integer, u'123'),
-            (Text, u' '),
-            (Number.Integer, u'0123456789'),
+            (Operator, fragment),
             (Text, u'\n')
         ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-        fragment = '0xabcdef0123456789 0XABCDEF'
+        assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_global_value_identifiers(lexer):
+    for fragment in ['0dbfs', 'A4', 'kr', 'ksmps', 'nchnls', 'nchnls_i', 'sr']:
         tokens = [
-            (Keyword.Type, u'0x'),
-            (Number.Hex, u'abcdef0123456789'),
-            (Text, u' '),
-            (Keyword.Type, u'0X'),
-            (Number.Hex, u'ABCDEF'),
+            (Name.Variable.Global, fragment),
             (Text, u'\n')
         ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-        fragments = ['1e2', '3e+4', '5e-6', '7E8', '9E+0', '1E-2', '3.', '4.56', '.789']
-        for fragment in fragments:
-            tokens = [
-                (Number.Float, fragment),
-                (Text, u'\n')
-            ]
-            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+        assert list(lexer.get_tokens(fragment)) == tokens
+
 
-    def testQuotedStrings(self):
-        fragment = '"characters$MACRO."'
+def test_keywords(lexer):
+    fragments = ['do', 'else', 'elseif', 'endif', 'enduntil', 'fi', 'if', 'ithen',
+                 'kthen', 'od', 'then', 'until', 'while']
+    for fragment in fragments:
         tokens = [
-            (String, u'"'),
-            (String, u'characters'),
-            (Comment.Preproc, u'$MACRO.'),
-            (String, u'"'),
+            (Keyword, fragment),
             (Text, u'\n')
         ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testBracedStrings(self):
-        fragment = dedent('''\
-            {{
-            characters$MACRO.
-            }}
-        ''')
+        assert list(lexer.get_tokens(fragment)) == tokens
+    for fragment in ['return', 'rireturn']:
         tokens = [
-            (String, u'{{'),
-            (String, u'\ncharacters$MACRO.\n'),
-            (String, u'}}'),
+            (Keyword.Pseudo, fragment),
             (Text, u'\n')
         ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+        assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_labels(lexer):
+    fragment = dedent('''\
+        aLabel:
+         label2:
+    ''')
+    tokens = [
+        (Name.Label, u'aLabel'),
+        (Punctuation, u':'),
+        (Text, u'\n'),
+        (Text, u' '),
+        (Name.Label, u'label2'),
+        (Punctuation, u':'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
 
-    def testEscapeSequences(self):
-        for character in ['\\', 'a', 'b', 'n', 'r', 't', '"', '012', '345', '67']:
-            escapedCharacter = '\\' + character
-            fragment = '"' + escapedCharacter + '"'
+
+def test_printks_and_prints_escape_sequences(lexer):
+    escapedCharacters = ['%!', '%%', '%n', '%N', '%r', '%R', '%t', '%T', '\\\\a',
+                         '\\\\A', '\\\\b', '\\\\B', '\\\\n', '\\\\N', '\\\\r',
+                         '\\\\R', '\\\\t', '\\\\T']
+    for opcode in ['printks', 'prints']:
+        for escapedCharacter in escapedCharacters:
+            fragment = opcode + ' "' + escapedCharacter + '"'
             tokens = [
+                (Name.Builtin, opcode),
+                (Text, u' '),
                 (String, u'"'),
                 (String.Escape, escapedCharacter),
                 (String, u'"'),
                 (Text, u'\n')
             ]
-            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-            fragment = '{{' + escapedCharacter + '}}'
-            tokens = [
-                (String, u'{{'),
-                (String.Escape, escapedCharacter),
-                (String, u'}}'),
-                (Text, u'\n')
-            ]
-            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+            assert list(lexer.get_tokens(fragment)) == tokens
 
-    def testOperators(self):
-        fragments = ['+', '-', '~', u'¬', '!', '*', '/', '^', '%', '<<', '>>', '<', '>',
-                     '<=', '>=', '==', '!=', '&', '#', '|', '&&', '||', '?', ':', '+=',
-                     '-=', '*=', '/=']
-        for fragment in fragments:
-            tokens = [
-                (Operator, fragment),
-                (Text, u'\n')
-            ]
-            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
 
-    def testGlobalValueIdentifiers(self):
-        for fragment in ['0dbfs', 'A4', 'kr', 'ksmps', 'nchnls', 'nchnls_i', 'sr']:
-            tokens = [
-                (Name.Variable.Global, fragment),
-                (Text, u'\n')
-            ]
-            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testKeywords(self):
-        fragments = ['do', 'else', 'elseif', 'endif', 'enduntil', 'fi', 'if', 'ithen',
-                     'kthen', 'od', 'then', 'until', 'while']
-        for fragment in fragments:
-            tokens = [
-                (Keyword, fragment),
-                (Text, u'\n')
-            ]
-            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-        for fragment in ['return', 'rireturn']:
-            tokens = [
-                (Keyword.Pseudo, fragment),
-                (Text, u'\n')
-            ]
-            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testLabels(self):
-        fragment = dedent('''\
-            aLabel:
-             label2:
-        ''')
+def test_goto_statements(lexer):
+    for keyword in ['goto', 'igoto', 'kgoto']:
+        fragment = keyword + ' aLabel'
         tokens = [
-            (Name.Label, u'aLabel'),
-            (Punctuation, u':'),
-            (Text, u'\n'),
+            (Keyword, keyword),
             (Text, u' '),
-            (Name.Label, u'label2'),
-            (Punctuation, u':'),
+            (Name.Label, u'aLabel'),
             (Text, u'\n')
         ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testPrintksAndPrintsEscapeSequences(self):
-        escapedCharacters = ['%!', '%%', '%n', '%N', '%r', '%R', '%t', '%T', '\\\\a',
-                             '\\\\A', '\\\\b', '\\\\B', '\\\\n', '\\\\N', '\\\\r',
-                             '\\\\R', '\\\\t', '\\\\T']
-        for opcode in ['printks', 'prints']:
-            for escapedCharacter in escapedCharacters:
-                fragment = opcode + ' "' + escapedCharacter + '"'
-                tokens = [
-                    (Name.Builtin, opcode),
-                    (Text, u' '),
-                    (String, u'"'),
-                    (String.Escape, escapedCharacter),
-                    (String, u'"'),
-                    (Text, u'\n')
-                ]
-                self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testGotoStatements(self):
-        for keyword in ['goto', 'igoto', 'kgoto']:
-            fragment = keyword + ' aLabel'
-            tokens = [
-                (Keyword, keyword),
-                (Text, u' '),
-                (Name.Label, u'aLabel'),
-                (Text, u'\n')
-            ]
-            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-        for opcode in ['reinit', 'rigoto', 'tigoto']:
-            fragment = opcode + ' aLabel'
-            tokens = [
-                (Keyword.Pseudo, opcode),
-                (Text, u' '),
-                (Name.Label, u'aLabel'),
-                (Text, u'\n')
-            ]
-            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-        for opcode in ['cggoto', 'cigoto', 'cingoto', 'ckgoto', 'cngoto', 'cnkgoto']:
-            fragment = opcode + ' 1==0, aLabel'
-            tokens = [
-                (Keyword.Pseudo, opcode),
-                (Text, u' '),
-                (Number.Integer, u'1'),
-                (Operator, u'=='),
-                (Number.Integer, u'0'),
-                (Punctuation, u','),
-                (Text, u' '),
-                (Name.Label, u'aLabel'),
-                (Text, u'\n')
-            ]
-            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-        fragment = 'timout 0, 0, aLabel'
+        assert list(lexer.get_tokens(fragment)) == tokens
+    for opcode in ['reinit', 'rigoto', 'tigoto']:
+        fragment = opcode + ' aLabel'
         tokens = [
-            (Keyword.Pseudo, 'timout'),
+            (Keyword.Pseudo, opcode),
             (Text, u' '),
-            (Number.Integer, u'0'),
-            (Punctuation, u','),
+            (Name.Label, u'aLabel'),
+            (Text, u'\n')
+        ]
+        assert list(lexer.get_tokens(fragment)) == tokens
+    for opcode in ['cggoto', 'cigoto', 'cingoto', 'ckgoto', 'cngoto', 'cnkgoto']:
+        fragment = opcode + ' 1==0, aLabel'
+        tokens = [
+            (Keyword.Pseudo, opcode),
             (Text, u' '),
+            (Number.Integer, u'1'),
+            (Operator, u'=='),
             (Number.Integer, u'0'),
             (Punctuation, u','),
             (Text, u' '),
             (Name.Label, u'aLabel'),
             (Text, u'\n')
         ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-        for opcode in ['loop_ge', 'loop_gt', 'loop_le', 'loop_lt']:
-            fragment = opcode + ' 0, 0, 0, aLabel'
-            tokens = [
-                (Keyword.Pseudo, opcode),
-                (Text, u' '),
-                (Number.Integer, u'0'),
-                (Punctuation, u','),
-                (Text, u' '),
-                (Number.Integer, u'0'),
-                (Punctuation, u','),
-                (Text, u' '),
-                (Number.Integer, u'0'),
-                (Punctuation, u','),
-                (Text, u' '),
-                (Name.Label, u'aLabel'),
-                (Text, u'\n')
-            ]
-            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testIncludeDirectives(self):
-        for character in ['"', '|']:
-            fragment = '#include/**/' + character + 'file.udo' + character
-            tokens = [
-                (Comment.Preproc, u'#include'),
-                (Comment.Multiline, u'/**/'),
-                (String, character + u'file.udo' + character),
-                (Text, u'\n')
-            ]
-            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testObjectLikeMacroDefinitions(self):
-        fragment = dedent('''\
-            # \tdefine MACRO#macro_body#
-            #define/**/
-            MACRO/**/
-            #\\#macro
-            body\\##
-        ''')
+        assert list(lexer.get_tokens(fragment)) == tokens
+    fragment = 'timout 0, 0, aLabel'
+    tokens = [
+        (Keyword.Pseudo, 'timout'),
+        (Text, u' '),
+        (Number.Integer, u'0'),
+        (Punctuation, u','),
+        (Text, u' '),
+        (Number.Integer, u'0'),
+        (Punctuation, u','),
+        (Text, u' '),
+        (Name.Label, u'aLabel'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+    for opcode in ['loop_ge', 'loop_gt', 'loop_le', 'loop_lt']:
+        fragment = opcode + ' 0, 0, 0, aLabel'
         tokens = [
-            (Comment.Preproc, u'# \tdefine'),
+            (Keyword.Pseudo, opcode),
             (Text, u' '),
-            (Comment.Preproc, u'MACRO'),
-            (Punctuation, u'#'),
-            (Comment.Preproc, u'macro_body'),
-            (Punctuation, u'#'),
-            (Text, u'\n'),
-            (Comment.Preproc, u'#define'),
-            (Comment.Multiline, u'/**/'),
-            (Text, u'\n'),
-            (Comment.Preproc, u'MACRO'),
-            (Comment.Multiline, u'/**/'),
-            (Text, u'\n'),
-            (Punctuation, u'#'),
-            (Comment.Preproc, u'\\#'),
-            (Comment.Preproc, u'macro\nbody'),
-            (Comment.Preproc, u'\\#'),
-            (Punctuation, u'#'),
-            (Text, u'\n')
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testFunctionLikeMacroDefinitions(self):
-        fragment = dedent('''\
-            #define MACRO(ARG1#ARG2) #macro_body#
-            #define/**/
-            MACRO(ARG1'ARG2' ARG3)/**/
-            #\\#macro
-            body\\##
-        ''')
-        tokens = [
-            (Comment.Preproc, u'#define'),
+            (Number.Integer, u'0'),
+            (Punctuation, u','),
             (Text, u' '),
-            (Comment.Preproc, u'MACRO'),
-            (Punctuation, u'('),
-            (Comment.Preproc, u'ARG1'),
-            (Punctuation, u'#'),
-            (Comment.Preproc, u'ARG2'),
-            (Punctuation, u')'),
+            (Number.Integer, u'0'),
+            (Punctuation, u','),
             (Text, u' '),
-            (Punctuation, u'#'),
-            (Comment.Preproc, u'macro_body'),
-            (Punctuation, u'#'),
-            (Text, u'\n'),
-            (Comment.Preproc, u'#define'),
-            (Comment.Multiline, u'/**/'),
-            (Text, u'\n'),
-            (Comment.Preproc, u'MACRO'),
-            (Punctuation, u'('),
-            (Comment.Preproc, u'ARG1'),
-            (Punctuation, u"'"),
-            (Comment.Preproc, u'ARG2'),
-            (Punctuation, u"'"),
+            (Number.Integer, u'0'),
+            (Punctuation, u','),
             (Text, u' '),
-            (Comment.Preproc, u'ARG3'),
-            (Punctuation, u')'),
-            (Comment.Multiline, u'/**/'),
-            (Text, u'\n'),
-            (Punctuation, u'#'),
-            (Comment.Preproc, u'\\#'),
-            (Comment.Preproc, u'macro\nbody'),
-            (Comment.Preproc, u'\\#'),
-            (Punctuation, u'#'),
+            (Name.Label, u'aLabel'),
             (Text, u'\n')
         ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+        assert list(lexer.get_tokens(fragment)) == tokens
 
-    def testMacroPreprocessorDirectives(self):
-        for directive in ['#ifdef', '#ifndef', '#undef']:
-            fragment = directive + ' MACRO'
-            tokens = [
-                (Comment.Preproc, directive),
-                (Text, u' '),
-                (Comment.Preproc, u'MACRO'),
-                (Text, u'\n')
-            ]
-            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testOtherPreprocessorDirectives(self):
-        fragment = dedent('''\
-            #else
-            #end
-            #endif
-            ###
-            @ \t12345
-            @@ \t67890
-        ''')
+
+def test_include_directives(lexer):
+    for character in ['"', '|']:
+        fragment = '#include/**/' + character + 'file.udo' + character
         tokens = [
-            (Comment.Preproc, u'#else'),
-            (Text, u'\n'),
-            (Comment.Preproc, u'#end'),
-            (Text, u'\n'),
-            (Comment.Preproc, u'#endif'),
-            (Text, u'\n'),
-            (Comment.Preproc, u'###'),
-            (Text, u'\n'),
-            (Comment.Preproc, u'@ \t12345'),
-            (Text, u'\n'),
-            (Comment.Preproc, u'@@ \t67890'),
+            (Comment.Preproc, u'#include'),
+            (Comment.Multiline, u'/**/'),
+            (String, character + u'file.udo' + character),
             (Text, u'\n')
         ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+        assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_includestr_directives(lexer):
+    fragment = '#includestr/**/"$MACRO..udo"'
+    tokens = [
+        (Comment.Preproc, u'#includestr'),
+        (Comment.Multiline, u'/**/'),
+        (String, u'"'),
+        (Comment.Preproc, u'$MACRO.'),
+        (String, u'.udo'),
+        (String, u'"'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
 
-    def testFunctionLikeMacros(self):
-        fragment = "$MACRO.(((x#y\\)))' \"(#'x)\\)x\\))\"# {{x\\))x)\\)(#'}});"
+def test_object_like_macro_definitions(lexer):
+    fragment = dedent('''\
+        # \tdefine MACRO#macro_body#
+        #define/**/
+        MACRO/**/
+        #\\#macro
+        body\\##
+    ''')
+    tokens = [
+        (Comment.Preproc, u'# \tdefine'),
+        (Text, u' '),
+        (Comment.Preproc, u'MACRO'),
+        (Punctuation, u'#'),
+        (Comment.Preproc, u'macro_body'),
+        (Punctuation, u'#'),
+        (Text, u'\n'),
+        (Comment.Preproc, u'#define'),
+        (Comment.Multiline, u'/**/'),
+        (Text, u'\n'),
+        (Comment.Preproc, u'MACRO'),
+        (Comment.Multiline, u'/**/'),
+        (Text, u'\n'),
+        (Punctuation, u'#'),
+        (Comment.Preproc, u'\\#'),
+        (Comment.Preproc, u'macro\nbody'),
+        (Comment.Preproc, u'\\#'),
+        (Punctuation, u'#'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_function_like_macro_definitions(lexer):
+    fragment = dedent('''\
+        #define MACRO(ARG1#ARG2) #macro_body#
+        #define/**/
+        MACRO(ARG1'ARG2' ARG3)/**/
+        #\\#macro
+        body\\##
+    ''')
+    tokens = [
+        (Comment.Preproc, u'#define'),
+        (Text, u' '),
+        (Comment.Preproc, u'MACRO'),
+        (Punctuation, u'('),
+        (Comment.Preproc, u'ARG1'),
+        (Punctuation, u'#'),
+        (Comment.Preproc, u'ARG2'),
+        (Punctuation, u')'),
+        (Text, u' '),
+        (Punctuation, u'#'),
+        (Comment.Preproc, u'macro_body'),
+        (Punctuation, u'#'),
+        (Text, u'\n'),
+        (Comment.Preproc, u'#define'),
+        (Comment.Multiline, u'/**/'),
+        (Text, u'\n'),
+        (Comment.Preproc, u'MACRO'),
+        (Punctuation, u'('),
+        (Comment.Preproc, u'ARG1'),
+        (Punctuation, u"'"),
+        (Comment.Preproc, u'ARG2'),
+        (Punctuation, u"'"),
+        (Text, u' '),
+        (Comment.Preproc, u'ARG3'),
+        (Punctuation, u')'),
+        (Comment.Multiline, u'/**/'),
+        (Text, u'\n'),
+        (Punctuation, u'#'),
+        (Comment.Preproc, u'\\#'),
+        (Comment.Preproc, u'macro\nbody'),
+        (Comment.Preproc, u'\\#'),
+        (Punctuation, u'#'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_macro_preprocessor_directives(lexer):
+    for directive in ['#ifdef', '#ifndef', '#undef']:
+        fragment = directive + ' MACRO'
         tokens = [
-            (Comment.Preproc, u'$MACRO.'),
-            (Punctuation, u'('),
-            (Comment.Preproc, u'('),
-            (Comment.Preproc, u'('),
-            (Comment.Preproc, u'x#y\\)'),
-            (Comment.Preproc, u')'),
-            (Comment.Preproc, u')'),
-            (Punctuation, u"'"),
-            (Comment.Preproc, u' '),
-            (String, u'"'),
-            (Error, u'('),
-            (Error, u'#'),
-            (Error, u"'"),
-            (String, u'x'),
-            (Error, u')'),
-            (Comment.Preproc, u'\\)'),
-            (String, u'x'),
-            (Comment.Preproc, u'\\)'),
-            (Error, u')'),
-            (String, u'"'),
-            (Punctuation, u'#'),
-            (Comment.Preproc, u' '),
-            (String, u'{{'),
-            (String, u'x'),
-            (Comment.Preproc, u'\\)'),
-            (Error, u')'),
-            (String, u'x'),
-            (Error, u')'),
-            (Comment.Preproc, u'\\)'),
-            (Error, u'('),
-            (Error, u'#'),
-            (Error, u"'"),
-            (String, u'}}'),
-            (Punctuation, u')'),
-            (Comment.Single, u';'),
+            (Comment.Preproc, directive),
+            (Text, u' '),
+            (Comment.Preproc, u'MACRO'),
             (Text, u'\n')
         ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+        assert list(lexer.get_tokens(fragment)) == tokens
 
-    def testName(self):
-        fragment = 'kG:V'
-        tokens = [
-            (Keyword.Type, 'k'),
-            (Name, 'G'),
-            (Punctuation, ':'),
-            (Name, 'V'),
-            (Text, '\n')
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+def test_other_preprocessor_directives(lexer):
+    fragment = dedent('''\
+        #else
+        #end
+        #endif
+        ###
+        @ \t12345
+        @@ \t67890
+    ''')
+    tokens = [
+        (Comment.Preproc, u'#else'),
+        (Text, u'\n'),
+        (Comment.Preproc, u'#end'),
+        (Text, u'\n'),
+        (Comment.Preproc, u'#endif'),
+        (Text, u'\n'),
+        (Comment.Preproc, u'###'),
+        (Text, u'\n'),
+        (Comment.Preproc, u'@ \t12345'),
+        (Text, u'\n'),
+        (Comment.Preproc, u'@@ \t67890'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_function_like_macros(lexer):
+    fragment = "$MACRO.(((x#y\\)))' \"(#'x)\\)x\\))\"# {{x\\))x)\\)(#'}});"
+    tokens = [
+        (Comment.Preproc, u'$MACRO.'),
+        (Punctuation, u'('),
+        (Comment.Preproc, u'('),
+        (Comment.Preproc, u'('),
+        (Comment.Preproc, u'x#y\\)'),
+        (Comment.Preproc, u')'),
+        (Comment.Preproc, u')'),
+        (Punctuation, u"'"),
+        (Comment.Preproc, u' '),
+        (String, u'"'),
+        (Error, u'('),
+        (Error, u'#'),
+        (Error, u"'"),
+        (String, u'x'),
+        (Error, u')'),
+        (Comment.Preproc, u'\\)'),
+        (String, u'x'),
+        (Comment.Preproc, u'\\)'),
+        (Error, u')'),
+        (String, u'"'),
+        (Punctuation, u'#'),
+        (Comment.Preproc, u' '),
+        (String, u'{{'),
+        (String, u'x'),
+        (Comment.Preproc, u'\\)'),
+        (Error, u')'),
+        (String, u'x'),
+        (Error, u')'),
+        (Comment.Preproc, u'\\)'),
+        (Error, u'('),
+        (Error, u'#'),
+        (Error, u"'"),
+        (String, u'}}'),
+        (Punctuation, u')'),
+        (Comment.Single, u';'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_name(lexer):
+    fragment = 'kG:V'
+    tokens = [
+        (Keyword.Type, 'k'),
+        (Name, 'G'),
+        (Punctuation, ':'),
+        (Name, 'V'),
+        (Text, '\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
index be371419ab2d726ed45e923ac4bee0f48f429867..23f1d4a0d64670325d56a49da769bbe8f1d26c97 100644 (file)
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
-from pygments.lexers import JsonLexer, JsonBareObjectLexer
+from pygments.lexers import JsonLexer, JsonBareObjectLexer, YamlLexer
 from pygments.token import Token
 
 
-class JsonTest(unittest.TestCase):
-    def setUp(self):
-        self.lexer = JsonLexer()
-
-    def testBasic(self):
-        fragment = u'{"foo": "bar", "foo2": [1, 2, 3]}\n'
-        tokens = [
-            (Token.Punctuation, u'{'),
-            (Token.Name.Tag, u'"foo"'),
-            (Token.Punctuation, u':'),
-            (Token.Text, u' '),
-            (Token.Literal.String.Double, u'"bar"'),
-            (Token.Punctuation, u','),
-            (Token.Text, u' '),
-            (Token.Name.Tag, u'"foo2"'),
-            (Token.Punctuation, u':'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'['),
-            (Token.Literal.Number.Integer, u'1'),
-            (Token.Punctuation, u','),
-            (Token.Text, u' '),
-            (Token.Literal.Number.Integer, u'2'),
-            (Token.Punctuation, u','),
-            (Token.Text, u' '),
-            (Token.Literal.Number.Integer, u'3'),
-            (Token.Punctuation, u']'),
-            (Token.Punctuation, u'}'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-class JsonBareObjectTest(unittest.TestCase):
-    def setUp(self):
-        self.lexer = JsonBareObjectLexer()
-
-    def testBasic(self):
-        # This is the same as testBasic for JsonLexer above, except the
-        # enclosing curly braces are removed.
-        fragment = u'"foo": "bar", "foo2": [1, 2, 3]\n'
-        tokens = [
-            (Token.Name.Tag, u'"foo"'),
-            (Token.Punctuation, u':'),
-            (Token.Text, u' '),
-            (Token.Literal.String.Double, u'"bar"'),
-            (Token.Punctuation, u','),
-            (Token.Text, u' '),
-            (Token.Name.Tag, u'"foo2"'),
-            (Token.Punctuation, u':'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'['),
-            (Token.Literal.Number.Integer, u'1'),
-            (Token.Punctuation, u','),
-            (Token.Text, u' '),
-            (Token.Literal.Number.Integer, u'2'),
-            (Token.Punctuation, u','),
-            (Token.Text, u' '),
-            (Token.Literal.Number.Integer, u'3'),
-            (Token.Punctuation, u']'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testClosingCurly(self):
-        # This can be an Error token, but should not be a can't-pop-from-stack
-        # exception.
-        fragment = '}"a"\n'
-        tokens = [
-            (Token.Error, '}'),
-            (Token.Name.Tag, '"a"'),
-            (Token.Text, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testClosingCurlyInValue(self):
-        fragment = '"": ""}\n'
-        tokens = [
-            (Token.Name.Tag, '""'),
-            (Token.Punctuation, ':'),
-            (Token.Text, ' '),
-            (Token.Literal.String.Double, '""'),
-            (Token.Error, '}'),
-            (Token.Text, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+@pytest.fixture(scope='module')
+def lexer_json():
+    yield JsonLexer()
 
+
+@pytest.fixture(scope='module')
+def lexer_bare():
+    yield JsonBareObjectLexer()
+
+
+@pytest.fixture(scope='module')
+def lexer_yaml():
+    yield YamlLexer()
+
+
+def test_basic_json(lexer_json):
+    fragment = u'{"foo": "bar", "foo2": [1, 2, 3]}\n'
+    tokens = [
+        (Token.Punctuation, u'{'),
+        (Token.Name.Tag, u'"foo"'),
+        (Token.Punctuation, u':'),
+        (Token.Text, u' '),
+        (Token.Literal.String.Double, u'"bar"'),
+        (Token.Punctuation, u','),
+        (Token.Text, u' '),
+        (Token.Name.Tag, u'"foo2"'),
+        (Token.Punctuation, u':'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'['),
+        (Token.Literal.Number.Integer, u'1'),
+        (Token.Punctuation, u','),
+        (Token.Text, u' '),
+        (Token.Literal.Number.Integer, u'2'),
+        (Token.Punctuation, u','),
+        (Token.Text, u' '),
+        (Token.Literal.Number.Integer, u'3'),
+        (Token.Punctuation, u']'),
+        (Token.Punctuation, u'}'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer_json.get_tokens(fragment)) == tokens
+
+
+def test_basic_bare(lexer_bare):
+    # This is the same as testBasic for JsonLexer above, except the
+    # enclosing curly braces are removed.
+    fragment = u'"foo": "bar", "foo2": [1, 2, 3]\n'
+    tokens = [
+        (Token.Name.Tag, u'"foo"'),
+        (Token.Punctuation, u':'),
+        (Token.Text, u' '),
+        (Token.Literal.String.Double, u'"bar"'),
+        (Token.Punctuation, u','),
+        (Token.Text, u' '),
+        (Token.Name.Tag, u'"foo2"'),
+        (Token.Punctuation, u':'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'['),
+        (Token.Literal.Number.Integer, u'1'),
+        (Token.Punctuation, u','),
+        (Token.Text, u' '),
+        (Token.Literal.Number.Integer, u'2'),
+        (Token.Punctuation, u','),
+        (Token.Text, u' '),
+        (Token.Literal.Number.Integer, u'3'),
+        (Token.Punctuation, u']'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer_bare.get_tokens(fragment)) == tokens
+
+
+def test_closing_curly(lexer_bare):
+    # This can be an Error token, but should not be a can't-pop-from-stack
+    # exception.
+    fragment = '}"a"\n'
+    tokens = [
+        (Token.Error, '}'),
+        (Token.Name.Tag, '"a"'),
+        (Token.Text, '\n'),
+    ]
+    assert list(lexer_bare.get_tokens(fragment)) == tokens
+
+
+def test_closing_curly_in_value(lexer_bare):
+    fragment = '"": ""}\n'
+    tokens = [
+        (Token.Name.Tag, '""'),
+        (Token.Punctuation, ':'),
+        (Token.Text, ' '),
+        (Token.Literal.String.Double, '""'),
+        (Token.Error, '}'),
+        (Token.Text, '\n'),
+    ]
+    assert list(lexer_bare.get_tokens(fragment)) == tokens
+
+
+def test_yaml(lexer_yaml):
+    # Bug #1528: This previously parsed 'token # innocent' as a tag
+    fragment = u'here: token # innocent: comment\n'
+    tokens = [
+        (Token.Name.Tag, u'here'),
+        (Token.Punctuation, u':'),
+        (Token.Text, u' '),
+        (Token.Literal.Scalar.Plain, u'token'),
+        (Token.Text, u' '),
+        (Token.Comment.Single, u'# innocent: comment'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer_yaml.get_tokens(fragment)) == tokens
index e208403b556172874258176a836dc40799915478..491c1e0da439aacbb22973a1b51775a2422b04ee 100644 (file)
@@ -14,12 +14,15 @@ import pprint
 import difflib
 import pickle
 
+import pytest
+
 from pygments.lexers import get_lexer_for_filename, get_lexer_by_name
 from pygments.token import Error
 from pygments.util import ClassNotFound
 
-import support
-
+# You can set this to True to store the exact token type output of example
+# files in tests/examplefiles/output, and on the next run the test will
+# want them to stay the same.  In the repository, this should stay False.
 STORE_OUTPUT = False
 
 STATS = {}
@@ -32,9 +35,11 @@ TESTDIR = os.path.dirname(__file__)
 BAD_FILES_FOR_JYTHON = ('Object.st', 'all.nit', 'genclass.clj',
                         'ragel-cpp_rlscan')
 
-def test_example_files():
-    global STATS
-    STATS = {}
+
+def get_example_files():
+    # TODO: move stats to a fixture
+    # global STATS
+    # STATS = {}
     outdir = os.path.join(TESTDIR, 'examplefiles', 'output')
     if STORE_OUTPUT and not os.path.isdir(outdir):
         os.makedirs(outdir)
@@ -51,48 +56,49 @@ def test_example_files():
             continue
 
         print(absfn)
-        with open(absfn, 'rb') as f:
-            code = f.read()
+        yield fn
+
+    # N = 7
+    # stats = list(STATS.items())
+    # stats.sort(key=lambda x: x[1][1])
+    # print('\nExample files that took longest absolute time:')
+    # for fn, t in stats[-N:]:
+    #     print('%-30s  %6d chars  %8.2f ms  %7.3f ms/char' % ((fn,) + t))
+    # print()
+    # stats.sort(key=lambda x: x[1][2])
+    # print('\nExample files that took longest relative time:')
+    # for fn, t in stats[-N:]:
+    #     print('%-30s  %6d chars  %8.2f ms  %7.3f ms/char' % ((fn,) + t))
+
+
+@pytest.mark.parametrize('filename', get_example_files())
+def test_examplefile(filename):
+    if os.name == 'java' and filename in BAD_FILES_FOR_JYTHON:
+        pytest.skip('%s is a known bad file on Jython' % filename)
+
+    absfn = os.path.join(TESTDIR, 'examplefiles', filename)
+    with open(absfn, 'rb') as f:
+        text = f.read()
+    try:
+        utext = text.decode('utf-8')
+    except UnicodeError:
+        utext = text.decode('latin1')
+
+    lx = None
+    if '_' in filename:
         try:
-            code = code.decode('utf-8')
-        except UnicodeError:
-            code = code.decode('latin1')
-
-        lx = None
-        if '_' in fn:
-            try:
-                lx = get_lexer_by_name(fn.split('_')[0])
-            except ClassNotFound:
-                pass
-        if lx is None:
-            try:
-                lx = get_lexer_for_filename(absfn, code=code)
-            except ClassNotFound:
-                raise AssertionError('file %r has no registered extension, '
-                                     'nor is of the form <lexer>_filename '
-                                     'for overriding, thus no lexer found.'
-                                     % fn)
-        yield check_lexer, lx, fn
-
-    N = 7
-    stats = list(STATS.items())
-    stats.sort(key=lambda x: x[1][1])
-    print('\nExample files that took longest absolute time:')
-    for fn, t in stats[-N:]:
-        print('%-30s  %6d chars  %8.2f ms  %7.3f ms/char' % ((fn,) + t))
-    print()
-    stats.sort(key=lambda x: x[1][2])
-    print('\nExample files that took longest relative time:')
-    for fn, t in stats[-N:]:
-        print('%-30s  %6d chars  %8.2f ms  %7.3f ms/char' % ((fn,) + t))
-
-
-def check_lexer(lx, fn):
-    if os.name == 'java' and fn in BAD_FILES_FOR_JYTHON:
-        raise support.SkipTest('%s is a known bad file on Jython' % fn)
-    absfn = os.path.join(TESTDIR, 'examplefiles', fn)
-    with open(absfn, 'rb') as fp:
-        text = fp.read()
+            lx = get_lexer_by_name(filename.split('_')[0])
+        except ClassNotFound:
+            pass
+    if lx is None:
+        try:
+            lx = get_lexer_for_filename(absfn, code=utext)
+        except ClassNotFound:
+            raise AssertionError('file %r has no registered extension, '
+                                 'nor is of the form <lexer>_filename '
+                                 'for overriding, thus no lexer found.'
+                                 % filename)
+
     text = text.replace(b'\r\n', b'\n')
     text = text.strip(b'\n') + b'\n'
     try:
@@ -122,7 +128,7 @@ def check_lexer(lx, fn):
     # check output against previous run if enabled
     if STORE_OUTPUT:
         # no previous output -- store it
-        outfn = os.path.join(TESTDIR, 'examplefiles', 'output', fn)
+        outfn = os.path.join(TESTDIR, 'examplefiles', 'output', filename)
         if not os.path.isfile(outfn):
             with open(outfn, 'wb') as fp:
                 pickle.dump(tokens, fp)
index 15cc13b199e71009527da96214ac801d5d636439..8047a30a787e719d206df41740ea8deea3d83ed7 100644 (file)
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
 from pygments.token import Operator, Number, Text, Token
 from pygments.lexers import EzhilLexer
 
 
-class EzhilTest(unittest.TestCase):
+@pytest.fixture(scope='module')
+def lexer():
+    yield EzhilLexer()
 
-    def setUp(self):
-        self.lexer = EzhilLexer()
-        self.maxDiff = None
-    
-    def testSum(self):
-        fragment = u'1+3\n'
-        tokens = [
-            (Number.Integer, u'1'),
-            (Operator, u'+'),
-            (Number.Integer, u'3'),
-            (Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-        
-    def testGCDExpr(self):
-        fragment = u'1^3+(5-5)*gcd(a,b)\n'
-        tokens = [
-            (Token.Number.Integer,u'1'),
-            (Token.Operator,u'^'),
-            (Token.Literal.Number.Integer, u'3'),
-            (Token.Operator, u'+'),
-            (Token.Punctuation, u'('),
-            (Token.Literal.Number.Integer, u'5'),
-            (Token.Operator, u'-'),
-            (Token.Literal.Number.Integer, u'5'),
-            (Token.Punctuation, u')'),
-            (Token.Operator, u'*'),
-            (Token.Name, u'gcd'),
-            (Token.Punctuation, u'('),
-            (Token.Name, u'a'),
-            (Token.Operator, u','),
-            (Token.Name, u'b'),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n')
-            ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
 
-    def testIfStatement(self):
-        fragment = u"""@( 0 > 3 ) ஆனால்
-       பதிப்பி "wont print"      
+def test_sum(lexer):
+    fragment = u'1+3\n'
+    tokens = [
+        (Number.Integer, u'1'),
+        (Operator, u'+'),
+        (Number.Integer, u'3'),
+        (Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_gcd_expr(lexer):
+    fragment = u'1^3+(5-5)*gcd(a,b)\n'
+    tokens = [
+        (Token.Number.Integer, u'1'),
+        (Token.Operator, u'^'),
+        (Token.Literal.Number.Integer, u'3'),
+        (Token.Operator, u'+'),
+        (Token.Punctuation, u'('),
+        (Token.Literal.Number.Integer, u'5'),
+        (Token.Operator, u'-'),
+        (Token.Literal.Number.Integer, u'5'),
+        (Token.Punctuation, u')'),
+        (Token.Operator, u'*'),
+        (Token.Name, u'gcd'),
+        (Token.Punctuation, u'('),
+        (Token.Name, u'a'),
+        (Token.Operator, u','),
+        (Token.Name, u'b'),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_if_statement(lexer):
+    fragment = u"""@( 0 > 3 ) ஆனால்
+    பதிப்பி "wont print"
 முடி"""
-        tokens = [          
-            (Token.Operator, u'@'),
-            (Token.Punctuation, u'('),
-            (Token.Text, u' '),
-            (Token.Literal.Number.Integer,u'0'),
-            (Token.Text, u' '),
-            (Token.Operator,u'>'),
-            (Token.Text, u' '),
-            (Token.Literal.Number.Integer, u'3'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u')'),
-            (Token.Text, u' '),
-            (Token.Keyword, u'ஆனால்'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'\t'),
-            (Token.Keyword, u'பதிப்பி'),
-            (Token.Text, u' '),
-            (Token.Literal.String, u'"wont print"'),
-            (Token.Text, u'\t'),
-            (Token.Text, u'\n'),
-            (Token.Keyword, u'முடி'),
-            (Token.Text, u'\n')
-            ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+    tokens = [
+        (Token.Operator, u'@'),
+        (Token.Punctuation, u'('),
+        (Token.Text, u' '),
+        (Token.Literal.Number.Integer, u'0'),
+        (Token.Text, u' '),
+        (Token.Operator, u'>'),
+        (Token.Text, u' '),
+        (Token.Literal.Number.Integer, u'3'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u')'),
+        (Token.Text, u' '),
+        (Token.Keyword, u'ஆனால்'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'    '),
+        (Token.Keyword, u'பதிப்பி'),
+        (Token.Text, u' '),
+        (Token.Literal.String, u'"wont print"'),
+        (Token.Text, u'\n'),
+        (Token.Keyword, u'முடி'),
+        (Token.Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
 
-    def testFunction(self):
-        fragment = u"""# (C) முத்தையா அண்ணாமலை 2013, 2015
+def test_function(lexer):
+    fragment = u"""# (C) முத்தையா அண்ணாமலை 2013, 2015
 நிரல்பாகம்  gcd ( x, y )
-    மு = max(x,y)
    q = min(x,y)
+மு = max(x,y)
+ q = min(x,y)
 
-    @( q == 0 ) ஆனால்
-           பின்கொடு  மு
-    முடி
-    பின்கொடு  gcd( மு - q , q )
+@( q == 0 ) ஆனால்
+       பின்கொடு  மு
+முடி
+பின்கொடு  gcd( மு - q , q )
 முடி\n"""
-        tokens = [
-            (Token.Comment.Single,
-             u'# (C) \u0bae\u0bc1\u0ba4\u0bcd\u0ba4\u0bc8\u0baf\u0bbe \u0b85'
-             u'\u0ba3\u0bcd\u0ba3\u0bbe\u0bae\u0bb2\u0bc8 2013, 2015\n'),
-            (Token.Keyword,u'நிரல்பாகம்'),
-            (Token.Text, u'  '),
-            (Token.Name, u'gcd'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'('),
-            (Token.Text, u' '),
-            (Token.Name, u'x'),
-            (Token.Operator, u','),
-            (Token.Text, u' '),
-            (Token.Name, u'y'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'    '),
-            (Token.Name, u'\u0bae\u0bc1'),
-            (Token.Text, u' '),
-            (Token.Operator, u'='),
-            (Token.Text, u' '),
-            (Token.Name.Builtin, u'max'),
-            (Token.Punctuation, u'('),
-            (Token.Name, u'x'),
-            (Token.Operator, u','),
-            (Token.Name, u'y'),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'     '),
-            (Token.Name, u'q'),
-            (Token.Text, u' '),
-            (Token.Operator, u'='),
-            (Token.Text, u' '),
-            (Token.Name.Builtin, u'min'),
-            (Token.Punctuation, u'('),
-            (Token.Name, u'x'),
-            (Token.Operator, u','),
-            (Token.Name, u'y'),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'    '),
-            (Token.Operator, u'@'),
-            (Token.Punctuation, u'('),
-            (Token.Text, u' '),
-            (Token.Name, u'q'),
-            (Token.Text, u' '),
-            (Token.Operator, u'=='),
-            (Token.Text, u' '),
-            (Token.Literal.Number.Integer, u'0'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u')'),
-            (Token.Text, u' '),
-            (Token.Keyword, u'ஆனால்'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'           '),
-            (Token.Keyword, u'பின்கொடு'),
-            (Token.Text, u'  '),
-            (Token.Name, u'\u0bae\u0bc1'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'    '),
-            (Token.Keyword, u'முடி'),
-            (Token.Text, u'\n'),
-            (Token.Text, u'    '),
-            (Token.Keyword, u'\u0baa\u0bbf\u0ba9\u0bcd\u0b95\u0bca\u0b9f\u0bc1'),
-            (Token.Text, u'  '),
-            (Token.Name, u'gcd'),
-            (Token.Punctuation, u'('),
-            (Token.Text, u' '),
-            (Token.Name, u'\u0bae\u0bc1'),
-            (Token.Text, u' '),
-            (Token.Operator, u'-'),
-            (Token.Text, u' '),
-            (Token.Name, u'q'),
-            (Token.Text, u' '),
-            (Token.Operator, u','),
-            (Token.Text, u' '),
-            (Token.Name, u'q'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-            (Token.Keyword, u'முடி'), #u'\u0bae\u0bc1\u0b9f\u0bbf'),
-            (Token.Text, u'\n')
-            ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-        
-if __name__ == "__main__":
-    unittest.main()
+    tokens = [
+        (Token.Comment.Single,
+         u'# (C) \u0bae\u0bc1\u0ba4\u0bcd\u0ba4\u0bc8\u0baf\u0bbe \u0b85'
+         u'\u0ba3\u0bcd\u0ba3\u0bbe\u0bae\u0bb2\u0bc8 2013, 2015\n'),
+        (Token.Keyword, u'நிரல்பாகம்'),
+        (Token.Text, u'  '),
+        (Token.Name, u'gcd'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'('),
+        (Token.Text, u' '),
+        (Token.Name, u'x'),
+        (Token.Operator, u','),
+        (Token.Text, u' '),
+        (Token.Name, u'y'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+        (Token.Name, u'\u0bae\u0bc1'),
+        (Token.Text, u' '),
+        (Token.Operator, u'='),
+        (Token.Text, u' '),
+        (Token.Name.Builtin, u'max'),
+        (Token.Punctuation, u'('),
+        (Token.Name, u'x'),
+        (Token.Operator, u','),
+        (Token.Name, u'y'),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+        (Token.Text, u' '),
+        (Token.Name, u'q'),
+        (Token.Text, u' '),
+        (Token.Operator, u'='),
+        (Token.Text, u' '),
+        (Token.Name.Builtin, u'min'),
+        (Token.Punctuation, u'('),
+        (Token.Name, u'x'),
+        (Token.Operator, u','),
+        (Token.Name, u'y'),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'\n'),
+        (Token.Operator, u'@'),
+        (Token.Punctuation, u'('),
+        (Token.Text, u' '),
+        (Token.Name, u'q'),
+        (Token.Text, u' '),
+        (Token.Operator, u'=='),
+        (Token.Text, u' '),
+        (Token.Literal.Number.Integer, u'0'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u')'),
+        (Token.Text, u' '),
+        (Token.Keyword, u'ஆனால்'),
+        (Token.Text, u'\n'),
+        (Token.Text, u'       '),
+        (Token.Keyword, u'பின்கொடு'),
+        (Token.Text, u'  '),
+        (Token.Name, u'\u0bae\u0bc1'),
+        (Token.Text, u'\n'),
+        (Token.Keyword, u'முடி'),
+        (Token.Text, u'\n'),
+        (Token.Keyword, u'\u0baa\u0bbf\u0ba9\u0bcd\u0b95\u0bca\u0b9f\u0bc1'),
+        (Token.Text, u'  '),
+        (Token.Name, u'gcd'),
+        (Token.Punctuation, u'('),
+        (Token.Text, u' '),
+        (Token.Name, u'\u0bae\u0bc1'),
+        (Token.Text, u' '),
+        (Token.Operator, u'-'),
+        (Token.Text, u' '),
+        (Token.Name, u'q'),
+        (Token.Text, u' '),
+        (Token.Operator, u','),
+        (Token.Text, u' '),
+        (Token.Name, u'q'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+        (Token.Keyword, u'முடி'),  # u'\u0bae\u0bc1\u0b9f\u0bbf'),
+        (Token.Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tests/test_guessing.py b/tests/test_guessing.py
new file mode 100644 (file)
index 0000000..e69de29
index 37efd6f0278a9cc1d5fa26fb9aba1df6e5de43a5..952473986df9753fdef463722a0ec05d823cced8 100644 (file)
@@ -12,189 +12,199 @@ from __future__ import print_function
 import io
 import os
 import re
-import unittest
 import tempfile
-from os.path import join, dirname, isfile
+from os import path
+
+from pytest import raises
 
 from pygments.util import StringIO
 from pygments.lexers import PythonLexer
 from pygments.formatters import HtmlFormatter, NullFormatter
 from pygments.formatters.html import escape_html
 
-import support
-
-TESTFILE, TESTDIR = support.location(__file__)
+TESTDIR = path.dirname(path.abspath(__file__))
+TESTFILE = path.join(TESTDIR, 'test_html_formatter.py')
 
 with io.open(TESTFILE, encoding='utf-8') as fp:
     tokensource = list(PythonLexer().get_tokens(fp.read()))
 
 
-class HtmlFormatterTest(unittest.TestCase):
-    def test_correct_output(self):
-        hfmt = HtmlFormatter(nowrap=True)
-        houtfile = StringIO()
-        hfmt.format(tokensource, houtfile)
-
-        nfmt = NullFormatter()
-        noutfile = StringIO()
-        nfmt.format(tokensource, noutfile)
-
-        stripped_html = re.sub('<.*?>', '', houtfile.getvalue())
-        escaped_text = escape_html(noutfile.getvalue())
-        self.assertEqual(stripped_html, escaped_text)
-
-    def test_external_css(self):
-        # test correct behavior
-        # CSS should be in /tmp directory
-        fmt1 = HtmlFormatter(full=True, cssfile='fmt1.css', outencoding='utf-8')
-        # CSS should be in TESTDIR (TESTDIR is absolute)
-        fmt2 = HtmlFormatter(full=True, cssfile=join(TESTDIR, 'fmt2.css'),
-                             outencoding='utf-8')
-        tfile = tempfile.NamedTemporaryFile(suffix='.html')
-        fmt1.format(tokensource, tfile)
-        try:
-            fmt2.format(tokensource, tfile)
-            self.assertTrue(isfile(join(TESTDIR, 'fmt2.css')))
-        except IOError:
-            # test directory not writable
-            pass
-        tfile.close()
-
-        self.assertTrue(isfile(join(dirname(tfile.name), 'fmt1.css')))
-        os.unlink(join(dirname(tfile.name), 'fmt1.css'))
-        try:
-            os.unlink(join(TESTDIR, 'fmt2.css'))
-        except OSError:
-            pass
-
-    def test_all_options(self):
-        def check(optdict):
-            outfile = StringIO()
-            fmt = HtmlFormatter(**optdict)
-            fmt.format(tokensource, outfile)
-
-        for optdict in [
-            dict(nowrap=True),
-            dict(linenos=True, full=True),
-            dict(linenos=True, linespans='L'),
-            dict(hl_lines=[1, 5, 10, 'xxx']),
-            dict(hl_lines=[1, 5, 10], noclasses=True),
-        ]:
-            check(optdict)
-
-        for linenos in [False, 'table', 'inline']:
-            for noclasses in [False, True]:
-                for linenospecial in [0, 5]:
-                    for anchorlinenos in [False, True]:
-                        optdict = dict(
-                            linenos=linenos,
-                            noclasses=noclasses,
-                            linenospecial=linenospecial,
-                            anchorlinenos=anchorlinenos,
-                        )
-                        check(optdict)
-
-    def test_linenos(self):
-        optdict = dict(linenos=True)
+def test_correct_output():
+    hfmt = HtmlFormatter(nowrap=True)
+    houtfile = StringIO()
+    hfmt.format(tokensource, houtfile)
+
+    nfmt = NullFormatter()
+    noutfile = StringIO()
+    nfmt.format(tokensource, noutfile)
+
+    stripped_html = re.sub('<.*?>', '', houtfile.getvalue())
+    escaped_text = escape_html(noutfile.getvalue())
+    assert stripped_html == escaped_text
+
+
+def test_external_css():
+    # test correct behavior
+    # CSS should be in /tmp directory
+    fmt1 = HtmlFormatter(full=True, cssfile='fmt1.css', outencoding='utf-8')
+    # CSS should be in TESTDIR (TESTDIR is absolute)
+    fmt2 = HtmlFormatter(full=True, cssfile=path.join(TESTDIR, 'fmt2.css'),
+                         outencoding='utf-8')
+    tfile = tempfile.NamedTemporaryFile(suffix='.html')
+    fmt1.format(tokensource, tfile)
+    try:
+        fmt2.format(tokensource, tfile)
+        assert path.isfile(path.join(TESTDIR, 'fmt2.css'))
+    except IOError:
+        # test directory not writable
+        pass
+    tfile.close()
+
+    assert path.isfile(path.join(path.dirname(tfile.name), 'fmt1.css'))
+    os.unlink(path.join(path.dirname(tfile.name), 'fmt1.css'))
+    try:
+        os.unlink(path.join(TESTDIR, 'fmt2.css'))
+    except OSError:
+        pass
+
+
+def test_all_options():
+    def check(optdict):
         outfile = StringIO()
         fmt = HtmlFormatter(**optdict)
         fmt.format(tokensource, outfile)
-        html = outfile.getvalue()
-        self.assertTrue(re.search(r"<pre>\s+1\s+2\s+3", html))
 
-    def test_linenos_with_startnum(self):
-        optdict = dict(linenos=True, linenostart=5)
+    for optdict in [
+        dict(nowrap=True),
+        dict(linenos=True, full=True),
+        dict(linenos=True, linespans='L'),
+        dict(hl_lines=[1, 5, 10, 'xxx']),
+        dict(hl_lines=[1, 5, 10], noclasses=True),
+    ]:
+        check(optdict)
+
+    for linenos in [False, 'table', 'inline']:
+        for noclasses in [False, True]:
+            for linenospecial in [0, 5]:
+                for anchorlinenos in [False, True]:
+                    optdict = dict(
+                        linenos=linenos,
+                        noclasses=noclasses,
+                        linenospecial=linenospecial,
+                        anchorlinenos=anchorlinenos,
+                    )
+                    check(optdict)
+
+
+def test_linenos():
+    optdict = dict(linenos=True)
+    outfile = StringIO()
+    fmt = HtmlFormatter(**optdict)
+    fmt.format(tokensource, outfile)
+    html = outfile.getvalue()
+    assert re.search(r"<pre>\s+1\s+2\s+3", html)
+
+
+def test_linenos_with_startnum():
+    optdict = dict(linenos=True, linenostart=5)
+    outfile = StringIO()
+    fmt = HtmlFormatter(**optdict)
+    fmt.format(tokensource, outfile)
+    html = outfile.getvalue()
+    assert re.search(r"<pre>\s+5\s+6\s+7", html)
+
+
+def test_lineanchors():
+    optdict = dict(lineanchors="foo")
+    outfile = StringIO()
+    fmt = HtmlFormatter(**optdict)
+    fmt.format(tokensource, outfile)
+    html = outfile.getvalue()
+    assert re.search("<pre><span></span><a name=\"foo-1\">", html)
+
+
+def test_lineanchors_with_startnum():
+    optdict = dict(lineanchors="foo", linenostart=5)
+    outfile = StringIO()
+    fmt = HtmlFormatter(**optdict)
+    fmt.format(tokensource, outfile)
+    html = outfile.getvalue()
+    assert re.search("<pre><span></span><a name=\"foo-5\">", html)
+
+
+def test_valid_output():
+    # test all available wrappers
+    fmt = HtmlFormatter(full=True, linenos=True, noclasses=True,
+                        outencoding='utf-8')
+
+    handle, pathname = tempfile.mkstemp('.html')
+    with os.fdopen(handle, 'w+b') as tfile:
+        fmt.format(tokensource, tfile)
+    catname = os.path.join(TESTDIR, 'dtds', 'HTML4.soc')
+    try:
+        import subprocess
+        po = subprocess.Popen(['nsgmls', '-s', '-c', catname, pathname],
+                              stdout=subprocess.PIPE)
+        ret = po.wait()
+        output = po.stdout.read()
+        po.stdout.close()
+    except OSError:
+        # nsgmls not available
+        pass
+    else:
+        if ret:
+            print(output)
+        assert not ret, 'nsgmls run reported errors'
+
+    os.unlink(pathname)
+
+
+def test_get_style_defs():
+    fmt = HtmlFormatter()
+    sd = fmt.get_style_defs()
+    assert sd.startswith('.')
+
+    fmt = HtmlFormatter(cssclass='foo')
+    sd = fmt.get_style_defs()
+    assert sd.startswith('.foo')
+    sd = fmt.get_style_defs('.bar')
+    assert sd.startswith('.bar')
+    sd = fmt.get_style_defs(['.bar', '.baz'])
+    fl = sd.splitlines()[0]
+    assert '.bar' in fl and '.baz' in fl
+
+
+def test_unicode_options():
+    fmt = HtmlFormatter(title=u'Föö',
+                        cssclass=u'bär',
+                        cssstyles=u'div:before { content: \'bäz\' }',
+                        encoding='utf-8')
+    handle, pathname = tempfile.mkstemp('.html')
+    with os.fdopen(handle, 'w+b') as tfile:
+        fmt.format(tokensource, tfile)
+
+
+def test_ctags():
+    try:
+        import ctags
+    except ImportError:
+        # we can't check without the ctags module, but at least check the exception
+        assert raises(RuntimeError, HtmlFormatter, tagsfile='support/tags')
+    else:
+        # this tagfile says that test_ctags() is on line 165, even if it isn't
+        # anymore in the actual source
+        fmt = HtmlFormatter(tagsfile='support/tags', lineanchors='L',
+                            tagurlformat='%(fname)s%(fext)s')
         outfile = StringIO()
-        fmt = HtmlFormatter(**optdict)
         fmt.format(tokensource, outfile)
-        html = outfile.getvalue()
-        self.assertTrue(re.search(r"<pre>\s+5\s+6\s+7", html))
+        assert '<a href="test_html_formatter.py#L-165">test_ctags</a>' \
+            in outfile.getvalue()
 
-    def test_lineanchors(self):
-        optdict = dict(lineanchors="foo")
-        outfile = StringIO()
-        fmt = HtmlFormatter(**optdict)
-        fmt.format(tokensource, outfile)
-        html = outfile.getvalue()
-        self.assertTrue(re.search("<pre><span></span><a name=\"foo-1\">", html))
 
-    def test_lineanchors_with_startnum(self):
-        optdict = dict(lineanchors="foo", linenostart=5)
-        outfile = StringIO()
-        fmt = HtmlFormatter(**optdict)
-        fmt.format(tokensource, outfile)
-        html = outfile.getvalue()
-        self.assertTrue(re.search("<pre><span></span><a name=\"foo-5\">", html))
-
-    def test_valid_output(self):
-        # test all available wrappers
-        fmt = HtmlFormatter(full=True, linenos=True, noclasses=True,
-                            outencoding='utf-8')
-
-        handle, pathname = tempfile.mkstemp('.html')
-        with os.fdopen(handle, 'w+b') as tfile:
-            fmt.format(tokensource, tfile)
-        catname = os.path.join(TESTDIR, 'dtds', 'HTML4.soc')
-        try:
-            import subprocess
-            po = subprocess.Popen(['nsgmls', '-s', '-c', catname, pathname],
-                                  stdout=subprocess.PIPE)
-            ret = po.wait()
-            output = po.stdout.read()
-            po.stdout.close()
-        except OSError:
-            # nsgmls not available
-            pass
-        else:
-            if ret:
-                print(output)
-            self.assertFalse(ret, 'nsgmls run reported errors')
-
-        os.unlink(pathname)
-
-    def test_get_style_defs(self):
-        fmt = HtmlFormatter()
-        sd = fmt.get_style_defs()
-        self.assertTrue(sd.startswith('.'))
-
-        fmt = HtmlFormatter(cssclass='foo')
-        sd = fmt.get_style_defs()
-        self.assertTrue(sd.startswith('.foo'))
-        sd = fmt.get_style_defs('.bar')
-        self.assertTrue(sd.startswith('.bar'))
-        sd = fmt.get_style_defs(['.bar', '.baz'])
-        fl = sd.splitlines()[0]
-        self.assertTrue('.bar' in fl and '.baz' in fl)
-
-    def test_unicode_options(self):
-        fmt = HtmlFormatter(title=u'Föö',
-                            cssclass=u'bär',
-                            cssstyles=u'div:before { content: \'bäz\' }',
-                            encoding='utf-8')
-        handle, pathname = tempfile.mkstemp('.html')
-        with os.fdopen(handle, 'w+b') as tfile:
-            fmt.format(tokensource, tfile)
-
-    def test_ctags(self):
-        try:
-            import ctags
-        except ImportError:
-            # we can't check without the ctags module, but at least check the exception
-            self.assertRaises(RuntimeError, HtmlFormatter, tagsfile='support/tags')
-        else:
-            # this tagfile says that test_ctags() is on line 165, even if it isn't
-            # anymore in the actual source
-            fmt = HtmlFormatter(tagsfile='support/tags', lineanchors='L',
-                                tagurlformat='%(fname)s%(fext)s')
-            outfile = StringIO()
-            fmt.format(tokensource, outfile)
-            self.assertTrue('<a href="test_html_formatter.py#L-165">test_ctags</a>'
-                            in outfile.getvalue())
-
-    def test_filename(self):
-        optdict = dict(filename="test.py")
-        outfile = StringIO()
-        fmt = HtmlFormatter(**optdict)
-        fmt.format(tokensource, outfile)
-        html = outfile.getvalue()
-        self.assertTrue(re.search("<span class=\"filename\">test.py</span><pre>", html))
+def test_filename():
+    optdict = dict(filename="test.py")
+    outfile = StringIO()
+    fmt = HtmlFormatter(**optdict)
+    fmt.format(tokensource, outfile)
+    html = outfile.getvalue()
+    assert re.search("<span class=\"filename\">test.py</span><pre>", html)
index 38acf328cb5f768ab67a702ade2448eb93f40dab..035277243282fc882fceaee5b8aa502efb25c0b4 100644 (file)
@@ -7,37 +7,10 @@
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
-
 from pygments.lexer import RegexLexer, inherit
 from pygments.token import Text
 
 
-class InheritTest(unittest.TestCase):
-    def test_single_inheritance_position(self):
-        t = Two()
-        pats = [x[0].__self__.pattern for x in t._tokens['root']]
-        self.assertEqual(['x', 'a', 'b', 'y'], pats)
-    def test_multi_inheritance_beginning(self):
-        t = Beginning()
-        pats = [x[0].__self__.pattern for x in t._tokens['root']]
-        self.assertEqual(['x', 'a', 'b', 'y', 'm'], pats)
-    def test_multi_inheritance_end(self):
-        t = End()
-        pats = [x[0].__self__.pattern for x in t._tokens['root']]
-        self.assertEqual(['m', 'x', 'a', 'b', 'y'], pats)
-
-    def test_multi_inheritance_position(self):
-        t = Three()
-        pats = [x[0].__self__.pattern for x in t._tokens['root']]
-        self.assertEqual(['i', 'x', 'a', 'b', 'y', 'j'], pats)
-
-    def test_single_inheritance_with_skip(self):
-        t = Skipped()
-        pats = [x[0].__self__.pattern for x in t._tokens['root']]
-        self.assertEqual(['x', 'a', 'b', 'y'], pats)
-
-
 class One(RegexLexer):
     tokens = {
         'root': [
@@ -46,6 +19,7 @@ class One(RegexLexer):
         ],
     }
 
+
 class Two(One):
     tokens = {
         'root': [
@@ -55,6 +29,7 @@ class Two(One):
         ],
     }
 
+
 class Three(Two):
     tokens = {
         'root': [
@@ -64,6 +39,7 @@ class Three(Two):
         ],
     }
 
+
 class Beginning(Two):
     tokens = {
         'root': [
@@ -72,6 +48,7 @@ class Beginning(Two):
         ],
     }
 
+
 class End(Two):
     tokens = {
         'root': [
@@ -80,9 +57,11 @@ class End(Two):
         ],
     }
 
+
 class Empty(One):
     tokens = {}
 
+
 class Skipped(Empty):
     tokens = {
         'root': [
@@ -92,3 +71,32 @@ class Skipped(Empty):
         ],
     }
 
+
+def test_single_inheritance_position():
+    t = Two()
+    pats = [x[0].__self__.pattern for x in t._tokens['root']]
+    assert ['x', 'a', 'b', 'y'] == pats
+
+
+def test_multi_inheritance_beginning():
+    t = Beginning()
+    pats = [x[0].__self__.pattern for x in t._tokens['root']]
+    assert ['x', 'a', 'b', 'y', 'm'] == pats
+
+
+def test_multi_inheritance_end():
+    t = End()
+    pats = [x[0].__self__.pattern for x in t._tokens['root']]
+    assert ['m', 'x', 'a', 'b', 'y'] == pats
+
+
+def test_multi_inheritance_position():
+    t = Three()
+    pats = [x[0].__self__.pattern for x in t._tokens['root']]
+    assert ['i', 'x', 'a', 'b', 'y', 'j'] == pats
+
+
+def test_single_inheritance_with_skip():
+    t = Skipped()
+    pats = [x[0].__self__.pattern for x in t._tokens['root']]
+    assert ['x', 'a', 'b', 'y'] == pats
index 18bcd58b220a4cb7db8b9057211f1049948e46ce..046a0d19c74f2f462b41b94d53549d25102d380d 100644 (file)
@@ -9,22 +9,16 @@
 
 from __future__ import print_function
 
-import re
-import unittest
-
 from pygments.util import StringIO
 from pygments.lexers import PythonLexer
 from pygments.formatters import IRCFormatter
 
-import support
-
 tokensource = list(PythonLexer().get_tokens("lambda x: 123"))
 
-class IRCFormatterTest(unittest.TestCase):
-    def test_correct_output(self):
-        hfmt = IRCFormatter()
-        houtfile = StringIO()
-        hfmt.format(tokensource, houtfile)
 
-        self.assertEqual(u'\x0302lambda\x03 x: \x0302123\x03\n', houtfile.getvalue())
+def test_correct_output():
+    hfmt = IRCFormatter()
+    houtfile = StringIO()
+    hfmt.format(tokensource, houtfile)
 
+    assert u'\x0302lambda\x03 x: \x0302123\x03\n' == houtfile.getvalue()
index 5f520853ddb83a67bdb9778f2c436e40967ee67c..944632463b3af71c2371d416b70bcbc108f075f7 100644 (file)
@@ -7,72 +7,72 @@
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
-from pygments.token import Text, Name, Operator, Keyword, Number
+from pygments.token import Text, Name, Punctuation, Keyword, Number
 from pygments.lexers import JavaLexer
 
 
-class JavaTest(unittest.TestCase):
+@pytest.fixture(scope='module')
+def lexer():
+    yield JavaLexer()
 
-    def setUp(self):
-        self.lexer = JavaLexer()
-        self.maxDiff = None
 
-    def testEnhancedFor(self):
-        fragment = u'label:\nfor(String var2: var1) {}\n'
-        tokens = [
-            (Name.Label, u'label:'),
-            (Text, u'\n'),
-            (Keyword, u'for'),
-            (Operator, u'('),
-            (Name, u'String'),
-            (Text, u' '),
-            (Name, u'var2'),
-            (Operator, u':'),
-            (Text, u' '),
-            (Name, u'var1'),
-            (Operator, u')'),
-            (Text, u' '),
-            (Operator, u'{'),
-            (Operator, u'}'),
-            (Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+def test_enhanced_for(lexer):
+    fragment = u'label:\nfor(String var2: var1) {}\n'
+    tokens = [
+        (Name.Label, u'label:'),
+        (Text, u'\n'),
+        (Keyword, u'for'),
+        (Punctuation, u'('),
+        (Name, u'String'),
+        (Text, u' '),
+        (Name, u'var2'),
+        (Punctuation, u':'),
+        (Text, u' '),
+        (Name, u'var1'),
+        (Punctuation, u')'),
+        (Text, u' '),
+        (Punctuation, u'{'),
+        (Punctuation, u'}'),
+        (Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
 
-    def testNumericLiterals(self):
-        fragment = '0 5L 9__542_72l 0xbEEf 0X9_A 0_35 01 0b0___101_0'
-        fragment += ' 0. .7_17F 3e-1_3d 1f 6_01.9e+3 0x.1Fp3 0XEP8D\n'
-        tokens = [
-            (Number.Integer, '0'),
-            (Text, ' '),
-            (Number.Integer, '5L'),
-            (Text, ' '),
-            (Number.Integer, '9__542_72l'),
-            (Text, ' '),
-            (Number.Hex, '0xbEEf'),
-            (Text, ' '),
-            (Number.Hex, '0X9_A'),
-            (Text, ' '),
-            (Number.Oct, '0_35'),
-            (Text, ' '),
-            (Number.Oct, '01'),
-            (Text, ' '),
-            (Number.Bin, '0b0___101_0'),
-            (Text, ' '),
-            (Number.Float, '0.'),
-            (Text, ' '),
-            (Number.Float, '.7_17F'),
-            (Text, ' '),
-            (Number.Float, '3e-1_3d'),
-            (Text, ' '),
-            (Number.Float, '1f'),
-            (Text, ' '),
-            (Number.Float, '6_01.9e+3'),
-            (Text, ' '),
-            (Number.Float, '0x.1Fp3'),
-            (Text, ' '),
-            (Number.Float, '0XEP8D'),
-            (Text, '\n')
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+def test_numeric_literals(lexer):
+    fragment = '0 5L 9__542_72l 0xbEEf 0X9_A 0_35 01 0b0___101_0'
+    fragment += ' 0. .7_17F 3e-1_3d 1f 6_01.9e+3 0x.1Fp3 0XEP8D\n'
+    tokens = [
+        (Number.Integer, '0'),
+        (Text, ' '),
+        (Number.Integer, '5L'),
+        (Text, ' '),
+        (Number.Integer, '9__542_72l'),
+        (Text, ' '),
+        (Number.Hex, '0xbEEf'),
+        (Text, ' '),
+        (Number.Hex, '0X9_A'),
+        (Text, ' '),
+        (Number.Oct, '0_35'),
+        (Text, ' '),
+        (Number.Oct, '01'),
+        (Text, ' '),
+        (Number.Bin, '0b0___101_0'),
+        (Text, ' '),
+        (Number.Float, '0.'),
+        (Text, ' '),
+        (Number.Float, '.7_17F'),
+        (Text, ' '),
+        (Number.Float, '3e-1_3d'),
+        (Text, ' '),
+        (Number.Float, '1f'),
+        (Text, ' '),
+        (Number.Float, '6_01.9e+3'),
+        (Text, ' '),
+        (Number.Float, '0x.1Fp3'),
+        (Text, ' '),
+        (Number.Float, '0XEP8D'),
+        (Text, '\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
index a2dfb7e1ca3beff5a3083be8bcc450c67661b75b..25e06fdc25ce6520a3c409647168f3bab0544db7 100644 (file)
@@ -7,7 +7,7 @@
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
 from pygments.lexers import CoffeeScriptLexer
 from pygments.token import Token
@@ -36,49 +36,50 @@ COFFEE_SLASH_GOLDEN = [
     ('a = 1 + /d/.test(a)', True),
 ]
 
-def test_coffee_slashes():
-    for input_str, slashes_are_regex_here in COFFEE_SLASH_GOLDEN:
-        yield coffee_runner, input_str, slashes_are_regex_here
 
-def coffee_runner(input_str, slashes_are_regex_here):
-    lex = CoffeeScriptLexer()
-    output = list(lex.get_tokens(input_str))
+@pytest.fixture(scope='module')
+def lexer():
+    yield CoffeeScriptLexer()
+
+
+@pytest.mark.parametrize('golden', COFFEE_SLASH_GOLDEN)
+def test_coffee_slashes(lexer, golden):
+    input_str, slashes_are_regex_here = golden
+    output = list(lexer.get_tokens(input_str))
     print(output)
     for t, s in output:
         if '/' in s:
             is_regex = t is Token.String.Regex
             assert is_regex == slashes_are_regex_here, (t, s)
 
-class CoffeeTest(unittest.TestCase):
-    def setUp(self):
-        self.lexer = CoffeeScriptLexer()
 
-    def testMixedSlashes(self):
-        fragment = u'a?/foo/:1/2;\n'
-        tokens = [
-            (Token.Name.Other, u'a'),
-            (Token.Operator, u'?'),
-            (Token.Literal.String.Regex, u'/foo/'),
-            (Token.Operator, u':'),
-            (Token.Literal.Number.Integer, u'1'),
-            (Token.Operator, u'/'),
-            (Token.Literal.Number.Integer, u'2'),
-            (Token.Punctuation, u';'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+def test_mixed_slashes(lexer):
+    fragment = u'a?/foo/:1/2;\n'
+    tokens = [
+        (Token.Name.Other, u'a'),
+        (Token.Operator, u'?'),
+        (Token.Literal.String.Regex, u'/foo/'),
+        (Token.Operator, u':'),
+        (Token.Literal.Number.Integer, u'1'),
+        (Token.Operator, u'/'),
+        (Token.Literal.Number.Integer, u'2'),
+        (Token.Punctuation, u';'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
 
-    def testBewareInfiniteLoop(self):
-        # This demonstrates the case that "This isn't really guarding" comment
-        # refers to.
-        fragment = '/a/x;\n'
-        tokens = [
-            (Token.Text, ''),
-            (Token.Operator, '/'),
-            (Token.Name.Other, 'a'),
-            (Token.Operator, '/'),
-            (Token.Name.Other, 'x'),
-            (Token.Punctuation, ';'),
-            (Token.Text, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+def test_beware_infinite_loop(lexer):
+    # This demonstrates the case that "This isn't really guarding" comment
+    # refers to.
+    fragment = '/a/x;\n'
+    tokens = [
+        (Token.Text, ''),
+        (Token.Operator, '/'),
+        (Token.Name.Other, 'a'),
+        (Token.Operator, '/'),
+        (Token.Name.Other, 'x'),
+        (Token.Punctuation, ';'),
+        (Token.Text, '\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
index eda04b1ca30c5b9ba86b070a99b1e2e8eb8e4a60..3f11593151ad3cfb81128d7a1a1ebd6882f6a9d4 100644 (file)
@@ -7,52 +7,53 @@
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
-from pygments.lexers import JuliaLexer
 from pygments.token import Token
+from pygments.lexers import JuliaLexer
+
 
+@pytest.fixture(scope='module')
+def lexer():
+    yield JuliaLexer()
 
-class JuliaTests(unittest.TestCase):
-    def setUp(self):
-        self.lexer = JuliaLexer()
 
-    def test_unicode(self):
-        """
-        Test that unicode character, √, in an expression is recognized
-        """
-        fragment = u's = \u221a((1/n) * sum(count .^ 2) - mu .^2)\n'
-        tokens = [
-            (Token.Name, u's'),
-            (Token.Text, u' '),
-            (Token.Operator, u'='),
-            (Token.Text, u' '),
-            (Token.Operator, u'\u221a'),
-            (Token.Punctuation, u'('),
-            (Token.Punctuation, u'('),
-            (Token.Literal.Number.Integer, u'1'),
-            (Token.Operator, u'/'),
-            (Token.Name, u'n'),
-            (Token.Punctuation, u')'),
-            (Token.Text, u' '),
-            (Token.Operator, u'*'),
-            (Token.Text, u' '),
-            (Token.Name, u'sum'),
-            (Token.Punctuation, u'('),
-            (Token.Name, u'count'),
-            (Token.Text, u' '),
-            (Token.Operator, u'.^'),
-            (Token.Text, u' '),
-            (Token.Literal.Number.Integer, u'2'),
-            (Token.Punctuation, u')'),
-            (Token.Text, u' '),
-            (Token.Operator, u'-'),
-            (Token.Text, u' '),
-            (Token.Name, u'mu'),
-            (Token.Text, u' '),
-            (Token.Operator, u'.^'),
-            (Token.Literal.Number.Integer, u'2'),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+def test_unicode(lexer):
+    """
+    Test that unicode character, √, in an expression is recognized
+    """
+    fragment = u's = \u221a((1/n) * sum(count .^ 2) - mu .^2)\n'
+    tokens = [
+        (Token.Name, u's'),
+        (Token.Text, u' '),
+        (Token.Operator, u'='),
+        (Token.Text, u' '),
+        (Token.Operator, u'\u221a'),
+        (Token.Punctuation, u'('),
+        (Token.Punctuation, u'('),
+        (Token.Literal.Number.Integer, u'1'),
+        (Token.Operator, u'/'),
+        (Token.Name, u'n'),
+        (Token.Punctuation, u')'),
+        (Token.Text, u' '),
+        (Token.Operator, u'*'),
+        (Token.Text, u' '),
+        (Token.Name, u'sum'),
+        (Token.Punctuation, u'('),
+        (Token.Name, u'count'),
+        (Token.Text, u' '),
+        (Token.Operator, u'.^'),
+        (Token.Text, u' '),
+        (Token.Literal.Number.Integer, u'2'),
+        (Token.Punctuation, u')'),
+        (Token.Text, u' '),
+        (Token.Operator, u'-'),
+        (Token.Text, u' '),
+        (Token.Name, u'mu'),
+        (Token.Text, u' '),
+        (Token.Operator, u'.^'),
+        (Token.Literal.Number.Integer, u'2'),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
index 417d0d9f99fcb4c9216b5724e8806e15805a697f..69b19c17d8cf8da4172bcdd42399d2fec6f34265 100644 (file)
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
-from pygments.token import Text, Name, Operator, Keyword, Number, Punctuation, String
+from pygments.token import Text, Name, Keyword, Punctuation, String
 from pygments.lexers import KotlinLexer
 
-class KotlinTest(unittest.TestCase):
-
-    def setUp(self):
-        self.lexer = KotlinLexer()
-        self.maxDiff = None
-    
-    def testCanCopeWithBackTickNamesInFunctions(self):
-        fragment = u'fun `wo bble`'
-        tokens = [
-            (Keyword, u'fun'),
-            (Text, u' '),
-            (Name.Function, u'`wo bble`'),
-            (Text, u'\n')
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testCanCopeWithCommasAndDashesInBackTickNames(self):
-        fragment = u'fun `wo,-bble`'
-        tokens = [
-            (Keyword, u'fun'),
-            (Text, u' '),
-            (Name.Function, u'`wo,-bble`'),
-            (Text, u'\n')
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-    
-    def testCanCopeWithDestructuring(self):
-        fragment = u'val (a, b) = '
-        tokens = [
-            (Keyword, u'val'),
-            (Text, u' '),
-            (Punctuation, u'('),
-            (Name.Property, u'a'),
-            (Punctuation, u','),
-            (Text, u' '),
-            (Name.Property, u'b'),
-            (Punctuation, u')'),
-            (Text, u' '),
-            (Punctuation, u'='),
-            (Text, u' '),
-            (Text, u'\n')
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-    
-    def testCanCopeGenericsInDestructuring(self):
-        fragment = u'val (a: List<Something>, b: Set<Wobble>) ='
-        tokens = [
-            (Keyword, u'val'),
-            (Text, u' '),
-            (Punctuation, u'('),
-            (Name.Property, u'a'),
-            (Punctuation, u':'),
-            (Text, u' '),
-            (Name.Property, u'List'),
-            (Punctuation, u'<'),
-            (Name, u'Something'),
-            (Punctuation, u'>'),
-            (Punctuation, u','),
-            (Text, u' '),
-            (Name.Property, u'b'),
-            (Punctuation, u':'),
-            (Text, u' '),
-            (Name.Property, u'Set'),
-            (Punctuation, u'<'),
-            (Name, u'Wobble'),
-            (Punctuation, u'>'),
-            (Punctuation, u')'),
-            (Text, u' '),
-            (Punctuation, u'='),
-            (Text, u'\n')
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testCanCopeWithGenerics(self):
-        fragment = u'inline fun <reified T : ContractState> VaultService.queryBy(): Vault.Page<T> {'
-        tokens = [
-            (Keyword, u'inline fun'),
-            (Text, u' '),
-            (Punctuation, u'<'),
-            (Keyword, u'reified'),
-            (Text, u' '),
-            (Name, u'T'),
-            (Text, u' '),
-            (Punctuation, u':'),
-            (Text, u' '),
-            (Name, u'ContractState'),
-            (Punctuation, u'>'),
-            (Text, u' '),
-            (Name.Class, u'VaultService'),
-            (Punctuation, u'.'),
-            (Name.Function, u'queryBy'),
-            (Punctuation, u'('),
-            (Punctuation, u')'),
-            (Punctuation, u':'),
-            (Text, u' '),
-            (Name, u'Vault'),
-            (Punctuation, u'.'),
-            (Name, u'Page'),
-            (Punctuation, u'<'),
-            (Name, u'T'),
-            (Punctuation, u'>'),
-            (Text, u' '),
-            (Punctuation, u'{'),
-            (Text, u'\n')
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testShouldCopeWithMultilineComments(self):
-        fragment = u'"""\nthis\nis\na\ncomment"""'
-        tokens = [
-            (String, u'"""\nthis\nis\na\ncomment"""'), 
-            (Text, u'\n')
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-if __name__ == '__main__':
-    unittest.main()
+
+@pytest.fixture(scope='module')
+def lexer():
+    yield KotlinLexer()
+
+
+def test_can_cope_with_backtick_names_in_functions(lexer):
+    fragment = u'fun `wo bble`'
+    tokens = [
+        (Keyword, u'fun'),
+        (Text, u' '),
+        (Name.Function, u'`wo bble`'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_can_cope_with_commas_and_dashes_in_backtick_Names(lexer):
+    fragment = u'fun `wo,-bble`'
+    tokens = [
+        (Keyword, u'fun'),
+        (Text, u' '),
+        (Name.Function, u'`wo,-bble`'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_can_cope_with_destructuring(lexer):
+    fragment = u'val (a, b) = '
+    tokens = [
+        (Keyword, u'val'),
+        (Text, u' '),
+        (Punctuation, u'('),
+        (Name.Property, u'a'),
+        (Punctuation, u','),
+        (Text, u' '),
+        (Name.Property, u'b'),
+        (Punctuation, u')'),
+        (Text, u' '),
+        (Punctuation, u'='),
+        (Text, u' '),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_can_cope_generics_in_destructuring(lexer):
+    fragment = u'val (a: List<Something>, b: Set<Wobble>) ='
+    tokens = [
+        (Keyword, u'val'),
+        (Text, u' '),
+        (Punctuation, u'('),
+        (Name.Property, u'a'),
+        (Punctuation, u':'),
+        (Text, u' '),
+        (Name.Property, u'List'),
+        (Punctuation, u'<'),
+        (Name, u'Something'),
+        (Punctuation, u'>'),
+        (Punctuation, u','),
+        (Text, u' '),
+        (Name.Property, u'b'),
+        (Punctuation, u':'),
+        (Text, u' '),
+        (Name.Property, u'Set'),
+        (Punctuation, u'<'),
+        (Name, u'Wobble'),
+        (Punctuation, u'>'),
+        (Punctuation, u')'),
+        (Text, u' '),
+        (Punctuation, u'='),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_can_cope_with_generics(lexer):
+    fragment = u'inline fun <reified T : ContractState> VaultService.queryBy(): Vault.Page<T> {'
+    tokens = [
+        (Keyword, u'inline fun'),
+        (Text, u' '),
+        (Punctuation, u'<'),
+        (Keyword, u'reified'),
+        (Text, u' '),
+        (Name, u'T'),
+        (Text, u' '),
+        (Punctuation, u':'),
+        (Text, u' '),
+        (Name, u'ContractState'),
+        (Punctuation, u'>'),
+        (Text, u' '),
+        (Name.Class, u'VaultService'),
+        (Punctuation, u'.'),
+        (Name.Function, u'queryBy'),
+        (Punctuation, u'('),
+        (Punctuation, u')'),
+        (Punctuation, u':'),
+        (Text, u' '),
+        (Name, u'Vault'),
+        (Punctuation, u'.'),
+        (Name, u'Page'),
+        (Punctuation, u'<'),
+        (Name, u'T'),
+        (Punctuation, u'>'),
+        (Text, u' '),
+        (Punctuation, u'{'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_should_cope_with_multiline_comments(lexer):
+    fragment = u'"""\nthis\nis\na\ncomment"""'
+    tokens = [
+        (String, u'"""\nthis\nis\na\ncomment"""'),
+        (Text, u'\n')
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
index aa4ac3bbef1df9049d9418d3ed91374e87ec1a10..7ab0d7d079926d039bada240642ebf5f6417eb4e 100644 (file)
 from __future__ import print_function
 
 import os
-import unittest
 import tempfile
+from os import path
+
+import pytest
 
 from pygments.formatters import LatexFormatter
 from pygments.lexers import PythonLexer
 
-import support
-
-TESTFILE, TESTDIR = support.location(__file__)
-
-
-class LatexFormatterTest(unittest.TestCase):
-
-    def test_valid_output(self):
-        with open(TESTFILE) as fp:
-            tokensource = list(PythonLexer().get_tokens(fp.read()))
-        fmt = LatexFormatter(full=True, encoding='latin1')
-
-        handle, pathname = tempfile.mkstemp('.tex')
-        # place all output files in /tmp too
-        old_wd = os.getcwd()
-        os.chdir(os.path.dirname(pathname))
-        tfile = os.fdopen(handle, 'wb')
-        fmt.format(tokensource, tfile)
-        tfile.close()
-        try:
-            import subprocess
-            po = subprocess.Popen(['latex', '-interaction=nonstopmode',
-                                   pathname], stdout=subprocess.PIPE)
-            ret = po.wait()
-            output = po.stdout.read()
-            po.stdout.close()
-        except OSError as e:
-            # latex not available
-            raise support.SkipTest(e)
-        else:
-            if ret:
-                print(output)
-            self.assertFalse(ret, 'latex run reported errors')
-
-        os.unlink(pathname)
-        os.chdir(old_wd)
+TESTDIR = path.dirname(path.abspath(__file__))
+TESTFILE = path.join(TESTDIR, 'test_latex_formatter.py')
+
+
+def test_valid_output():
+    with open(TESTFILE) as fp:
+        tokensource = list(PythonLexer().get_tokens(fp.read()))
+    fmt = LatexFormatter(full=True, encoding='latin1')
+
+    handle, pathname = tempfile.mkstemp('.tex')
+    # place all output files in /tmp too
+    old_wd = os.getcwd()
+    os.chdir(os.path.dirname(pathname))
+    tfile = os.fdopen(handle, 'wb')
+    fmt.format(tokensource, tfile)
+    tfile.close()
+    try:
+        import subprocess
+        po = subprocess.Popen(['latex', '-interaction=nonstopmode',
+                               pathname], stdout=subprocess.PIPE)
+        ret = po.wait()
+        output = po.stdout.read()
+        po.stdout.close()
+    except OSError as e:
+        # latex not available
+        pytest.skip(str(e))
+    else:
+        if ret:
+            print(output)
+        assert not ret, 'latex run reported errors'
+
+    os.unlink(pathname)
+    os.chdir(old_wd)
index 8d53c542914423a7c78af6488ab960629e6df711..3e8d3fc16b69bdb63d41dec485dc3989e0310444 100644 (file)
@@ -6,75 +6,65 @@
     :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
     :license: BSD, see LICENSE for details.
 """
+
 import glob
 import os
-import unittest
+
+import pytest
 
 from pygments.lexers import guess_lexer
 from pygments.lexers.scripting import EasytrieveLexer, JclLexer, RexxLexer
 
 
-def _exampleFilePath(filename):
+def _example_file_path(filename):
     return os.path.join(os.path.dirname(__file__), 'examplefiles', filename)
 
 
-class AnalyseTextTest(unittest.TestCase):
-    def _testCanRecognizeAndGuessExampleFiles(self, lexer):
-        assert lexer is not None
-
-        for pattern in lexer.filenames:
-            exampleFilesPattern = _exampleFilePath(pattern)
-            for exampleFilePath in glob.glob(exampleFilesPattern):
-                with open(exampleFilePath, 'rb') as fp:
-                    text = fp.read().decode('utf-8')
-                probability = lexer.analyse_text(text)
-                self.assertTrue(probability > 0,
-                                '%s must recognize %r' % (
-                                    lexer.name, exampleFilePath))
-                guessedLexer = guess_lexer(text)
-                self.assertEqual(guessedLexer.name, lexer.name)
-
-    def testCanRecognizeAndGuessExampleFiles(self):
-        LEXERS_TO_TEST = [
-            EasytrieveLexer,
-            JclLexer,
-            RexxLexer,
-        ]
-        for lexerToTest in LEXERS_TO_TEST:
-            self._testCanRecognizeAndGuessExampleFiles(lexerToTest)
+@pytest.mark.parametrize('lexer', [
+    EasytrieveLexer,
+    JclLexer,
+    RexxLexer,
+])
+def test_can_recognize_and_guess_example_files(lexer):
+    for pattern in lexer.filenames:
+        exampleFilesPattern = _example_file_path(pattern)
+        for exampleFilePath in glob.glob(exampleFilesPattern):
+            with open(exampleFilePath, 'rb') as fp:
+                text = fp.read().decode('utf-8')
+            probability = lexer.analyse_text(text)
+            assert probability > 0, '%s must recognize %r' % (
+                lexer.name, exampleFilePath)
+            guessedLexer = guess_lexer(text)
+            assert guessedLexer.name == lexer.name
 
 
-class EasyTrieveLexerTest(unittest.TestCase):
-    def testCanGuessFromText(self):
-        self.assertTrue(EasytrieveLexer.analyse_text('MACRO'))
-        self.assertTrue(EasytrieveLexer.analyse_text('\nMACRO'))
-        self.assertTrue(EasytrieveLexer.analyse_text(' \nMACRO'))
-        self.assertTrue(EasytrieveLexer.analyse_text(' \n MACRO'))
-        self.assertTrue(EasytrieveLexer.analyse_text('*\nMACRO'))
-        self.assertTrue(EasytrieveLexer.analyse_text(
-            '*\n *\n\n \n*\n MACRO'))
+def test_easytrieve_can_guess_from_text():
+    assert EasytrieveLexer.analyse_text('MACRO')
+    assert EasytrieveLexer.analyse_text('\nMACRO')
+    assert EasytrieveLexer.analyse_text(' \nMACRO')
+    assert EasytrieveLexer.analyse_text(' \n MACRO')
+    assert EasytrieveLexer.analyse_text('*\nMACRO')
+    assert EasytrieveLexer.analyse_text('*\n *\n\n \n*\n MACRO')
 
 
-class RexxLexerTest(unittest.TestCase):
-    def testCanGuessFromText(self):
-        self.assertAlmostEqual(0.01, RexxLexer.analyse_text('/* */'))
-        self.assertAlmostEqual(1.0,
-                               RexxLexer.analyse_text('''/* Rexx */
-                say "hello world"'''))
-        val = RexxLexer.analyse_text('/* */\n'
-                                     'hello:pRoceduRe\n'
-                                     '  say "hello world"')
-        self.assertTrue(val > 0.5, val)
-        val = RexxLexer.analyse_text('''/* */
-                if 1 > 0 then do
-                    say "ok"
-                end
-                else do
-                    say "huh?"
-                end''')
-        self.assertTrue(val > 0.2, val)
-        val = RexxLexer.analyse_text('''/* */
-                greeting = "hello world!"
-                parse value greeting "hello" name "!"
-                say name''')
-        self.assertTrue(val > 0.2, val)
+def test_rexx_can_guess_from_text():
+    assert RexxLexer.analyse_text('/* */') == pytest.approx(0.01)
+    assert RexxLexer.analyse_text('''/* Rexx */
+            say "hello world"''') == pytest.approx(1.0)
+    val = RexxLexer.analyse_text('/* */\n'
+                                 'hello:pRoceduRe\n'
+                                 '  say "hello world"')
+    assert val > 0.5
+    val = RexxLexer.analyse_text('''/* */
+            if 1 > 0 then do
+                say "ok"
+            end
+            else do
+                say "huh?"
+            end''')
+    assert val > 0.2
+    val = RexxLexer.analyse_text('''/* */
+            greeting = "hello world!"
+            parse value greeting "hello" name "!"
+            say name''')
+    assert val > 0.2
index c143586a259c8eb3779f56507b34e538b132be44..9024bf07c42a15c91a6ec984c88b424428da4d54 100644 (file)
@@ -1,31 +1,36 @@
 # -*- coding: utf-8 -*-
 """
-    Pygments regex lexer tests
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~
+    Pygments Markdown lexer tests
+    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
     :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
     :license: BSD, see LICENSE for details.
 """
-import unittest
+
+import pytest
 
 from pygments.lexers.markup import MarkdownLexer
 
 
-class SameTextTests(unittest.TestCase):
+@pytest.fixture(scope='module')
+def lexer():
+    yield MarkdownLexer()
+
+
+def assert_same_text(lexer, text):
+    """Show that lexed markdown does not remove any content. """
+    tokens = list(lexer.get_tokens_unprocessed(text))
+    output = ''.join(t[2] for t in tokens)
+    assert text == output
+
 
-    lexer = MarkdownLexer()
+def test_code_fence(lexer):
+    assert_same_text(lexer, r'```\nfoo\n```\n')
 
-    def assert_same_text(self, text):
-        """Show that lexed markdown does not remove any content. """
-        tokens = list(self.lexer.get_tokens_unprocessed(text))
-        output = ''.join(t[2] for t in tokens)
-        self.assertEqual(text, output)
 
-    def test_code_fence(self):
-        self.assert_same_text(r'```\nfoo\n```\n')
+def test_code_fence_gsm(lexer):
+    assert_same_text(lexer, r'```markdown\nfoo\n```\n')
 
-    def test_code_fence_gsm(self):
-        self.assert_same_text(r'```markdown\nfoo\n```\n')
 
-    def test_code_fence_gsm_with_no_lexer(self):
-        self.assert_same_text(r'```invalid-lexer\nfoo\n```\n')
+def test_code_fence_gsm_with_no_lexer(lexer):
+    assert_same_text(lexer, r'```invalid-lexer\nfoo\n```\n')
index 6e1f16a4f453aa398729cdd2077c531582048792..b1206949062c9db1e7ad1946cf63f7ee60887c76 100644 (file)
@@ -12,10 +12,7 @@ from __future__ import print_function
 from pygments import modeline
 
 
-def test_lexer_classes():
-    def verify(buf):
-        assert modeline.get_filetype_from_buffer(buf) == 'python'
-
+def test_modelines():
     for buf in [
             'vi: ft=python' + '\n' * 8,
             'vi: ft=python' + '\n' * 8,
@@ -23,4 +20,4 @@ def test_lexer_classes():
             '\n' * 8 + 'ex: filetype=python',
             '\n' * 8 + 'vim: some,other,syn=python\n\n\n\n'
     ]:
-        yield verify, buf
+        assert modeline.get_filetype_from_buffer(buf) == 'python'
index 3db6a9e0068d34578c501026439aaa47d58b5d62..54f31db404ec0ae514d5ba9d6b5094f108520ece 100644 (file)
@@ -7,86 +7,90 @@
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
-import os
+import pytest
 
 from pygments.token import Token
 from pygments.lexers import ObjectiveCLexer
 
 
-class ObjectiveCLexerTest(unittest.TestCase):
-
-    def setUp(self):
-        self.lexer = ObjectiveCLexer()
-
-    def testLiteralNumberInt(self):
-        fragment = u'@(1);\n'
-        expected = [
-            (Token.Literal, u'@('),
-            (Token.Literal.Number.Integer, u'1'),
-            (Token.Literal, u')'),
-            (Token.Punctuation, u';'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
-
-    def testLiteralNumberExpression(self):
-        fragment = u'@(1+2);\n'
-        expected = [
-            (Token.Literal, u'@('),
-            (Token.Literal.Number.Integer, u'1'),
-            (Token.Operator, u'+'),
-            (Token.Literal.Number.Integer, u'2'),
-            (Token.Literal, u')'),
-            (Token.Punctuation, u';'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
-
-    def testLiteralNumberNestedExpression(self):
-        fragment = u'@(1+(2+3));\n'
-        expected = [
-            (Token.Literal, u'@('),
-            (Token.Literal.Number.Integer, u'1'),
-            (Token.Operator, u'+'),
-            (Token.Punctuation, u'('),
-            (Token.Literal.Number.Integer, u'2'),
-            (Token.Operator, u'+'),
-            (Token.Literal.Number.Integer, u'3'),
-            (Token.Punctuation, u')'),
-            (Token.Literal, u')'),
-            (Token.Punctuation, u';'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
-
-    def testLiteralNumberBool(self):
-        fragment = u'@NO;\n'
-        expected = [
-            (Token.Literal.Number, u'@NO'),
-            (Token.Punctuation, u';'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
-
-    def testLiteralNumberBoolExpression(self):
-        fragment = u'@(YES);\n'
-        expected = [
-            (Token.Literal, u'@('),
-            (Token.Name.Builtin, u'YES'),
-            (Token.Literal, u')'),
-            (Token.Punctuation, u';'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
-
-    def testModuleImport(self):
-        fragment = u'@import ModuleA;\n'
-        expected = [
-            (Token.Keyword, u'@import'),
-            (Token.Text, u' '),
-            (Token.Name, u'ModuleA'),
-            (Token.Punctuation, u';'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
+@pytest.fixture(scope='module')
+def lexer():
+    yield ObjectiveCLexer()
+
+
+def test_literal_number_int(lexer):
+    fragment = u'@(1);\n'
+    expected = [
+        (Token.Literal, u'@('),
+        (Token.Literal.Number.Integer, u'1'),
+        (Token.Literal, u')'),
+        (Token.Punctuation, u';'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == expected
+
+
+def test_literal_number_expression(lexer):
+    fragment = u'@(1+2);\n'
+    expected = [
+        (Token.Literal, u'@('),
+        (Token.Literal.Number.Integer, u'1'),
+        (Token.Operator, u'+'),
+        (Token.Literal.Number.Integer, u'2'),
+        (Token.Literal, u')'),
+        (Token.Punctuation, u';'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == expected
+
+
+def test_literal_number_nested_expression(lexer):
+    fragment = u'@(1+(2+3));\n'
+    expected = [
+        (Token.Literal, u'@('),
+        (Token.Literal.Number.Integer, u'1'),
+        (Token.Operator, u'+'),
+        (Token.Punctuation, u'('),
+        (Token.Literal.Number.Integer, u'2'),
+        (Token.Operator, u'+'),
+        (Token.Literal.Number.Integer, u'3'),
+        (Token.Punctuation, u')'),
+        (Token.Literal, u')'),
+        (Token.Punctuation, u';'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == expected
+
+
+def test_literal_number_bool(lexer):
+    fragment = u'@NO;\n'
+    expected = [
+        (Token.Literal.Number, u'@NO'),
+        (Token.Punctuation, u';'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == expected
+
+
+def test_literal_number_bool_expression(lexer):
+    fragment = u'@(YES);\n'
+    expected = [
+        (Token.Literal, u'@('),
+        (Token.Name.Builtin, u'YES'),
+        (Token.Literal, u')'),
+        (Token.Punctuation, u';'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == expected
+
+
+def test_module_import(lexer):
+    fragment = u'@import ModuleA;\n'
+    expected = [
+        (Token.Keyword, u'@import'),
+        (Token.Text, u' '),
+        (Token.Name, u'ModuleA'),
+        (Token.Punctuation, u';'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == expected
index 30f9eca8097115e6019e4eea42af9a7c7162dab6..8849bacf045366356d1b7be7241e83c4f622715d 100644 (file)
 """
 
 import time
-import unittest
+
+import pytest
 
 from pygments.token import Keyword, Name, String, Text
 from pygments.lexers.perl import PerlLexer
 
 
-class RunawayRegexTest(unittest.TestCase):
-    # A previous version of the Perl lexer would spend a great deal of
-    # time backtracking when given particular strings.  These tests show that
-    # the runaway backtracking doesn't happen any more (at least for the given
-    # cases).
+@pytest.fixture(scope='module')
+def lexer():
+    yield PerlLexer()
+
+
+# Test runaway regexes.
+# A previous version of the Perl lexer would spend a great deal of
+# time backtracking when given particular strings.  These tests show that
+# the runaway backtracking doesn't happen any more (at least for the given
+# cases).
+
+
+# Test helpers.
+
+def assert_single_token(lexer, s, token):
+    """Show that a given string generates only one token."""
+    tokens = list(lexer.get_tokens_unprocessed(s))
+    assert len(tokens) == 1
+    assert s == tokens[0][2]
+    assert token == tokens[0][1]
+
+
+def assert_tokens(lexer, strings, expected_tokens):
+    """Show that a given string generates the expected tokens."""
+    tokens = list(lexer.get_tokens_unprocessed(''.join(strings)))
+    assert len(tokens) == len(expected_tokens)
+    for index, s in enumerate(strings):
+        assert s == tokens[index][2]
+        assert expected_tokens[index] == tokens[index][1]
+
+
+def assert_fast_tokenization(lexer, s):
+    """Show that a given string is tokenized quickly."""
+    start = time.time()
+    tokens = list(lexer.get_tokens_unprocessed(s))
+    end = time.time()
+    # Isn't 10 seconds kind of a long time?  Yes, but we don't want false
+    # positives when the tests are starved for CPU time.
+    if end-start > 10:
+        pytest.fail('tokenization took too long')
+    return tokens
+
+
+# Strings.
+
+def test_single_quote_strings(lexer):
+    assert_single_token(lexer, r"'foo\tbar\\\'baz'", String)
+    assert_fast_tokenization(lexer, "'" + '\\'*999)
+
+
+def test_double_quote_strings(lexer):
+    assert_single_token(lexer, r'"foo\tbar\\\"baz"', String)
+    assert_fast_tokenization(lexer, '"' + '\\'*999)
+
+
+def test_backtick_strings(lexer):
+    assert_single_token(lexer, r'`foo\tbar\\\`baz`', String.Backtick)
+    assert_fast_tokenization(lexer, '`' + '\\'*999)
+
+
+# Regex matches with various delimiters.
+
+def test_match(lexer):
+    assert_single_token(lexer, r'/aa\tbb/', String.Regex)
+    assert_fast_tokenization(lexer, '/' + '\\'*999)
+
+
+def test_match_with_slash(lexer):
+    assert_tokens(lexer, ['m', '/\n\\t\\\\/'], [String.Regex, String.Regex])
+    assert_fast_tokenization(lexer, 'm/xxx\n' + '\\'*999)
+
+
+def test_match_with_bang(lexer):
+    assert_tokens(lexer, ['m', r'!aa\t\!bb!'], [String.Regex, String.Regex])
+    assert_fast_tokenization(lexer, 'm!' + '\\'*999)
+
+
+def test_match_with_brace(lexer):
+    assert_tokens(lexer, ['m', r'{aa\t\}bb}'], [String.Regex, String.Regex])
+    assert_fast_tokenization(lexer, 'm{' + '\\'*999)
+
+
+def test_match_with_angle_brackets(lexer):
+    assert_tokens(lexer, ['m', r'<aa\t\>bb>'], [String.Regex, String.Regex])
+    assert_fast_tokenization(lexer, 'm<' + '\\'*999)
+
+
+def test_match_with_parenthesis(lexer):
+    assert_tokens(lexer, ['m', r'(aa\t\)bb)'], [String.Regex, String.Regex])
+    assert_fast_tokenization(lexer, 'm(' + '\\'*999)
 
-    lexer = PerlLexer()
 
-    ### Test helpers.
+def test_match_with_at_sign(lexer):
+    assert_tokens(lexer, ['m', r'@aa\t\@bb@'], [String.Regex, String.Regex])
+    assert_fast_tokenization(lexer, 'm@' + '\\'*999)
 
-    def assert_single_token(self, s, token):
-        """Show that a given string generates only one token."""
-        tokens = list(self.lexer.get_tokens_unprocessed(s))
-        self.assertEqual(len(tokens), 1, tokens)
-        self.assertEqual(s, tokens[0][2])
-        self.assertEqual(token, tokens[0][1])
 
-    def assert_tokens(self, strings, expected_tokens):
-        """Show that a given string generates the expected tokens."""
-        tokens = list(self.lexer.get_tokens_unprocessed(''.join(strings)))
-        self.assertEqual(len(tokens), len(expected_tokens), tokens)
-        for index, s in enumerate(strings):
-            self.assertEqual(s, tokens[index][2])
-            self.assertEqual(expected_tokens[index], tokens[index][1])
+def test_match_with_percent_sign(lexer):
+    assert_tokens(lexer, ['m', r'%aa\t\%bb%'], [String.Regex, String.Regex])
+    assert_fast_tokenization(lexer, 'm%' + '\\'*999)
 
-    def assert_fast_tokenization(self, s):
-        """Show that a given string is tokenized quickly."""
-        start = time.time()
-        tokens = list(self.lexer.get_tokens_unprocessed(s))
-        end = time.time()
-        # Isn't 10 seconds kind of a long time?  Yes, but we don't want false
-        # positives when the tests are starved for CPU time.
-        if end-start > 10:
-            self.fail('tokenization took too long')
-        return tokens
 
-    ### Strings.
+def test_match_with_dollar_sign(lexer):
+    assert_tokens(lexer, ['m', r'$aa\t\$bb$'], [String.Regex, String.Regex])
+    assert_fast_tokenization(lexer, 'm$' + '\\'*999)
 
-    def test_single_quote_strings(self):
-        self.assert_single_token(r"'foo\tbar\\\'baz'", String)
-        self.assert_fast_tokenization("'" + '\\'*999)
 
-    def test_double_quote_strings(self):
-        self.assert_single_token(r'"foo\tbar\\\"baz"', String)
-        self.assert_fast_tokenization('"' + '\\'*999)
+# Regex substitutions with various delimeters.
 
-    def test_backtick_strings(self):
-        self.assert_single_token(r'`foo\tbar\\\`baz`', String.Backtick)
-        self.assert_fast_tokenization('`' + '\\'*999)
+def test_substitution_with_slash(lexer):
+    assert_single_token(lexer, 's/aaa/bbb/g', String.Regex)
+    assert_fast_tokenization(lexer, 's/foo/' + '\\'*999)
 
-    ### Regex matches with various delimiters.
 
-    def test_match(self):
-        self.assert_single_token(r'/aa\tbb/', String.Regex)
-        self.assert_fast_tokenization('/' + '\\'*999)
+def test_substitution_with_at_sign(lexer):
+    assert_single_token(lexer, r's@aaa@bbb@g', String.Regex)
+    assert_fast_tokenization(lexer, 's@foo@' + '\\'*999)
 
-    def test_match_with_slash(self):
-        self.assert_tokens(['m', '/\n\\t\\\\/'], [String.Regex, String.Regex])
-        self.assert_fast_tokenization('m/xxx\n' + '\\'*999)
 
-    def test_match_with_bang(self):
-        self.assert_tokens(['m', r'!aa\t\!bb!'], [String.Regex, String.Regex])
-        self.assert_fast_tokenization('m!' + '\\'*999)
+def test_substitution_with_percent_sign(lexer):
+    assert_single_token(lexer, r's%aaa%bbb%g', String.Regex)
+    assert_fast_tokenization(lexer, 's%foo%' + '\\'*999)
 
-    def test_match_with_brace(self):
-        self.assert_tokens(['m', r'{aa\t\}bb}'], [String.Regex, String.Regex])
-        self.assert_fast_tokenization('m{' + '\\'*999)
 
-    def test_match_with_angle_brackets(self):
-        self.assert_tokens(['m', r'<aa\t\>bb>'], [String.Regex, String.Regex])
-        self.assert_fast_tokenization('m<' + '\\'*999)
+def test_substitution_with_brace(lexer):
+    assert_single_token(lexer, r's{aaa}', String.Regex)
+    assert_fast_tokenization(lexer, 's{' + '\\'*999)
 
-    def test_match_with_parenthesis(self):
-        self.assert_tokens(['m', r'(aa\t\)bb)'], [String.Regex, String.Regex])
-        self.assert_fast_tokenization('m(' + '\\'*999)
 
-    def test_match_with_at_sign(self):
-        self.assert_tokens(['m', r'@aa\t\@bb@'], [String.Regex, String.Regex])
-        self.assert_fast_tokenization('m@' + '\\'*999)
+def test_substitution_with_angle_bracket(lexer):
+    assert_single_token(lexer, r's<aaa>', String.Regex)
+    assert_fast_tokenization(lexer, 's<' + '\\'*999)
 
-    def test_match_with_percent_sign(self):
-        self.assert_tokens(['m', r'%aa\t\%bb%'], [String.Regex, String.Regex])
-        self.assert_fast_tokenization('m%' + '\\'*999)
 
-    def test_match_with_dollar_sign(self):
-        self.assert_tokens(['m', r'$aa\t\$bb$'], [String.Regex, String.Regex])
-        self.assert_fast_tokenization('m$' + '\\'*999)
+def test_substitution_with_square_bracket(lexer):
+    assert_single_token(lexer, r's[aaa]', String.Regex)
+    assert_fast_tokenization(lexer, 's[' + '\\'*999)
 
-    ### Regex substitutions with various delimeters.
 
-    def test_substitution_with_slash(self):
-        self.assert_single_token('s/aaa/bbb/g', String.Regex)
-        self.assert_fast_tokenization('s/foo/' + '\\'*999)
+def test_substitution_with_parenthesis(lexer):
+    assert_single_token(lexer, r's(aaa)', String.Regex)
+    assert_fast_tokenization(lexer, 's(' + '\\'*999)
 
-    def test_substitution_with_at_sign(self):
-        self.assert_single_token(r's@aaa@bbb@g', String.Regex)
-        self.assert_fast_tokenization('s@foo@' + '\\'*999)
 
-    def test_substitution_with_percent_sign(self):
-        self.assert_single_token(r's%aaa%bbb%g', String.Regex)
-        self.assert_fast_tokenization('s%foo%' + '\\'*999)
-
-    def test_substitution_with_brace(self):
-        self.assert_single_token(r's{aaa}', String.Regex)
-        self.assert_fast_tokenization('s{' + '\\'*999)
+# Namespaces/modules
 
-    def test_substitution_with_angle_bracket(self):
-        self.assert_single_token(r's<aaa>', String.Regex)
-        self.assert_fast_tokenization('s<' + '\\'*999)
-
-    def test_substitution_with_angle_bracket(self):
-        self.assert_single_token(r's<aaa>', String.Regex)
-        self.assert_fast_tokenization('s<' + '\\'*999)
-
-    def test_substitution_with_square_bracket(self):
-        self.assert_single_token(r's[aaa]', String.Regex)
-        self.assert_fast_tokenization('s[' + '\\'*999)
-
-    def test_substitution_with_parenthesis(self):
-        self.assert_single_token(r's(aaa)', String.Regex)
-        self.assert_fast_tokenization('s(' + '\\'*999)
+def test_package_statement(lexer):
+    assert_tokens(lexer, ['package', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
+    assert_tokens(lexer, ['package', '  ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
 
-    ### Namespaces/modules
 
-    def test_package_statement(self):
-        self.assert_tokens(['package', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
-        self.assert_tokens(['package', '  ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
+def test_use_statement(lexer):
+    assert_tokens(lexer, ['use', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
+    assert_tokens(lexer, ['use', '  ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
 
-    def test_use_statement(self):
-        self.assert_tokens(['use', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
-        self.assert_tokens(['use', '  ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
 
-    def test_no_statement(self):
-        self.assert_tokens(['no', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
-        self.assert_tokens(['no', '  ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
+def test_no_statement(lexer):
+    assert_tokens(lexer, ['no', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
+    assert_tokens(lexer, ['no', '  ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
 
-    def test_require_statement(self):
-        self.assert_tokens(['require', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
-        self.assert_tokens(['require', '  ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
-        self.assert_tokens(['require', ' ', '"Foo/Bar.pm"'], [Keyword, Text, String])
 
+def test_require_statement(lexer):
+    assert_tokens(lexer, ['require', ' ', 'Foo'], [Keyword, Text, Name.Namespace])
+    assert_tokens(lexer, ['require', '  ', 'Foo::Bar'], [Keyword, Text, Name.Namespace])
+    assert_tokens(lexer, ['require', ' ', '"Foo/Bar.pm"'], [Keyword, Text, String])
index bb047b91b81e9c9c9d94e28d29344788baceb524..1660183a03918cca8158739d9ebaca482d037e97 100644 (file)
@@ -7,30 +7,31 @@
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
 from pygments.lexers import PhpLexer
 from pygments.token import Token
 
 
-class PhpTest(unittest.TestCase):
-    def setUp(self):
-        self.lexer = PhpLexer()
+@pytest.fixture(scope='module')
+def lexer():
+    yield PhpLexer()
 
-    def testStringEscapingRun(self):
-        fragment = '<?php $x="{\\""; ?>\n'
-        tokens = [
-            (Token.Comment.Preproc, '<?php'),
-            (Token.Text, ' '),
-            (Token.Name.Variable, '$x'),
-            (Token.Operator, '='),
-            (Token.Literal.String.Double, '"'),
-            (Token.Literal.String.Double, '{'),
-            (Token.Literal.String.Escape, '\\"'),
-            (Token.Literal.String.Double, '"'),
-            (Token.Punctuation, ';'),
-            (Token.Text, ' '),
-            (Token.Comment.Preproc, '?>'),
-            (Token.Other, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+def test_string_escaping_run(lexer):
+    fragment = '<?php $x="{\\""; ?>\n'
+    tokens = [
+        (Token.Comment.Preproc, '<?php'),
+        (Token.Text, ' '),
+        (Token.Name.Variable, '$x'),
+        (Token.Operator, '='),
+        (Token.Literal.String.Double, '"'),
+        (Token.Literal.String.Double, '{'),
+        (Token.Literal.String.Escape, '\\"'),
+        (Token.Literal.String.Double, '"'),
+        (Token.Punctuation, ';'),
+        (Token.Text, ' '),
+        (Token.Comment.Preproc, '?>'),
+        (Token.Other, '\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
index 9bf3ce76fc2ec3f2d5a043a9f0a9989d14bb534d..61ddfd57de1b43b48064afa85a57436ab360696d 100644 (file)
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
 from pygments.token import Token
 from pygments.lexers import PraatLexer
 
-class PraatTest(unittest.TestCase):
-
-    def setUp(self):
-        self.lexer = PraatLexer()
-        self.maxDiff = None
-
-    def testNumericAssignment(self):
-        fragment = u'var = -15e4\n'
-        tokens = [
-            (Token.Text, u'var'),
-            (Token.Text, u' '),
-            (Token.Operator, u'='),
-            (Token.Text, u' '),
-            (Token.Operator, u'-'),
-            (Token.Literal.Number, u'15e4'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testStringAssignment(self):
-        fragment = u'var$ = "foo"\n'
-        tokens = [
-            (Token.Text, u'var$'),
-            (Token.Text, u' '),
-            (Token.Operator, u'='),
-            (Token.Text, u' '),
-            (Token.Literal.String, u'"'),
-            (Token.Literal.String, u'foo'),
-            (Token.Literal.String, u'"'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testStringEscapedQuotes(self):
-        fragment = u'"it said ""foo"""\n'
-        tokens = [
-            (Token.Literal.String, u'"'),
-            (Token.Literal.String, u'it said '),
-            (Token.Literal.String, u'"'),
-            (Token.Literal.String, u'"'),
-            (Token.Literal.String, u'foo'),
-            (Token.Literal.String, u'"'),
-            (Token.Literal.String, u'"'),
-            (Token.Literal.String, u'"'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testFunctionCall(self):
-        fragment = u'selected("Sound", i+(a*b))\n'
-        tokens = [
-            (Token.Name.Function, u'selected'),
-            (Token.Punctuation, u'('),
-            (Token.Literal.String, u'"'),
-            (Token.Literal.String, u'Sound'),
-            (Token.Literal.String, u'"'),
-            (Token.Punctuation, u','),
-            (Token.Text, u' '),
-            (Token.Text, u'i'),
-            (Token.Operator, u'+'),
-            (Token.Text, u'('),
-            (Token.Text, u'a'),
-            (Token.Operator, u'*'),
-            (Token.Text, u'b'),
-            (Token.Text, u')'),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testBrokenUnquotedString(self):
-        fragment = u'printline string\n... \'interpolated\' string\n'
-        tokens = [
-            (Token.Keyword, u'printline'),
-            (Token.Text, u' '),
-            (Token.Literal.String, u'string'),
-            (Token.Text, u'\n'),
-            (Token.Punctuation, u'...'),
-            (Token.Text, u' '),
-            (Token.Literal.String.Interpol, u"'"),
-            (Token.Literal.String.Interpol, u'interpolated'),
-            (Token.Literal.String.Interpol, u"'"),
-            (Token.Text, u' '),
-            (Token.Literal.String, u'string'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testInlinIf(self):
-        fragment = u'var = if true == 1 then -1 else 0 fi'
-        tokens = [
-            (Token.Text, u'var'),
-            (Token.Text, u' '),
-            (Token.Operator, u'='),
-            (Token.Text, u' '),
-            (Token.Keyword, u'if'),
-            (Token.Text, u' '),
-            (Token.Text, u'true'),
-            (Token.Text, u' '),
-            (Token.Operator, u'=='),
-            (Token.Text, u' '),
-            (Token.Literal.Number, u'1'),
-            (Token.Text, u' '),
-            (Token.Keyword, u'then'),
-            (Token.Text, u' '),
-            (Token.Operator, u'-'),
-            (Token.Literal.Number, u'1'),
-            (Token.Text, u' '),
-            (Token.Keyword, u'else'),
-            (Token.Text, u' '),
-            (Token.Literal.Number, u'0'),
-            (Token.Text, u' '),
-            (Token.Keyword, u'fi'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+@pytest.fixture(scope='module')
+def lexer():
+    yield PraatLexer()
+
+
+def test_numeric_assignment(lexer):
+    fragment = u'var = -15e4\n'
+    tokens = [
+        (Token.Text, u'var'),
+        (Token.Text, u' '),
+        (Token.Operator, u'='),
+        (Token.Text, u' '),
+        (Token.Operator, u'-'),
+        (Token.Literal.Number, u'15e4'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def testStringAssignment(lexer):
+    fragment = u'var$ = "foo"\n'
+    tokens = [
+        (Token.Text, u'var$'),
+        (Token.Text, u' '),
+        (Token.Operator, u'='),
+        (Token.Text, u' '),
+        (Token.Literal.String, u'"'),
+        (Token.Literal.String, u'foo'),
+        (Token.Literal.String, u'"'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_string_escaped_quotes(lexer):
+    fragment = u'"it said ""foo"""\n'
+    tokens = [
+        (Token.Literal.String, u'"'),
+        (Token.Literal.String, u'it said '),
+        (Token.Literal.String, u'"'),
+        (Token.Literal.String, u'"'),
+        (Token.Literal.String, u'foo'),
+        (Token.Literal.String, u'"'),
+        (Token.Literal.String, u'"'),
+        (Token.Literal.String, u'"'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_function_call(lexer):
+    fragment = u'selected("Sound", i+(a*b))\n'
+    tokens = [
+        (Token.Name.Function, u'selected'),
+        (Token.Punctuation, u'('),
+        (Token.Literal.String, u'"'),
+        (Token.Literal.String, u'Sound'),
+        (Token.Literal.String, u'"'),
+        (Token.Punctuation, u','),
+        (Token.Text, u' '),
+        (Token.Text, u'i'),
+        (Token.Operator, u'+'),
+        (Token.Text, u'('),
+        (Token.Text, u'a'),
+        (Token.Operator, u'*'),
+        (Token.Text, u'b'),
+        (Token.Text, u')'),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_broken_unquoted_string(lexer):
+    fragment = u'printline string\n... \'interpolated\' string\n'
+    tokens = [
+        (Token.Keyword, u'printline'),
+        (Token.Text, u' '),
+        (Token.Literal.String, u'string'),
+        (Token.Text, u'\n'),
+        (Token.Punctuation, u'...'),
+        (Token.Text, u' '),
+        (Token.Literal.String.Interpol, u"'interpolated'"),
+        (Token.Text, u' '),
+        (Token.Literal.String, u'string'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_inline_if(lexer):
+    fragment = u'var = if true == 1 then -1 else 0 fi'
+    tokens = [
+        (Token.Text, u'var'),
+        (Token.Text, u' '),
+        (Token.Operator, u'='),
+        (Token.Text, u' '),
+        (Token.Keyword, u'if'),
+        (Token.Text, u' '),
+        (Token.Text, u'true'),
+        (Token.Text, u' '),
+        (Token.Operator, u'=='),
+        (Token.Text, u' '),
+        (Token.Literal.Number, u'1'),
+        (Token.Text, u' '),
+        (Token.Keyword, u'then'),
+        (Token.Text, u' '),
+        (Token.Operator, u'-'),
+        (Token.Literal.Number, u'1'),
+        (Token.Text, u' '),
+        (Token.Keyword, u'else'),
+        (Token.Text, u' '),
+        (Token.Literal.Number, u'0'),
+        (Token.Text, u' '),
+        (Token.Keyword, u'fi'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+def test_interpolation_boundary(lexer):
+    fragment = u'"\'" + "\'"'
+    tokens = [
+        (Token.Literal.String, u'"'),
+        (Token.Literal.String, u"'"),
+        (Token.Literal.String, u'"'),
+        (Token.Text, u' '),
+        (Token.Operator, u'+'),
+        (Token.Text, u' '),
+        (Token.Literal.String, u'"'),
+        (Token.Literal.String, u"'"),
+        (Token.Literal.String, u'"'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+def test_interpolated_numeric_indexed(lexer):
+    fragment = u"'a[3]'"
+    tokens = [
+        (Token.Literal.String.Interpol, u"'a[3]'"),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+def test_interpolated_numeric_hash(lexer):
+    fragment = u"'a[\"b\"]'"
+    tokens = [
+        (Token.Literal.String.Interpol, u"'a[\"b\"]'"),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+def test_interpolated_string_indexed(lexer):
+    fragment = u"'a$[3]'"
+    tokens = [
+        (Token.Literal.String.Interpol, u"'a$[3]'"),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+def test_interpolated_string_hash(lexer):
+    fragment = u"'a$[\"b\"]'"
+    tokens = [
+        (Token.Literal.String.Interpol, u"'a$[\"b\"]'"),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+def test_interpolated_numeric_with_precision(lexer):
+    fragment = u"'a:3'"
+    tokens = [
+        (Token.Literal.String.Interpol, u"'a:3'"),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+def test_interpolated_indexed_numeric_with_precision(lexer):
+    fragment = u"'a[3]:3'"
+    tokens = [
+        (Token.Literal.String.Interpol, u"'a[3]:3'"),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+def test_interpolated_local_numeric_with_precision(lexer):
+    fragment = u"'a.a:3'"
+    tokens = [
+        (Token.Literal.String.Interpol, u"'a.a:3'"),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
index aaa8ce29313d8777a5fd3c5cb8a25a211c63fef9..25368d93d3de98c7f632532f4c76d7be9370fe93 100644 (file)
@@ -7,83 +7,90 @@
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
 from pygments.lexers.configs import PropertiesLexer
 from pygments.token import Token
 
 
-class PropertiesTest(unittest.TestCase):
-    def setUp(self):
-        self.lexer = PropertiesLexer()
-
-    def test_comments(self):
-        """
-        Assures lines lead by either # or ! are recognized as a comment
-        """
-        fragment = '! a comment\n# also a comment\n'
-        tokens = [
-            (Token.Comment, '! a comment'),
-            (Token.Text, '\n'),
-            (Token.Comment, '# also a comment'),
-            (Token.Text, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def test_leading_whitespace_comments(self):
-        fragment = '    # comment\n'
-        tokens = [
-            (Token.Text, '    '),
-            (Token.Comment, '# comment'),
-            (Token.Text, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def test_escaped_space_in_key(self):
-        fragment = 'key = value\n'
-        tokens = [
-            (Token.Name.Attribute, 'key'),
-            (Token.Text, ' '),
-            (Token.Operator, '='),
-            (Token.Text, ' '),
-            (Token.Literal.String, 'value'),
-            (Token.Text, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def test_escaped_space_in_value(self):
-        fragment = 'key = doubleword\\ value\n'
-        tokens = [
-            (Token.Name.Attribute, 'key'),
-            (Token.Text, ' '),
-            (Token.Operator, '='),
-            (Token.Text, ' '),
-            (Token.Literal.String, 'doubleword\\ value'),
-            (Token.Text, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def test_space_delimited_kv_pair(self):
-        fragment = 'key value\n'
-        tokens = [
-            (Token.Name.Attribute, 'key'),
-            (Token.Text, ' '),
-            (Token.Literal.String, 'value\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def test_just_key(self):
-        fragment = 'justkey\n'
-        tokens = [
-            (Token.Name.Attribute, 'justkey'),
-            (Token.Text, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def test_just_key_with_space(self):
-        fragment = 'just\\ key\n'
-        tokens = [
-            (Token.Name.Attribute, 'just\\ key'),
-            (Token.Text, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+@pytest.fixture(scope='module')
+def lexer():
+    yield PropertiesLexer()
+
+
+def test_comments(lexer):
+    """
+    Assures lines lead by either # or ! are recognized as a comment
+    """
+    fragment = '! a comment\n# also a comment\n'
+    tokens = [
+        (Token.Comment, '! a comment'),
+        (Token.Text, '\n'),
+        (Token.Comment, '# also a comment'),
+        (Token.Text, '\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_leading_whitespace_comments(lexer):
+    fragment = '    # comment\n'
+    tokens = [
+        (Token.Text, '    '),
+        (Token.Comment, '# comment'),
+        (Token.Text, '\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_escaped_space_in_key(lexer):
+    fragment = 'key = value\n'
+    tokens = [
+        (Token.Name.Attribute, 'key'),
+        (Token.Text, ' '),
+        (Token.Operator, '='),
+        (Token.Text, ' '),
+        (Token.Literal.String, 'value'),
+        (Token.Text, '\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_escaped_space_in_value(lexer):
+    fragment = 'key = doubleword\\ value\n'
+    tokens = [
+        (Token.Name.Attribute, 'key'),
+        (Token.Text, ' '),
+        (Token.Operator, '='),
+        (Token.Text, ' '),
+        (Token.Literal.String, 'doubleword\\ value'),
+        (Token.Text, '\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_space_delimited_kv_pair(lexer):
+    fragment = 'key value\n'
+    tokens = [
+        (Token.Name.Attribute, 'key'),
+        (Token.Text, ' '),
+        (Token.Literal.String, 'value\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_just_key(lexer):
+    fragment = 'justkey\n'
+    tokens = [
+        (Token.Name.Attribute, 'justkey'),
+        (Token.Text, '\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_just_key_with_space(lexer):
+    fragment = 'just\\ key\n'
+    tokens = [
+        (Token.Name.Attribute, 'just\\ key'),
+        (Token.Text, '\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
index b9c6c49bf6b996836b09fc7bffb593e65ad2b11d..4e5d5bbfd149b13603b4b9e3913905ea0b6b8e70 100644 (file)
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
 from pygments.lexers import PythonLexer, Python3Lexer
 from pygments.token import Token
 
 
-class PythonTest(unittest.TestCase):
-    def setUp(self):
-        self.lexer = PythonLexer()
+@pytest.fixture(scope='module')
+def lexer2():
+    yield PythonLexer()
 
-    def test_cls_builtin(self):
-        """
-        Tests that a cls token gets interpreted as a Token.Name.Builtin.Pseudo
 
-        """
-        fragment = 'class TestClass():\n    @classmethod\n    def hello(cls):\n        pass\n'
-        tokens = [
-            (Token.Keyword, 'class'),
-            (Token.Text, ' '),
-            (Token.Name.Class, 'TestClass'),
-            (Token.Punctuation, '('),
-            (Token.Punctuation, ')'),
-            (Token.Punctuation, ':'),
-            (Token.Text, '\n'),
-            (Token.Text, '    '),
-            (Token.Name.Decorator, '@classmethod'),
-            (Token.Text, '\n'),
-            (Token.Text, '    '),
-            (Token.Keyword, 'def'),
-            (Token.Text, ' '),
-            (Token.Name.Function, 'hello'),
-            (Token.Punctuation, '('),
-            (Token.Name.Builtin.Pseudo, 'cls'),
-            (Token.Punctuation, ')'),
-            (Token.Punctuation, ':'),
-            (Token.Text, '\n'),
-            (Token.Text, '        '),
-            (Token.Keyword, 'pass'),
-            (Token.Text, '\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+@pytest.fixture(scope='module')
+def lexer3():
+    yield Python3Lexer()
+
+
+def test_cls_builtin(lexer2):
+    """
+    Tests that a cls token gets interpreted as a Token.Name.Builtin.Pseudo
+    """
+    fragment = 'class TestClass():\n    @classmethod\n    def hello(cls):\n        pass\n'
+    tokens = [
+        (Token.Keyword, 'class'),
+        (Token.Text, ' '),
+        (Token.Name.Class, 'TestClass'),
+        (Token.Punctuation, '('),
+        (Token.Punctuation, ')'),
+        (Token.Punctuation, ':'),
+        (Token.Text, '\n'),
+        (Token.Text, '    '),
+        (Token.Name.Decorator, '@classmethod'),
+        (Token.Text, '\n'),
+        (Token.Text, '    '),
+        (Token.Keyword, 'def'),
+        (Token.Text, ' '),
+        (Token.Name.Function, 'hello'),
+        (Token.Punctuation, '('),
+        (Token.Name.Builtin.Pseudo, 'cls'),
+        (Token.Punctuation, ')'),
+        (Token.Punctuation, ':'),
+        (Token.Text, '\n'),
+        (Token.Text, '        '),
+        (Token.Keyword, 'pass'),
+        (Token.Text, '\n'),
+    ]
+    assert list(lexer2.get_tokens(fragment)) == tokens
+
 
+def test_needs_name(lexer3):
+    """
+    Tests that '@' is recognized as an Operator
+    """
+    fragment = u'S = (H @ beta - r).T @ inv(H @ V @ H.T) @ (H @ beta - r)\n'
+    tokens = [
+        (Token.Name, u'S'),
+        (Token.Text, u' '),
+        (Token.Operator, u'='),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'('),
+        (Token.Name, u'H'),
+        (Token.Text, u' '),
+        (Token.Operator, u'@'),
+        (Token.Text, u' '),
+        (Token.Name, u'beta'),
+        (Token.Text, u' '),
+        (Token.Operator, u'-'),
+        (Token.Text, u' '),
+        (Token.Name, u'r'),
+        (Token.Punctuation, u')'),
+        (Token.Operator, u'.'),
+        (Token.Name, u'T'),
+        (Token.Text, u' '),
+        (Token.Operator, u'@'),
+        (Token.Text, u' '),
+        (Token.Name, u'inv'),
+        (Token.Punctuation, u'('),
+        (Token.Name, u'H'),
+        (Token.Text, u' '),
+        (Token.Operator, u'@'),
+        (Token.Text, u' '),
+        (Token.Name, u'V'),
+        (Token.Text, u' '),
+        (Token.Operator, u'@'),
+        (Token.Text, u' '),
+        (Token.Name, u'H'),
+        (Token.Operator, u'.'),
+        (Token.Name, u'T'),
+        (Token.Punctuation, u')'),
+        (Token.Text, u' '),
+        (Token.Operator, u'@'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'('),
+        (Token.Name, u'H'),
+        (Token.Text, u' '),
+        (Token.Operator, u'@'),
+        (Token.Text, u' '),
+        (Token.Name, u'beta'),
+        (Token.Text, u' '),
+        (Token.Operator, u'-'),
+        (Token.Text, u' '),
+        (Token.Name, u'r'),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer3.get_tokens(fragment)) == tokens
 
-class Python3Test(unittest.TestCase):
-    def setUp(self):
-        self.lexer = Python3Lexer()
-        
-    def testNeedsName(self):
-        """
-        Tests that '@' is recognized as an Operator
-        """
-        fragment = u'S = (H @ beta - r).T @ inv(H @ V @ H.T) @ (H @ beta - r)\n'
+
+def test_pep_515(lexer3):
+    """
+    Tests that the lexer can parse numeric literals with underscores
+    """
+    fragments = (
+        (Token.Literal.Number.Integer, u'1_000_000'),
+        (Token.Literal.Number.Float, u'1_000.000_001'),
+        (Token.Literal.Number.Float, u'1_000e1_000j'),
+        (Token.Literal.Number.Hex, u'0xCAFE_F00D'),
+        (Token.Literal.Number.Bin, u'0b_0011_1111_0100_1110'),
+        (Token.Literal.Number.Oct, u'0o_777_123'),
+    )
+
+    for token, fragment in fragments:
         tokens = [
-            (Token.Name, u'S'),
-            (Token.Text, u' '),
-            (Token.Operator, u'='),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'('),
-            (Token.Name, u'H'),
-            (Token.Text, u' '),
-            (Token.Operator, u'@'),
-            (Token.Text, u' '),
-            (Token.Name, u'beta'),
-            (Token.Text, u' '),
-            (Token.Operator, u'-'),
-            (Token.Text, u' '),
-            (Token.Name, u'r'),
-            (Token.Punctuation, u')'),
-            (Token.Operator, u'.'),
-            (Token.Name, u'T'),
-            (Token.Text, u' '),
-            (Token.Operator, u'@'),
-            (Token.Text, u' '),
-            (Token.Name, u'inv'),
-            (Token.Punctuation, u'('),
-            (Token.Name, u'H'),
-            (Token.Text, u' '),
-            (Token.Operator, u'@'),
-            (Token.Text, u' '),
-            (Token.Name, u'V'),
-            (Token.Text, u' '),
-            (Token.Operator, u'@'),
-            (Token.Text, u' '),
-            (Token.Name, u'H'),
-            (Token.Operator, u'.'),
-            (Token.Name, u'T'),
-            (Token.Punctuation, u')'),
-            (Token.Text, u' '),
-            (Token.Operator, u'@'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'('),
-            (Token.Name, u'H'),
-            (Token.Text, u' '),
-            (Token.Operator, u'@'),
-            (Token.Text, u' '),
-            (Token.Name, u'beta'),
-            (Token.Text, u' '),
-            (Token.Operator, u'-'),
-            (Token.Text, u' '),
-            (Token.Name, u'r'),
-            (Token.Punctuation, u')'),
+            (token, fragment),
             (Token.Text, u'\n'),
         ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def test_pep_515(self):
-        """
-        Tests that the lexer can parse numeric literals with underscores
-        """
-        fragments = (
-            (Token.Literal.Number.Integer, u'1_000_000'),
-            (Token.Literal.Number.Float, u'1_000.000_001'),
-            (Token.Literal.Number.Float, u'1_000e1_000j'),
-            (Token.Literal.Number.Hex, u'0xCAFE_F00D'),
-            (Token.Literal.Number.Bin, u'0b_0011_1111_0100_1110'),
-            (Token.Literal.Number.Oct, u'0o_777_123'),
-        )
-
-        for token, fragment in fragments:
-            tokens = [
-                (token, fragment),
-                (Token.Text, u'\n'),
-            ]
-            self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+        assert list(lexer3.get_tokens(fragment)) == tokens
index f40b8b63f0e66d7461f17b5cbae82fdbde3baf6b..3c64d69ef5bf560c3d1c01b881a1ea6c27755920 100644 (file)
@@ -7,37 +7,35 @@
     :license: BSD, see LICENSE for details.
 """
 
-import glob
-import os
-import unittest
+import pytest
 
 from pygments.token import Token
 from pygments.lexers.basic import QBasicLexer
 
 
-class QBasicTest(unittest.TestCase):
-    def setUp(self):
-        self.lexer = QBasicLexer()
-        self.maxDiff = None
+@pytest.fixture(scope='module')
+def lexer():
+    yield QBasicLexer()
 
-    def testKeywordsWithDollar(self):
-        fragment = u'DIM x\nx = RIGHT$("abc", 1)\n'
-        expected = [
-            (Token.Keyword.Declaration, u'DIM'),
-            (Token.Text.Whitespace, u' '),
-            (Token.Name.Variable.Global, u'x'),
-            (Token.Text, u'\n'),
-            (Token.Name.Variable.Global, u'x'),
-            (Token.Text.Whitespace, u' '),
-            (Token.Operator, u'='),
-            (Token.Text.Whitespace, u' '),
-            (Token.Keyword.Reserved, u'RIGHT$'),
-            (Token.Punctuation, u'('),
-            (Token.Literal.String.Double, u'"abc"'),
-            (Token.Punctuation, u','),
-            (Token.Text.Whitespace, u' '),
-            (Token.Literal.Number.Integer.Long, u'1'),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
+
+def test_keywords_with_dollar(lexer):
+    fragment = u'DIM x\nx = RIGHT$("abc", 1)\n'
+    expected = [
+        (Token.Keyword.Declaration, u'DIM'),
+        (Token.Text.Whitespace, u' '),
+        (Token.Name.Variable.Global, u'x'),
+        (Token.Text, u'\n'),
+        (Token.Name.Variable.Global, u'x'),
+        (Token.Text.Whitespace, u' '),
+        (Token.Operator, u'='),
+        (Token.Text.Whitespace, u' '),
+        (Token.Keyword.Reserved, u'RIGHT$'),
+        (Token.Punctuation, u'('),
+        (Token.Literal.String.Double, u'"abc"'),
+        (Token.Punctuation, u','),
+        (Token.Text.Whitespace, u' '),
+        (Token.Literal.Number.Integer.Long, u'1'),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == expected
index 70148e534bc35fff15c412ec920723ffa1700859..72cb8afc29e75d4c205b8b08130993f9dcb757f8 100644 (file)
@@ -1,70 +1,75 @@
 # -*- coding: utf-8 -*-
 """
     R Tests
-    ~~~~~~~~~
+    ~~~~~~~
 
-    :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
 from pygments.lexers import SLexer
 from pygments.token import Token, Name, Punctuation
 
 
-class RTest(unittest.TestCase):
-    def setUp(self):
-        self.lexer = SLexer()
+@pytest.fixture(scope='module')
+def lexer():
+    yield SLexer()
 
-    def testCall(self):
-        fragment = u'f(1, a)\n'
-        tokens = [
-            (Name.Function, u'f'),
-            (Punctuation, u'('),
-            (Token.Literal.Number, u'1'),
-            (Punctuation, u','),
-            (Token.Text, u' '),
-            (Token.Name, u'a'),
-            (Punctuation, u')'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
 
-    def testName1(self):
-        fragment = u'._a_2.c'
-        tokens = [
-            (Name, u'._a_2.c'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+def test_call(lexer):
+    fragment = u'f(1, a)\n'
+    tokens = [
+        (Name.Function, u'f'),
+        (Punctuation, u'('),
+        (Token.Literal.Number, u'1'),
+        (Punctuation, u','),
+        (Token.Text, u' '),
+        (Token.Name, u'a'),
+        (Punctuation, u')'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
 
-    def testName2(self):
-        # Invalid names are valid if backticks are used
-        fragment = u'`.1 blah`'
-        tokens = [
-            (Name, u'`.1 blah`'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
 
-    def testName3(self):
-        # Internal backticks can be escaped
-        fragment = u'`.1 \\` blah`'
-        tokens = [
-            (Name, u'`.1 \\` blah`'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+def test_name1(lexer):
+    fragment = u'._a_2.c'
+    tokens = [
+        (Name, u'._a_2.c'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
 
-    def testCustomOperator(self):
-        fragment = u'7 % and % 8'
-        tokens = [
-            (Token.Literal.Number, u'7'),
-            (Token.Text, u' '),
-            (Token.Operator, u'% and %'),
-            (Token.Text, u' '),
-            (Token.Literal.Number, u'8'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+def test_name2(lexer):
+    # Invalid names are valid if backticks are used
+    fragment = u'`.1 blah`'
+    tokens = [
+        (Name, u'`.1 blah`'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_name3(lexer):
+    # Internal backticks can be escaped
+    fragment = u'`.1 \\` blah`'
+    tokens = [
+        (Name, u'`.1 \\` blah`'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_custom_operator(lexer):
+    fragment = u'7 % and % 8'
+    tokens = [
+        (Token.Literal.Number, u'7'),
+        (Token.Text, u' '),
+        (Token.Operator, u'% and %'),
+        (Token.Text, u' '),
+        (Token.Literal.Number, u'8'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
index adc05a93750cc17ec46c7a47ba28aaa0cd7c1a05..4e832361e6a3bc10af656ecf505a00d1d999c4d4 100644 (file)
@@ -7,14 +7,18 @@
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
 from pygments.token import Text
-from pygments.lexer import RegexLexer
-from pygments.lexer import default
+from pygments.lexer import RegexLexer, default
 
 
-class TestLexer(RegexLexer):
+@pytest.fixture(scope='module')
+def lexer():
+    yield MyLexer()
+
+
+class MyLexer(RegexLexer):
     """Test tuple state transitions including #pop."""
     tokens = {
         'root': [
@@ -34,33 +38,29 @@ class TestLexer(RegexLexer):
     }
 
 
-class TupleTransTest(unittest.TestCase):
-    def test(self):
-        lx = TestLexer()
-        toks = list(lx.get_tokens_unprocessed('abcde'))
-        self.assertEqual(toks, [
-            (0, Text.Root, 'a'), (1, Text.Rag, 'b'), (2, Text.Rag, 'c'),
-            (3, Text.Beer, 'd'), (4, Text.Root, 'e')])
-
-    def test_multiline(self):
-        lx = TestLexer()
-        toks = list(lx.get_tokens_unprocessed('a\ne'))
-        self.assertEqual(toks, [
-            (0, Text.Root, 'a'), (1, Text, u'\n'), (2, Text.Root, 'e')])
-
-    def test_default(self):
-        lx = TestLexer()
-        toks = list(lx.get_tokens_unprocessed('d'))
-        self.assertEqual(toks, [(0, Text.Beer, 'd')])
-
-
-class PopEmptyTest(unittest.TestCase):
-    def test_regular(self):
-        lx = TestLexer()
-        toks = list(lx.get_tokens_unprocessed('#e'))
-        self.assertEqual(toks, [(0, Text.Root, '#'), (1, Text.Root, 'e')])
-
-    def test_tuple(self):
-        lx = TestLexer()
-        toks = list(lx.get_tokens_unprocessed('@e'))
-        self.assertEqual(toks, [(0, Text.Root, '@'), (1, Text.Root, 'e')])
+def test_tuple(lexer):
+    toks = list(lexer.get_tokens_unprocessed('abcde'))
+    assert toks == [
+        (0, Text.Root, 'a'), (1, Text.Rag, 'b'), (2, Text.Rag, 'c'),
+        (3, Text.Beer, 'd'), (4, Text.Root, 'e')]
+
+
+def test_multiline(lexer):
+    toks = list(lexer.get_tokens_unprocessed('a\ne'))
+    assert toks == [
+        (0, Text.Root, 'a'), (1, Text, u'\n'), (2, Text.Root, 'e')]
+
+
+def test_default(lexer):
+    toks = list(lexer.get_tokens_unprocessed('d'))
+    assert toks == [(0, Text.Beer, 'd')]
+
+
+def test_pop_empty_regular(lexer):
+    toks = list(lexer.get_tokens_unprocessed('#e'))
+    assert toks == [(0, Text.Root, '#'), (1, Text.Root, 'e')]
+
+
+def test_pop_empty_tuple(lexer):
+    toks = list(lexer.get_tokens_unprocessed('@e'))
+    assert toks == [(0, Text.Root, '@'), (1, Text.Root, 'e')]
index 9c44f4987c3359d38b1fcff50af5e05990687c80..20d48dda87457e362f3f17a01c05644bb5c75a14 100644 (file)
 
 import re
 import random
-import unittest
-import itertools
+from itertools import combinations_with_replacement
 
 from pygments.regexopt import regex_opt
 
 ALPHABET = ['a', 'b', 'c', 'd', 'e']
 
-try:
-    from itertools import combinations_with_replacement
-    N_TRIES = 15
-except ImportError:
-    # Python 2.6
-    def combinations_with_replacement(iterable, r):
-        pool = tuple(iterable)
-        n = len(pool)
-        for indices in itertools.product(range(n), repeat=r):
-            if sorted(indices) == list(indices):
-                yield tuple(pool[i] for i in indices)
-    N_TRIES = 9
-
-
-class RegexOptTestCase(unittest.TestCase):
-
-    def generate_keywordlist(self, length):
-        return [''.join(p) for p in
-                combinations_with_replacement(ALPHABET, length)]
-
-    def test_randomly(self):
-        # generate a list of all possible keywords of a certain length using
-        # a restricted alphabet, then choose some to match and make sure only
-        # those do
-        for n in range(3, N_TRIES):
-            kwlist = self.generate_keywordlist(n)
-            to_match = random.sample(kwlist,
-                                     random.randint(1, len(kwlist) - 1))
-            no_match = set(kwlist) - set(to_match)
-            rex = re.compile(regex_opt(to_match))
-            self.assertEqual(rex.groups, 1)
-            for w in to_match:
-                self.assertTrue(rex.match(w))
-            for w in no_match:
-                self.assertFalse(rex.match(w))
-
-    def test_prefix(self):
-        opt = regex_opt(('a', 'b'), prefix=r':{1,2}')
-        print(opt)
-        rex = re.compile(opt)
-        self.assertFalse(rex.match('a'))
-        self.assertTrue(rex.match('::a'))
-        self.assertFalse(rex.match(':::')) # fullmatch
-
-    def test_suffix(self):
-        opt = regex_opt(('a', 'b'), suffix=r':{1,2}')
-        print(opt)
-        rex = re.compile(opt)
-        self.assertFalse(rex.match('a'))
-        self.assertTrue(rex.match('a::'))
-        self.assertFalse(rex.match(':::')) # fullmatch
-
-    def test_suffix_opt(self):
-        # test that detected suffixes remain sorted.
-        opt = regex_opt(('afoo', 'abfoo'))
-        print(opt)
-        rex = re.compile(opt)
-        m = rex.match('abfoo')
-        self.assertEqual(5, m.end())
-
-    def test_different_length_grouping(self):
-        opt = regex_opt(('a', 'xyz'))
-        print(opt)
-        rex = re.compile(opt)
-        self.assertTrue(rex.match('a'))
-        self.assertTrue(rex.match('xyz'))
-        self.assertFalse(rex.match('b'))
-        self.assertEqual(1, rex.groups)
-
-    def test_same_length_grouping(self):
-        opt = regex_opt(('a', 'b'))
-        print(opt)
-        rex = re.compile(opt)
-        self.assertTrue(rex.match('a'))
-        self.assertTrue(rex.match('b'))
-        self.assertFalse(rex.match('x'))
-
-        self.assertEqual(1, rex.groups)
-        groups = rex.match('a').groups()
-        self.assertEqual(('a',), groups)
-
-    def test_same_length_suffix_grouping(self):
-        opt = regex_opt(('a', 'b'), suffix='(m)')
-        print(opt)
-        rex = re.compile(opt)
-        self.assertTrue(rex.match('am'))
-        self.assertTrue(rex.match('bm'))
-        self.assertFalse(rex.match('xm'))
-        self.assertFalse(rex.match('ax'))
-        self.assertEqual(2, rex.groups)
-        groups = rex.match('am').groups()
-        self.assertEqual(('a', 'm'), groups)
+N_TRIES = 15
+
+
+def generate_keywordlist(length):
+    return [''.join(p) for p in
+            combinations_with_replacement(ALPHABET, length)]
+
+
+def test_randomly():
+    # generate a list of all possible keywords of a certain length using
+    # a restricted alphabet, then choose some to match and make sure only
+    # those do
+    for n in range(3, N_TRIES):
+        kwlist = generate_keywordlist(n)
+        to_match = random.sample(kwlist,
+                                 random.randint(1, len(kwlist) - 1))
+        no_match = set(kwlist) - set(to_match)
+        rex = re.compile(regex_opt(to_match))
+        assert rex.groups == 1
+        for w in to_match:
+            assert rex.match(w)
+        for w in no_match:
+            assert not rex.match(w)
+
+
+def test_prefix():
+    opt = regex_opt(('a', 'b'), prefix=r':{1,2}')
+    print(opt)
+    rex = re.compile(opt)
+    assert not rex.match('a')
+    assert rex.match('::a')
+    assert not rex.match(':::')  # fullmatch
+
+
+def test_suffix():
+    opt = regex_opt(('a', 'b'), suffix=r':{1,2}')
+    print(opt)
+    rex = re.compile(opt)
+    assert not rex.match('a')
+    assert rex.match('a::')
+    assert not rex.match(':::')  # fullmatch
+
+
+def test_suffix_opt():
+    # test that detected suffixes remain sorted.
+    opt = regex_opt(('afoo', 'abfoo'))
+    print(opt)
+    rex = re.compile(opt)
+    m = rex.match('abfoo')
+    assert m.end() == 5
+
+
+def test_different_length_grouping():
+    opt = regex_opt(('a', 'xyz'))
+    print(opt)
+    rex = re.compile(opt)
+    assert rex.match('a')
+    assert rex.match('xyz')
+    assert not rex.match('b')
+    assert rex.groups == 1
+
+
+def test_same_length_grouping():
+    opt = regex_opt(('a', 'b'))
+    print(opt)
+    rex = re.compile(opt)
+    assert rex.match('a')
+    assert rex.match('b')
+    assert not rex.match('x')
+
+    assert rex.groups == 1
+    groups = rex.match('a').groups()
+    assert groups == ('a',)
+
+
+def test_same_length_suffix_grouping():
+    opt = regex_opt(('a', 'b'), suffix='(m)')
+    print(opt)
+    rex = re.compile(opt)
+    assert rex.match('am')
+    assert rex.match('bm')
+    assert not rex.match('xm')
+    assert not rex.match('ax')
+    assert rex.groups == 2
+    groups = rex.match('am').groups()
+    assert groups == ('a', 'm')
index 80ce01f532650d069884dcaca0beac730b6f8f28..35179df4d8fe09d41b78c46529529f4891c2c4d2 100644 (file)
 # -*- coding: utf-8 -*-
 """
     Pygments RTF formatter tests
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
     :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
-from string_asserts import StringTests
-
 from pygments.util import StringIO
 from pygments.formatters import RtfFormatter
 from pygments.lexers.special import TextLexer
 
-class RtfFormatterTest(StringTests, unittest.TestCase):
-    foot = (r'\par' '\n' r'}')
-
-    def _escape(self, string):
-        return(string.replace("\n", r"\n"))
-
-    def _build_message(self, *args, **kwargs):
-        string = kwargs.get('string', None)
-        t = self._escape(kwargs.get('t', ''))
-        expected = self._escape(kwargs.get('expected', ''))
-        result = self._escape(kwargs.get('result', ''))
-
-        if string is None:
-            string = (u"The expected output of '{t}'\n"
-                      u"\t\tShould be '{expected}'\n"
-                      u"\t\tActually outputs '{result}'\n"
-                      u"\t(WARNING: Partial Output of Result!)")
-
-        end = -(len(self._escape(self.foot)))
-        start = end-len(expected)
-
-        return string.format(t=t,
-                             result = result[start:end],
-                             expected = expected)
-
-    def format_rtf(self, t):
-        tokensource = list(TextLexer().get_tokens(t))
-        fmt = RtfFormatter()
-        buf = StringIO()
-        fmt.format(tokensource, buf)
-        result = buf.getvalue()
-        buf.close()
-        return result
-
-    def test_rtf_header(self):
-        t = u''
-        result = self.format_rtf(t)
-        expected = r'{\rtf1\ansi\uc0'
-        msg = (u"RTF documents are expected to start with '{expected}'\n"
-               u"\t\tStarts intead with '{result}'\n"
-               u"\t(WARNING: Partial Output of Result!)".format(
-                   expected = expected,
-                   result = result[:len(expected)]))
-        self.assertStartsWith(result, expected, msg)
-
-    def test_rtf_footer(self):
-        t = u''
-        result = self.format_rtf(t)
-        expected = self.foot
-        msg = (u"RTF documents are expected to end with '{expected}'\n"
-               u"\t\tEnds intead with '{result}'\n"
-               u"\t(WARNING: Partial Output of Result!)".format(
-                   expected = self._escape(expected),
-                   result = self._escape(result[-len(expected):])))
-        self.assertEndsWith(result, expected, msg)
-
-    def test_ascii_characters(self):
-        t = u'a b c d ~'
-        result = self.format_rtf(t)
-        expected = (r'a b c d ~')
-        if not result.endswith(self.foot):
-            return(unittest.skip('RTF Footer incorrect'))
-        msg = self._build_message(t=t, result=result, expected=expected)
-        self.assertEndsWith(result, expected+self.foot, msg)
-
-    def test_escape_characters(self):
-        t = u'\\ {{'
-        result = self.format_rtf(t)
-        expected = (r'\\ \{\{')
-        if not result.endswith(self.foot):
-            return(unittest.skip('RTF Footer incorrect'))
-        msg = self._build_message(t=t, result=result, expected=expected)
-        self.assertEndsWith(result, expected+self.foot, msg)
-
-    def test_single_characters(self):
-        t = u'â € ¤ каждой'
-        result = self.format_rtf(t)
-        expected = (r'{\u226} {\u8364} {\u164} '
-                    r'{\u1082}{\u1072}{\u1078}{\u1076}{\u1086}{\u1081}')
-        if not result.endswith(self.foot):
-            return(unittest.skip('RTF Footer incorrect'))
-        msg = self._build_message(t=t, result=result, expected=expected)
-        self.assertEndsWith(result, expected+self.foot, msg)
-
-    def test_double_characters(self):
-        t = u'က 힣 ↕ ↕︎ 鼖'
-        result = self.format_rtf(t)
-        expected = (r'{\u4096} {\u55203} {\u8597} '
-                    r'{\u8597}{\u65038} {\u55422}{\u56859}')
-        if not result.endswith(self.foot):
-            return(unittest.skip('RTF Footer incorrect'))
-        msg = self._build_message(t=t, result=result, expected=expected)
-        self.assertEndsWith(result, expected+self.foot, msg)
+
+foot = (r'\par' '\n' r'}')
+
+
+def _escape(string):
+    return string.replace("\n", r"\n")
+
+
+def _build_message(*args, **kwargs):
+    string = kwargs.get('string', None)
+    t = _escape(kwargs.get('t', ''))
+    expected = _escape(kwargs.get('expected', ''))
+    result = _escape(kwargs.get('result', ''))
+
+    if string is None:
+        string = (u"The expected output of '{t}'\n"
+                  u"\t\tShould be '{expected}'\n"
+                  u"\t\tActually outputs '{result}'\n"
+                  u"\t(WARNING: Partial Output of Result!)")
+
+    end = -len(_escape(foot))
+    start = end - len(expected)
+
+    return string.format(t=t,
+                         result = result[start:end],
+                         expected = expected)
+
+
+def format_rtf(t):
+    tokensource = list(TextLexer().get_tokens(t))
+    fmt = RtfFormatter()
+    buf = StringIO()
+    fmt.format(tokensource, buf)
+    result = buf.getvalue()
+    buf.close()
+    return result
+
+
+def test_rtf_header():
+    t = u''
+    result = format_rtf(t)
+    expected = r'{\rtf1\ansi\uc0'
+    msg = (u"RTF documents are expected to start with '{expected}'\n"
+           u"\t\tStarts intead with '{result}'\n"
+           u"\t(WARNING: Partial Output of Result!)".format(
+               expected=expected,
+               result=result[:len(expected)]))
+    assert result.startswith(expected), msg
+
+
+def test_rtf_footer():
+    t = u''
+    result = format_rtf(t)
+    expected = ''
+    msg = (u"RTF documents are expected to end with '{expected}'\n"
+           u"\t\tEnds intead with '{result}'\n"
+           u"\t(WARNING: Partial Output of Result!)".format(
+               expected=_escape(expected),
+               result=_escape(result[-len(expected):])))
+    assert result.endswith(expected+foot), msg
+
+
+def test_ascii_characters():
+    t = u'a b c d ~'
+    result = format_rtf(t)
+    expected = (r'a b c d ~')
+    msg = _build_message(t=t, result=result, expected=expected)
+    assert result.endswith(expected+foot), msg
+
+
+def test_escape_characters():
+    t = u'\\ {{'
+    result = format_rtf(t)
+    expected = r'\\ \{\{'
+    msg = _build_message(t=t, result=result, expected=expected)
+    assert result.endswith(expected+foot), msg
+
+
+def test_single_characters():
+    t = u'â € ¤ каждой'
+    result = format_rtf(t)
+    expected = (r'{\u226} {\u8364} {\u164} '
+                r'{\u1082}{\u1072}{\u1078}{\u1076}{\u1086}{\u1081}')
+    msg = _build_message(t=t, result=result, expected=expected)
+    assert result.endswith(expected+foot), msg
+
+
+def test_double_characters():
+    t = u'က 힣 ↕ ↕︎ 鼖'
+    result = format_rtf(t)
+    expected = (r'{\u4096} {\u55203} {\u8597} '
+                r'{\u8597}{\u65038} {\u55422}{\u56859}')
+    msg = _build_message(t=t, result=result, expected=expected)
+    assert result.endswith(expected+foot), msg
index 45a77469706e4b87f9f6c902f8d6fbfe9e46daf2..a6da4bf9d808a9d72242477ab8aa889166242cc7 100644 (file)
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
 from pygments.token import Operator, Number, Text, Token
 from pygments.lexers import RubyLexer
 
 
-class RubyTest(unittest.TestCase):
-
-    def setUp(self):
-        self.lexer = RubyLexer()
-        self.maxDiff = None
-
-    def testRangeSyntax1(self):
-        fragment = u'1..3\n'
-        tokens = [
-            (Number.Integer, u'1'),
-            (Operator, u'..'),
-            (Number.Integer, u'3'),
-            (Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testRangeSyntax2(self):
-        fragment = u'1...3\n'
-        tokens = [
-            (Number.Integer, u'1'),
-            (Operator, u'...'),
-            (Number.Integer, u'3'),
-            (Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testRangeSyntax3(self):
-        fragment = u'1 .. 3\n'
-        tokens = [
-            (Number.Integer, u'1'),
-            (Text, u' '),
-            (Operator, u'..'),
-            (Text, u' '),
-            (Number.Integer, u'3'),
-            (Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testInterpolationNestedCurly(self):
-        fragment = (
-            u'"A#{ (3..5).group_by { |x| x/2}.map '
-            u'do |k,v| "#{k}" end.join }" + "Z"\n')
-
-        tokens = [
-            (Token.Literal.String.Double, u'"'),
-            (Token.Literal.String.Double, u'A'),
-            (Token.Literal.String.Interpol, u'#{'),
-            (Token.Text, u' '),
-            (Token.Punctuation, u'('),
-            (Token.Literal.Number.Integer, u'3'),
-            (Token.Operator, u'..'),
-            (Token.Literal.Number.Integer, u'5'),
-            (Token.Punctuation, u')'),
-            (Token.Operator, u'.'),
-            (Token.Name, u'group_by'),
-            (Token.Text, u' '),
-            (Token.Literal.String.Interpol, u'{'),
-            (Token.Text, u' '),
-            (Token.Operator, u'|'),
-            (Token.Name, u'x'),
-            (Token.Operator, u'|'),
-            (Token.Text, u' '),
-            (Token.Name, u'x'),
-            (Token.Operator, u'/'),
-            (Token.Literal.Number.Integer, u'2'),
-            (Token.Literal.String.Interpol, u'}'),
-            (Token.Operator, u'.'),
-            (Token.Name, u'map'),
-            (Token.Text, u' '),
-            (Token.Keyword, u'do'),
-            (Token.Text, u' '),
-            (Token.Operator, u'|'),
-            (Token.Name, u'k'),
-            (Token.Punctuation, u','),
-            (Token.Name, u'v'),
-            (Token.Operator, u'|'),
-            (Token.Text, u' '),
-            (Token.Literal.String.Double, u'"'),
-            (Token.Literal.String.Interpol, u'#{'),
-            (Token.Name, u'k'),
-            (Token.Literal.String.Interpol, u'}'),
-            (Token.Literal.String.Double, u'"'),
-            (Token.Text, u' '),
-            (Token.Keyword, u'end'),
-            (Token.Operator, u'.'),
-            (Token.Name, u'join'),
-            (Token.Text, u' '),
-            (Token.Literal.String.Interpol, u'}'),
-            (Token.Literal.String.Double, u'"'),
-            (Token.Text, u' '),
-            (Token.Operator, u'+'),
-            (Token.Text, u' '),
-            (Token.Literal.String.Double, u'"'),
-            (Token.Literal.String.Double, u'Z'),
-            (Token.Literal.String.Double, u'"'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testOperatorMethods(self):
-        fragment = u'x.==4\n'
-        tokens = [
-            (Token.Name, u'x'),
-            (Token.Operator, u'.'),
-            (Token.Name.Operator, u'=='),
-            (Token.Literal.Number.Integer, u'4'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testEscapedBracestring(self):
-        fragment = u'str.gsub(%r{\\\\\\\\}, "/")\n'
-        tokens = [
-            (Token.Name, u'str'),
-            (Token.Operator, u'.'),
-            (Token.Name, u'gsub'),
-            (Token.Punctuation, u'('),
-            (Token.Literal.String.Regex, u'%r{'),
-            (Token.Literal.String.Regex, u'\\\\'),
-            (Token.Literal.String.Regex, u'\\\\'),
-            (Token.Literal.String.Regex, u'}'),
-            (Token.Punctuation, u','),
-            (Token.Text, u' '),
-            (Token.Literal.String.Double, u'"'),
-            (Token.Literal.String.Double, u'/'),
-            (Token.Literal.String.Double, u'"'),
-            (Token.Punctuation, u')'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+@pytest.fixture(scope='module')
+def lexer():
+    yield RubyLexer()
+
+
+def test_range_syntax1(lexer):
+    fragment = u'1..3\n'
+    tokens = [
+        (Number.Integer, u'1'),
+        (Operator, u'..'),
+        (Number.Integer, u'3'),
+        (Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_range_syntax2(lexer):
+    fragment = u'1...3\n'
+    tokens = [
+        (Number.Integer, u'1'),
+        (Operator, u'...'),
+        (Number.Integer, u'3'),
+        (Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_range_syntax3(lexer):
+    fragment = u'1 .. 3\n'
+    tokens = [
+        (Number.Integer, u'1'),
+        (Text, u' '),
+        (Operator, u'..'),
+        (Text, u' '),
+        (Number.Integer, u'3'),
+        (Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_interpolation_nested_curly(lexer):
+    fragment = (
+        u'"A#{ (3..5).group_by { |x| x/2}.map '
+        u'do |k,v| "#{k}" end.join }" + "Z"\n')
+
+    tokens = [
+        (Token.Literal.String.Double, u'"'),
+        (Token.Literal.String.Double, u'A'),
+        (Token.Literal.String.Interpol, u'#{'),
+        (Token.Text, u' '),
+        (Token.Punctuation, u'('),
+        (Token.Literal.Number.Integer, u'3'),
+        (Token.Operator, u'..'),
+        (Token.Literal.Number.Integer, u'5'),
+        (Token.Punctuation, u')'),
+        (Token.Operator, u'.'),
+        (Token.Name, u'group_by'),
+        (Token.Text, u' '),
+        (Token.Literal.String.Interpol, u'{'),
+        (Token.Text, u' '),
+        (Token.Operator, u'|'),
+        (Token.Name, u'x'),
+        (Token.Operator, u'|'),
+        (Token.Text, u' '),
+        (Token.Name, u'x'),
+        (Token.Operator, u'/'),
+        (Token.Literal.Number.Integer, u'2'),
+        (Token.Literal.String.Interpol, u'}'),
+        (Token.Operator, u'.'),
+        (Token.Name, u'map'),
+        (Token.Text, u' '),
+        (Token.Keyword, u'do'),
+        (Token.Text, u' '),
+        (Token.Operator, u'|'),
+        (Token.Name, u'k'),
+        (Token.Punctuation, u','),
+        (Token.Name, u'v'),
+        (Token.Operator, u'|'),
+        (Token.Text, u' '),
+        (Token.Literal.String.Double, u'"'),
+        (Token.Literal.String.Interpol, u'#{'),
+        (Token.Name, u'k'),
+        (Token.Literal.String.Interpol, u'}'),
+        (Token.Literal.String.Double, u'"'),
+        (Token.Text, u' '),
+        (Token.Keyword, u'end'),
+        (Token.Operator, u'.'),
+        (Token.Name, u'join'),
+        (Token.Text, u' '),
+        (Token.Literal.String.Interpol, u'}'),
+        (Token.Literal.String.Double, u'"'),
+        (Token.Text, u' '),
+        (Token.Operator, u'+'),
+        (Token.Text, u' '),
+        (Token.Literal.String.Double, u'"'),
+        (Token.Literal.String.Double, u'Z'),
+        (Token.Literal.String.Double, u'"'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_operator_methods(lexer):
+    fragment = u'x.==4\n'
+    tokens = [
+        (Token.Name, u'x'),
+        (Token.Operator, u'.'),
+        (Token.Name.Operator, u'=='),
+        (Token.Literal.Number.Integer, u'4'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
+
+
+def test_escaped_bracestring(lexer):
+    fragment = u'str.gsub(%r{\\\\\\\\}, "/")\n'
+    tokens = [
+        (Token.Name, u'str'),
+        (Token.Operator, u'.'),
+        (Token.Name, u'gsub'),
+        (Token.Punctuation, u'('),
+        (Token.Literal.String.Regex, u'%r{'),
+        (Token.Literal.String.Regex, u'\\\\'),
+        (Token.Literal.String.Regex, u'\\\\'),
+        (Token.Literal.String.Regex, u'}'),
+        (Token.Punctuation, u','),
+        (Token.Text, u' '),
+        (Token.Literal.String.Double, u'"'),
+        (Token.Literal.String.Double, u'/'),
+        (Token.Literal.String.Double, u'"'),
+        (Token.Punctuation, u')'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
index 6b24eb4309a204634232080c5ec2f74b5e6adbdc..64918a2e69181e7d8aa775b2d49775239b69317c 100644 (file)
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
 from pygments.token import Token
 from pygments.lexers import BashLexer, BashSessionLexer, MSDOSSessionLexer
 
 
-class BashTest(unittest.TestCase):
-
-    def setUp(self):
-        self.lexer = BashLexer()
-        self.maxDiff = None
-
-    def testCurlyNoEscapeAndQuotes(self):
-        fragment = u'echo "${a//["b"]/}"\n'
-        tokens = [
-            (Token.Name.Builtin, u'echo'),
-            (Token.Text, u' '),
-            (Token.Literal.String.Double, u'"'),
-            (Token.String.Interpol, u'${'),
-            (Token.Name.Variable, u'a'),
-            (Token.Punctuation, u'//['),
-            (Token.Literal.String.Double, u'"b"'),
-            (Token.Punctuation, u']/'),
-            (Token.String.Interpol, u'}'),
-            (Token.Literal.String.Double, u'"'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testCurlyWithEscape(self):
-        fragment = u'echo ${a//[\\"]/}\n'
-        tokens = [
-            (Token.Name.Builtin, u'echo'),
-            (Token.Text, u' '),
-            (Token.String.Interpol, u'${'),
-            (Token.Name.Variable, u'a'),
-            (Token.Punctuation, u'//['),
-            (Token.Literal.String.Escape, u'\\"'),
-            (Token.Punctuation, u']/'),
-            (Token.String.Interpol, u'}'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testParsedSingle(self):
-        fragment = u"a=$'abc\\''\n"
-        tokens = [
-            (Token.Name.Variable, u'a'),
-            (Token.Operator, u'='),
-            (Token.Literal.String.Single, u"$'abc\\''"),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testShortVariableNames(self):
-        fragment = u'x="$"\ny="$_"\nz="$abc"\n'
-        tokens = [
-            # single lone $
-            (Token.Name.Variable, u'x'),
-            (Token.Operator, u'='),
-            (Token.Literal.String.Double, u'"'),
-            (Token.Text, u'$'),
-            (Token.Literal.String.Double, u'"'),
-            (Token.Text, u'\n'),
-            # single letter shell var
-            (Token.Name.Variable, u'y'),
-            (Token.Operator, u'='),
-            (Token.Literal.String.Double, u'"'),
-            (Token.Name.Variable, u'$_'),
-            (Token.Literal.String.Double, u'"'),
-            (Token.Text, u'\n'),
-            # multi-letter user var
-            (Token.Name.Variable, u'z'),
-            (Token.Operator, u'='),
-            (Token.Literal.String.Double, u'"'),
-            (Token.Name.Variable, u'$abc'),
-            (Token.Literal.String.Double, u'"'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testArrayNums(self):
-        fragment = u'a=(1 2 3)\n'
-        tokens = [
-            (Token.Name.Variable, u'a'),
-            (Token.Operator, u'='),
-            (Token.Operator, u'('),
-            (Token.Literal.Number, u'1'),
-            (Token.Text, u' '),
-            (Token.Literal.Number, u'2'),
-            (Token.Text, u' '),
-            (Token.Literal.Number, u'3'),
-            (Token.Operator, u')'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-    def testEndOfLineNums(self):
-        fragment = u'a=1\nb=2 # comment\n'
-        tokens = [
-            (Token.Name.Variable, u'a'),
-            (Token.Operator, u'='),
-            (Token.Literal.Number, u'1'),
-            (Token.Text, u'\n'),
-            (Token.Name.Variable, u'b'),
-            (Token.Operator, u'='),
-            (Token.Literal.Number, u'2'),
-            (Token.Text, u' '),
-            (Token.Comment.Single, u'# comment\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-class BashSessionTest(unittest.TestCase):
-
-    def setUp(self):
-        self.lexer = BashSessionLexer()
-        self.maxDiff = None
-
-    def testNeedsName(self):
-        fragment = u'$ echo \\\nhi\nhi\n'
-        tokens = [
-            (Token.Text, u''),
-            (Token.Generic.Prompt, u'$'),
-            (Token.Text, u' '),
-            (Token.Name.Builtin, u'echo'),
-            (Token.Text, u' '),
-            (Token.Literal.String.Escape, u'\\\n'),
-            (Token.Text, u'hi'),
-            (Token.Text, u'\n'),
-            (Token.Generic.Output, u'hi\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
-class MSDOSSessionTest(unittest.TestCase):
-
-    def setUp(self):
-        self.lexer = MSDOSSessionLexer()
-
-    def testGtOnlyPrompt(self):
-        fragment = u'> py\nhi\n'
-        tokens = [
-            (Token.Text, u''),
-            (Token.Generic.Prompt, u'>'),
-            (Token.Text, u' '),
-            (Token.Text, u'py'),
-            (Token.Text, u''),
-            (Token.Text, u'\n'),
-            (Token.Generic.Output, u'hi\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+@pytest.fixture(scope='module')
+def lexer_bash():
+    yield BashLexer()
+
+
+@pytest.fixture(scope='module')
+def lexer_session():
+    yield BashSessionLexer()
+
+
+@pytest.fixture(scope='module')
+def lexer_msdos():
+    yield MSDOSSessionLexer()
+
+
+def test_curly_no_escape_and_quotes(lexer_bash):
+    fragment = u'echo "${a//["b"]/}"\n'
+    tokens = [
+        (Token.Name.Builtin, u'echo'),
+        (Token.Text, u' '),
+        (Token.Literal.String.Double, u'"'),
+        (Token.String.Interpol, u'${'),
+        (Token.Name.Variable, u'a'),
+        (Token.Punctuation, u'//['),
+        (Token.Literal.String.Double, u'"b"'),
+        (Token.Punctuation, u']/'),
+        (Token.String.Interpol, u'}'),
+        (Token.Literal.String.Double, u'"'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer_bash.get_tokens(fragment)) == tokens
+
+
+def test_curly_with_escape(lexer_bash):
+    fragment = u'echo ${a//[\\"]/}\n'
+    tokens = [
+        (Token.Name.Builtin, u'echo'),
+        (Token.Text, u' '),
+        (Token.String.Interpol, u'${'),
+        (Token.Name.Variable, u'a'),
+        (Token.Punctuation, u'//['),
+        (Token.Literal.String.Escape, u'\\"'),
+        (Token.Punctuation, u']/'),
+        (Token.String.Interpol, u'}'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer_bash.get_tokens(fragment)) == tokens
+
+
+def test_parsed_single(lexer_bash):
+    fragment = u"a=$'abc\\''\n"
+    tokens = [
+        (Token.Name.Variable, u'a'),
+        (Token.Operator, u'='),
+        (Token.Literal.String.Single, u"$'abc\\''"),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer_bash.get_tokens(fragment)) == tokens
+
+
+def test_short_variable_names(lexer_bash):
+    fragment = u'x="$"\ny="$_"\nz="$abc"\n'
+    tokens = [
+        # single lone $
+        (Token.Name.Variable, u'x'),
+        (Token.Operator, u'='),
+        (Token.Literal.String.Double, u'"'),
+        (Token.Text, u'$'),
+        (Token.Literal.String.Double, u'"'),
+        (Token.Text, u'\n'),
+        # single letter shell var
+        (Token.Name.Variable, u'y'),
+        (Token.Operator, u'='),
+        (Token.Literal.String.Double, u'"'),
+        (Token.Name.Variable, u'$_'),
+        (Token.Literal.String.Double, u'"'),
+        (Token.Text, u'\n'),
+        # multi-letter user var
+        (Token.Name.Variable, u'z'),
+        (Token.Operator, u'='),
+        (Token.Literal.String.Double, u'"'),
+        (Token.Name.Variable, u'$abc'),
+        (Token.Literal.String.Double, u'"'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer_bash.get_tokens(fragment)) == tokens
+
+
+def test_array_nums(lexer_bash):
+    fragment = u'a=(1 2 3)\n'
+    tokens = [
+        (Token.Name.Variable, u'a'),
+        (Token.Operator, u'='),
+        (Token.Operator, u'('),
+        (Token.Literal.Number, u'1'),
+        (Token.Text, u' '),
+        (Token.Literal.Number, u'2'),
+        (Token.Text, u' '),
+        (Token.Literal.Number, u'3'),
+        (Token.Operator, u')'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer_bash.get_tokens(fragment)) == tokens
+
+
+def test_end_of_line_nums(lexer_bash):
+    fragment = u'a=1\nb=2 # comment\n'
+    tokens = [
+        (Token.Name.Variable, u'a'),
+        (Token.Operator, u'='),
+        (Token.Literal.Number, u'1'),
+        (Token.Text, u'\n'),
+        (Token.Name.Variable, u'b'),
+        (Token.Operator, u'='),
+        (Token.Literal.Number, u'2'),
+        (Token.Text, u' '),
+        (Token.Comment.Single, u'# comment\n'),
+    ]
+    assert list(lexer_bash.get_tokens(fragment)) == tokens
+
+
+def test_newline_in_echo(lexer_session):
+    fragment = u'$ echo \\\nhi\nhi\n'
+    tokens = [
+        (Token.Text, u''),
+        (Token.Generic.Prompt, u'$'),
+        (Token.Text, u' '),
+        (Token.Name.Builtin, u'echo'),
+        (Token.Text, u' '),
+        (Token.Literal.String.Escape, u'\\\n'),
+        (Token.Text, u'hi'),
+        (Token.Text, u'\n'),
+        (Token.Generic.Output, u'hi\n'),
+    ]
+    assert list(lexer_session.get_tokens(fragment)) == tokens
+
+
+def test_msdos_gt_only(lexer_msdos):
+    fragment = u'> py\nhi\n'
+    tokens = [
+        (Token.Text, u''),
+        (Token.Generic.Prompt, u'>'),
+        (Token.Text, u' '),
+        (Token.Text, u'py'),
+        (Token.Text, u''),
+        (Token.Text, u'\n'),
+        (Token.Generic.Output, u'hi\n'),
+    ]
+    assert list(lexer_msdos.get_tokens(fragment)) == tokens
+
+def test_virtualenv(lexer_session):
+    fragment = u'(env) [~/project]$ foo -h\n'
+    tokens = [
+        (Token.Text, u''),
+        (Token.Generic.Prompt.VirtualEnv, u'(env)'),
+        (Token.Text, u''),
+        (Token.Text, u' '),
+        (Token.Text, u''),
+        (Token.Generic.Prompt, u'[~/project]$'),
+        (Token.Text, u' '),
+        (Token.Text, u'foo'),
+        (Token.Text, u' '),
+        (Token.Text, u'-h'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer_session.get_tokens(fragment)) == tokens
index fb15f7f40673dbdd5e2e60b9ef3fd88b6ef897a3..2d172559f47b3bbe0688ab1e6ddd493f2ac04cf1 100644 (file)
@@ -1,40 +1,39 @@
 # -*- coding: utf-8 -*-
 """
     Basic SmartyLexer Test
-    ~~~~~~~~~~~~~~~~~~~~
+    ~~~~~~~~~~~~~~~~~~~~~~
 
     :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
-from pygments.token import Operator, Number, Text, Token
+from pygments.token import Token
 from pygments.lexers import SmartyLexer
 
 
-class SmartyTest(unittest.TestCase):
-
-    def setUp(self):
-        self.lexer = SmartyLexer()
-
-    def testNestedCurly(self):
-        fragment = u'{templateFunction param={anotherFunction} param2=$something}\n'
-        tokens = [
-            (Token.Comment.Preproc, u'{'),
-            (Token.Name.Function, u'templateFunction'),
-            (Token.Text, u' '),
-            (Token.Name.Attribute, u'param'),
-            (Token.Operator, u'='),
-            (Token.Comment.Preproc, u'{'),
-            (Token.Name.Attribute, u'anotherFunction'),
-            (Token.Comment.Preproc, u'}'),
-            (Token.Text, u' '),
-            (Token.Name.Attribute, u'param2'),
-            (Token.Operator, u'='),
-            (Token.Name.Variable, u'$something'),
-            (Token.Comment.Preproc, u'}'),
-            (Token.Other, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
-
+@pytest.fixture(scope='module')
+def lexer():
+    yield SmartyLexer()
+
+
+def test_nested_curly(lexer):
+    fragment = u'{templateFunction param={anotherFunction} param2=$something}\n'
+    tokens = [
+        (Token.Comment.Preproc, u'{'),
+        (Token.Name.Function, u'templateFunction'),
+        (Token.Text, u' '),
+        (Token.Name.Attribute, u'param'),
+        (Token.Operator, u'='),
+        (Token.Comment.Preproc, u'{'),
+        (Token.Name.Attribute, u'anotherFunction'),
+        (Token.Comment.Preproc, u'}'),
+        (Token.Text, u' '),
+        (Token.Name.Attribute, u'param2'),
+        (Token.Operator, u'='),
+        (Token.Name.Variable, u'$something'),
+        (Token.Comment.Preproc, u'}'),
+        (Token.Other, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
index 6be34006910359d9d2d7a4a53488b518c26b200d..efd63be603081f6fa055572335217dbc4ac9bc7d 100644 (file)
@@ -3,10 +3,11 @@
     Pygments SQL lexers tests
     ~~~~~~~~~~~~~~~~~~~~~~~~~
 
-    :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
     :license: BSD, see LICENSE for details.
 """
-import unittest
+
+import pytest
 
 from pygments.lexers.sql import name_between_bracket_re, \
     name_between_backtick_re, tsql_go_re, tsql_declare_re, \
@@ -15,104 +16,102 @@ from pygments.lexers.sql import name_between_bracket_re, \
 from pygments.token import Comment, Name, Number, Punctuation, Whitespace
 
 
-class TransactSqlLexerTest(unittest.TestCase):
-
-    def setUp(self):
-        self.lexer = TransactSqlLexer()
-
-    def _assertAreTokensOfType(self, examples, expected_token_type):
-        for test_number, example in enumerate(examples.split(), 1):
-            token_count = 0
-            for token_type, token_value in self.lexer.get_tokens(example):
-                if token_type != Whitespace:
-                    token_count += 1
-                    self.assertEqual(
-                        token_type, expected_token_type,
-                        'token_type #%d for %s is be %s but must be %s' %
-                        (test_number, token_value, token_type, expected_token_type))
-            self.assertEqual(
-                token_count, 1,
-                '%s must yield exactly 1 token instead of %d' %
-                (example, token_count))
-
-    def _assertTokensMatch(self, text, expected_tokens_without_trailing_newline):
-        actual_tokens = tuple(self.lexer.get_tokens(text))
-        if (len(actual_tokens) >= 1) and (actual_tokens[-1] == (Whitespace, '\n')):
-            actual_tokens = tuple(actual_tokens[:-1])
-        self.assertEqual(
-            expected_tokens_without_trailing_newline, actual_tokens,
-            'text must yield expected tokens: %s' % text)
-
-    def test_can_lex_float(self):
-        self._assertAreTokensOfType(
-            '1. 1.e1 .1 1.2 1.2e3 1.2e+3 1.2e-3 1e2', Number.Float)
-        self._assertTokensMatch(
-            '1e2.1e2',
-            ((Number.Float, '1e2'), (Number.Float, '.1e2'))
+@pytest.fixture(scope='module')
+def lexer():
+    yield TransactSqlLexer()
+
+
+def _assert_are_tokens_of_type(lexer, examples, expected_token_type):
+    for test_number, example in enumerate(examples.split(), 1):
+        token_count = 0
+        for token_type, token_value in lexer.get_tokens(example):
+            if token_type != Whitespace:
+                token_count += 1
+                assert token_type == expected_token_type, \
+                    'token_type #%d for %s is be %s but must be %s' % \
+                    (test_number, token_value, token_type, expected_token_type)
+        assert token_count == 1, \
+            '%s must yield exactly 1 token instead of %d' % \
+            (example, token_count)
+
+
+def _assert_tokens_match(lexer, text, expected_tokens_without_trailing_newline):
+    actual_tokens = tuple(lexer.get_tokens(text))
+    if (len(actual_tokens) >= 1) and (actual_tokens[-1] == (Whitespace, '\n')):
+        actual_tokens = tuple(actual_tokens[:-1])
+    assert expected_tokens_without_trailing_newline == actual_tokens, \
+        'text must yield expected tokens: %s' % text
+
+
+def test_can_lex_float(lexer):
+    _assert_are_tokens_of_type(lexer,
+                               '1. 1.e1 .1 1.2 1.2e3 1.2e+3 1.2e-3 1e2',
+                               Number.Float)
+    _assert_tokens_match(lexer,
+                         '1e2.1e2',
+                         ((Number.Float, '1e2'), (Number.Float, '.1e2')))
+
+
+def test_can_reject_almost_float(lexer):
+    _assert_tokens_match(lexer, '.e1', ((Punctuation, '.'), (Name, 'e1')))
+
+
+def test_can_lex_integer(lexer):
+    _assert_are_tokens_of_type(lexer, '1 23 456', Number.Integer)
+
+
+def test_can_lex_names(lexer):
+    _assert_are_tokens_of_type(lexer,
+                               u'thingy thingy123 _thingy _ _123 Ähnliches Müll #temp1 ##temp2',
+                               Name)
+
+
+def test_can_lex_comments(lexer):
+    _assert_tokens_match(lexer, '--\n', ((Comment.Single, '--\n'),))
+    _assert_tokens_match(lexer, '/**/', (
+        (Comment.Multiline, '/*'), (Comment.Multiline, '*/')
+    ))
+    _assert_tokens_match(lexer, '/*/**/*/', (
+        (Comment.Multiline, '/*'),
+        (Comment.Multiline, '/*'),
+        (Comment.Multiline, '*/'),
+        (Comment.Multiline, '*/'),
+    ))
+
+
+def test_can_match_analyze_text_res():
+    assert ['`a`', '`bc`'] == \
+        name_between_backtick_re.findall('select `a`, `bc` from some')
+    assert ['[a]', '[bc]'] == \
+        name_between_bracket_re.findall('select [a], [bc] from some')
+    assert tsql_declare_re.search('--\nDeClaRe @some int;')
+    assert tsql_go_re.search('select 1\ngo\n--')
+    assert tsql_variable_re.search('create procedure dbo.usp_x @a int, @b int')
+
+
+def test_can_analyze_text():
+    mysql_lexer = MySqlLexer()
+    sql_lexer = SqlLexer()
+    tsql_lexer = TransactSqlLexer()
+    code_to_expected_lexer_map = {
+        'select `a`, `bc` from some': mysql_lexer,
+        'select a, bc from some': sql_lexer,
+        'select [a], [bc] from some': tsql_lexer,
+        '-- `a`, `bc`\nselect [a], [bc] from some': tsql_lexer,
+        '-- `a`, `bc`\nselect [a], [bc] from some; go': tsql_lexer,
+    }
+    sql_lexers = set(code_to_expected_lexer_map.values())
+    for code, expected_lexer in code_to_expected_lexer_map.items():
+        ratings_and_lexers = list((lexer.analyse_text(code), lexer.name) for lexer in sql_lexers)
+        best_rating, best_lexer_name  = sorted(ratings_and_lexers, reverse=True)[0]
+        expected_rating = expected_lexer.analyse_text(code)
+        message = (
+            'lexer must be %s (rating %.2f) instead of '
+            '%s (rating %.2f) for analyse_text() on code:\n%s') % (
+            expected_lexer.name,
+            expected_rating,
+            best_lexer_name,
+            best_rating,
+            code
         )
-
-    def test_can_reject_almost_float(self):
-        self._assertTokensMatch(
-            '.e1',
-            ((Punctuation, '.'), (Name, 'e1')))
-
-    def test_can_lex_integer(self):
-        self._assertAreTokensOfType(
-            '1 23 456', Number.Integer)
-
-    def test_can_lex_names(self):
-        self._assertAreTokensOfType(
-            u'thingy thingy123 _thingy _ _123 Ähnliches Müll #temp1 ##temp2', Name)
-
-    def test_can_lex_comments(self):
-        self._assertTokensMatch('--\n', ((Comment.Single, '--\n'),))
-        self._assertTokensMatch('/**/', (
-            (Comment.Multiline, '/*'), (Comment.Multiline, '*/')
-        ))
-        self._assertTokensMatch('/*/**/*/', (
-            (Comment.Multiline, '/*'),
-            (Comment.Multiline, '/*'),
-            (Comment.Multiline, '*/'),
-            (Comment.Multiline, '*/'),
-        ))
-
-
-class SqlAnalyzeTextTest(unittest.TestCase):
-    def test_can_match_analyze_text_res(self):
-        self.assertEqual(['`a`', '`bc`'],
-            name_between_backtick_re.findall('select `a`, `bc` from some'))
-        self.assertEqual(['[a]', '[bc]'],
-            name_between_bracket_re.findall('select [a], [bc] from some'))
-        self.assertTrue(tsql_declare_re.search('--\nDeClaRe @some int;'))
-        self.assertTrue(tsql_go_re.search('select 1\ngo\n--'))
-        self.assertTrue(tsql_variable_re.search(
-            'create procedure dbo.usp_x @a int, @b int'))
-
-    def test_can_analyze_text(self):
-        mysql_lexer = MySqlLexer()
-        sql_lexer = SqlLexer()
-        tsql_lexer = TransactSqlLexer()
-        code_to_expected_lexer_map = {
-            'select `a`, `bc` from some': mysql_lexer,
-            'select a, bc from some': sql_lexer,
-            'select [a], [bc] from some': tsql_lexer,
-            '-- `a`, `bc`\nselect [a], [bc] from some': tsql_lexer,
-            '-- `a`, `bc`\nselect [a], [bc] from some; go': tsql_lexer,
-        }
-        sql_lexers = set(code_to_expected_lexer_map.values())
-        for code, expected_lexer in code_to_expected_lexer_map.items():
-            ratings_and_lexers = list((lexer.analyse_text(code), lexer.name) for lexer in sql_lexers)
-            best_rating, best_lexer_name  = sorted(ratings_and_lexers, reverse=True)[0]
-            expected_rating = expected_lexer.analyse_text(code)
-            message = (
-                'lexer must be %s (rating %.2f) instead of '
-                '%s (rating %.2f) for analyse_text() on code:\n%s') % (
-                expected_lexer.name,
-                expected_rating,
-                best_lexer_name,
-                best_rating,
-                code
-            )
-            self.assertEqual(
-                expected_lexer.name, best_lexer_name, message
-            )
+        assert expected_lexer.name == best_lexer_name, message
diff --git a/tests/test_string_asserts.py b/tests/test_string_asserts.py
deleted file mode 100644 (file)
index 737ba20..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-    Pygments string assert utility tests
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import unittest
-from string_asserts import StringTests
-
-class TestStringTests(StringTests, unittest.TestCase):
-
-    def test_startswith_correct(self):
-        self.assertStartsWith("AAA", "A")
-
-    # @unittest.expectedFailure not supported by nose
-    def test_startswith_incorrect(self):
-        self.assertRaises(AssertionError, self.assertStartsWith, "AAA", "B")
-
-    # @unittest.expectedFailure not supported by nose
-    def test_startswith_short(self):
-        self.assertRaises(AssertionError, self.assertStartsWith, "A", "AA")
-
-    def test_endswith_correct(self):
-        self.assertEndsWith("AAA", "A")
-
-    # @unittest.expectedFailure not supported by nose
-    def test_endswith_incorrect(self):
-        self.assertRaises(AssertionError, self.assertEndsWith, "AAA", "B")
-
-    # @unittest.expectedFailure not supported by nose
-    def test_endswith_short(self):
-        self.assertRaises(AssertionError, self.assertEndsWith, "A", "AA")
index e5a1343190f6f200f8327d748bd390e903c8e317..98eab581df058c888f6611a41d00c2660e73ddfb 100644 (file)
@@ -9,7 +9,6 @@
 
 from __future__ import print_function
 
-import unittest
 import re
 
 from pygments.util import StringIO
@@ -37,26 +36,26 @@ def strip_ansi(x):
     return ANSI_RE.sub('', x)
 
 
-class TerminalFormatterTest(unittest.TestCase):
-    def test_reasonable_output(self):
-        out = StringIO()
-        TerminalFormatter().format(DEMO_TOKENS, out)
-        plain = strip_ansi(out.getvalue())
-        self.assertEqual(DEMO_TEXT.count('\n'), plain.count('\n'))
-        print(repr(plain))
+def test_reasonable_output():
+    out = StringIO()
+    TerminalFormatter().format(DEMO_TOKENS, out)
+    plain = strip_ansi(out.getvalue())
+    assert DEMO_TEXT.count('\n') == plain.count('\n')
+    print(repr(plain))
 
-        for a, b in zip(DEMO_TEXT.splitlines(), plain.splitlines()):
-            self.assertEqual(a, b)
+    for a, b in zip(DEMO_TEXT.splitlines(), plain.splitlines()):
+        assert a == b
 
-    def test_reasonable_output_lineno(self):
-        out = StringIO()
-        TerminalFormatter(linenos=True).format(DEMO_TOKENS, out)
-        plain = strip_ansi(out.getvalue())
-        self.assertEqual(DEMO_TEXT.count('\n') + 1, plain.count('\n'))
-        print(repr(plain))
 
-        for a, b in zip(DEMO_TEXT.splitlines(), plain.splitlines()):
-            self.assertTrue(a in b)
+def test_reasonable_output_lineno():
+    out = StringIO()
+    TerminalFormatter(linenos=True).format(DEMO_TOKENS, out)
+    plain = strip_ansi(out.getvalue())
+    assert DEMO_TEXT.count('\n') + 1 == plain.count('\n')
+    print(repr(plain))
+
+    for a, b in zip(DEMO_TEXT.splitlines(), plain.splitlines()):
+        assert a in b
 
 
 class MyStyle(Style):
@@ -68,8 +67,7 @@ class MyStyle(Style):
     }
 
 
-class Terminal256FormatterTest(unittest.TestCase):
-    code = '''
+CODE = '''
 # this should be a comment
 print("Hello World")
 async def function(a,b,c, *d, **kwarg:Bool)->Bool:
@@ -78,25 +76,28 @@ async def function(a,b,c, *d, **kwarg:Bool)->Bool:
 
 '''
 
-    def test_style_html(self):
-        style = HtmlFormatter(style=MyStyle).get_style_defs()
-        self.assertTrue('#555555' in style,
-                        "ansigray for comment not html css style")
-
-    def test_others_work(self):
-        """check other formatters don't crash"""
-        highlight(self.code, Python3Lexer(), LatexFormatter(style=MyStyle))
-        highlight(self.code, Python3Lexer(), HtmlFormatter(style=MyStyle))
-
-    def test_256esc_seq(self):
-        """
-        test that a few escape sequences are actually used when using ansi<> color codes
-        """
-        def termtest(x):
-            return highlight(x, Python3Lexer(),
-                             Terminal256Formatter(style=MyStyle))
-
-        self.assertTrue('32;101' in termtest('0x123'))
-        self.assertTrue('92;42' in termtest('123'))
-        self.assertTrue('90' in termtest('#comment'))
-        self.assertTrue('94;41' in termtest('"String"'))
+
+def test_style_html():
+    style = HtmlFormatter(style=MyStyle).get_style_defs()
+    assert '#555555' in style, "ansigray for comment not html css style"
+
+
+def test_others_work():
+    """Check other formatters don't crash."""
+    highlight(CODE, Python3Lexer(), LatexFormatter(style=MyStyle))
+    highlight(CODE, Python3Lexer(), HtmlFormatter(style=MyStyle))
+
+
+def test_256esc_seq():
+    """
+    Test that a few escape sequences are actually used when using ansi<> color
+    codes.
+    """
+    def termtest(x):
+        return highlight(x, Python3Lexer(),
+                         Terminal256Formatter(style=MyStyle))
+
+    assert '32;101' in termtest('0x123')
+    assert '92;42' in termtest('123')
+    assert '90' in termtest('#comment')
+    assert '94;41' in termtest('"String"')
index 8a1b8edac612248fabeea7317c5d40bd8b79b3c5..5f36900716647d2fafa97d32dfc6b669840ae9b8 100644 (file)
@@ -7,35 +7,32 @@
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
-from pygments.token import Operator, Number, Text, Token
+from pygments.token import Token
 from pygments.lexers.textfmts import HttpLexer
 
 
-class RubyTest(unittest.TestCase):
-
-    def setUp(self):
-        self.lexer = HttpLexer()
-        self.maxDiff = None
-
-    def testApplicationXml(self):
-        fragment = u'GET / HTTP/1.0\nContent-Type: application/xml\n\n<foo>\n'
-        tokens = [
-            (Token.Name.Tag, u'<foo'),
-            (Token.Name.Tag, u'>'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(
-            tokens, list(self.lexer.get_tokens(fragment))[-len(tokens):])
-
-    def testApplicationCalendarXml(self):
-        fragment = u'GET / HTTP/1.0\nContent-Type: application/calendar+xml\n\n<foo>\n'
-        tokens = [
-            (Token.Name.Tag, u'<foo'),
-            (Token.Name.Tag, u'>'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(
-            tokens, list(self.lexer.get_tokens(fragment))[-len(tokens):])
+@pytest.fixture(scope='module')
+def lexer():
+    yield HttpLexer()
 
+
+def test_application_xml(lexer):
+    fragment = u'GET / HTTP/1.0\nContent-Type: application/xml\n\n<foo>\n'
+    tokens = [
+        (Token.Name.Tag, u'<foo'),
+        (Token.Name.Tag, u'>'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment))[-len(tokens):] == tokens
+
+
+def test_application_calendar_xml(lexer):
+    fragment = u'GET / HTTP/1.0\nContent-Type: application/calendar+xml\n\n<foo>\n'
+    tokens = [
+        (Token.Name.Tag, u'<foo'),
+        (Token.Name.Tag, u'>'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment))[-len(tokens):] == tokens
index fdbcabd14f8bdb74f80e967fafd91b2faee5e9e0..11e4d375560c9741332bd1be6321a4e0f3112733 100644 (file)
@@ -8,47 +8,45 @@
 """
 
 import copy
-import unittest
 
-from pygments import token
+import pytest
 
+from pygments import token
 
-class TokenTest(unittest.TestCase):
 
-    def test_tokentype(self):
-        e = self.assertEqual
+def test_tokentype():
+    t = token.String
+    assert t.split() == [token.Token, token.Literal, token.String]
+    assert t.__class__ is token._TokenType
 
-        t = token.String
 
-        e(t.split(), [token.Token, token.Literal, token.String])
+def test_functions():
+    assert token.is_token_subtype(token.String, token.String)
+    assert token.is_token_subtype(token.String, token.Literal)
+    assert not token.is_token_subtype(token.Literal, token.String)
 
-        e(t.__class__, token._TokenType)
+    assert token.string_to_tokentype(token.String) is token.String
+    assert token.string_to_tokentype('') is token.Token
+    assert token.string_to_tokentype('String') is token.String
 
-    def test_functions(self):
-        self.assertTrue(token.is_token_subtype(token.String, token.String))
-        self.assertTrue(token.is_token_subtype(token.String, token.Literal))
-        self.assertFalse(token.is_token_subtype(token.Literal, token.String))
 
-        self.assertTrue(token.string_to_tokentype(token.String) is token.String)
-        self.assertTrue(token.string_to_tokentype('') is token.Token)
-        self.assertTrue(token.string_to_tokentype('String') is token.String)
+def test_sanity_check():
+    stp = token.STANDARD_TYPES.copy()
+    stp[token.Token] = '---'  # Token and Text do conflict, that is okay
+    t = {}
+    for k, v in stp.items():
+        t.setdefault(v, []).append(k)
+    if len(t) == len(stp):
+        return  # Okay
 
-    def test_sanity_check(self):
-        stp = token.STANDARD_TYPES.copy()
-        stp[token.Token] = '---' # Token and Text do conflict, that is okay
-        t = {}
-        for k, v in stp.items():
-            t.setdefault(v, []).append(k)
-        if len(t) == len(stp):
-            return # Okay
+    for k, v in t.items():
+        if len(v) > 1:
+            pytest.fail("%r has more than one key: %r" % (k, v))
 
-        for k, v in t.items():
-            if len(v) > 1:
-                self.fail("%r has more than one key: %r" % (k, v))
 
-    def test_copying(self):
-        # Token instances are supposed to be singletons, so copying or even
-        # deepcopying should return themselves
-        t = token.String
-        self.assertIs(t, copy.copy(t))
-        self.assertIs(t, copy.deepcopy(t))
+def test_copying():
+    # Token instances are supposed to be singletons, so copying or even
+    # deepcopying should return themselves
+    t = token.String
+    assert t is copy.copy(t)
+    assert t is copy.deepcopy(t)
index 82d74ed6ad4a4cebdec02ba166c596582b7e2ab3..a4b58827ce295f259876de454aae8380bc0d8444 100644 (file)
@@ -8,41 +8,40 @@
 """
 
 import re
-import unittest
 import random
 
 from pygments import unistring as uni
 from pygments.util import unichr
 
 
-class UnistringTest(unittest.TestCase):
-    def test_cats_exist_and_compilable(self):
-        for cat in uni.cats:
-            s = getattr(uni, cat)
-            if s == '':  # Probably Cs on Jython
-                continue
-            print("%s %r" % (cat, s))
-            re.compile('[%s]' % s)
-
-    def _cats_that_match(self, c):
-        matching_cats = []
-        for cat in uni.cats:
-            s = getattr(uni, cat)
-            if s == '':  # Probably Cs on Jython
-                continue
-            if re.compile('[%s]' % s).match(c):
-                matching_cats.append(cat)
-        return matching_cats
-
-    def test_spot_check_types(self):
-        # Each char should match one, and precisely one, category
-        random.seed(0)
-        for i in range(1000):
-            o = random.randint(0, 65535)
-            c = unichr(o)
-            if o > 0xd800 and o <= 0xdfff and not uni.Cs:
-                continue  # Bah, Jython.
-            print(hex(o))
-            cats = self._cats_that_match(c)
-            self.assertEqual(len(cats), 1,
-                             "%d (%s): %s" % (o, c, cats))
+def test_cats_exist_and_compilable():
+    for cat in uni.cats:
+        s = getattr(uni, cat)
+        if s == '':  # Probably Cs on Jython
+            continue
+        print("%s %r" % (cat, s))
+        re.compile('[%s]' % s)
+
+
+def _cats_that_match(c):
+    matching_cats = []
+    for cat in uni.cats:
+        s = getattr(uni, cat)
+        if s == '':  # Probably Cs on Jython
+            continue
+        if re.compile('[%s]' % s).match(c):
+            matching_cats.append(cat)
+    return matching_cats
+
+
+def test_spot_check_types():
+    # Each char should match one, and precisely one, category
+    random.seed(0)
+    for i in range(1000):
+        o = random.randint(0, 65535)
+        c = unichr(o)
+        if o > 0xd800 and o <= 0xdfff and not uni.Cs:
+            continue  # Bah, Jython.
+        print(hex(o))
+        cats = _cats_that_match(c)
+        assert len(cats) == 1, "%d (%s): %s" % (o, c, cats)
index 2ab70d098b3e042f2feea023b9d6f7c61107bc9b..b5310aa8ef0f1d8336e36f541224f3c0df34d47d 100644 (file)
@@ -7,12 +7,13 @@
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+from pytest import raises
 
 from pygments.lexer import using, bygroups, this, RegexLexer
 from pygments.token import String, Text, Keyword
 
-class TestLexer(RegexLexer):
+
+class MyLexer(RegexLexer):
     tokens = {
         'root': [
             (r'#.*',
@@ -27,14 +28,13 @@ class TestLexer(RegexLexer):
     }
 
 
-class UsingStateTest(unittest.TestCase):
-    def test_basic(self):
-        expected = [(Text, 'a'), (String, '"'), (Keyword, 'bcd'),
-                    (String, '"'), (Text, 'e\n')]
-        t = list(TestLexer().get_tokens('a"bcd"e'))
-        self.assertEqual(t, expected)
+def test_basic():
+    expected = [(Text, 'a'), (String, '"'), (Keyword, 'bcd'),
+                (String, '"'), (Text, 'e\n')]
+    assert list(MyLexer().get_tokens('a"bcd"e')) == expected
+
 
-    def test_error(self):
-        def gen():
-            return list(TestLexer().get_tokens('#a'))
-        self.assertRaises(KeyError, gen)
+def test_error():
+    def gen():
+        return list(MyLexer().get_tokens('#a'))
+    assert raises(KeyError, gen)
index 646a403b8e60a6b757881bda1ab18b2121edac1a..aa7b7acb899b3256e82f2151d89c76fea641145f 100644 (file)
@@ -8,7 +8,8 @@
 """
 
 import re
-import unittest
+
+from pytest import raises
 
 from pygments import util, console
 
@@ -19,195 +20,201 @@ class FakeLexer(object):
     analyse = util.make_analysator(analyse)
 
 
-class UtilTest(unittest.TestCase):
-
-    def test_getoptions(self):
-        raises = self.assertRaises
-        equals = self.assertEqual
-
-        equals(util.get_bool_opt({}, 'a', True), True)
-        equals(util.get_bool_opt({}, 'a', 1), True)
-        equals(util.get_bool_opt({}, 'a', 'true'), True)
-        equals(util.get_bool_opt({}, 'a', 'no'), False)
-        raises(util.OptionError, util.get_bool_opt, {}, 'a', [])
-        raises(util.OptionError, util.get_bool_opt, {}, 'a', 'foo')
-
-        equals(util.get_int_opt({}, 'a', 1), 1)
-        raises(util.OptionError, util.get_int_opt, {}, 'a', [])
-        raises(util.OptionError, util.get_int_opt, {}, 'a', 'bar')
-
-        equals(util.get_list_opt({}, 'a', [1]), [1])
-        equals(util.get_list_opt({}, 'a', '1 2'), ['1', '2'])
-        raises(util.OptionError, util.get_list_opt, {}, 'a', 1)
-
-        equals(util.get_choice_opt({}, 'a', ['foo', 'bar'], 'bar'), 'bar')
-        equals(util.get_choice_opt({}, 'a', ['foo', 'bar'], 'Bar', True), 'bar')
-        raises(util.OptionError, util.get_choice_opt, {}, 'a',
-               ['foo', 'bar'], 'baz')
-
-    def test_docstring_headline(self):
-        def f1():
-            """
-            docstring headline
-
-            other text
-            """
-        def f2():
-            """
-            docstring
-            headline
-
-            other text
-            """
-        def f3():
-            pass
-
-        self.assertEqual(util.docstring_headline(f1), 'docstring headline')
-        self.assertEqual(util.docstring_headline(f2), 'docstring headline')
-        self.assertEqual(util.docstring_headline(f3), '')
-
-    def test_analysator_returns_float(self):
-        # If an analysator wrapped by make_analysator returns a floating point
-        # number, then that number will be returned by the wrapper.
-        self.assertEqual(FakeLexer.analyse('0.5'), 0.5)
-
-    def test_analysator_returns_boolean(self):
-        # If an analysator wrapped by make_analysator returns a boolean value,
-        # then the wrapper will return 1.0 if the boolean was True or 0.0 if
-        # it was False.
-        self.assertEqual(FakeLexer.analyse(True), 1.0)
-        self.assertEqual(FakeLexer.analyse(False), 0.0)
-
-    def test_analysator_raises_exception(self):
-        # If an analysator wrapped by make_analysator raises an exception,
-        # then the wrapper will return 0.0.
-        class ErrorLexer(object):
-            def analyse(text):
-                raise RuntimeError('something bad happened')
-            analyse = util.make_analysator(analyse)
-        self.assertEqual(ErrorLexer.analyse(''), 0.0)
-
-    def test_analysator_value_error(self):
-        # When converting the analysator's return value to a float a
-        # ValueError may occur.  If that happens 0.0 is returned instead.
-        self.assertEqual(FakeLexer.analyse('bad input'), 0.0)
-
-    def test_analysator_type_error(self):
-        # When converting the analysator's return value to a float a
-        # TypeError may occur.  If that happens 0.0 is returned instead.
-        self.assertEqual(FakeLexer.analyse('xxx'), 0.0)
-
-    def test_shebang_matches(self):
-        self.assertTrue(util.shebang_matches('#!/usr/bin/env python\n', r'python(2\.\d)?'))
-        self.assertTrue(util.shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?'))
-        self.assertTrue(util.shebang_matches('#!/usr/bin/startsomethingwith python',
-                                             r'python(2\.\d)?'))
-        self.assertTrue(util.shebang_matches('#!C:\\Python2.4\\Python.exe',
-                                             r'python(2\.\d)?'))
-
-        self.assertFalse(util.shebang_matches('#!/usr/bin/python-ruby',
-                                              r'python(2\.\d)?'))
-        self.assertFalse(util.shebang_matches('#!/usr/bin/python/ruby',
-                                              r'python(2\.\d)?'))
-        self.assertFalse(util.shebang_matches('#!', r'python'))
-
-    def test_doctype_matches(self):
-        self.assertTrue(util.doctype_matches(
-            '<!DOCTYPE html> <html>', 'html.*'))
-        self.assertFalse(util.doctype_matches(
-            '<?xml ?> <DOCTYPE html PUBLIC "a"> <html>', 'html.*'))
-        self.assertTrue(util.html_doctype_matches(
-            '<?xml ?><!DOCTYPE html PUBLIC  "-//W3C//DTD XHTML 1.0 Strict//EN">'))
-
-    def test_xml(self):
-        self.assertTrue(util.looks_like_xml(
-            '<?xml ?><!DOCTYPE html PUBLIC  "-//W3C//DTD XHTML 1.0 Strict//EN">'))
-        self.assertTrue(util.looks_like_xml('<html xmlns>abc</html>'))
-        self.assertFalse(util.looks_like_xml('<html>'))
-
-    def test_unirange(self):
-        first_non_bmp = u'\U00010000'
-        r = re.compile(util.unirange(0x10000, 0x20000))
-        m = r.match(first_non_bmp)
-        self.assertTrue(m)
-        self.assertEqual(m.end(), len(first_non_bmp))
-        self.assertFalse(r.match(u'\uffff'))
-        self.assertFalse(r.match(u'xxx'))
-        # Tests that end is inclusive
-        r = re.compile(util.unirange(0x10000, 0x10000) + '+')
-        # Tests that the plus works for the entire unicode point, if narrow
-        # build
-        m = r.match(first_non_bmp * 2)
-        self.assertTrue(m)
-        self.assertEqual(m.end(), len(first_non_bmp) * 2)
-
-    def test_format_lines(self):
-        lst = ['cat', 'dog']
-        output = util.format_lines('var', lst)
-        d = {}
-        exec(output, d)
-        self.assertTrue(isinstance(d['var'], tuple))
-        self.assertEqual(('cat', 'dog'), d['var'])
-
-    def test_duplicates_removed_seq_types(self):
-        # tuple
-        x = util.duplicates_removed(('a', 'a', 'b'))
-        self.assertEqual(['a', 'b'], x)
-        # list
-        x = util.duplicates_removed(['a', 'a', 'b'])
-        self.assertEqual(['a', 'b'], x)
-        # iterator
-        x = util.duplicates_removed(iter(('a', 'a', 'b')))
-        self.assertEqual(['a', 'b'], x)
-
-    def test_duplicates_removed_nonconsecutive(self):
-        # keeps first
-        x = util.duplicates_removed(('a', 'b', 'a'))
-        self.assertEqual(['a', 'b'], x)
-
-    def test_guess_decode(self):
-        # UTF-8 should be decoded as UTF-8
-        s = util.guess_decode(u'\xff'.encode('utf-8'))
-        self.assertEqual(s, (u'\xff', 'utf-8'))
-
-        # otherwise, it could be latin1 or the locale encoding...
-        import locale
-        s = util.guess_decode(b'\xff')
-        self.assertTrue(s[1] in ('latin1', locale.getpreferredencoding()))
-
-    def test_guess_decode_from_terminal(self):
-        class Term:
-            encoding = 'utf-7'
-
-        s = util.guess_decode_from_terminal(u'\xff'.encode('utf-7'), Term)
-        self.assertEqual(s, (u'\xff', 'utf-7'))
-
-        s = util.guess_decode_from_terminal(u'\xff'.encode('utf-8'), Term)
-        self.assertEqual(s, (u'\xff', 'utf-8'))
-
-    def test_add_metaclass(self):
-        class Meta(type):
-            pass
-
-        @util.add_metaclass(Meta)
-        class Cls:
-            pass
-
-        self.assertEqual(type(Cls), Meta)
-
-
-class ConsoleTest(unittest.TestCase):
-
-    def test_ansiformat(self):
-        f = console.ansiformat
-        c = console.codes
-        all_attrs = f('+*_blue_*+', 'text')
-        self.assertTrue(c['blue'] in all_attrs and c['blink'] in all_attrs
-                        and c['bold'] in all_attrs and c['underline'] in all_attrs
-                        and c['reset'] in all_attrs)
-        self.assertRaises(KeyError, f, '*mauve*', 'text')
-
-    def test_functions(self):
-        self.assertEqual(console.reset_color(), console.codes['reset'])
-        self.assertEqual(console.colorize('blue', 'text'),
-                         console.codes['blue'] + 'text' + console.codes['reset'])
+def test_getoptions():
+    assert util.get_bool_opt({}, 'a', True) is True
+    assert util.get_bool_opt({}, 'a', 1) is True
+    assert util.get_bool_opt({}, 'a', 'true') is True
+    assert util.get_bool_opt({}, 'a', 'no') is False
+    assert raises(util.OptionError, util.get_bool_opt, {}, 'a', [])
+    assert raises(util.OptionError, util.get_bool_opt, {}, 'a', 'foo')
+
+    assert util.get_int_opt({}, 'a', 1) == 1
+    assert raises(util.OptionError, util.get_int_opt, {}, 'a', [])
+    assert raises(util.OptionError, util.get_int_opt, {}, 'a', 'bar')
+
+    assert util.get_list_opt({}, 'a', [1]) == [1]
+    assert util.get_list_opt({}, 'a', '1 2') == ['1', '2']
+    assert raises(util.OptionError, util.get_list_opt, {}, 'a', 1)
+
+    assert util.get_choice_opt({}, 'a', ['foo', 'bar'], 'bar') == 'bar'
+    assert util.get_choice_opt({}, 'a', ['foo', 'bar'], 'Bar', True) == 'bar'
+    assert raises(util.OptionError, util.get_choice_opt, {}, 'a',
+                  ['foo', 'bar'], 'baz')
+
+
+def test_docstring_headline():
+    def f1():
+        """
+        docstring headline
+
+        other text
+        """
+    def f2():
+        """
+        docstring
+        headline
+
+        other text
+        """
+    def f3():
+        pass
+
+    assert util.docstring_headline(f1) == 'docstring headline'
+    assert util.docstring_headline(f2) == 'docstring headline'
+    assert util.docstring_headline(f3) == ''
+
+
+def test_analysator_returns_float():
+    # If an analysator wrapped by make_analysator returns a floating point
+    # number, then that number will be returned by the wrapper.
+    assert FakeLexer.analyse('0.5') == 0.5
+
+
+def test_analysator_returns_boolean():
+    # If an analysator wrapped by make_analysator returns a boolean value,
+    # then the wrapper will return 1.0 if the boolean was True or 0.0 if
+    # it was False.
+    assert FakeLexer.analyse(True) == 1.0
+    assert FakeLexer.analyse(False) == 0.0
+
+
+def test_analysator_raises_exception():
+    # If an analysator wrapped by make_analysator raises an exception,
+    # then the wrapper will return 0.0.
+    class ErrorLexer(object):
+        def analyse(text):
+            raise RuntimeError('something bad happened')
+        analyse = util.make_analysator(analyse)
+    assert ErrorLexer.analyse('') == 0.0
+
+
+def test_analysator_value_error():
+    # When converting the analysator's return value to a float a
+    # ValueError may occur.  If that happens 0.0 is returned instead.
+    assert FakeLexer.analyse('bad input') == 0.0
+
+
+def test_analysator_type_error():
+    # When converting the analysator's return value to a float a
+    # TypeError may occur.  If that happens 0.0 is returned instead.
+    assert FakeLexer.analyse('xxx') == 0.0
+
+
+def test_shebang_matches():
+    assert util.shebang_matches('#!/usr/bin/env python\n', r'python(2\.\d)?')
+    assert util.shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?')
+    assert util.shebang_matches('#!/usr/bin/startsomethingwith python',
+                                r'python(2\.\d)?')
+    assert util.shebang_matches('#!C:\\Python2.4\\Python.exe', r'python(2\.\d)?')
+
+    assert not util.shebang_matches('#!/usr/bin/python-ruby', r'python(2\.\d)?')
+    assert not util.shebang_matches('#!/usr/bin/python/ruby', r'python(2\.\d)?')
+    assert not util.shebang_matches('#!', r'python')
+
+
+def test_doctype_matches():
+    assert util.doctype_matches('<!DOCTYPE html> <html>', 'html.*')
+    assert not util.doctype_matches(
+        '<?xml ?> <DOCTYPE html PUBLIC "a"> <html>', 'html.*')
+    assert util.html_doctype_matches(
+        '<?xml ?><!DOCTYPE html PUBLIC  "-//W3C//DTD XHTML 1.0 Strict//EN">')
+
+
+def test_xml():
+    assert util.looks_like_xml(
+        '<?xml ?><!DOCTYPE html PUBLIC  "-//W3C//DTD XHTML 1.0 Strict//EN">')
+    assert util.looks_like_xml('<html xmlns>abc</html>')
+    assert not util.looks_like_xml('<html>')
+
+
+def test_unirange():
+    first_non_bmp = u'\U00010000'
+    r = re.compile(util.unirange(0x10000, 0x20000))
+    m = r.match(first_non_bmp)
+    assert m
+    assert m.end() == len(first_non_bmp)
+    assert not r.match(u'\uffff')
+    assert not r.match(u'xxx')
+    # Tests that end is inclusive
+    r = re.compile(util.unirange(0x10000, 0x10000) + '+')
+    # Tests that the plus works for the entire unicode point, if narrow
+    # build
+    m = r.match(first_non_bmp * 2)
+    assert m
+    assert m.end() == len(first_non_bmp) * 2
+
+
+def test_format_lines():
+    lst = ['cat', 'dog']
+    output = util.format_lines('var', lst)
+    d = {}
+    exec(output, d)
+    assert isinstance(d['var'], tuple)
+    assert ('cat', 'dog') == d['var']
+
+
+def test_duplicates_removed_seq_types():
+    # tuple
+    x = util.duplicates_removed(('a', 'a', 'b'))
+    assert ['a', 'b'] == x
+    # list
+    x = util.duplicates_removed(['a', 'a', 'b'])
+    assert ['a', 'b'] == x
+    # iterator
+    x = util.duplicates_removed(iter(('a', 'a', 'b')))
+    assert ['a', 'b'] == x
+
+
+def test_duplicates_removed_nonconsecutive():
+    # keeps first
+    x = util.duplicates_removed(('a', 'b', 'a'))
+    assert ['a', 'b'] == x
+
+
+def test_guess_decode():
+    # UTF-8 should be decoded as UTF-8
+    s = util.guess_decode(u'\xff'.encode('utf-8'))
+    assert s == (u'\xff', 'utf-8')
+
+    # otherwise, it could be latin1 or the locale encoding...
+    import locale
+    s = util.guess_decode(b'\xff')
+    assert s[1] in ('latin1', locale.getpreferredencoding())
+
+
+def test_guess_decode_from_terminal():
+    class Term:
+        encoding = 'utf-7'
+
+    s = util.guess_decode_from_terminal(u'\xff'.encode('utf-7'), Term)
+    assert s == (u'\xff', 'utf-7')
+
+    s = util.guess_decode_from_terminal(u'\xff'.encode('utf-8'), Term)
+    assert s == (u'\xff', 'utf-8')
+
+
+def test_add_metaclass():
+    class Meta(type):
+        pass
+
+    @util.add_metaclass(Meta)
+    class Cls:
+        pass
+
+    assert type(Cls) is Meta
+
+
+def test_console_ansiformat():
+    f = console.ansiformat
+    c = console.codes
+    all_attrs = f('+*_blue_*+', 'text')
+    assert c['blue'] in all_attrs and c['blink'] in all_attrs
+    assert c['bold'] in all_attrs and c['underline'] in all_attrs
+    assert c['reset'] in all_attrs
+    assert raises(KeyError, f, '*mauve*', 'text')
+
+
+def test_console_functions():
+    assert console.reset_color() == console.codes['reset']
+    assert console.colorize('blue', 'text') == \
+        console.codes['blue'] + 'text' + console.codes['reset']
index f447ffecda043f06656b2e8839aaa3d4660adbb4..84fef25b7d1fee4638ff4d30ec708fa56ecaa4ea 100644 (file)
@@ -3,28 +3,29 @@
     Whiley Test
     ~~~~~~~~~~~
 
-    :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS.
+    :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
     :license: BSD, see LICENSE for details.
 """
 
-import unittest
+import pytest
 
 from pygments.lexers import WhileyLexer
 from pygments.token import Token
 
 
-class WhileyTest(unittest.TestCase):
-    def setUp(self):
-        self.lexer = WhileyLexer()
+@pytest.fixture(scope='module')
+def lexer():
+    yield WhileyLexer()
 
-    def testWhileyOperator(self):
-        fragment = u'123 \u2200 x\n'
-        tokens = [
-            (Token.Literal.Number.Integer, u'123'),
-            (Token.Text, u' '),
-            (Token.Operator, u'\u2200'),
-            (Token.Text, u' '),
-            (Token.Name, u'x'),
-            (Token.Text, u'\n'),
-        ]
-        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+def test_whiley_operator(lexer):
+    fragment = u'123 \u2200 x\n'
+    tokens = [
+        (Token.Literal.Number.Integer, u'123'),
+        (Token.Text, u' '),
+        (Token.Operator, u'\u2200'),
+        (Token.Text, u' '),
+        (Token.Name, u'x'),
+        (Token.Text, u'\n'),
+    ]
+    assert list(lexer.get_tokens(fragment)) == tokens
diff --git a/tox.ini b/tox.ini
deleted file mode 100644 (file)
index 2c63c29..0000000
--- a/tox.ini
+++ /dev/null
@@ -1,7 +0,0 @@
-[tox]
-envlist = py27, py35, py36, py37
-[testenv]
-deps =
-    nose
-    coverage
-commands = python -d tests/run.py {posargs}