summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGeorg Brandl <georg@python.org>2015-08-08 07:07:10 +0200
committerGeorg Brandl <georg@python.org>2015-08-08 07:07:10 +0200
commitf3817e5d42209595fa787ef81814a8deedfb40ec (patch)
tree5da9b02514aa1c4da31ce5b808f46825fcde43f5
parent62ed942986ae720e1df237c7d96700e93a2aaa11 (diff)
parent151707358274accc260f64e590879f8cd8b77465 (diff)
downloadpygments-f3817e5d42209595fa787ef81814a8deedfb40ec.tar.gz
Merged in ylikx/pygments-main (pull request #213)
-rw-r--r--.hgignore13
-rw-r--r--AUTHORS25
-rw-r--r--CHANGES193
-rw-r--r--LICENSE2
-rw-r--r--MANIFEST.in2
-rw-r--r--Makefile25
-rw-r--r--README.rst39
-rw-r--r--doc/Makefile153
-rw-r--r--doc/_static/favicon.icobin0 -> 16958 bytes
-rw-r--r--doc/_static/logo_new.pngbin0 -> 40944 bytes
-rw-r--r--doc/_static/logo_only.pngbin0 -> 16424 bytes
-rw-r--r--doc/_templates/docssidebar.html3
-rw-r--r--doc/_templates/indexsidebar.html25
-rw-r--r--doc/_themes/pygments14/layout.html98
-rw-r--r--doc/_themes/pygments14/static/bodybg.pngbin0 -> 51903 bytes
-rw-r--r--doc/_themes/pygments14/static/docbg.pngbin0 -> 61296 bytes
-rw-r--r--doc/_themes/pygments14/static/listitem.pngbin0 -> 207 bytes
-rw-r--r--doc/_themes/pygments14/static/logo.pngbin0 -> 26933 bytes
-rw-r--r--doc/_themes/pygments14/static/pocoo.pngbin0 -> 2154 bytes
-rw-r--r--doc/_themes/pygments14/static/pygments14.css_t401
-rw-r--r--doc/_themes/pygments14/theme.conf15
-rw-r--r--doc/conf.py241
-rw-r--r--doc/docs/api.rst316
-rw-r--r--doc/docs/authors.rst4
-rw-r--r--doc/docs/changelog.rst1
-rw-r--r--doc/docs/cmdline.rst (renamed from docs/src/cmdline.txt)28
-rw-r--r--doc/docs/filterdevelopment.rst (renamed from docs/src/filterdevelopment.txt)9
-rw-r--r--doc/docs/filters.rst (renamed from docs/src/filters.txt)9
-rw-r--r--doc/docs/formatterdevelopment.rst (renamed from docs/src/formatterdevelopment.txt)2
-rw-r--r--doc/docs/formatters.rst (renamed from docs/src/formatters.txt)12
-rw-r--r--doc/docs/index.rst66
-rw-r--r--doc/docs/integrate.rst (renamed from docs/src/integrate.txt)10
-rw-r--r--doc/docs/java.rst (renamed from docs/src/java.txt)14
-rw-r--r--doc/docs/lexerdevelopment.rst (renamed from docs/src/lexerdevelopment.txt)429
-rw-r--r--doc/docs/lexers.rst (renamed from docs/src/lexers.txt)16
-rw-r--r--doc/docs/moinmoin.rst (renamed from docs/src/moinmoin.txt)0
-rw-r--r--doc/docs/plugins.rst (renamed from docs/src/plugins.txt)0
-rw-r--r--doc/docs/quickstart.rst (renamed from docs/src/quickstart.txt)41
-rw-r--r--doc/docs/rstdirective.rst (renamed from docs/src/rstdirective.txt)0
-rw-r--r--doc/docs/styles.rst (renamed from docs/src/styles.txt)6
-rw-r--r--doc/docs/tokens.rst (renamed from docs/src/tokens.txt)25
-rw-r--r--doc/docs/unicode.rst (renamed from docs/src/unicode.txt)35
-rw-r--r--doc/download.rst41
-rw-r--r--doc/faq.rst142
-rw-r--r--doc/index.rst54
-rw-r--r--doc/languages.rst151
-rw-r--r--doc/make.bat190
-rw-r--r--doc/pygmentize.1 (renamed from docs/pygmentize.1)0
-rwxr-xr-xdocs/generate.py472
-rw-r--r--docs/src/api.txt270
-rw-r--r--docs/src/authors.txt5
-rw-r--r--docs/src/changelog.txt5
-rw-r--r--docs/src/index.txt69
-rw-r--r--docs/src/installation.txt71
-rwxr-xr-xexternal/autopygmentize119
-rw-r--r--external/markdown-processor.py30
-rw-r--r--external/moin-parser.py2
-rw-r--r--external/rst-directive-old.py77
-rw-r--r--external/rst-directive.py5
-rw-r--r--[-rwxr-xr-x]ez_setup.py574
-rwxr-xr-xpygmentize3
-rw-r--r--pygments/__init__.py19
-rw-r--r--pygments/cmdline.py401
-rw-r--r--pygments/console.py2
-rw-r--r--pygments/filter.py4
-rw-r--r--pygments/filters/__init__.py74
-rw-r--r--pygments/formatter.py12
-rw-r--r--pygments/formatters/__init__.py134
-rwxr-xr-xpygments/formatters/_mapping.py106
-rw-r--r--pygments/formatters/bbcode.py2
-rw-r--r--pygments/formatters/html.py101
-rw-r--r--pygments/formatters/img.py53
-rw-r--r--pygments/formatters/latex.py137
-rw-r--r--pygments/formatters/other.py63
-rw-r--r--pygments/formatters/rtf.py91
-rw-r--r--pygments/formatters/svg.py5
-rw-r--r--pygments/formatters/terminal.py43
-rw-r--r--pygments/formatters/terminal256.py51
-rw-r--r--pygments/lexer.py227
-rw-r--r--pygments/lexers/__init__.py135
-rw-r--r--pygments/lexers/_asy_builtins.py (renamed from pygments/lexers/_asybuiltins.py)14
-rw-r--r--pygments/lexers/_cl_builtins.py (renamed from pygments/lexers/_clbuiltins.py)34
-rw-r--r--pygments/lexers/_cocoa_builtins.py73
-rw-r--r--pygments/lexers/_lasso_builtins.py (renamed from pygments/lexers/_lassobuiltins.py)40
-rw-r--r--pygments/lexers/_lua_builtins.py (renamed from pygments/lexers/_luabuiltins.py)82
-rw-r--r--pygments/lexers/_mapping.py489
-rw-r--r--pygments/lexers/_mql_builtins.py1172
-rw-r--r--pygments/lexers/_openedge_builtins.py2547
-rw-r--r--pygments/lexers/_openedgebuiltins.py562
-rw-r--r--pygments/lexers/_php_builtins.py (renamed from pygments/lexers/_phpbuiltins.py)1737
-rw-r--r--pygments/lexers/_postgres_builtins.py812
-rw-r--r--pygments/lexers/_scilab_builtins.py3114
-rw-r--r--pygments/lexers/_sourcemod_builtins.py1163
-rw-r--r--pygments/lexers/_sourcemodbuiltins.py1072
-rw-r--r--pygments/lexers/_stan_builtins.py201
-rw-r--r--pygments/lexers/_vim_builtins.py1939
-rw-r--r--pygments/lexers/_vimbuiltins.py13
-rw-r--r--pygments/lexers/actionscript.py240
-rw-r--r--pygments/lexers/agile.py2294
-rw-r--r--pygments/lexers/algebra.py187
-rw-r--r--pygments/lexers/ambient.py76
-rw-r--r--pygments/lexers/apl.py101
-rw-r--r--pygments/lexers/asm.py119
-rw-r--r--pygments/lexers/automation.py374
-rw-r--r--pygments/lexers/basic.py500
-rw-r--r--pygments/lexers/business.py592
-rw-r--r--pygments/lexers/c_cpp.py235
-rw-r--r--pygments/lexers/c_like.py539
-rw-r--r--pygments/lexers/chapel.py99
-rw-r--r--pygments/lexers/compiled.py3778
-rw-r--r--pygments/lexers/configs.py546
-rw-r--r--pygments/lexers/console.py114
-rw-r--r--pygments/lexers/css.py499
-rw-r--r--pygments/lexers/d.py251
-rw-r--r--pygments/lexers/dalvik.py43
-rw-r--r--pygments/lexers/data.py530
-rw-r--r--pygments/lexers/diff.py106
-rw-r--r--pygments/lexers/dotnet.py197
-rw-r--r--pygments/lexers/dsls.py514
-rw-r--r--pygments/lexers/dylan.py289
-rw-r--r--pygments/lexers/ecl.py125
-rw-r--r--pygments/lexers/eiffel.py65
-rw-r--r--pygments/lexers/erlang.py511
-rw-r--r--pygments/lexers/esoteric.py114
-rw-r--r--pygments/lexers/factor.py344
-rw-r--r--pygments/lexers/fantom.py250
-rw-r--r--pygments/lexers/felix.py273
-rw-r--r--pygments/lexers/fortran.py203
-rw-r--r--pygments/lexers/foxpro.py6
-rw-r--r--pygments/lexers/functional.py2732
-rw-r--r--pygments/lexers/go.py101
-rw-r--r--pygments/lexers/graph.py79
-rw-r--r--pygments/lexers/graphics.py553
-rw-r--r--pygments/lexers/haskell.py840
-rw-r--r--pygments/lexers/haxe.py936
-rw-r--r--pygments/lexers/hdl.py343
-rw-r--r--pygments/lexers/html.py601
-rw-r--r--pygments/lexers/idl.py262
-rw-r--r--pygments/lexers/igor.py280
-rw-r--r--pygments/lexers/inferno.py96
-rw-r--r--pygments/lexers/installers.py322
-rw-r--r--pygments/lexers/int_fiction.py1342
-rw-r--r--pygments/lexers/iolang.py63
-rw-r--r--pygments/lexers/javascript.py1199
-rw-r--r--pygments/lexers/julia.py196
-rw-r--r--pygments/lexers/jvm.py1068
-rw-r--r--pygments/lexers/lisp.py2123
-rw-r--r--pygments/lexers/make.py201
-rw-r--r--pygments/lexers/markup.py502
-rw-r--r--pygments/lexers/math.py1919
-rw-r--r--pygments/lexers/matlab.py663
-rw-r--r--pygments/lexers/ml.py769
-rw-r--r--pygments/lexers/modeling.py356
-rw-r--r--pygments/lexers/modula2.py1566
-rw-r--r--pygments/lexers/nimrod.py159
-rw-r--r--pygments/lexers/nit.py64
-rw-r--r--pygments/lexers/nix.py136
-rw-r--r--pygments/lexers/objective.py501
-rw-r--r--pygments/lexers/ooc.py85
-rw-r--r--pygments/lexers/other.py3796
-rw-r--r--pygments/lexers/parsers.py327
-rw-r--r--pygments/lexers/pascal.py641
-rw-r--r--pygments/lexers/pawn.py199
-rw-r--r--pygments/lexers/perl.py615
-rw-r--r--pygments/lexers/php.py245
-rw-r--r--pygments/lexers/prolog.py306
-rw-r--r--pygments/lexers/python.py848
-rw-r--r--pygments/lexers/r.py453
-rw-r--r--pygments/lexers/rdf.py99
-rw-r--r--pygments/lexers/rebol.py431
-rw-r--r--pygments/lexers/resource.py84
-rw-r--r--pygments/lexers/robotframework.py (renamed from pygments/lexers/_robotframeworklexer.py)45
-rw-r--r--pygments/lexers/ruby.py519
-rw-r--r--pygments/lexers/rust.py190
-rw-r--r--pygments/lexers/scripting.py923
-rw-r--r--pygments/lexers/shell.py88
-rw-r--r--pygments/lexers/smalltalk.py195
-rw-r--r--pygments/lexers/snobol.py83
-rw-r--r--pygments/lexers/special.py18
-rw-r--r--pygments/lexers/sql.py289
-rw-r--r--pygments/lexers/tcl.py145
-rw-r--r--pygments/lexers/templates.py650
-rw-r--r--pygments/lexers/testing.py131
-rw-r--r--pygments/lexers/text.py1898
-rw-r--r--pygments/lexers/textedit.py169
-rw-r--r--pygments/lexers/textfmts.py292
-rw-r--r--pygments/lexers/theorem.py452
-rw-r--r--pygments/lexers/urbi.py133
-rw-r--r--pygments/lexers/web.py4047
-rw-r--r--pygments/lexers/webmisc.py920
-rw-r--r--pygments/modeline.py2
-rw-r--r--pygments/plugin.py2
-rw-r--r--pygments/regexopt.py92
-rw-r--r--pygments/scanner.py2
-rw-r--r--pygments/sphinxext.py155
-rw-r--r--pygments/style.py7
-rw-r--r--pygments/styles/__init__.py9
-rw-r--r--pygments/styles/algol.py63
-rw-r--r--pygments/styles/algol_nu.py63
-rw-r--r--pygments/styles/arduino.py97
-rw-r--r--pygments/styles/autumn.py2
-rw-r--r--pygments/styles/borland.py2
-rw-r--r--pygments/styles/bw.py2
-rw-r--r--pygments/styles/colorful.py2
-rw-r--r--pygments/styles/default.py2
-rw-r--r--pygments/styles/emacs.py2
-rw-r--r--pygments/styles/friendly.py2
-rw-r--r--pygments/styles/fruity.py2
-rw-r--r--pygments/styles/igor.py29
-rw-r--r--pygments/styles/lovelace.py90
-rw-r--r--pygments/styles/manni.py2
-rw-r--r--pygments/styles/monokai.py8
-rw-r--r--pygments/styles/murphy.py2
-rw-r--r--pygments/styles/native.py2
-rw-r--r--pygments/styles/paraiso_dark.py125
-rw-r--r--pygments/styles/paraiso_light.py125
-rw-r--r--pygments/styles/pastie.py2
-rw-r--r--pygments/styles/perldoc.py2
-rw-r--r--pygments/styles/rrt.py2
-rw-r--r--pygments/styles/tango.py2
-rw-r--r--pygments/styles/trac.py2
-rw-r--r--pygments/styles/vim.py2
-rw-r--r--pygments/styles/vs.py2
-rw-r--r--pygments/styles/xcode.py51
-rw-r--r--pygments/token.py6
-rw-r--r--pygments/unistring.py207
-rw-r--r--pygments/util.py212
-rw-r--r--requirements.txt5
-rwxr-xr-xscripts/check_sources.py123
-rwxr-xr-xscripts/debug_lexer.py244
-rw-r--r--scripts/detect_missing_analyse_text.py9
-rwxr-xr-xscripts/find_codetags.py205
l---------[-rwxr-xr-x]scripts/find_error.py171
-rw-r--r--scripts/get_vimkw.py43
-rwxr-xr-xscripts/reindent.py291
-rwxr-xr-x[-rw-r--r--]scripts/vim2pygments.py16
-rwxr-xr-xsetup.py55
-rw-r--r--tests/examplefiles/99_bottles_of_beer.chpl174
-rw-r--r--tests/examplefiles/Blink.ino24
-rw-r--r--tests/examplefiles/Error.pmod38
-rw-r--r--tests/examplefiles/Errors.scala5
-rw-r--r--tests/examplefiles/FakeFile.pike360
-rw-r--r--tests/examplefiles/all.nit1986
-rw-r--r--tests/examplefiles/antlr_ANTLRv3.g (renamed from tests/examplefiles/ANTLRv3.g)0
-rw-r--r--tests/examplefiles/autoit_submit.au32
-rw-r--r--tests/examplefiles/automake.mk7
-rw-r--r--tests/examplefiles/clojure-weird-keywords.clj5
-rw-r--r--tests/examplefiles/core.cljs52
-rw-r--r--tests/examplefiles/demo.cfm14
-rw-r--r--tests/examplefiles/demo.css.in6
-rw-r--r--tests/examplefiles/demo.hbs12
-rw-r--r--tests/examplefiles/demo.js.in6
-rw-r--r--tests/examplefiles/demo.xul.in7
-rw-r--r--tests/examplefiles/docker.docker5
-rw-r--r--tests/examplefiles/ember.handlebars33
-rw-r--r--tests/examplefiles/eval.rs606
-rw-r--r--tests/examplefiles/example.als217
-rw-r--r--tests/examplefiles/example.c2
-rw-r--r--tests/examplefiles/example.chai6
-rw-r--r--tests/examplefiles/example.cob936
-rw-r--r--tests/examplefiles/example.coffee27
-rw-r--r--tests/examplefiles/example.e124
-rw-r--r--tests/examplefiles/example.f908
-rw-r--r--tests/examplefiles/example.feature16
-rw-r--r--tests/examplefiles/example.gd23
-rw-r--r--tests/examplefiles/example.gi64
-rw-r--r--tests/examplefiles/example.golo113
-rw-r--r--tests/examplefiles/example.groovy2
-rw-r--r--tests/examplefiles/example.hs31
-rw-r--r--tests/examplefiles/example.hx52
-rw-r--r--tests/examplefiles/example.i6t32
-rw-r--r--tests/examplefiles/example.i7x45
-rw-r--r--tests/examplefiles/example.inf374
-rw-r--r--tests/examplefiles/example.j564
-rw-r--r--tests/examplefiles/example.java16
-rw-r--r--tests/examplefiles/example.jsonld27
-rw-r--r--tests/examplefiles/example.kal75
-rw-r--r--tests/examplefiles/example.liquid42
-rw-r--r--tests/examplefiles/example.ma8
-rw-r--r--tests/examplefiles/example.mq4187
-rw-r--r--tests/examplefiles/example.mqh123
-rw-r--r--tests/examplefiles/example.ni57
-rw-r--r--tests/examplefiles/example.nix80
-rw-r--r--tests/examplefiles/example.pp8
-rw-r--r--tests/examplefiles/example.red257
-rw-r--r--tests/examplefiles/example.reds150
-rw-r--r--tests/examplefiles/example.rkt706
-rw-r--r--tests/examplefiles/example.sh22
-rw-r--r--tests/examplefiles/example.slim31
-rw-r--r--tests/examplefiles/example.sls51
-rw-r--r--tests/examplefiles/example.stan16
-rw-r--r--tests/examplefiles/example.thy751
-rw-r--r--tests/examplefiles/example.todotxt9
-rw-r--r--tests/examplefiles/example.weechatlog4
-rw-r--r--tests/examplefiles/exampleScript.cfc241
-rw-r--r--tests/examplefiles/exampleTag.cfc18
-rw-r--r--tests/examplefiles/example_coq.v4
-rw-r--r--tests/examplefiles/example_elixir.ex570
-rw-r--r--tests/examplefiles/hash_syntax.rb5
-rw-r--r--tests/examplefiles/hello.at6
-rw-r--r--tests/examplefiles/hello.golo5
-rw-r--r--tests/examplefiles/hello.lsl12
-rw-r--r--tests/examplefiles/hybris_File.hy (renamed from tests/examplefiles/File.hy)0
-rw-r--r--tests/examplefiles/idl_sample.pro (renamed from tests/examplefiles/mg_sample.pro)0
-rw-r--r--tests/examplefiles/iex_example23
-rw-r--r--tests/examplefiles/import.hs4
-rw-r--r--tests/examplefiles/inet_pton6.dg48
-rw-r--r--tests/examplefiles/interp.scala10
-rw-r--r--tests/examplefiles/language.hy165
-rw-r--r--tests/examplefiles/limbo.b456
-rw-r--r--tests/examplefiles/livescript-demo.ls4
-rw-r--r--tests/examplefiles/main.cmake2
-rw-r--r--tests/examplefiles/matlab_sample4
-rw-r--r--tests/examplefiles/modula2_test_cases.def354
-rw-r--r--tests/examplefiles/objc_example.m179
-rw-r--r--tests/examplefiles/objc_example2.m24
-rw-r--r--tests/examplefiles/openedge_example (renamed from tests/examplefiles/example.p)0
-rw-r--r--tests/examplefiles/pawn_example25
-rw-r--r--tests/examplefiles/pycon_test.pycon5
-rw-r--r--tests/examplefiles/qbasic_example2
-rw-r--r--tests/examplefiles/r6rs-comments.scm23
-rw-r--r--tests/examplefiles/resourcebundle_demo9
-rw-r--r--tests/examplefiles/robotframework_test.txt (renamed from tests/examplefiles/robotframework.txt)1
-rw-r--r--tests/examplefiles/rql-queries.rql34
-rw-r--r--tests/examplefiles/rust_example.rs233
-rw-r--r--tests/examplefiles/scope.cirru211
-rw-r--r--tests/examplefiles/simple.croc (renamed from tests/examplefiles/simple.md)0
-rw-r--r--tests/examplefiles/sparql.rq23
-rw-r--r--tests/examplefiles/subr.el4868
-rw-r--r--tests/examplefiles/tads3_example.t1248
-rw-r--r--tests/examplefiles/test.R42
-rw-r--r--tests/examplefiles/test.agda7
-rw-r--r--tests/examplefiles/test.apl26
-rw-r--r--tests/examplefiles/test.cyp123
-rw-r--r--tests/examplefiles/test.gradle20
-rw-r--r--tests/examplefiles/test.idr101
-rw-r--r--tests/examplefiles/test.lean217
-rw-r--r--tests/examplefiles/test.mask41
-rw-r--r--tests/examplefiles/test.pan54
-rw-r--r--tests/examplefiles/test.php12
-rw-r--r--tests/examplefiles/test.pig148
-rw-r--r--tests/examplefiles/test.pwn253
-rw-r--r--tests/examplefiles/test.pypylog839
-rw-r--r--tests/examplefiles/test.r334
-rw-r--r--tests/examplefiles/test.rsl111
-rw-r--r--tests/examplefiles/test.swift65
-rw-r--r--tests/examplefiles/test.zep33
-rw-r--r--tests/examplefiles/twig_test4612
-rw-r--r--tests/examplefiles/unicode.go10
-rw-r--r--tests/examplefiles/unicode.js6
-rw-r--r--tests/examplefiles/vbnet_test.bas (renamed from tests/examplefiles/test.bas)0
-rw-r--r--tests/examplefiles/vctreestatus_hg4
-rw-r--r--tests/examplefiles/vimrc21
-rw-r--r--tests/examplefiles/vpath.mk16
-rw-r--r--tests/old_run.py138
-rw-r--r--tests/run.py53
-rw-r--r--tests/string_asserts.py22
-rw-r--r--tests/support.py2
-rw-r--r--tests/test_basic_api.py166
-rw-r--r--tests/test_cfm.py46
-rw-r--r--tests/test_clexer.py211
-rw-r--r--tests/test_cmdline.py263
-rw-r--r--tests/test_examplefiles.py111
-rw-r--r--tests/test_html_formatter.py58
-rw-r--r--tests/test_inherit.py94
-rw-r--r--tests/test_java.py42
-rw-r--r--tests/test_latex_formatter.py11
-rw-r--r--tests/test_lexers_other.py41
-rw-r--r--tests/test_objectiveclexer.py81
-rw-r--r--tests/test_perllexer.py4
-rw-r--r--tests/test_qbasiclexer.py43
-rw-r--r--tests/test_regexlexer.py9
-rw-r--r--tests/test_regexopt.py76
-rw-r--r--tests/test_rtf_formatter.py109
-rw-r--r--tests/test_ruby.py145
-rw-r--r--tests/test_shell.py63
-rw-r--r--tests/test_smarty.py40
-rw-r--r--tests/test_string_asserts.py35
-rw-r--r--tests/test_textfmts.py41
-rw-r--r--tests/test_token.py6
-rw-r--r--tests/test_unistring.py48
-rw-r--r--tests/test_using_api.py2
-rw-r--r--tests/test_util.py98
-rw-r--r--tox.ini7
384 files changed, 73528 insertions, 29953 deletions
diff --git a/.hgignore b/.hgignore
index f5d9f0c2..6823314d 100644
--- a/.hgignore
+++ b/.hgignore
@@ -1,9 +1,16 @@
syntax: glob
+*.egg
*.pyc
*.pyo
-*.egg
+.idea/
+.ropeproject
+.tags
+.tox
+Pygments.egg-info/*
+TAGS
build/*
dist/*
-Pygments.egg-info/*
-.ropeproject
+doc/_build
+tests/.coverage
+tests/cover
tests/examplefiles/output
diff --git a/AUTHORS b/AUTHORS
index a6d9e986..96a346ba 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -14,8 +14,10 @@ Other contributors, listed alphabetically, are:
* Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers
* Jeremy Ashkenas -- CoffeeScript lexer
* Stefan Matthias Aust -- Smalltalk lexer
+* Lucas Bajolet -- Nit lexer
* Ben Bangert -- Mako lexers
* Max Battcher -- Darcs patch lexer
+* Thomas Baruchel -- APL lexer
* Tim Baumann -- (Literate) Agda lexer
* Paul Baumgart, 280 North, Inc. -- Objective-J lexer
* Michael Bayer -- Myghty lexers
@@ -27,12 +29,14 @@ Other contributors, listed alphabetically, are:
* Pierre Bourdon -- bugfixes
* Hiram Chirino -- Scaml and Jade lexers
* Ian Cooper -- VGL lexer
+* David Corbett -- Inform, Jasmin, and TADS 3 lexers
* Leaf Corcoran -- MoonScript lexer
-* Christian Jann -- ShellSession lexer
* Christopher Creutzig -- MuPAD lexer
+* Daniël W. Crompton - Pike lexer
* Pete Curry -- bugfixes
* Bryan Davis -- EBNF lexer
* Owen Durni -- Haxe lexer
+* Alexander Dutton, Oxford University Computing Services -- SPARQL lexer
* Nick Efford -- Python 3 lexer
* Sven Efftinge -- Xtend lexer
* Artem Egorkine -- terminal256 formatter
@@ -63,18 +67,25 @@ Other contributors, listed alphabetically, are:
* Rob Hoelz -- Perl 6 lexer
* Doug Hogan -- Mscgen lexer
* Ben Hollis -- Mason lexer
+* Max Horn -- GAP lexer
* Dustin Howett -- Logos lexer
* Alastair Houghton -- Lexer inheritance facility
* Tim Howard -- BlitzMax lexer
* Ivan Inozemtsev -- Fantom lexer
* Brian R. Jackson -- Tea lexer
+* Christian Jann -- ShellSession lexer
* Dennis Kaarsemaker -- sources.list lexer
+* Dmitri Kabak - Inferno Limbo lexer
* Igor Kalnitsky -- vhdl lexer
+* Alexander Kit -- MaskJS lexer
* Pekka Klärck -- Robot Framework lexer
+* Gerwin Klein -- Isabelle lexer
* Eric Knibbe -- Lasso lexer
* Stepan Koltsov -- Clay lexer
* Adam Koprowski -- Opa lexer
* Benjamin Kowarsch -- Modula-2 lexer
+* Domen Kožar -- Nix lexer
+* Oleh Krekel -- Emacs Lisp lexer
* Alexander Kriegisch -- Kconfig and AspectJ lexers
* Marek Kubica -- Scheme lexer
* Jochen Kupperschmidt -- Markdown processor
@@ -85,6 +96,7 @@ Other contributors, listed alphabetically, are:
* Mark Lee -- Vala lexer
* Ben Mabey -- Gherkin lexer
* Angus MacArthur -- QML lexer
+* Louis Marchand -- Eiffel lexer
* Simone Margaritelli -- Hybris lexer
* Kirk McDonald -- D lexer
* Gordon McGregor -- SystemVerilog lexer
@@ -92,6 +104,7 @@ Other contributors, listed alphabetically, are:
* Brian McKenna -- F# lexer
* Charles McLaughlin -- Puppet lexer
* Lukas Meuser -- BBCode formatter, Lua lexer
+* Cat Miller -- Pig lexer
* Paul Miller -- LiveScript lexer
* Hong Minhee -- HTTP lexer
* Michael Mior -- Awk lexer
@@ -105,21 +118,26 @@ Other contributors, listed alphabetically, are:
* Jesper Noehr -- HTML formatter "anchorlinenos"
* Mike Nolta -- Julia lexer
* Jonas Obrist -- BBCode lexer
+* Edward O'Callaghan -- Cryptol lexer
* David Oliva -- Rebol lexer
* Pat Pannuto -- nesC lexer
* Jon Parise -- Protocol buffers lexer
* Ronny Pfannschmidt -- BBCode lexer
* Benjamin Peterson -- Test suite refactoring
* Dominik Picheta -- Nimrod lexer
+* Andrew Pinkham -- RTF Formatter Refactoring
* Clément Prévost -- UrbiScript lexer
* Elias Rabel -- Fortran fixed form lexer
+* raichoo -- Idris lexer
* Kashif Rasul -- CUDA lexer
* Justin Reidy -- MXML lexer
* Norman Richards -- JSON lexer
+* Corey Richardson -- Rust lexer updates
* Lubomir Rintel -- GoodData MAQL and CL lexers
* Andre Roberge -- Tango style
* Konrad Rudolph -- LaTeX formatter enhancements
* Mario Ruggier -- Evoque lexers
+* Miikka Salminen -- Lovelace style, lexer enhancements
* Stou Sandalski -- NumPy, FORTRAN, tcsh and XSLT lexers
* Matteo Sasso -- Common Lisp lexer
* Joe Schafer -- Ada lexer
@@ -137,12 +155,16 @@ Other contributors, listed alphabetically, are:
* Tiberius Teng -- default style overhaul
* Jeremy Thurgood -- Erlang, Squid config lexers
* Brian Tiffin -- OpenCOBOL lexer
+* Bob Tolbert -- Hy lexer
* Erick Tryzelaar -- Felix lexer
+* Alexander Udalov -- Kotlin lexer improvements
+* Thomas Van Doren -- Chapel lexer
* Daniele Varrazzo -- PostgreSQL lexers
* Abe Voelker -- OpenEdge ABL lexer
* Pepijn de Vos -- HTML formatter CTags support
* Whitney Young -- ObjectiveC lexer
* Matthias Vallentin -- Bro lexer
+* Linh Vu Hong -- RSL lexer
* Nathan Weizenbaum -- Haml and Sass lexers
* Dietmar Winkler -- Modelica lexer
* Nils Winter -- Smalltalk lexer
@@ -150,5 +172,6 @@ Other contributors, listed alphabetically, are:
* Diego Zamboni -- CFengine3 lexer
* Enrique Zamudio -- Ceylon lexer
* Alex Zimin -- Nemerle lexer
+* Rob Zimmerman -- Kal lexer
Many thanks for all contributions!
diff --git a/CHANGES b/CHANGES
index 9288b306..ad5bba49 100644
--- a/CHANGES
+++ b/CHANGES
@@ -2,29 +2,174 @@ Pygments changelog
==================
Issue numbers refer to the tracker at
-<http://bitbucket.org/birkenfeld/pygments-main/issues>,
+<https://bitbucket.org/birkenfeld/pygments-main/issues>,
pull request numbers to the requests at
-<http://bitbucket.org/birkenfeld/pygments-main/pull-requests/merged>.
+<https://bitbucket.org/birkenfeld/pygments-main/pull-requests/merged>.
-Version 1.7
+
+Version 2.1
-----------
-(under development)
+(not released yet)
+
+- Added lexers:
+
+ * Emacs Lisp (PR#431)
+ * Arduino (PR#442)
+ * Modula-2 with multi-dialect support (#1090)
+
+- Added styles:
+
+ * Lovelace (PR#456)
+ * Algol and Algol-nu (#1090)
+
+- Updated autopygmentize script (PR#445)
+
+- Fixed style inheritance for non-standard token types in HTML output.
+
+
+Version 2.0.3
+-------------
+(not released yet)
+
+- Fix highlighting of certain IRC logs formats (#1076).
+
+
+Version 2.0.2
+-------------
+(released Jan 20, 2015)
+
+- Fix Python tracebacks getting duplicated in the console lexer (#1068).
+
+- Backquote-delimited identifiers are now recognized in F# (#1062).
+
+
+Version 2.0.1
+-------------
+(released Nov 10, 2014)
+
+- Fix an encoding issue when using ``pygmentize`` with the ``-o`` option.
+
+
+Version 2.0
+-----------
+(released Nov 9, 2014)
+
+- Default lexer encoding is now "guess", i.e. UTF-8 / Locale / Latin1 is
+ tried in that order.
+
+- Major update to Swift lexer (PR#410).
+
+- Multiple fixes to lexer guessing in conflicting cases:
+
+ * recognize HTML5 by doctype
+ * recognize XML by XML declaration
+ * don't recognize C/C++ as SystemVerilog
+
+- Simplified regexes and builtin lists.
+
+
+Version 2.0rc1
+--------------
+(released Oct 16, 2014)
+
+- Dropped Python 2.4 and 2.5 compatibility. This is in favor of single-source
+ compatibility between Python 2.6, 2.7 and 3.3+.
+
+- New website and documentation based on Sphinx (finally!)
- Lexers added:
- * Clay (PR#184)
- * Perl 6 (PR#181)
- * Swig (PR#168)
- * nesC (PR#166)
+ * APL (#969)
+ * Agda and Literate Agda (PR#203)
+ * Alloy (PR#355)
+ * AmbientTalk
* BlitzBasic (PR#197)
+ * ChaiScript (PR#24)
+ * Chapel (PR#256)
+ * Cirru (PR#275)
+ * Clay (PR#184)
+ * ColdFusion CFC (PR#283)
+ * Cryptol and Literate Cryptol (PR#344)
+ * Cypher (PR#257)
+ * Docker config files
* EBNF (PR#193)
+ * Eiffel (PR#273)
+ * GAP (PR#311)
+ * Golo (PR#309)
+ * Handlebars (PR#186)
+ * Hy (PR#238)
+ * Idris and Literate Idris (PR#210)
* Igor Pro (PR#172)
+ * Inform 6/7 (PR#281)
+ * Intel objdump (PR#279)
+ * Isabelle (PR#386)
+ * Jasmin (PR#349)
+ * JSON-LD (PR#289)
+ * Kal (PR#233)
+ * Lean (PR#399)
+ * LSL (PR#296)
+ * Limbo (PR#291)
+ * Liquid (#977)
+ * MQL (PR#285)
+ * MaskJS (PR#280)
+ * Mozilla preprocessors
+ * Mathematica (PR#245)
+ * NesC (PR#166)
+ * Nit (PR#375)
+ * Nix (PR#267)
+ * Pan
+ * Pawn (PR#211)
+ * Perl 6 (PR#181)
+ * Pig (PR#304)
+ * Pike (PR#237)
+ * QBasic (PR#182)
+ * Red (PR#341)
+ * ResourceBundle (#1038)
* Rexx (PR#199)
- * Agda and Literate Agda (PR#203)
+ * Rql (PR#251)
+ * Rsl
+ * SPARQL (PR#78)
+ * Slim (PR#366)
+ * Swift (PR#371)
+ * Swig (PR#168)
+ * TADS 3 (PR#407)
+ * Todo.txt todo lists
+ * Twig (PR#404)
+
+- Added a helper to "optimize" regular expressions that match one of many
+ literal words; this can save 20% and more lexing time with lexers that
+ highlight many keywords or builtins.
+
+- New styles: "xcode" and "igor", similar to the default highlighting of
+ the respective IDEs.
+
+- The command-line "pygmentize" tool now tries a little harder to find the
+ correct encoding for files and the terminal (#979).
+
+- Added "inencoding" option for lexers to override "encoding" analogous
+ to "outencoding" (#800).
+
+- Added line-by-line "streaming" mode for pygmentize with the "-s" option.
+ (PR#165) Only fully works for lexers that have no constructs spanning
+ lines!
+
+- Added an "envname" option to the LaTeX formatter to select a replacement
+ verbatim environment (PR#235).
+
+- Updated the Makefile lexer to yield a little more useful highlighting.
+
+- Lexer aliases passed to ``get_lexer_by_name()`` are now case-insensitive.
+
+- File name matching in lexers and formatters will now use a regex cache
+ for speed (PR#205).
- Pygments will now recognize "vim" modelines when guessing the lexer for
a file based on content (PR#118).
+- Major restructure of the ``pygments.lexers`` module namespace. There are now
+ many more modules with less lexers per module. Old modules are still around
+ and re-export the lexers they previously contained.
+
- The NameHighlightFilter now works with any Name.* token type (#790).
- Python 3 lexer: add new exceptions from PEP 3151.
@@ -39,9 +184,10 @@ Version 1.7
- Objective C/C++ lexers: allow "@" prefixing any expression (#871).
-- Ruby lexer: fix lexing of Name::Space tokens (#860).
+- Ruby lexer: fix lexing of Name::Space tokens (#860) and of symbols
+ in hashes (#873).
-- Stan lexer: update for version 1.3.0 of the language (PR#162).
+- Stan lexer: update for version 2.4.0 of the language (PR#162, PR#255, PR#377).
- JavaScript lexer: add the "yield" keyword (PR#196).
@@ -53,7 +199,30 @@ Version 1.7
- Prolog lexer: add different kinds of numeric literals (#864).
-- F# lexer: rewrite with newest spec for F# 3.0 (#842).
+- F# lexer: rewrite with newest spec for F# 3.0 (#842), fix a bug with
+ dotted chains (#948).
+
+- Kotlin lexer: general update (PR#271).
+
+- Rebol lexer: fix comment detection and analyse_text (PR#261).
+
+- LLVM lexer: update keywords to v3.4 (PR#258).
+
+- PHP lexer: add new keywords and binary literals (PR#222).
+
+- external/markdown-processor.py updated to newest python-markdown (PR#221).
+
+- CSS lexer: some highlighting order fixes (PR#231).
+
+- Ceylon lexer: fix parsing of nested multiline comments (#915).
+
+- C family lexers: fix parsing of indented preprocessor directives (#944).
+
+- Rust lexer: update to 0.9 language version (PR#270, PR#388).
+
+- Elixir lexer: update to 0.15 language version (PR#392).
+
+- Fix swallowing incomplete tracebacks in Python console lexer (#874).
Version 1.6
diff --git a/LICENSE b/LICENSE
index 1e091194..10b8e916 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,4 +1,4 @@
-Copyright (c) 2006-2013 by the respective authors (see AUTHORS file).
+Copyright (c) 2006-2015 by the respective authors (see AUTHORS file).
All rights reserved.
Redistribution and use in source and binary forms, with or without
diff --git a/MANIFEST.in b/MANIFEST.in
index 312c1504..cfec4e94 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -2,5 +2,5 @@ include pygmentize
include external/*
include Makefile CHANGES LICENSE AUTHORS TODO ez_setup.py
recursive-include tests *
-recursive-include docs *
+recursive-include doc *
recursive-include scripts *
diff --git a/Makefile b/Makefile
index f24dd084..efae8577 100644
--- a/Makefile
+++ b/Makefile
@@ -4,7 +4,7 @@
#
# Combines scripts for common tasks.
#
-# :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+# :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
# :license: BSD, see LICENSE for details.
#
@@ -19,9 +19,9 @@ all: clean-pyc check test
check:
@$(PYTHON) scripts/detect_missing_analyse_text.py || true
+ @pyflakes pygments | grep -v 'but unused' || true
@$(PYTHON) scripts/check_sources.py -i build -i dist -i pygments/lexers/_mapping.py \
- -i docs/build -i pygments/formatters/_mapping.py -i pygments/unistring.py \
- -i pygments/lexers/_vimbuiltins.py
+ -i docs/build -i pygments/formatters/_mapping.py -i pygments/unistring.py
clean: clean-pyc
-rm -rf build
@@ -36,15 +36,12 @@ codetags:
@$(PYTHON) scripts/find_codetags.py -i tests/examplefiles -i scripts/pylintrc \
-i scripts/find_codetags.py -o codetags.html .
-docs: docs/build
-
-docs/build: docs/src/*.txt
- $(PYTHON) docs/generate.py html docs/build $?
- touch docs/build
+docs:
+ make -C doc html
mapfiles:
- (cd pygments/lexers; $(PYTHON) _mapping.py)
(cd pygments/formatters; $(PYTHON) _mapping.py)
+ (cd pygments/lexers; $(PYTHON) _mapping.py)
pylint:
@pylint --rcfile scripts/pylintrc pygments
@@ -53,7 +50,13 @@ reindent:
@$(PYTHON) scripts/reindent.py -r -B .
test:
- @$(PYTHON) tests/run.py $(TESTS)
+ @$(PYTHON) tests/run.py -d $(TEST)
test-coverage:
- @$(PYTHON) tests/run.py -C $(TESTS)
+ @$(PYTHON) tests/run.py -d --with-coverage --cover-package=pygments --cover-erase $(TEST)
+
+tox-test:
+ @tox -- $(TEST)
+
+tox-test-coverage:
+ @tox -- --with-coverage --cover-package=pygments --cover-erase $(TEST)
diff --git a/README.rst b/README.rst
new file mode 100644
index 00000000..e6c03926
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,39 @@
+README for Pygments
+===================
+
+This is the source of Pygments. It is a generic syntax highlighter that
+supports over 300 languages and text formats, for use in code hosting, forums,
+wikis or other applications that need to prettify source code.
+
+Installing
+----------
+
+... works as usual, use ``python setup.py install``.
+
+Documentation
+-------------
+
+... can be found online at http://pygments.org/ or created by ::
+
+ cd doc
+ make html
+
+Development
+-----------
+
+... takes place on `Bitbucket
+<https://bitbucket.org/birkenfeld/pygments-main>`_, where the Mercurial
+repository, tickets and pull requests can be viewed.
+
+Continuous testing runs on drone.io:
+
+.. image:: https://drone.io/bitbucket.org/birkenfeld/pygments-main/status.png
+ :target: https://drone.io/bitbucket.org/birkenfeld/pygments-main/
+
+The authors
+-----------
+
+Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*.
+
+Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of
+the `Pocoo <http://dev.pocoo.org/>`_ team and **Tim Hatch**.
diff --git a/doc/Makefile b/doc/Makefile
new file mode 100644
index 00000000..7fb75411
--- /dev/null
+++ b/doc/Makefile
@@ -0,0 +1,153 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = PYTHONPATH=.. sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Pygments.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Pygments.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/Pygments"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Pygments"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/doc/_static/favicon.ico b/doc/_static/favicon.ico
new file mode 100644
index 00000000..777f617d
--- /dev/null
+++ b/doc/_static/favicon.ico
Binary files differ
diff --git a/doc/_static/logo_new.png b/doc/_static/logo_new.png
new file mode 100644
index 00000000..0ae4b209
--- /dev/null
+++ b/doc/_static/logo_new.png
Binary files differ
diff --git a/doc/_static/logo_only.png b/doc/_static/logo_only.png
new file mode 100644
index 00000000..fdebcc47
--- /dev/null
+++ b/doc/_static/logo_only.png
Binary files differ
diff --git a/doc/_templates/docssidebar.html b/doc/_templates/docssidebar.html
new file mode 100644
index 00000000..913acaaf
--- /dev/null
+++ b/doc/_templates/docssidebar.html
@@ -0,0 +1,3 @@
+{% if pagename != 'docs/index' %}
+<strong>&laquo; <a href="{{ pathto('docs/index') }}">Back to docs index</a></strong>
+{% endif %}
diff --git a/doc/_templates/indexsidebar.html b/doc/_templates/indexsidebar.html
new file mode 100644
index 00000000..29954554
--- /dev/null
+++ b/doc/_templates/indexsidebar.html
@@ -0,0 +1,25 @@
+<h3>Download</h3>
+{% if version.endswith('(hg)') %}
+<p>This documentation is for version <b>{{ version }}</b>, which is
+ not released yet.</p>
+<p>You can use it from the
+ <a href="http://bitbucket.org/birkenfeld/sphinx/">Mercurial repo</a> or look for
+ released versions in the <a href="http://pypi.python.org/pypi/Sphinx">Python
+ Package Index</a>.</p>
+{% else %}
+<p>Current version: <b>{{ version }}</b></p>
+<p>Get Pygments from the <a href="http://pypi.python.org/pypi/Pygments">Python Package
+Index</a>, or install it with:</p>
+<pre>pip install Pygments</pre>
+{% endif %}
+
+<h3>Questions? Suggestions?</h3>
+
+<p>Clone at <a href="https://bitbucket.org/birkenfeld/pygments-main">Bitbucket</a>
+or come to the <tt>#pocoo</tt> channel on FreeNode.</p>
+<p>You can also open an issue at the
+ <a href="https://www.bitbucket.org/birkenfeld/pygments-main/issues/">tracker</a>.</p>
+
+<p class="logo">A <a href="http://pocoo.org/">
+ <img src="{{ pathto("_static/pocoo.png", 1) }}" /></a> project</a></p>
+
diff --git a/doc/_themes/pygments14/layout.html b/doc/_themes/pygments14/layout.html
new file mode 100644
index 00000000..2cc03e03
--- /dev/null
+++ b/doc/_themes/pygments14/layout.html
@@ -0,0 +1,98 @@
+{#
+ sphinxdoc/layout.html
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Sphinx layout template for the sphinxdoc theme.
+
+ :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+#}
+{%- extends "basic/layout.html" %}
+
+{# put the sidebar before the body #}
+{% block sidebar1 %}{{ sidebar() }}{% endblock %}
+{% block sidebar2 %}{% endblock %}
+
+{% block relbar1 %}{% endblock %}
+{% block relbar2 %}{% endblock %}
+
+{% block extrahead %}
+ <link href='http://fonts.googleapis.com/css?family={{ theme_font|replace(' ', '+') }}:300,400,700'
+ rel='stylesheet' type='text/css'>
+{{ super() }}
+{%- if not embedded %}
+ <style type="text/css">
+ table.right { float: right; margin-left: 20px; }
+ table.right td { border: 1px solid #ccc; }
+ {% if pagename == 'index' %}
+ .related { display: none; }
+ {% endif %}
+ </style>
+ <script type="text/javascript">
+ // intelligent scrolling of the sidebar content
+ $(window).scroll(function() {
+ var sb = $('.sphinxsidebarwrapper');
+ var win = $(window);
+ var sbh = sb.height();
+ var offset = $('.sphinxsidebar').position()['top'];
+ var wintop = win.scrollTop();
+ var winbot = wintop + win.innerHeight();
+ var curtop = sb.position()['top'];
+ var curbot = curtop + sbh;
+ // does sidebar fit in window?
+ if (sbh < win.innerHeight()) {
+ // yes: easy case -- always keep at the top
+ sb.css('top', $u.min([$u.max([0, wintop - offset - 10]),
+ $(document).height() - sbh - 200]));
+ } else {
+ // no: only scroll if top/bottom edge of sidebar is at
+ // top/bottom edge of window
+ if (curtop > wintop && curbot > winbot) {
+ sb.css('top', $u.max([wintop - offset - 10, 0]));
+ } else if (curtop < wintop && curbot < winbot) {
+ sb.css('top', $u.min([winbot - sbh - offset - 20,
+ $(document).height() - sbh - 200]));
+ }
+ }
+ });
+ </script>
+{%- endif %}
+{% endblock %}
+
+{% block header %}
+<div class="outerwrapper">
+<div class="pageheader">
+ <ul>
+ <li><a href="{{ pathto('index') }}">Home</a></li>
+ {% if demo_active %}
+ <li><a href="{{ pathto('demo') }}">Demo</a></li>
+ {% endif %}
+ <li><a href="{{ pathto('languages') }}">Languages</a></li>
+ <li><a href="{{ pathto('faq') }}">FAQ</a></li>
+ <li><a href="{{ pathto('download') }}">Get it</a></li>
+ <li><a href="{{ pathto('docs/index') }}">Docs</a></li>
+ </ul>
+ <div>
+ <a href="{{ pathto('index') }}">
+ <img src="{{ pathto('_static/logo.png', 1) }}" alt="Pygments logo" />
+ </a>
+ </div>
+</div>
+{% endblock %}
+
+{% block footer %}
+ <div class="footer" role="contentinfo">
+ &copy; Copyright 2006-2015, Georg Brandl and Pygments contributors.
+ Created using <a href="http://sphinx-doc.org/">Sphinx</a> {{
+ sphinx_version }}. <br/>
+ Pygments logo created by <a href="http://joelunger.com">Joel Unger</a>.
+ Backgrounds from <a href="http://subtlepatterns.com">subtlepatterns.com</a>.
+ </div>
+ </div> {# closes "outerwrapper" div #}
+{% endblock %}
+
+{% block sidebarrel %}
+{% endblock %}
+
+{% block sidebarsourcelink %}
+{% endblock %}
diff --git a/doc/_themes/pygments14/static/bodybg.png b/doc/_themes/pygments14/static/bodybg.png
new file mode 100644
index 00000000..46892b80
--- /dev/null
+++ b/doc/_themes/pygments14/static/bodybg.png
Binary files differ
diff --git a/doc/_themes/pygments14/static/docbg.png b/doc/_themes/pygments14/static/docbg.png
new file mode 100644
index 00000000..13e61f32
--- /dev/null
+++ b/doc/_themes/pygments14/static/docbg.png
Binary files differ
diff --git a/doc/_themes/pygments14/static/listitem.png b/doc/_themes/pygments14/static/listitem.png
new file mode 100644
index 00000000..e45715f9
--- /dev/null
+++ b/doc/_themes/pygments14/static/listitem.png
Binary files differ
diff --git a/doc/_themes/pygments14/static/logo.png b/doc/_themes/pygments14/static/logo.png
new file mode 100644
index 00000000..2c1a24dc
--- /dev/null
+++ b/doc/_themes/pygments14/static/logo.png
Binary files differ
diff --git a/doc/_themes/pygments14/static/pocoo.png b/doc/_themes/pygments14/static/pocoo.png
new file mode 100644
index 00000000..41741494
--- /dev/null
+++ b/doc/_themes/pygments14/static/pocoo.png
Binary files differ
diff --git a/doc/_themes/pygments14/static/pygments14.css_t b/doc/_themes/pygments14/static/pygments14.css_t
new file mode 100644
index 00000000..5c37aaf9
--- /dev/null
+++ b/doc/_themes/pygments14/static/pygments14.css_t
@@ -0,0 +1,401 @@
+/*
+ * pygments14.css
+ * ~~~~~~~~~~~~~~
+ *
+ * Sphinx stylesheet -- pygments14 theme. Heavily copied from sphinx13.
+ *
+ * :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+@import url("basic.css");
+
+/* -- page layout ----------------------------------------------------------- */
+
+body {
+ font-family: {{ theme_font }}, 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
+ 'Verdana', sans-serif;
+ font-size: 14px;
+ text-align: center;
+ background-image: url(bodybg.png);
+ background-color: {{ theme_background }};
+ color: black;
+ padding: 0;
+ /*
+ border-right: 1px solid {{ theme_border }};
+ border-left: 1px solid {{ theme_border }};
+ */
+
+ margin: 0 auto;
+ min-width: 780px;
+ max-width: 1080px;
+}
+
+.outerwrapper {
+ background-image: url(docbg.png);
+ background-attachment: fixed;
+}
+
+.pageheader {
+ text-align: left;
+ padding: 10px 15px;
+}
+
+.pageheader ul {
+ float: right;
+ color: white;
+ list-style-type: none;
+ padding-left: 0;
+ margin-top: 40px;
+ margin-right: 10px;
+}
+
+.pageheader li {
+ float: left;
+ margin: 0 0 0 10px;
+}
+
+.pageheader li a {
+ border-radius: 3px;
+ padding: 8px 12px;
+ color: {{ theme_darkgray }};
+ text-shadow: 0 0 5px rgba(0, 0, 0, 0.2);
+}
+
+.pageheader li a:hover {
+ background-color: {{ theme_yellow }};
+ color: black;
+ text-shadow: none;
+}
+
+div.document {
+ text-align: left;
+ /*border-left: 1em solid {{ theme_lightyellow }};*/
+}
+
+div.bodywrapper {
+ margin: 0 12px 0 240px;
+ background-color: white;
+/* border-right: 1px solid {{ theme_border }}; */
+}
+
+div.body {
+ margin: 0;
+ padding: 0.5em 20px 20px 20px;
+}
+
+div.related {
+ font-size: 1em;
+ color: {{ theme_darkgray }};
+}
+
+div.related ul {
+ background-image: url(relbg.png);
+ background-repeat: repeat-y;
+ background-color: {{ theme_yellow }};
+ height: 1.9em;
+ /*
+ border-top: 1px solid {{ theme_border }};
+ border-bottom: 1px solid {{ theme_border }};
+ */
+}
+
+div.related ul li {
+ margin: 0 5px 0 0;
+ padding: 0;
+ float: left;
+}
+
+div.related ul li.right {
+ float: right;
+ margin-right: 5px;
+}
+
+div.related ul li a {
+ margin: 0;
+ padding: 0 5px 0 5px;
+ line-height: 1.75em;
+ color: {{ theme_darkgray }};
+ /*text-shadow: 0px 0px 1px rgba(0, 0, 0, 0.5);*/
+}
+
+div.related ul li a:hover {
+ text-decoration: underline;
+ text-shadow: 0px 0px 1px rgba(255, 255, 255, 0.5);
+}
+
+div.sphinxsidebarwrapper {
+ position: relative;
+ top: 0px;
+ padding: 0;
+}
+
+div.sphinxsidebar {
+ margin: 0;
+ padding: 0 0px 15px 15px;
+ width: 210px;
+ float: left;
+ font-size: 1em;
+ text-align: left;
+}
+
+div.sphinxsidebar .logo {
+ font-size: 1.8em;
+ color: #666;
+ font-weight: 300;
+ text-align: center;
+}
+
+div.sphinxsidebar .logo img {
+ vertical-align: middle;
+}
+
+div.sphinxsidebar input {
+ border: 1px solid #aaa;
+ font-family: {{ theme_font }}, 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
+ 'Verdana', sans-serif;
+ font-size: 1em;
+}
+
+div.sphinxsidebar h3 {
+ font-size: 1.5em;
+ /* border-top: 1px solid {{ theme_border }}; */
+ margin-top: 1em;
+ margin-bottom: 0.5em;
+ padding-top: 0.5em;
+}
+
+div.sphinxsidebar h4 {
+ font-size: 1.2em;
+ margin-bottom: 0;
+}
+
+div.sphinxsidebar h3, div.sphinxsidebar h4 {
+ margin-right: -15px;
+ margin-left: -15px;
+ padding-right: 14px;
+ padding-left: 14px;
+ color: #333;
+ font-weight: 300;
+ /*text-shadow: 0px 0px 0.5px rgba(0, 0, 0, 0.4);*/
+}
+
+div.sphinxsidebarwrapper > h3:first-child {
+ margin-top: 0.5em;
+ border: none;
+}
+
+div.sphinxsidebar h3 a {
+ color: #333;
+}
+
+div.sphinxsidebar ul {
+ color: #444;
+ margin-top: 7px;
+ padding: 0;
+ line-height: 130%;
+}
+
+div.sphinxsidebar ul ul {
+ margin-left: 20px;
+ list-style-image: url(listitem.png);
+}
+
+div.footer {
+ color: {{ theme_darkgray }};
+ text-shadow: 0 0 .2px rgba(255, 255, 255, 0.8);
+ padding: 2em;
+ text-align: center;
+ clear: both;
+ font-size: 0.8em;
+}
+
+/* -- body styles ----------------------------------------------------------- */
+
+p {
+ margin: 0.8em 0 0.5em 0;
+}
+
+a {
+ color: {{ theme_darkgreen }};
+ text-decoration: none;
+}
+
+a:hover {
+ color: {{ theme_darkyellow }};
+}
+
+div.body a {
+ text-decoration: underline;
+}
+
+h1 {
+ margin: 10px 0 0 0;
+ font-size: 2.4em;
+ color: {{ theme_darkgray }};
+ font-weight: 300;
+}
+
+h2 {
+ margin: 1.em 0 0.2em 0;
+ font-size: 1.5em;
+ font-weight: 300;
+ padding: 0;
+ color: {{ theme_darkgreen }};
+}
+
+h3 {
+ margin: 1em 0 -0.3em 0;
+ font-size: 1.3em;
+ font-weight: 300;
+}
+
+div.body h1 a, div.body h2 a, div.body h3 a, div.body h4 a, div.body h5 a, div.body h6 a {
+ text-decoration: none;
+}
+
+div.body h1 a tt, div.body h2 a tt, div.body h3 a tt, div.body h4 a tt, div.body h5 a tt, div.body h6 a tt {
+ color: {{ theme_darkgreen }} !important;
+ font-size: inherit !important;
+}
+
+a.headerlink {
+ color: {{ theme_green }} !important;
+ font-size: 12px;
+ margin-left: 6px;
+ padding: 0 4px 0 4px;
+ text-decoration: none !important;
+ float: right;
+}
+
+a.headerlink:hover {
+ background-color: #ccc;
+ color: white!important;
+}
+
+cite, code, tt {
+ font-family: 'Consolas', 'DejaVu Sans Mono',
+ 'Bitstream Vera Sans Mono', monospace;
+ font-size: 14px;
+ letter-spacing: -0.02em;
+}
+
+tt {
+ background-color: #f2f2f2;
+ border: 1px solid #ddd;
+ border-radius: 2px;
+ color: #333;
+ padding: 1px;
+}
+
+tt.descname, tt.descclassname, tt.xref {
+ border: 0;
+}
+
+hr {
+ border: 1px solid #abc;
+ margin: 2em;
+}
+
+a tt {
+ border: 0;
+ color: {{ theme_darkgreen }};
+}
+
+a tt:hover {
+ color: {{ theme_darkyellow }};
+}
+
+pre {
+ font-family: 'Consolas', 'DejaVu Sans Mono',
+ 'Bitstream Vera Sans Mono', monospace;
+ font-size: 13px;
+ letter-spacing: 0.015em;
+ line-height: 120%;
+ padding: 0.5em;
+ border: 1px solid #ccc;
+ border-radius: 2px;
+ background-color: #f8f8f8;
+}
+
+pre a {
+ color: inherit;
+ text-decoration: underline;
+}
+
+td.linenos pre {
+ padding: 0.5em 0;
+}
+
+div.quotebar {
+ background-color: #f8f8f8;
+ max-width: 250px;
+ float: right;
+ padding: 0px 7px;
+ border: 1px solid #ccc;
+ margin-left: 1em;
+}
+
+div.topic {
+ background-color: #f8f8f8;
+}
+
+table {
+ border-collapse: collapse;
+ margin: 0 -0.5em 0 -0.5em;
+}
+
+table td, table th {
+ padding: 0.2em 0.5em 0.2em 0.5em;
+}
+
+div.admonition, div.warning {
+ font-size: 0.9em;
+ margin: 1em 0 1em 0;
+ border: 1px solid #86989B;
+ border-radius: 2px;
+ background-color: #f7f7f7;
+ padding: 0;
+}
+
+div.admonition p, div.warning p {
+ margin: 0.5em 1em 0.5em 1em;
+ padding: 0;
+}
+
+div.admonition pre, div.warning pre {
+ margin: 0.4em 1em 0.4em 1em;
+}
+
+div.admonition p.admonition-title,
+div.warning p.admonition-title {
+ margin-top: 1em;
+ padding-top: 0.5em;
+ font-weight: bold;
+}
+
+div.warning {
+ border: 1px solid #940000;
+/* background-color: #FFCCCF;*/
+}
+
+div.warning p.admonition-title {
+}
+
+div.admonition ul, div.admonition ol,
+div.warning ul, div.warning ol {
+ margin: 0.1em 0.5em 0.5em 3em;
+ padding: 0;
+}
+
+.viewcode-back {
+ font-family: {{ theme_font }}, 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
+ 'Verdana', sans-serif;
+}
+
+div.viewcode-block:target {
+ background-color: #f4debf;
+ border-top: 1px solid #ac9;
+ border-bottom: 1px solid #ac9;
+}
diff --git a/doc/_themes/pygments14/theme.conf b/doc/_themes/pygments14/theme.conf
new file mode 100644
index 00000000..fffe66d6
--- /dev/null
+++ b/doc/_themes/pygments14/theme.conf
@@ -0,0 +1,15 @@
+[theme]
+inherit = basic
+stylesheet = pygments14.css
+pygments_style = friendly
+
+[options]
+green = #66b55e
+darkgreen = #36852e
+darkgray = #666666
+border = #66b55e
+yellow = #f4cd00
+darkyellow = #d4ad00
+lightyellow = #fffbe3
+background = #f9f9f9
+font = PT Sans
diff --git a/doc/conf.py b/doc/conf.py
new file mode 100644
index 00000000..51a91617
--- /dev/null
+++ b/doc/conf.py
@@ -0,0 +1,241 @@
+# -*- coding: utf-8 -*-
+#
+# Pygments documentation build configuration file
+#
+
+import sys, os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath('..'))
+
+import pygments
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'pygments.sphinxext']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'Pygments'
+copyright = u'2015, Georg Brandl'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = pygments.__version__
+# The full version, including alpha/beta/rc tags.
+release = version
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+#pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'pygments14'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+html_theme_path = ['_themes']
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+html_favicon = '_static/favicon.ico'
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+html_sidebars = {'index': 'indexsidebar.html',
+ 'docs/*': 'docssidebar.html'}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Pygmentsdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'Pygments.tex', u'Pygments Documentation',
+ u'Georg Brandl', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'pygments', u'Pygments Documentation',
+ [u'Georg Brandl'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'Pygments', u'Pygments Documentation',
+ u'Georg Brandl', 'Pygments', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+#intersphinx_mapping = {'http://docs.python.org/': None}
diff --git a/doc/docs/api.rst b/doc/docs/api.rst
new file mode 100644
index 00000000..123a4643
--- /dev/null
+++ b/doc/docs/api.rst
@@ -0,0 +1,316 @@
+.. -*- mode: rst -*-
+
+=====================
+The full Pygments API
+=====================
+
+This page describes the Pygments API.
+
+High-level API
+==============
+
+.. module:: pygments
+
+Functions from the :mod:`pygments` module:
+
+.. function:: lex(code, lexer)
+
+ Lex `code` with the `lexer` (must be a `Lexer` instance)
+ and return an iterable of tokens. Currently, this only calls
+ `lexer.get_tokens()`.
+
+.. function:: format(tokens, formatter, outfile=None)
+
+ Format a token stream (iterable of tokens) `tokens` with the
+ `formatter` (must be a `Formatter` instance). The result is
+ written to `outfile`, or if that is ``None``, returned as a
+ string.
+
+.. function:: highlight(code, lexer, formatter, outfile=None)
+
+ This is the most high-level highlighting function.
+ It combines `lex` and `format` in one function.
+
+
+.. module:: pygments.lexers
+
+Functions from :mod:`pygments.lexers`:
+
+.. function:: get_lexer_by_name(alias, **options)
+
+ Return an instance of a `Lexer` subclass that has `alias` in its
+ aliases list. The lexer is given the `options` at its
+ instantiation.
+
+ Will raise :exc:`pygments.util.ClassNotFound` if no lexer with that alias is
+ found.
+
+.. function:: get_lexer_for_filename(fn, **options)
+
+ Return a `Lexer` subclass instance that has a filename pattern
+ matching `fn`. The lexer is given the `options` at its
+ instantiation.
+
+ Will raise :exc:`pygments.util.ClassNotFound` if no lexer for that filename
+ is found.
+
+.. function:: get_lexer_for_mimetype(mime, **options)
+
+ Return a `Lexer` subclass instance that has `mime` in its mimetype
+ list. The lexer is given the `options` at its instantiation.
+
+ Will raise :exc:`pygments.util.ClassNotFound` if not lexer for that mimetype
+ is found.
+
+.. function:: guess_lexer(text, **options)
+
+ Return a `Lexer` subclass instance that's guessed from the text in
+ `text`. For that, the :meth:`.analyse_text()` method of every known lexer
+ class is called with the text as argument, and the lexer which returned the
+ highest value will be instantiated and returned.
+
+ :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can
+ handle the content.
+
+.. function:: guess_lexer_for_filename(filename, text, **options)
+
+ As :func:`guess_lexer()`, but only lexers which have a pattern in `filenames`
+ or `alias_filenames` that matches `filename` are taken into consideration.
+
+ :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can
+ handle the content.
+
+.. function:: get_all_lexers()
+
+ Return an iterable over all registered lexers, yielding tuples in the
+ format::
+
+ (longname, tuple of aliases, tuple of filename patterns, tuple of mimetypes)
+
+ .. versionadded:: 0.6
+
+
+.. module:: pygments.formatters
+
+Functions from :mod:`pygments.formatters`:
+
+.. function:: get_formatter_by_name(alias, **options)
+
+ Return an instance of a :class:`.Formatter` subclass that has `alias` in its
+ aliases list. The formatter is given the `options` at its instantiation.
+
+ Will raise :exc:`pygments.util.ClassNotFound` if no formatter with that
+ alias is found.
+
+.. function:: get_formatter_for_filename(fn, **options)
+
+ Return a :class:`.Formatter` subclass instance that has a filename pattern
+ matching `fn`. The formatter is given the `options` at its instantiation.
+
+ Will raise :exc:`pygments.util.ClassNotFound` if no formatter for that filename
+ is found.
+
+
+.. module:: pygments.styles
+
+Functions from :mod:`pygments.styles`:
+
+.. function:: get_style_by_name(name)
+
+ Return a style class by its short name. The names of the builtin styles
+ are listed in :data:`pygments.styles.STYLE_MAP`.
+
+ Will raise :exc:`pygments.util.ClassNotFound` if no style of that name is
+ found.
+
+.. function:: get_all_styles()
+
+ Return an iterable over all registered styles, yielding their names.
+
+ .. versionadded:: 0.6
+
+
+.. module:: pygments.lexer
+
+Lexers
+======
+
+The base lexer class from which all lexers are derived is:
+
+.. class:: Lexer(**options)
+
+ The constructor takes a \*\*keywords dictionary of options.
+ Every subclass must first process its own options and then call
+ the `Lexer` constructor, since it processes the `stripnl`,
+ `stripall` and `tabsize` options.
+
+ An example looks like this:
+
+ .. sourcecode:: python
+
+ def __init__(self, **options):
+ self.compress = options.get('compress', '')
+ Lexer.__init__(self, **options)
+
+ As these options must all be specifiable as strings (due to the
+ command line usage), there are various utility functions
+ available to help with that, see `Option processing`_.
+
+ .. method:: get_tokens(text)
+
+ This method is the basic interface of a lexer. It is called by
+ the `highlight()` function. It must process the text and return an
+ iterable of ``(tokentype, value)`` pairs from `text`.
+
+ Normally, you don't need to override this method. The default
+ implementation processes the `stripnl`, `stripall` and `tabsize`
+ options and then yields all tokens from `get_tokens_unprocessed()`,
+ with the ``index`` dropped.
+
+ .. method:: get_tokens_unprocessed(text)
+
+ This method should process the text and return an iterable of
+ ``(index, tokentype, value)`` tuples where ``index`` is the starting
+ position of the token within the input text.
+
+ This method must be overridden by subclasses.
+
+ .. staticmethod:: analyse_text(text)
+
+ A static method which is called for lexer guessing. It should analyse
+ the text and return a float in the range from ``0.0`` to ``1.0``.
+ If it returns ``0.0``, the lexer will not be selected as the most
+ probable one, if it returns ``1.0``, it will be selected immediately.
+
+ .. note:: You don't have to add ``@staticmethod`` to the definition of
+ this method, this will be taken care of by the Lexer's metaclass.
+
+ For a list of known tokens have a look at the :doc:`tokens` page.
+
+ A lexer also can have the following attributes (in fact, they are mandatory
+ except `alias_filenames`) that are used by the builtin lookup mechanism.
+
+ .. attribute:: name
+
+ Full name for the lexer, in human-readable form.
+
+ .. attribute:: aliases
+
+ A list of short, unique identifiers that can be used to lookup
+ the lexer from a list, e.g. using `get_lexer_by_name()`.
+
+ .. attribute:: filenames
+
+ A list of `fnmatch` patterns that match filenames which contain
+ content for this lexer. The patterns in this list should be unique among
+ all lexers.
+
+ .. attribute:: alias_filenames
+
+ A list of `fnmatch` patterns that match filenames which may or may not
+ contain content for this lexer. This list is used by the
+ :func:`.guess_lexer_for_filename()` function, to determine which lexers
+ are then included in guessing the correct one. That means that
+ e.g. every lexer for HTML and a template language should include
+ ``\*.html`` in this list.
+
+ .. attribute:: mimetypes
+
+ A list of MIME types for content that can be lexed with this
+ lexer.
+
+
+.. module:: pygments.formatter
+
+Formatters
+==========
+
+A formatter is derived from this class:
+
+
+.. class:: Formatter(**options)
+
+ As with lexers, this constructor processes options and then must call the
+ base class :meth:`__init__`.
+
+ The :class:`Formatter` class recognizes the options `style`, `full` and
+ `title`. It is up to the formatter class whether it uses them.
+
+ .. method:: get_style_defs(arg='')
+
+ This method must return statements or declarations suitable to define
+ the current style for subsequent highlighted text (e.g. CSS classes
+ in the `HTMLFormatter`).
+
+ The optional argument `arg` can be used to modify the generation and
+ is formatter dependent (it is standardized because it can be given on
+ the command line).
+
+ This method is called by the ``-S`` :doc:`command-line option <cmdline>`,
+ the `arg` is then given by the ``-a`` option.
+
+ .. method:: format(tokensource, outfile)
+
+ This method must format the tokens from the `tokensource` iterable and
+ write the formatted version to the file object `outfile`.
+
+ Formatter options can control how exactly the tokens are converted.
+
+ .. versionadded:: 0.7
+ A formatter must have the following attributes that are used by the
+ builtin lookup mechanism.
+
+ .. attribute:: name
+
+ Full name for the formatter, in human-readable form.
+
+ .. attribute:: aliases
+
+ A list of short, unique identifiers that can be used to lookup
+ the formatter from a list, e.g. using :func:`.get_formatter_by_name()`.
+
+ .. attribute:: filenames
+
+ A list of :mod:`fnmatch` patterns that match filenames for which this
+ formatter can produce output. The patterns in this list should be unique
+ among all formatters.
+
+
+.. module:: pygments.util
+
+Option processing
+=================
+
+The :mod:`pygments.util` module has some utility functions usable for option
+processing:
+
+.. exception:: OptionError
+
+ This exception will be raised by all option processing functions if
+ the type or value of the argument is not correct.
+
+.. function:: get_bool_opt(options, optname, default=None)
+
+ Interpret the key `optname` from the dictionary `options` as a boolean and
+ return it. Return `default` if `optname` is not in `options`.
+
+ The valid string values for ``True`` are ``1``, ``yes``, ``true`` and
+ ``on``, the ones for ``False`` are ``0``, ``no``, ``false`` and ``off``
+ (matched case-insensitively).
+
+.. function:: get_int_opt(options, optname, default=None)
+
+ As :func:`get_bool_opt`, but interpret the value as an integer.
+
+.. function:: get_list_opt(options, optname, default=None)
+
+ If the key `optname` from the dictionary `options` is a string,
+ split it at whitespace and return it. If it is already a list
+ or a tuple, it is returned as a list.
+
+.. function:: get_choice_opt(options, optname, allowed, default=None)
+
+ If the key `optname` from the dictionary is not in the sequence
+ `allowed`, raise an error, otherwise return it.
+
+ .. versionadded:: 0.8
diff --git a/doc/docs/authors.rst b/doc/docs/authors.rst
new file mode 100644
index 00000000..f8373f0a
--- /dev/null
+++ b/doc/docs/authors.rst
@@ -0,0 +1,4 @@
+Full contributor list
+=====================
+
+.. include:: ../../AUTHORS
diff --git a/doc/docs/changelog.rst b/doc/docs/changelog.rst
new file mode 100644
index 00000000..f264cab0
--- /dev/null
+++ b/doc/docs/changelog.rst
@@ -0,0 +1 @@
+.. include:: ../../CHANGES
diff --git a/docs/src/cmdline.txt b/doc/docs/cmdline.rst
index a48a5c27..165af969 100644
--- a/docs/src/cmdline.txt
+++ b/doc/docs/cmdline.rst
@@ -4,8 +4,8 @@
Command Line Interface
======================
-You can use Pygments from the shell, provided you installed the `pygmentize`
-script::
+You can use Pygments from the shell, provided you installed the
+:program:`pygmentize` script::
$ pygmentize test.py
print "Hello World"
@@ -28,7 +28,7 @@ written to stdout.
The ``-f`` option selects a formatter (as with ``-l``, it can also be omitted
if an output file name is given and has a supported extension).
If no output file name is given and ``-f`` is omitted, the
-`TerminalFormatter` is used.
+:class:`.TerminalFormatter` is used.
The above command could therefore also be given as::
@@ -82,14 +82,15 @@ Usage is as follows::
generates a CSS style sheet (because you selected the HTML formatter) for
the "colorful" style prepending a ".syntax" selector to all style rules.
-For an explanation what ``-a`` means for `a particular formatter`_, look for
-the `arg` argument for the formatter's `get_style_defs()` method.
+For an explanation what ``-a`` means for :doc:`a particular formatter
+<formatters>`, look for the `arg` argument for the formatter's
+:meth:`.get_style_defs()` method.
Getting lexer names
-------------------
-*New in Pygments 1.0.*
+.. versionadded:: 1.0
The ``-N`` option guesses a lexer name for a given filename, so that ::
@@ -125,7 +126,7 @@ will print the help for the Python lexer, etc.
A note on encodings
-------------------
-*New in Pygments 0.9.*
+.. versionadded:: 0.9
Pygments tries to be smart regarding encodings in the formatting process:
@@ -135,13 +136,14 @@ Pygments tries to be smart regarding encodings in the formatting process:
* If you give an ``outencoding`` option, it will override ``encoding``
as the output encoding.
+* If you give an ``inencoding`` option, it will override ``encoding``
+ as the input encoding.
+
* If you don't give an encoding and have given an output file, the default
- encoding for lexer and formatter is ``latin1`` (which will pass through
- all non-ASCII characters).
+ encoding for lexer and formatter is the terminal encoding or the default
+ locale encoding of the system. As a last resort, ``latin1`` is used (which
+ will pass through all non-ASCII characters).
* If you don't give an encoding and haven't given an output file (that means
output is written to the console), the default encoding for lexer and
- formatter is the terminal encoding (`sys.stdout.encoding`).
-
-
-.. _a particular formatter: formatters.txt
+ formatter is the terminal encoding (``sys.stdout.encoding``).
diff --git a/docs/src/filterdevelopment.txt b/doc/docs/filterdevelopment.rst
index c60e1e84..fbcd0a09 100644
--- a/docs/src/filterdevelopment.txt
+++ b/doc/docs/filterdevelopment.rst
@@ -4,11 +4,11 @@
Write your own filter
=====================
-*New in Pygments 0.7.*
+.. versionadded:: 0.7
Writing own filters is very easy. All you have to do is to subclass
the `Filter` class and override the `filter` method. Additionally a
-filter is instanciated with some keyword arguments you can use to
+filter is instantiated with some keyword arguments you can use to
adjust the behavior of your filter.
@@ -58,7 +58,7 @@ You can also use the `simplefilter` decorator from the `pygments.filter` module:
@simplefilter
- def uncolor(lexer, stream, options):
+ def uncolor(self, lexer, stream, options):
class_too = get_bool_opt(options, 'classtoo')
for ttype, value in stream:
if ttype is Name.Function or (class_too and
@@ -67,4 +67,5 @@ You can also use the `simplefilter` decorator from the `pygments.filter` module:
yield ttype, value
The decorator automatically subclasses an internal filter class and uses the
-decorated function for filtering.
+decorated function as a method for filtering. (That's why there is a `self`
+argument that you probably won't end up using in the method.)
diff --git a/docs/src/filters.txt b/doc/docs/filters.rst
index 522f6330..ff2519a3 100644
--- a/docs/src/filters.txt
+++ b/doc/docs/filters.rst
@@ -4,7 +4,7 @@
Filters
=======
-*New in Pygments 0.7.*
+.. versionadded:: 0.7
You can filter token streams coming from lexers to improve or annotate the
output. For example, you can highlight special words in comments, convert
@@ -31,12 +31,11 @@ To get a list of all registered filters by name, you can use the
`get_all_filters()` function from the `pygments.filters` module that returns an
iterable for all known filters.
-If you want to write your own filter, have a look at `Write your own filter`_.
-
-.. _Write your own filter: filterdevelopment.txt
+If you want to write your own filter, have a look at :doc:`Write your own filter
+<filterdevelopment>`.
Builtin Filters
===============
-[builtin_filter_docs]
+.. pygmentsdoc:: filters
diff --git a/docs/src/formatterdevelopment.txt b/doc/docs/formatterdevelopment.rst
index 83a13b6a..2bfac05c 100644
--- a/docs/src/formatterdevelopment.txt
+++ b/doc/docs/formatterdevelopment.rst
@@ -4,7 +4,7 @@
Write your own formatter
========================
-As well as creating `your own lexer <lexerdevelopment.txt>`_, writing a new
+As well as creating :doc:`your own lexer <lexerdevelopment>`, writing a new
formatter for Pygments is easy and straightforward.
A formatter is a class that is initialized with some keyword arguments (the
diff --git a/docs/src/formatters.txt b/doc/docs/formatters.rst
index 7a590648..9e7074e8 100644
--- a/docs/src/formatters.txt
+++ b/doc/docs/formatters.rst
@@ -12,8 +12,6 @@ Common options
All formatters support these options:
`encoding`
- *New in Pygments 0.6.*
-
If given, must be an encoding name (such as ``"utf-8"``). This will
be used to convert the token strings (which are Unicode strings)
to byte strings in the output (default: ``None``).
@@ -30,19 +28,21 @@ All formatters support these options:
supports Unicode arguments to `write()`. Using a regular file object
wouldn't work.
-`outencoding`
- *New in Pygments 0.7.*
+ .. versionadded:: 0.6
+`outencoding`
When using Pygments from the command line, any `encoding` option given is
passed to the lexer and the formatter. This is sometimes not desirable,
for example if you want to set the input encoding to ``"guess"``.
Therefore, `outencoding` has been introduced which overrides `encoding`
for the formatter if given.
+ .. versionadded:: 0.7
+
Formatter classes
=================
-All these classes are importable from `pygments.formatters`.
+All these classes are importable from :mod:`pygments.formatters`.
-[builtin_formatter_docs]
+.. pygmentsdoc:: formatters
diff --git a/doc/docs/index.rst b/doc/docs/index.rst
new file mode 100644
index 00000000..30d5c085
--- /dev/null
+++ b/doc/docs/index.rst
@@ -0,0 +1,66 @@
+Pygments documentation
+======================
+
+**Starting with Pygments**
+
+.. toctree::
+ :maxdepth: 1
+
+ ../download
+ quickstart
+ cmdline
+
+**Builtin components**
+
+.. toctree::
+ :maxdepth: 1
+
+ lexers
+ filters
+ formatters
+ styles
+
+**Reference**
+
+.. toctree::
+ :maxdepth: 1
+
+ unicode
+ tokens
+ api
+
+**Hacking for Pygments**
+
+.. toctree::
+ :maxdepth: 1
+
+ lexerdevelopment
+ formatterdevelopment
+ filterdevelopment
+ plugins
+
+**Hints and tricks**
+
+.. toctree::
+ :maxdepth: 1
+
+ rstdirective
+ moinmoin
+ java
+ integrate
+
+**About Pygments**
+
+.. toctree::
+ :maxdepth: 1
+
+ changelog
+ authors
+
+
+If you find bugs or have suggestions for the documentation, please look
+:ref:`here <contribute>` for info on how to contact the team.
+
+.. XXX You can download an offline version of this documentation from the
+ :doc:`download page </download>`.
+
diff --git a/docs/src/integrate.txt b/doc/docs/integrate.rst
index 6f8c1253..03fc268f 100644
--- a/docs/src/integrate.txt
+++ b/doc/docs/integrate.rst
@@ -23,8 +23,9 @@ Markdown
--------
Since Pygments 0.9, the distribution ships Markdown_ preprocessor sample code
-that uses Pygments to render source code in `external/markdown-processor.py`.
-You can copy and adapt it to your liking.
+that uses Pygments to render source code in
+:file:`external/markdown-processor.py`. You can copy and adapt it to your
+liking.
.. _Markdown: http://www.freewisdom.org/projects/python-markdown/
@@ -41,8 +42,3 @@ Bash completion
The source distribution contains a file ``external/pygments.bashcomp`` that
sets up completion for the ``pygmentize`` command in bash.
-
-Java
-----
-
-See the `Java quickstart <java.txt>`_ document.
diff --git a/docs/src/java.txt b/doc/docs/java.rst
index 5eb6196a..f553463c 100644
--- a/docs/src/java.txt
+++ b/doc/docs/java.rst
@@ -2,18 +2,18 @@
Use Pygments in Java
=====================
-Thanks to `Jython <http://www.jython.org>`__ it is possible to use Pygments in
+Thanks to `Jython <http://www.jython.org>`_ it is possible to use Pygments in
Java.
-This page is a simple tutorial to get an idea of how this is working. You can
-then look at the `Jython documentation <http://www.jython.org/docs/>`__ for more
-advanced use.
+This page is a simple tutorial to get an idea of how this works. You can
+then look at the `Jython documentation <http://www.jython.org/docs/>`_ for more
+advanced uses.
Since version 1.5, Pygments is deployed on `Maven Central
-<http://repo1.maven.org/maven2/org/pygments/pygments/>`__ as a JAR so is Jython
-which makes it a lot easier to create the Java project.
+<http://repo1.maven.org/maven2/org/pygments/pygments/>`_ as a JAR, as is Jython
+which makes it a lot easier to create a Java project.
-Here is an example of a `Maven <http://www.maven.org>`__ ``pom.xml`` file for a
+Here is an example of a `Maven <http://www.maven.org>`_ ``pom.xml`` file for a
project running Pygments:
.. sourcecode:: xml
diff --git a/docs/src/lexerdevelopment.txt b/doc/docs/lexerdevelopment.rst
index 730a08b2..08069889 100644
--- a/docs/src/lexerdevelopment.txt
+++ b/doc/docs/lexerdevelopment.rst
@@ -1,55 +1,56 @@
.. -*- mode: rst -*-
+.. highlight:: python
+
====================
Write your own lexer
====================
-If a lexer for your favorite language is missing in the Pygments package, you can
-easily write your own and extend Pygments.
+If a lexer for your favorite language is missing in the Pygments package, you
+can easily write your own and extend Pygments.
-All you need can be found inside the `pygments.lexer` module. As you can read in
-the `API documentation <api.txt>`_, a lexer is a class that is initialized with
-some keyword arguments (the lexer options) and that provides a
-`get_tokens_unprocessed()` method which is given a string or unicode object with
-the data to parse.
+All you need can be found inside the :mod:`pygments.lexer` module. As you can
+read in the :doc:`API documentation <api>`, a lexer is a class that is
+initialized with some keyword arguments (the lexer options) and that provides a
+:meth:`.get_tokens_unprocessed()` method which is given a string or unicode
+object with the data to lex.
-The `get_tokens_unprocessed()` method must return an iterator or iterable
-containing tuples in the form ``(index, token, value)``. Normally you don't need
-to do this since there are numerous base lexers you can subclass.
+The :meth:`.get_tokens_unprocessed()` method must return an iterator or iterable
+containing tuples in the form ``(index, token, value)``. Normally you don't
+need to do this since there are base lexers that do most of the work and that
+you can subclass.
RegexLexer
==========
-A very powerful (but quite easy to use) lexer is the `RegexLexer`. This lexer
-base class allows you to define lexing rules in terms of *regular expressions*
-for different *states*.
+The lexer base class used by almost all of Pygments' lexers is the
+:class:`RegexLexer`. This class allows you to define lexing rules in terms of
+*regular expressions* for different *states*.
States are groups of regular expressions that are matched against the input
-string at the *current position*. If one of these expressions matches, a
-corresponding action is performed (normally yielding a token with a specific
-type), the current position is set to where the last match ended and the
-matching process continues with the first regex of the current state.
+string at the *current position*. If one of these expressions matches, a
+corresponding action is performed (such as yielding a token with a specific
+type, or changing state), the current position is set to where the last match
+ended and the matching process continues with the first regex of the current
+state.
-Lexer states are kept in a state stack: each time a new state is entered, the
-new state is pushed onto the stack. The most basic lexers (like the
-`DiffLexer`) just need one state.
+Lexer states are kept on a stack: each time a new state is entered, the new
+state is pushed onto the stack. The most basic lexers (like the `DiffLexer`)
+just need one state.
Each state is defined as a list of tuples in the form (`regex`, `action`,
`new_state`) where the last item is optional. In the most basic form, `action`
is a token type (like `Name.Builtin`). That means: When `regex` matches, emit a
token with the match text and type `tokentype` and push `new_state` on the state
stack. If the new state is ``'#pop'``, the topmost state is popped from the
-stack instead. (To pop more than one state, use ``'#pop:2'`` and so on.)
-``'#push'`` is a synonym for pushing the current state on the
-stack.
+stack instead. To pop more than one state, use ``'#pop:2'`` and so on.
+``'#push'`` is a synonym for pushing the current state on the stack.
-The following example shows the `DiffLexer` from the builtin lexers. Note that
+The following example shows the `DiffLexer` from the builtin lexers. Note that
it contains some additional attributes `name`, `aliases` and `filenames` which
-aren't required for a lexer. They are used by the builtin lexer lookup
-functions.
-
-.. sourcecode:: python
+aren't required for a lexer. They are used by the builtin lexer lookup
+functions. ::
from pygments.lexer import RegexLexer
from pygments.token import *
@@ -72,56 +73,57 @@ functions.
}
As you can see this lexer only uses one state. When the lexer starts scanning
-the text, it first checks if the current character is a space. If this is true
-it scans everything until newline and returns the parsed data as `Text` token.
+the text, it first checks if the current character is a space. If this is true
+it scans everything until newline and returns the data as a `Text` token (which
+is the "no special highlighting" token).
If this rule doesn't match, it checks if the current char is a plus sign. And
so on.
If no rule matches at the current position, the current char is emitted as an
-`Error` token that indicates a parsing error, and the position is increased by
-1.
+`Error` token that indicates a lexing error, and the position is increased by
+one.
Adding and testing a new lexer
==============================
-To make pygments aware of your new lexer, you have to perform the following
+To make Pygments aware of your new lexer, you have to perform the following
steps:
-First, change to the current directory containing the pygments source code:
+First, change to the current directory containing the Pygments source code:
-.. sourcecode:: console
+.. code-block:: console
$ cd .../pygments-main
-Next, make sure the lexer is known from outside of the module. All modules in
-the ``pygments.lexers`` specify ``__all__``. For example, ``other.py`` sets:
+Select a matching module under ``pygments/lexers``, or create a new module for
+your lexer class.
-.. sourcecode:: python
+Next, make sure the lexer is known from outside of the module. All modules in
+the ``pygments.lexers`` specify ``__all__``. For example, ``esoteric.py`` sets::
__all__ = ['BrainfuckLexer', 'BefungeLexer', ...]
Simply add the name of your lexer class to this list.
-Finally the lexer can be made publically known by rebuilding the lexer
-mapping:
+Finally the lexer can be made publicly known by rebuilding the lexer mapping:
-.. sourcecode:: console
+.. code-block:: console
$ make mapfiles
To test the new lexer, store an example file with the proper extension in
-``tests/examplefiles``. For example, to test your ``DiffLexer``, add a
+``tests/examplefiles``. For example, to test your ``DiffLexer``, add a
``tests/examplefiles/example.diff`` containing a sample diff output.
Now you can use pygmentize to render your example to HTML:
-.. sourcecode:: console
+.. code-block:: console
$ ./pygmentize -O full -f html -o /tmp/example.html tests/examplefiles/example.diff
-Note that this explicitely calls the ``pygmentize`` in the current directory
+Note that this explicitly calls the ``pygmentize`` in the current directory
by preceding it with ``./``. This ensures your modifications are used.
Otherwise a possibly already installed, unmodified version without your new
lexer would have been called from the system search path (``$PATH``).
@@ -130,29 +132,35 @@ To view the result, open ``/tmp/example.html`` in your browser.
Once the example renders as expected, you should run the complete test suite:
-.. sourcecode:: console
+.. code-block:: console
$ make test
+It also tests that your lexer fulfills the lexer API and certain invariants,
+such as that the concatenation of all token text is the same as the input text.
+
Regex Flags
===========
-You can either define regex flags in the regex (``r'(?x)foo bar'``) or by adding
-a `flags` attribute to your lexer class. If no attribute is defined, it defaults
-to `re.MULTILINE`. For more informations about regular expression flags see the
-`regular expressions`_ help page in the python documentation.
+You can either define regex flags locally in the regex (``r'(?x)foo bar'``) or
+globally by adding a `flags` attribute to your lexer class. If no attribute is
+defined, it defaults to `re.MULTILINE`. For more informations about regular
+expression flags see the page about `regular expressions`_ in the Python
+documentation.
-.. _regular expressions: http://docs.python.org/lib/re-syntax.html
+.. _regular expressions: http://docs.python.org/library/re.html#regular-expression-syntax
Scanning multiple tokens at once
================================
-Here is a more complex lexer that highlights INI files. INI files consist of
-sections, comments and key = value pairs:
+So far, the `action` element in the rule tuple of regex, action and state has
+been a single token type. Now we look at the first of several other possible
+values.
-.. sourcecode:: python
+Here is a more complex lexer that highlights INI files. INI files consist of
+sections, comments and ``key = value`` pairs::
from pygments.lexer import RegexLexer, bygroups
from pygments.token import *
@@ -172,43 +180,41 @@ sections, comments and key = value pairs:
]
}
-The lexer first looks for whitespace, comments and section names. And later it
+The lexer first looks for whitespace, comments and section names. Later it
looks for a line that looks like a key, value pair, separated by an ``'='``
sign, and optional whitespace.
-The `bygroups` helper makes sure that each group is yielded with a different
-token type. First the `Name.Attribute` token, then a `Text` token for the
+The `bygroups` helper yields each capturing group in the regex with a different
+token type. First the `Name.Attribute` token, then a `Text` token for the
optional whitespace, after that a `Operator` token for the equals sign. Then a
-`Text` token for the whitespace again. The rest of the line is returned as
+`Text` token for the whitespace again. The rest of the line is returned as
`String`.
Note that for this to work, every part of the match must be inside a capturing
group (a ``(...)``), and there must not be any nested capturing groups. If you
nevertheless need a group, use a non-capturing group defined using this syntax:
-``r'(?:some|words|here)'`` (note the ``?:`` after the beginning parenthesis).
+``(?:some|words|here)`` (note the ``?:`` after the beginning parenthesis).
-If you find yourself needing a capturing group inside the regex which
-shouldn't be part of the output but is used in the regular expressions for
-backreferencing (eg: ``r'(<(foo|bar)>)(.*?)(</\2>)'``), you can pass `None`
-to the bygroups function and it will skip that group will be skipped in the
-output.
+If you find yourself needing a capturing group inside the regex which shouldn't
+be part of the output but is used in the regular expressions for backreferencing
+(eg: ``r'(<(foo|bar)>)(.*?)(</\2>)'``), you can pass `None` to the bygroups
+function and that group will be skipped in the output.
Changing states
===============
-Many lexers need multiple states to work as expected. For example, some
-languages allow multiline comments to be nested. Since this is a recursive
+Many lexers need multiple states to work as expected. For example, some
+languages allow multiline comments to be nested. Since this is a recursive
pattern it's impossible to lex just using regular expressions.
-Here is the solution:
-
-.. sourcecode:: python
+Here is a lexer that recognizes C++ style comments (multi-line with ``/* */``
+and single-line with ``//`` until end of line)::
from pygments.lexer import RegexLexer
from pygments.token import *
- class ExampleLexer(RegexLexer):
+ class CppCommentLexer(RegexLexer):
name = 'Example Lexer with states'
tokens = {
@@ -227,28 +233,29 @@ Here is the solution:
}
This lexer starts lexing in the ``'root'`` state. It tries to match as much as
-possible until it finds a slash (``'/'``). If the next character after the slash
-is a star (``'*'``) the `RegexLexer` sends those two characters to the output
-stream marked as `Comment.Multiline` and continues parsing with the rules
+possible until it finds a slash (``'/'``). If the next character after the slash
+is an asterisk (``'*'``) the `RegexLexer` sends those two characters to the
+output stream marked as `Comment.Multiline` and continues lexing with the rules
defined in the ``'comment'`` state.
-If there wasn't a star after the slash, the `RegexLexer` checks if it's a
-singleline comment (eg: followed by a second slash). If this also wasn't the
-case it must be a single slash (the separate regex for a single slash must also
-be given, else the slash would be marked as an error token).
+If there wasn't an asterisk after the slash, the `RegexLexer` checks if it's a
+Singleline comment (i.e. followed by a second slash). If this also wasn't the
+case it must be a single slash, which is not a comment starter (the separate
+regex for a single slash must also be given, else the slash would be marked as
+an error token).
-Inside the ``'comment'`` state, we do the same thing again. Scan until the lexer
-finds a star or slash. If it's the opening of a multiline comment, push the
-``'comment'`` state on the stack and continue scanning, again in the
-``'comment'`` state. Else, check if it's the end of the multiline comment. If
+Inside the ``'comment'`` state, we do the same thing again. Scan until the
+lexer finds a star or slash. If it's the opening of a multiline comment, push
+the ``'comment'`` state on the stack and continue scanning, again in the
+``'comment'`` state. Else, check if it's the end of the multiline comment. If
yes, pop one state from the stack.
-Note: If you pop from an empty stack you'll get an `IndexError`. (There is an
+Note: If you pop from an empty stack you'll get an `IndexError`. (There is an
easy way to prevent this from happening: don't ``'#pop'`` in the root state).
If the `RegexLexer` encounters a newline that is flagged as an error token, the
-stack is emptied and the lexer continues scanning in the ``'root'`` state. This
-helps producing error-tolerant highlighting for erroneous input, e.g. when a
+stack is emptied and the lexer continues scanning in the ``'root'`` state. This
+can help producing error-tolerant highlighting for erroneous input, e.g. when a
single-line string is not closed.
@@ -258,14 +265,14 @@ Advanced state tricks
There are a few more things you can do with states:
- You can push multiple states onto the stack if you give a tuple instead of a
- simple string as the third item in a rule tuple. For example, if you want to
- match a comment containing a directive, something like::
+ simple string as the third item in a rule tuple. For example, if you want to
+ match a comment containing a directive, something like:
- /* <processing directive> rest of comment */
+ .. code-block:: text
- you can use this rule:
+ /* <processing directive> rest of comment */
- .. sourcecode:: python
+ you can use this rule::
tokens = {
'root': [
@@ -286,17 +293,16 @@ There are a few more things you can do with states:
When this encounters the above sample, first ``'comment'`` and ``'directive'``
are pushed onto the stack, then the lexer continues in the directive state
until it finds the closing ``>``, then it continues in the comment state until
- the closing ``*/``. Then, both states are popped from the stack again and
+ the closing ``*/``. Then, both states are popped from the stack again and
lexing continues in the root state.
- *New in Pygments 0.9:* The tuple can contain the special ``'#push'`` and
- ``'#pop'`` (but not ``'#pop:n'``) directives.
+ .. versionadded:: 0.9
+ The tuple can contain the special ``'#push'`` and ``'#pop'`` (but not
+ ``'#pop:n'``) directives.
- You can include the rules of a state in the definition of another. This is
- done by using `include` from `pygments.lexer`:
-
- .. sourcecode:: python
+ done by using `include` from `pygments.lexer`::
from pygments.lexer import RegexLexer, bygroups, include
from pygments.token import *
@@ -317,20 +323,19 @@ There are a few more things you can do with states:
(r'[^}/]+', Text),
include('comments'),
(r'/', Text),
- (r'}', Keyword, '#pop'),
+ (r'\}', Keyword, '#pop'),
]
}
This is a hypothetical lexer for a language that consist of functions and
- comments. Because comments can occur at toplevel and in functions, we need
- rules for comments in both states. As you can see, the `include` helper saves
+ comments. Because comments can occur at toplevel and in functions, we need
+ rules for comments in both states. As you can see, the `include` helper saves
repeating rules that occur more than once (in this example, the state
``'comment'`` will never be entered by the lexer, as it's only there to be
included in ``'root'`` and ``'function'``).
-
- Sometimes, you may want to "combine" a state from existing ones. This is
- possible with the `combine` helper from `pygments.lexer`.
+ possible with the `combined` helper from `pygments.lexer`.
If you, instead of a new state, write ``combined('state1', 'state2')`` as the
third item of a rule tuple, a new anonymous state will be formed from state1
@@ -339,14 +344,12 @@ There are a few more things you can do with states:
This is not used very often, but can be helpful in some cases, such as the
`PythonLexer`'s string literal processing.
-- If you want your lexer to start lexing in a different state you can modify
- the stack by overloading the `get_tokens_unprocessed()` method:
-
- .. sourcecode:: python
+- If you want your lexer to start lexing in a different state you can modify the
+ stack by overloading the `get_tokens_unprocessed()` method::
from pygments.lexer import RegexLexer
- class MyLexer(RegexLexer):
+ class ExampleLexer(RegexLexer):
tokens = {...}
def get_tokens_unprocessed(self, text):
@@ -355,29 +358,88 @@ There are a few more things you can do with states:
yield item
Some lexers like the `PhpLexer` use this to make the leading ``<?php``
- preprocessor comments optional. Note that you can crash the lexer easily
- by putting values into the stack that don't exist in the token map. Also
+ preprocessor comments optional. Note that you can crash the lexer easily by
+ putting values into the stack that don't exist in the token map. Also
removing ``'root'`` from the stack can result in strange errors!
-- An empty regex at the end of a state list, combined with ``'#pop'``, can
- act as a return point from a state that doesn't have a clear end marker.
+- In some lexers, a state should be popped if anything is encountered that isn't
+ matched by a rule in the state. You could use an empty regex at the end of
+ the state list, but Pygments provides a more obvious way of spelling that:
+ ``default('#pop')`` is equivalent to ``('', Text, '#pop')``.
+
+ .. versionadded:: 2.0
+
+
+Subclassing lexers derived from RegexLexer
+==========================================
+
+.. versionadded:: 1.6
+
+Sometimes multiple languages are very similar, but should still be lexed by
+different lexer classes.
+
+When subclassing a lexer derived from RegexLexer, the ``tokens`` dictionaries
+defined in the parent and child class are merged. For example::
+
+ from pygments.lexer import RegexLexer, inherit
+ from pygments.token import *
+
+ class BaseLexer(RegexLexer):
+ tokens = {
+ 'root': [
+ ('[a-z]+', Name),
+ (r'/\*', Comment, 'comment'),
+ ('"', String, 'string'),
+ ('\s+', Text),
+ ],
+ 'string': [
+ ('[^"]+', String),
+ ('"', String, '#pop'),
+ ],
+ 'comment': [
+ ...
+ ],
+ }
+
+ class DerivedLexer(BaseLexer):
+ tokens = {
+ 'root': [
+ ('[0-9]+', Number),
+ inherit,
+ ],
+ 'string': [
+ (r'[^"\\]+', String),
+ (r'\\.', String.Escape),
+ ('"', String, '#pop'),
+ ],
+ }
+
+The `BaseLexer` defines two states, lexing names and strings. The
+`DerivedLexer` defines its own tokens dictionary, which extends the definitions
+of the base lexer:
+
+* The "root" state has an additional rule and then the special object `inherit`,
+ which tells Pygments to insert the token definitions of the parent class at
+ that point.
+
+* The "string" state is replaced entirely, since there is not `inherit` rule.
+
+* The "comment" state is inherited entirely.
Using multiple lexers
=====================
-Using multiple lexers for the same input can be tricky. One of the easiest
-combination techniques is shown here: You can replace the token type entry in a
-rule tuple (the second item) with a lexer class. The matched text will then be
-lexed with that lexer, and the resulting tokens will be yielded.
-
-For example, look at this stripped-down HTML lexer:
+Using multiple lexers for the same input can be tricky. One of the easiest
+combination techniques is shown here: You can replace the action entry in a rule
+tuple with a lexer class. The matched text will then be lexed with that lexer,
+and the resulting tokens will be yielded.
-.. sourcecode:: python
+For example, look at this stripped-down HTML lexer::
from pygments.lexer import RegexLexer, bygroups, using
from pygments.token import *
- from pygments.lexers.web import JavascriptLexer
+ from pygments.lexers.javascript import JavascriptLexer
class HtmlLexer(RegexLexer):
name = 'HTML'
@@ -401,26 +463,29 @@ For example, look at this stripped-down HTML lexer:
}
Here the content of a ``<script>`` tag is passed to a newly created instance of
-a `JavascriptLexer` and not processed by the `HtmlLexer`. This is done using the
-`using` helper that takes the other lexer class as its parameter.
+a `JavascriptLexer` and not processed by the `HtmlLexer`. This is done using
+the `using` helper that takes the other lexer class as its parameter.
-Note the combination of `bygroups` and `using`. This makes sure that the content
-up to the ``</script>`` end tag is processed by the `JavascriptLexer`, while the
-end tag is yielded as a normal token with the `Name.Tag` type.
-
-As an additional goodie, if the lexer class is replaced by `this` (imported from
-`pygments.lexer`), the "other" lexer will be the current one (because you cannot
-refer to the current class within the code that runs at class definition time).
+Note the combination of `bygroups` and `using`. This makes sure that the
+content up to the ``</script>`` end tag is processed by the `JavascriptLexer`,
+while the end tag is yielded as a normal token with the `Name.Tag` type.
Also note the ``(r'<\s*script\s*', Name.Tag, ('script-content', 'tag'))`` rule.
Here, two states are pushed onto the state stack, ``'script-content'`` and
-``'tag'``. That means that first ``'tag'`` is processed, which will parse
+``'tag'``. That means that first ``'tag'`` is processed, which will lex
attributes and the closing ``>``, then the ``'tag'`` state is popped and the
next state on top of the stack will be ``'script-content'``.
+Since you cannot refer to the class currently being defined, use `this`
+(imported from `pygments.lexer`) to refer to the current lexer class, i.e.
+``using(this)``. This construct may seem unnecessary, but this is often the
+most obvious way of lexing arbitrary syntax between fixed delimiters without
+introducing deeply nested states.
+
The `using()` helper has a special keyword argument, `state`, which works as
follows: if given, the lexer to use initially is not in the ``"root"`` state,
-but in the state given by this argument. This *only* works with a `RegexLexer`.
+but in the state given by this argument. This does not work with advanced
+`RegexLexer` subclasses such as `ExtendedRegexLexer` (see below).
Any other keywords arguments passed to `using()` are added to the keyword
arguments used to create the lexer.
@@ -429,17 +494,15 @@ arguments used to create the lexer.
Delegating Lexer
================
-Another approach for nested lexers is the `DelegatingLexer` which is for
-example used for the template engine lexers. It takes two lexers as
-arguments on initialisation: a `root_lexer` and a `language_lexer`.
+Another approach for nested lexers is the `DelegatingLexer` which is for example
+used for the template engine lexers. It takes two lexers as arguments on
+initialisation: a `root_lexer` and a `language_lexer`.
The input is processed as follows: First, the whole text is lexed with the
-`language_lexer`. All tokens yielded with a type of ``Other`` are then
-concatenated and given to the `root_lexer`. The language tokens of the
-`language_lexer` are then inserted into the `root_lexer`'s token stream
-at the appropriate positions.
-
-.. sourcecode:: python
+`language_lexer`. All tokens yielded with the special type of ``Other`` are
+then concatenated and given to the `root_lexer`. The language tokens of the
+`language_lexer` are then inserted into the `root_lexer`'s token stream at the
+appropriate positions. ::
from pygments.lexer import DelegatingLexer
from pygments.lexers.web import HtmlLexer, PhpLexer
@@ -451,10 +514,8 @@ at the appropriate positions.
This procedure ensures that e.g. HTML with template tags in it is highlighted
correctly even if the template tags are put into HTML tags or attributes.
-If you want to change the needle token ``Other`` to something else, you can
-give the lexer another token type as the third parameter:
-
-.. sourcecode:: python
+If you want to change the needle token ``Other`` to something else, you can give
+the lexer another token type as the third parameter::
DelegatingLexer.__init__(MyLexer, OtherLexer, Text, **options)
@@ -463,24 +524,22 @@ Callbacks
=========
Sometimes the grammar of a language is so complex that a lexer would be unable
-to parse it just by using regular expressions and stacks.
+to process it just by using regular expressions and stacks.
For this, the `RegexLexer` allows callbacks to be given in rule tuples, instead
of token types (`bygroups` and `using` are nothing else but preimplemented
-callbacks). The callback must be a function taking two arguments:
+callbacks). The callback must be a function taking two arguments:
* the lexer itself
* the match object for the last matched rule
The callback must then return an iterable of (or simply yield) ``(index,
tokentype, value)`` tuples, which are then just passed through by
-`get_tokens_unprocessed()`. The ``index`` here is the position of the token in
+`get_tokens_unprocessed()`. The ``index`` here is the position of the token in
the input string, ``tokentype`` is the normal token type (like `Name.Builtin`),
and ``value`` the associated part of the input string.
-You can see an example here:
-
-.. sourcecode:: python
+You can see an example here::
from pygments.lexer import RegexLexer
from pygments.token import Generic
@@ -498,26 +557,25 @@ You can see an example here:
]
}
-If the regex for the `headline_callback` matches, the function is called with the
-match object. Note that after the callback is done, processing continues
-normally, that is, after the end of the previous match. The callback has no
+If the regex for the `headline_callback` matches, the function is called with
+the match object. Note that after the callback is done, processing continues
+normally, that is, after the end of the previous match. The callback has no
possibility to influence the position.
There are not really any simple examples for lexer callbacks, but you can see
-them in action e.g. in the `compiled.py`_ source code in the `CLexer` and
-`JavaLexer` classes.
+them in action e.g. in the `SMLLexer` class in `ml.py`_.
-.. _compiled.py: http://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/compiled.py
+.. _ml.py: http://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/ml.py
The ExtendedRegexLexer class
============================
The `RegexLexer`, even with callbacks, unfortunately isn't powerful enough for
-the funky syntax rules of some languages that will go unnamed, such as Ruby.
+the funky syntax rules of languages such as Ruby.
But fear not; even then you don't have to abandon the regular expression
-approach. For Pygments has a subclass of `RegexLexer`, the `ExtendedRegexLexer`.
+approach: Pygments has a subclass of `RegexLexer`, the `ExtendedRegexLexer`.
All features known from RegexLexers are available here too, and the tokens are
specified in exactly the same way, *except* for one detail:
@@ -541,9 +599,7 @@ creating a new one for the string argument.
Note that because you can set the current position to anything in the callback,
it won't be automatically be set by the caller after the callback is finished.
For example, this is how the hypothetical lexer above would be written with the
-`ExtendedRegexLexer`:
-
-.. sourcecode:: python
+`ExtendedRegexLexer`::
from pygments.lexer import ExtendedRegexLexer
from pygments.token import Generic
@@ -563,31 +619,58 @@ For example, this is how the hypothetical lexer above would be written with the
}
This might sound confusing (and it can really be). But it is needed, and for an
-example look at the Ruby lexer in `agile.py`_.
+example look at the Ruby lexer in `ruby.py`_.
+
+.. _ruby.py: https://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/ruby.py
-.. _agile.py: https://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/agile.py
+Handling Lists of Keywords
+==========================
-Filtering Token Streams
+For a relatively short list (hundreds) you can construct an optimized regular
+expression directly using ``words()`` (longer lists, see next section). This
+function handles a few things for you automatically, including escaping
+metacharacters and Python's first-match rather than longest-match in
+alternations. Feel free to put the lists themselves in
+``pygments/lexers/_$lang_builtins.py`` (see examples there), and generated by
+code if possible.
+
+An example of using ``words()`` is something like::
+
+ from pygments.lexer import RegexLexer, words, Name
+
+ class MyLexer(RegexLexer):
+
+ tokens = {
+ 'root': [
+ (words(('else', 'elseif'), suffix=r'\b'), Name.Builtin),
+ (r'\w+', Name),
+ ],
+ }
+
+As you can see, you can add ``prefix`` and ``suffix`` parts to the constructed
+regex.
+
+
+Modifying Token Streams
=======================
-Some languages ship a lot of builtin functions (for example PHP). The total
+Some languages ship a lot of builtin functions (for example PHP). The total
amount of those functions differs from system to system because not everybody
-has every extension installed. In the case of PHP there are over 3000 builtin
-functions. That's an incredible huge amount of functions, much more than you
-can put into a regular expression.
+has every extension installed. In the case of PHP there are over 3000 builtin
+functions. That's an incredibly huge amount of functions, much more than you
+want to put into a regular expression.
-But because only `Name` tokens can be function names it's solvable by overriding
-the ``get_tokens_unprocessed()`` method. The following lexer subclasses the
-`PythonLexer` so that it highlights some additional names as pseudo keywords:
+But because only `Name` tokens can be function names this is solvable by
+overriding the ``get_tokens_unprocessed()`` method. The following lexer
+subclasses the `PythonLexer` so that it highlights some additional names as
+pseudo keywords::
-.. sourcecode:: python
-
- from pygments.lexers.agile import PythonLexer
+ from pygments.lexers.python import PythonLexer
from pygments.token import Name, Keyword
class MyPythonLexer(PythonLexer):
- EXTRA_KEYWORDS = ['foo', 'bar', 'foobar', 'barfoo', 'spam', 'eggs']
+ EXTRA_KEYWORDS = set(('foo', 'bar', 'foobar', 'barfoo', 'spam', 'eggs'))
def get_tokens_unprocessed(self, text):
for index, token, value in PythonLexer.get_tokens_unprocessed(self, text):
@@ -597,7 +680,3 @@ the ``get_tokens_unprocessed()`` method. The following lexer subclasses the
yield index, token, value
The `PhpLexer` and `LuaLexer` use this method to resolve builtin functions.
-
-**Note** Do not confuse this with the `filter`_ system.
-
-.. _filter: filters.txt
diff --git a/docs/src/lexers.txt b/doc/docs/lexers.rst
index 016de6ce..9262efb0 100644
--- a/docs/src/lexers.txt
+++ b/doc/docs/lexers.rst
@@ -18,35 +18,37 @@ Currently, **all lexers** support these options:
`ensurenl`
Make sure that the input ends with a newline (default: ``True``). This
is required for some lexers that consume input linewise.
- *New in Pygments 1.3.*
+
+ .. versionadded:: 1.3
`tabsize`
If given and greater than 0, expand tabs in the input (default: ``0``).
`encoding`
- *New in Pygments 0.6.*
-
If given, must be an encoding name (such as ``"utf-8"``). This encoding
will be used to convert the input string to Unicode (if it is not already
- a Unicode string). The default is ``"latin1"``.
+ a Unicode string). The default is ``"guess"``.
If this option is set to ``"guess"``, a simple UTF-8 vs. Latin-1
detection is used, if it is set to ``"chardet"``, the
- `chardet library <http://chardet.feedparser.org/>`__ is used to
+ `chardet library <http://chardet.feedparser.org/>`_ is used to
guess the encoding of the input.
+ .. versionadded:: 0.6
+
The "Short Names" field lists the identifiers that can be used with the
`get_lexer_by_name()` function.
These lexers are builtin and can be imported from `pygments.lexers`:
-[builtin_lexer_docs]
+.. pygmentsdoc:: lexers
+
Iterating over all lexers
-------------------------
-*New in Pygments 0.6.*
+.. versionadded:: 0.6
To get all lexers (both the builtin and the plugin ones), you can
use the `get_all_lexers()` function from the `pygments.lexers`
diff --git a/docs/src/moinmoin.txt b/doc/docs/moinmoin.rst
index 8b2216b3..8b2216b3 100644
--- a/docs/src/moinmoin.txt
+++ b/doc/docs/moinmoin.rst
diff --git a/docs/src/plugins.txt b/doc/docs/plugins.rst
index a6f8d7b0..a6f8d7b0 100644
--- a/docs/src/plugins.txt
+++ b/doc/docs/plugins.rst
diff --git a/docs/src/quickstart.txt b/doc/docs/quickstart.rst
index 40409104..dba7698a 100644
--- a/docs/src/quickstart.txt
+++ b/doc/docs/quickstart.rst
@@ -58,8 +58,8 @@ can be produced by:
print HtmlFormatter().get_style_defs('.highlight')
-The argument to `get_style_defs` is used as an additional CSS selector: the output
-may look like this:
+The argument to :func:`get_style_defs` is used as an additional CSS selector:
+the output may look like this:
.. sourcecode:: css
@@ -71,9 +71,9 @@ may look like this:
Options
=======
-The `highlight()` function supports a fourth argument called `outfile`, it must be
-a file object if given. The formatted output will then be written to this file
-instead of being returned as a string.
+The :func:`highlight()` function supports a fourth argument called *outfile*, it
+must be a file object if given. The formatted output will then be written to
+this file instead of being returned as a string.
Lexers and formatters both support options. They are given to them as keyword
arguments either to the class or to the lookup method:
@@ -103,9 +103,9 @@ Important options include:
For an overview of builtin lexers and formatters and their options, visit the
-`lexer <lexers.txt>`_ and `formatters <formatters.txt>`_ lists.
+:doc:`lexer <lexers>` and :doc:`formatters <formatters>` lists.
-For a documentation on filters, see `this page <filters.txt>`_.
+For a documentation on filters, see :doc:`this page <filters>`.
Lexer and formatter lookup
@@ -131,9 +131,9 @@ one of the following methods:
All these functions accept keyword arguments; they will be passed to the lexer
as options.
-A similar API is available for formatters: use `get_formatter_by_name()` and
-`get_formatter_for_filename()` from the `pygments.formatters` module
-for this purpose.
+A similar API is available for formatters: use :func:`.get_formatter_by_name()`
+and :func:`.get_formatter_for_filename()` from the :mod:`pygments.formatters`
+module for this purpose.
Guessing lexers
@@ -153,16 +153,17 @@ or some template tags), use these functions:
>>> guess_lexer_for_filename('test.py', 'print "Hello World!"')
<pygments.lexers.PythonLexer>
-`guess_lexer()` passes the given content to the lexer classes' `analyse_text()`
-method and returns the one for which it returns the highest number.
+:func:`.guess_lexer()` passes the given content to the lexer classes'
+:meth:`analyse_text()` method and returns the one for which it returns the
+highest number.
All lexers have two different filename pattern lists: the primary and the
-secondary one. The `get_lexer_for_filename()` function only uses the primary
-list, whose entries are supposed to be unique among all lexers.
-`guess_lexer_for_filename()`, however, will first loop through all lexers and
-look at the primary and secondary filename patterns if the filename matches.
+secondary one. The :func:`.get_lexer_for_filename()` function only uses the
+primary list, whose entries are supposed to be unique among all lexers.
+:func:`.guess_lexer_for_filename()`, however, will first loop through all lexers
+and look at the primary and secondary filename patterns if the filename matches.
If only one lexer matches, it is returned, else the guessing mechanism of
-`guess_lexer()` is used with the matching lexers.
+:func:`.guess_lexer()` is used with the matching lexers.
As usual, keyword arguments to these functions are given to the created lexer
as options.
@@ -171,7 +172,8 @@ as options.
Command line usage
==================
-You can use Pygments from the command line, using the `pygmentize` script::
+You can use Pygments from the command line, using the :program:`pygmentize`
+script::
$ pygmentize test.py
@@ -199,4 +201,5 @@ it can be created with::
where ``default`` is the style name.
-More options and tricks and be found in the `command line reference <cmdline.txt>`_.
+More options and tricks and be found in the :doc:`command line reference
+<cmdline>`.
diff --git a/docs/src/rstdirective.txt b/doc/docs/rstdirective.rst
index c0d503b3..c0d503b3 100644
--- a/docs/src/rstdirective.txt
+++ b/doc/docs/rstdirective.rst
diff --git a/docs/src/styles.txt b/doc/docs/styles.rst
index e3e9cfb3..d56db0db 100644
--- a/docs/src/styles.txt
+++ b/doc/docs/styles.rst
@@ -21,6 +21,7 @@ option in form of a string:
.. sourcecode:: pycon
>>> from pygments.styles import get_style_by_name
+ >>> from pygments.formatters import HtmlFormatter
>>> HtmlFormatter(style='colorful').style
<class 'pygments.styles.colorful.ColorfulStyle'>
@@ -30,6 +31,7 @@ Or you can also import your own style (which must be a subclass of
.. sourcecode:: pycon
>>> from yourapp.yourmodule import YourStyle
+ >>> from pygments.formatters import HtmlFormatter
>>> HtmlFormatter(style=YourStyle).style
<class 'yourapp.yourmodule.YourStyle'>
@@ -68,7 +70,7 @@ they can be used for a variety of formatters.)
To make the style usable for Pygments, you must
-* either register it as a plugin (see `the plugin docs <plugins.txt>`_)
+* either register it as a plugin (see :doc:`the plugin docs <plugins>`)
* or drop it into the `styles` subpackage of your Pygments distribution one style
class per style, where the file name is the style name and the class name is
`StylenameClass`. For example, if your style should be called
@@ -132,7 +134,7 @@ To get a list of known styles you can use this snippet:
Getting a list of available styles
==================================
-*New in Pygments 0.6.*
+.. versionadded:: 0.6
Because it could be that a plugin registered a style, there is
a way to iterate over all styles:
diff --git a/docs/src/tokens.txt b/doc/docs/tokens.rst
index 4900a9ab..6455a501 100644
--- a/docs/src/tokens.txt
+++ b/doc/docs/tokens.rst
@@ -4,7 +4,9 @@
Builtin Tokens
==============
-Inside the `pygments.token` module, there is a special object called `Token`
+.. module:: pygments.token
+
+In the :mod:`pygments.token` module, there is a special object called `Token`
that is used to create token types.
You can create a new token type by accessing an attribute of `Token`:
@@ -30,8 +32,8 @@ As of Pygments 0.7 you can also use the ``in`` operator to perform set tests:
>>> Comment in Comment.Multi
False
-This can be useful in `filters`_ and if you write lexers on your own without
-using the base lexers.
+This can be useful in :doc:`filters <filters>` and if you write lexers on your
+own without using the base lexers.
You can also split a token type into a hierarchy, and get the parent of it:
@@ -55,7 +57,7 @@ For some tokens aliases are already defined:
>>> String
Token.Literal.String
-Inside the `pygments.token` module the following aliases are defined:
+Inside the :mod:`pygments.token` module the following aliases are defined:
============= ============================ ====================================
`Text` `Token.Text` for any type of text data
@@ -87,7 +89,7 @@ The `is_token_subtype()` function in the `pygments.token` module can be used to
test if a token type is a subtype of another (such as `Name.Tag` and `Name`).
(This is the same as ``Name.Tag in Name``. The overloaded `in` operator was newly
introduced in Pygments 0.7, the function still exists for backwards
-compatiblity.)
+compatibility.)
With Pygments 0.7, it's also possible to convert strings to token types (for example
if you want to supply a token from the command line):
@@ -158,7 +160,7 @@ Name Tokens
other languages constants are uppercase by definition (Ruby).
`Name.Decorator`
- Token type for decorators. Decorators are synatic elements in the Python
+ Token type for decorators. Decorators are syntactic elements in the Python
language. Similar syntax elements exist in C# and Java.
`Name.Entity`
@@ -251,6 +253,9 @@ Literals
`Number`
Token type for any number literal.
+`Number.Bin`
+ Token type for binary literals (e.g. ``0b101010``).
+
`Number.Float`
Token type for float literals (e.g. ``42.0``).
@@ -280,7 +285,7 @@ Operators
Punctuation
===========
-*New in Pygments 0.7.*
+.. versionadded:: 0.7
`Punctuation`
For any punctuation which is not an operator (e.g. ``[``, ``(``...)
@@ -292,6 +297,10 @@ Comments
`Comment`
Token type for any comment.
+`Comment.Hashbang`
+ Token type for hashbang comments (i.e. first lines of files that start with
+ ``#!``).
+
`Comment.Multiline`
Token type for multiline comments.
@@ -345,5 +354,3 @@ highlight a programming language but a patch file.
`Generic.Traceback`
Marks the token value as a part of an error traceback.
-
-.. _filters: filters.txt
diff --git a/docs/src/unicode.txt b/doc/docs/unicode.rst
index dc6394a9..17853a36 100644
--- a/docs/src/unicode.txt
+++ b/doc/docs/unicode.rst
@@ -3,15 +3,23 @@ Unicode and Encodings
=====================
Since Pygments 0.6, all lexers use unicode strings internally. Because of that
-you might encounter the occasional `UnicodeDecodeError` if you pass strings with the
-wrong encoding.
+you might encounter the occasional :exc:`UnicodeDecodeError` if you pass strings
+with the wrong encoding.
-Per default all lexers have their input encoding set to `latin1`.
-If you pass a lexer a string object (not unicode), it tries to decode the data
-using this encoding.
-You can override the encoding using the `encoding` lexer option. If you have the
-`chardet`_ library installed and set the encoding to ``chardet`` if will ananlyse
-the text and use the encoding it thinks is the right one automatically:
+Per default all lexers have their input encoding set to `guess`. This means
+that the following encodings are tried:
+
+* UTF-8 (including BOM handling)
+* The locale encoding (i.e. the result of `locale.getpreferredencoding()`)
+* As a last resort, `latin1`
+
+If you pass a lexer a byte string object (not unicode), it tries to decode the
+data using this encoding.
+
+You can override the encoding using the `encoding` or `inencoding` lexer
+options. If you have the `chardet`_ library installed and set the encoding to
+``chardet`` if will analyse the text and use the encoding it thinks is the
+right one automatically:
.. sourcecode:: python
@@ -39,11 +47,12 @@ Unicode string with this encoding before writing it. This is the case for
`sys.stdout`, for example. The other formatters don't have that behavior.
Another note: If you call Pygments via the command line (`pygmentize`),
-encoding is handled differently, see `the command line docs <cmdline.txt>`_.
+encoding is handled differently, see :doc:`the command line docs <cmdline>`.
-*New in Pygments 0.7*: the formatters now also accept an `outencoding` option
-which will override the `encoding` option if given. This makes it possible to
-use a single options dict with lexers and formatters, and still have different
-input and output encodings.
+.. versionadded:: 0.7
+ The formatters now also accept an `outencoding` option which will override
+ the `encoding` option if given. This makes it possible to use a single
+ options dict with lexers and formatters, and still have different input and
+ output encodings.
.. _chardet: http://chardet.feedparser.org/
diff --git a/doc/download.rst b/doc/download.rst
new file mode 100644
index 00000000..cf32f481
--- /dev/null
+++ b/doc/download.rst
@@ -0,0 +1,41 @@
+Download and installation
+=========================
+
+The current release is version |version|.
+
+Packaged versions
+-----------------
+
+You can download it `from the Python Package Index
+<http://pypi.python.org/pypi/Pygments>`_. For installation of packages from
+PyPI, we recommend `Pip <http://www.pip-installer.org>`_, which works on all
+major platforms.
+
+Under Linux, most distributions include a package for Pygments, usually called
+``pygments`` or ``python-pygments``. You can install it with the package
+manager as usual.
+
+Development sources
+-------------------
+
+We're using the `Mercurial <http://selenic.com/mercurial>`_ version control
+system. You can get the development source using this command::
+
+ hg clone http://bitbucket.org/birkenfeld/pygments-main pygments
+
+Development takes place at `Bitbucket
+<http://bitbucket.org/birkenfeld/pygments-main>`_, you can browse the source
+online `here <http://bitbucket.org/birkenfeld/pygments-main/src>`_.
+
+The latest changes in the development source code are listed in the `changelog
+<http://bitbucket.org/birkenfeld/pygments-main/src/tip/CHANGES>`_.
+
+.. Documentation
+ -------------
+
+.. XXX todo
+
+ You can download the <a href="/docs/">documentation</a> either as
+ a bunch of rst files from the Mercurial repository, see above, or
+ as a tar.gz containing rendered HTML files:</p>
+ <p><a href="/docs/download/pygmentsdocs.tar.gz">pygmentsdocs.tar.gz</a></p>
diff --git a/doc/faq.rst b/doc/faq.rst
new file mode 100644
index 00000000..aeba9259
--- /dev/null
+++ b/doc/faq.rst
@@ -0,0 +1,142 @@
+:orphan:
+
+Pygments FAQ
+=============
+
+What is Pygments?
+-----------------
+
+Pygments is a syntax highlighting engine written in Python. That means, it will
+take source code (or other markup) in a supported language and output a
+processed version (in different formats) containing syntax highlighting markup.
+
+Its features include:
+
+* a wide range of common languages and markup formats is supported (look here
+ for a list)
+* new languages and formats are added easily
+* a number of output formats is available, including:
+
+ - HTML
+ - ANSI sequences (console output)
+ - LaTeX
+ - RTF
+
+* it is usable as a command-line tool and as a library
+* parsing and formatting is fast
+
+Pygments is licensed under the BSD license.
+
+Where does the name Pygments come from?
+---------------------------------------
+
+*Py* of course stands for Python, while *pigments* are used for coloring paint,
+and in this case, source code!
+
+What are the system requirements?
+---------------------------------
+
+Pygments only needs a standard Python install, version 2.6 or higher or version
+3.3 or higher for Python 3. No additional libraries are needed.
+
+How can I use Pygments?
+-----------------------
+
+Pygments is usable as a command-line tool as well as a library.
+
+From the command-line, usage looks like this (assuming the pygmentize script is
+properly installed)::
+
+ pygmentize -f html /path/to/file.py
+
+This will print a HTML-highlighted version of /path/to/file.py to standard output.
+
+For a complete help, please run ``pygmentize -h``.
+
+Usage as a library is thoroughly demonstrated in the Documentation section.
+
+How do I make a new style?
+--------------------------
+
+Please see the :doc:`documentation on styles <docs/styles>`.
+
+How can I report a bug or suggest a feature?
+--------------------------------------------
+
+Please report bugs and feature wishes in the tracker at Bitbucket.
+
+You can also e-mail the author or use IRC, see the contact details.
+
+I want this support for this language!
+--------------------------------------
+
+Instead of waiting for others to include language support, why not write it
+yourself? All you have to know is :doc:`outlined in the docs
+<docs/lexerdevelopment>`.
+
+Can I use Pygments for programming language processing?
+-------------------------------------------------------
+
+The Pygments lexing machinery is quite powerful can be used to build lexers for
+basically all languages. However, parsing them is not possible, though some
+lexers go some steps in this direction in order to e.g. highlight function names
+differently.
+
+Also, error reporting is not the scope of Pygments. It focuses on correctly
+highlighting syntactically valid documents, not finding and compensating errors.
+
+Who uses Pygments?
+------------------
+
+This is an (incomplete) list of projects and sites known to use the Pygments highlighter.
+
+* `Pygments API <http://pygments.appspot.com/>`_, a HTTP POST interface to Pygments
+* `The Sphinx documentation builder <http://sphinx.pocoo.org/>`_, for embedded source examples
+* `rst2pdf <http://code.google.com/p/rst2pdf/>`_, a reStructuredText to PDF converter
+* `Zine <http://zine.pocoo.org/>`_, a Python blogging system
+* `Trac <http://trac.edgewall.org/>`_, the universal project management tool
+* `Bruce <http://r1chardj0n3s.googlepages.com/bruce>`_, a reStructuredText presentation tool
+* `AsciiDoc <http://www.methods.co.nz/asciidoc/>`_, a text-based documentation generator
+* `ActiveState Code <http://code.activestate.com/>`_, the Python Cookbook successor
+* `ViewVC <http://viewvc.org/>`_, a web-based version control repository browser
+* `BzrFruit <http://repo.or.cz/w/bzrfruit.git>`_, a Bazaar branch viewer
+* `QBzr <http://bazaar-vcs.org/QBzr>`_, a cross-platform Qt-based GUI front end for Bazaar
+* `BitBucket <http://bitbucket.org/>`_, a Mercurial and Git hosting site
+* `Review Board <http://www.review-board.org/>`_, a collaborative code reviewing tool
+* `skeletonz <http://orangoo.com/skeletonz/>`_, a Python powered content management system
+* `Diamanda <http://code.google.com/p/diamanda/>`_, a Django powered wiki system with support for Pygments
+* `Progopedia <http://progopedia.ru/>`_ (`English <http://progopedia.com/>`_),
+ an encyclopedia of programming languages
+* `Postmarkup <http://code.google.com/p/postmarkup/>`_, a BBCode to XHTML generator
+* `Language Comparison <http://michaelsilver.us/lc>`_, a site that compares different programming languages
+* `BPython <http://www.noiseforfree.com/bpython/>`_, a curses-based intelligent Python shell
+* `Challenge-You! <http://challenge-you.appspot.com/>`_, a site offering programming challenges
+* `PIDA <http://pida.co.uk/>`_, a universal IDE written in Python
+* `PuDB <http://pypi.python.org/pypi/pudb>`_, a console Python debugger
+* `XWiki <http://www.xwiki.org/>`_, a wiki-based development framework in Java, using Jython
+* `roux <http://ananelson.com/software/roux/>`_, a script for running R scripts
+ and creating beautiful output including graphs
+* `hurl <http://hurl.it/>`_, a web service for making HTTP requests
+* `wxHTMLPygmentizer <http://colinbarnette.net/projects/wxHTMLPygmentizer>`_ is
+ a GUI utility, used to make code-colorization easier
+* `WpPygments <http://blog.mirotin.net/?page_id=49>`_, a highlighter plugin for WordPress
+* `LodgeIt <http://paste.pocoo.org/>`_, a pastebin with XMLRPC support and diffs
+* `SpammCan <http://chrisarndt.de/projects/spammcan/>`_, a pastebin (demo see
+ `here <http://paste.chrisarndt.de/>`_)
+* `WowAce.com pastes <http://www.wowace.com/paste/>`_, a pastebin
+* `Siafoo <http://siafoo.net>`_, a tool for sharing and storing useful code and programming experience
+* `D source <http://www.dsource.org/>`_, a community for the D programming language
+* `dumpz.org <http://dumpz.org/>`_, a pastebin
+* `dpaste.com <http://dpaste.com/>`_, another Django pastebin
+* `PylonsHQ Pasties <http://pylonshq.com/pasties/new>`_, a pastebin
+* `Django snippets <http://www.djangosnippets.org/>`_, a pastebin for Django code
+* `Fayaa <http://www.fayaa.com/code/>`_, a Chinese pastebin
+* `Incollo.com <http://incollo.com>`_, a free collaborative debugging tool
+* `PasteBox <http://p.boxnet.eu/>`_, a pastebin focused on privacy
+* `xinotes.org <http://www.xinotes.org/>`_, a site to share notes, code snippets etc.
+* `hilite.me <http://www.hilite.me/>`_, a site to highlight code snippets
+* `patx.me <http://patx.me/paste>`_, a pastebin
+
+If you have a project or web site using Pygments, drop me a line, and I'll add a
+link here.
+
diff --git a/doc/index.rst b/doc/index.rst
new file mode 100644
index 00000000..26114045
--- /dev/null
+++ b/doc/index.rst
@@ -0,0 +1,54 @@
+Welcome!
+========
+
+This is the home of Pygments. It is a generic syntax highlighter suitable for
+use in code hosting, forums, wikis or other applications that need to prettify
+source code. Highlights are:
+
+* a wide range of over 300 languages and other text formats is supported
+* special attention is paid to details that increase highlighting quality
+* support for new languages and formats are added easily; most languages use a
+ simple regex-based lexing mechanism
+* a number of output formats is available, among them HTML, RTF, LaTeX and ANSI
+ sequences
+* it is usable as a command-line tool and as a library
+* ... and it highlights even Perl 6!
+
+Read more in the :doc:`FAQ list <faq>` or the :doc:`documentation <docs/index>`,
+or `download the latest release <http://pypi.python.org/pypi/Pygments>`_.
+
+.. _contribute:
+
+Contribute
+----------
+
+Like every open-source project, we are always looking for volunteers to help us
+with programming. Python knowledge is required, but don't fear: Python is a very
+clear and easy to learn language.
+
+Development takes place on `Bitbucket
+<https://bitbucket.org/birkenfeld/pygments-main>`_, where the Mercurial
+repository, tickets and pull requests can be viewed.
+
+Our primary communication instrument is the IRC channel **#pocoo** on the
+Freenode network. To join it, let your IRC client connect to
+``irc.freenode.net`` and do ``/join #pocoo``.
+
+If you found a bug, just open a ticket in the Bitbucket tracker. Be sure to log
+in to be notified when the issue is fixed -- development is not fast-paced as
+the library is quite stable. You can also send an e-mail to the developers, see
+below.
+
+The authors
+-----------
+
+Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*.
+
+Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of
+the `Pocoo <http://dev.pocoo.org/>`_ team and **Tim Hatch**.
+
+.. toctree::
+ :maxdepth: 1
+ :hidden:
+
+ docs/index
diff --git a/doc/languages.rst b/doc/languages.rst
new file mode 100644
index 00000000..13555ccf
--- /dev/null
+++ b/doc/languages.rst
@@ -0,0 +1,151 @@
+:orphan:
+
+Supported languages
+===================
+
+Pygments supports an ever-growing range of languages. Watch this space...
+
+Programming languages
+---------------------
+
+* ActionScript
+* Ada
+* ANTLR
+* AppleScript
+* Assembly (various)
+* Asymptote
+* Awk
+* Befunge
+* Boo
+* BrainFuck
+* C, C++
+* C#
+* Clojure
+* CoffeeScript
+* ColdFusion
+* Common Lisp
+* Coq
+* Cryptol (incl. Literate Cryptol)
+* `Cython <http://cython.org>`_
+* `D <http://dlang.org>`_
+* Dart
+* Delphi
+* Dylan
+* Erlang
+* Factor
+* Fancy
+* Fortran
+* F#
+* GAP
+* Gherkin (Cucumber)
+* GL shaders
+* Groovy
+* `Haskell <http://www.haskell.org>`_ (incl. Literate Haskell)
+* IDL
+* Io
+* Java
+* JavaScript
+* Lasso
+* LLVM
+* Logtalk
+* `Lua <http://www.lua.org>`_
+* Matlab
+* MiniD
+* Modelica
+* Modula-2
+* MuPad
+* Nemerle
+* Nimrod
+* Objective-C
+* Objective-J
+* Octave
+* OCaml
+* PHP
+* `Perl <http://perl.org>`_
+* PovRay
+* PostScript
+* PowerShell
+* Prolog
+* `Python <http://www.python.org>`_ 2.x and 3.x (incl. console sessions and tracebacks)
+* `REBOL <http://www.rebol.com>`_
+* `Red <http://www.red-lang.org>`_
+* Redcode
+* `Ruby <http://www.ruby-lang.org>`_ (incl. irb sessions)
+* Rust
+* S, S-Plus, R
+* Scala
+* Scheme
+* Scilab
+* Smalltalk
+* SNOBOL
+* Tcl
+* Vala
+* Verilog
+* VHDL
+* Visual Basic.NET
+* Visual FoxPro
+* XQuery
+* Zephir
+
+Template languages
+------------------
+
+* Cheetah templates
+* `Django <http://www.djangoproject.com>`_ / `Jinja
+ <http://jinja.pocoo.org/jinja>`_ templates
+* ERB (Ruby templating)
+* `Genshi <http://genshi.edgewall.org>`_ (the Trac template language)
+* JSP (Java Server Pages)
+* `Myghty <http://www.myghty.org>`_ (the HTML::Mason based framework)
+* `Mako <http://www.makotemplates.org>`_ (the Myghty successor)
+* `Smarty <http://www.smarty.net>`_ templates (PHP templating)
+* Tea
+
+Other markup
+------------
+
+* Apache config files
+* Bash shell scripts
+* BBCode
+* CMake
+* CSS
+* Debian control files
+* Diff files
+* DTD
+* Gettext catalogs
+* Gnuplot script
+* Groff markup
+* HTML
+* HTTP sessions
+* INI-style config files
+* IRC logs (irssi style)
+* Lighttpd config files
+* Makefiles
+* MoinMoin/Trac Wiki markup
+* MySQL
+* Nginx config files
+* POV-Ray scenes
+* Ragel
+* Redcode
+* ReST
+* Robot Framework
+* RPM spec files
+* SQL, also MySQL, SQLite
+* Squid configuration
+* TeX
+* tcsh
+* Vim Script
+* Windows batch files
+* XML
+* XSLT
+* YAML
+
+... that's all?
+---------------
+
+Well, why not write your own? Contributing to Pygments is easy and fun. Look
+:doc:`here <docs/lexerdevelopment>` for the docs on lexer development and
+:ref:`here <contribute>` for contact details.
+
+Note: the languages listed here are supported in the development version. The
+latest release may lack a few of them.
diff --git a/doc/make.bat b/doc/make.bat
new file mode 100644
index 00000000..8803c985
--- /dev/null
+++ b/doc/make.bat
@@ -0,0 +1,190 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. singlehtml to make a single large HTML file
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. devhelp to make HTML files and a Devhelp project
+ echo. epub to make an epub
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. text to make text files
+ echo. man to make manual pages
+ echo. texinfo to make Texinfo files
+ echo. gettext to make PO message catalogs
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+ goto end
+)
+
+if "%1" == "singlehtml" (
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Pygments.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Pygments.ghc
+ goto end
+)
+
+if "%1" == "devhelp" (
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished.
+ goto end
+)
+
+if "%1" == "epub" (
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "text" (
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The text files are in %BUILDDIR%/text.
+ goto end
+)
+
+if "%1" == "man" (
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.
+ goto end
+)
+
+if "%1" == "texinfo" (
+ %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+ goto end
+)
+
+if "%1" == "gettext" (
+ %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.The overview file is in %BUILDDIR%/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+:end
diff --git a/docs/pygmentize.1 b/doc/pygmentize.1
index 71bb6f9c..71bb6f9c 100644
--- a/docs/pygmentize.1
+++ b/doc/pygmentize.1
diff --git a/docs/generate.py b/docs/generate.py
deleted file mode 100755
index f5405074..00000000
--- a/docs/generate.py
+++ /dev/null
@@ -1,472 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
- Generate Pygments Documentation
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Generates a bunch of html files containing the documentation.
-
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import os
-import sys
-from datetime import datetime
-from cgi import escape
-
-from docutils import nodes
-from docutils.parsers.rst import directives
-from docutils.core import publish_parts
-from docutils.writers import html4css1
-
-from jinja2 import Template
-
-# try to use the right Pygments to build the docs
-sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
-
-from pygments import highlight, __version__
-from pygments.lexers import get_lexer_by_name
-from pygments.formatters import HtmlFormatter
-
-
-LEXERDOC = '''
-`%s`
-%s
- :Short names: %s
- :Filename patterns: %s
- :Mimetypes: %s
-
-'''
-
-def generate_lexer_docs():
- from pygments.lexers import LEXERS
-
- out = []
-
- modules = {}
- moduledocstrings = {}
- for classname, data in sorted(LEXERS.iteritems(), key=lambda x: x[0]):
- module = data[0]
- mod = __import__(module, None, None, [classname])
- cls = getattr(mod, classname)
- if not cls.__doc__:
- print "Warning: %s does not have a docstring." % classname
- modules.setdefault(module, []).append((
- classname,
- cls.__doc__,
- ', '.join(data[2]) or 'None',
- ', '.join(data[3]).replace('*', '\\*').replace('_', '\\') or 'None',
- ', '.join(data[4]) or 'None'))
- if module not in moduledocstrings:
- moduledocstrings[module] = mod.__doc__
-
- for module, lexers in sorted(modules.iteritems(), key=lambda x: x[0]):
- heading = moduledocstrings[module].splitlines()[4].strip().rstrip('.')
- out.append('\n' + heading + '\n' + '-'*len(heading) + '\n')
- for data in lexers:
- out.append(LEXERDOC % data)
- return ''.join(out).decode('utf-8')
-
-def generate_formatter_docs():
- from pygments.formatters import FORMATTERS
-
- out = []
- for cls, data in sorted(FORMATTERS.iteritems(),
- key=lambda x: x[0].__name__):
- heading = cls.__name__
- out.append('`' + heading + '`\n' + '-'*(2+len(heading)) + '\n')
- out.append(cls.__doc__)
- out.append('''
- :Short names: %s
- :Filename patterns: %s
-
-
-''' % (', '.join(data[1]) or 'None', ', '.join(data[2]).replace('*', '\\*') or 'None'))
- return ''.join(out).decode('utf-8')
-
-def generate_filter_docs():
- from pygments.filters import FILTERS
-
- out = []
- for name, cls in FILTERS.iteritems():
- out.append('''
-`%s`
-%s
- :Name: %s
-''' % (cls.__name__, cls.__doc__, name))
- return ''.join(out).decode('utf-8')
-
-def generate_changelog():
- fn = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',
- 'CHANGES'))
- f = file(fn)
- result = []
- in_header = False
- header = True
- for line in f:
- if header:
- if not in_header and line.strip():
- in_header = True
- elif in_header and not line.strip():
- header = False
- else:
- result.append(line.rstrip())
- f.close()
- return '\n'.join(result).decode('utf-8')
-
-def generate_authors():
- fn = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',
- 'AUTHORS'))
- f = file(fn)
- r = f.read().rstrip().decode('utf-8')
- f.close()
- return r
-
-LEXERDOCS = generate_lexer_docs()
-FORMATTERDOCS = generate_formatter_docs()
-FILTERDOCS = generate_filter_docs()
-CHANGELOG = generate_changelog()
-AUTHORS = generate_authors()
-
-
-PYGMENTS_FORMATTER = HtmlFormatter(style='pastie', cssclass='syntax')
-
-USAGE = '''\
-Usage: %s <mode> <destination> [<source.txt> ...]
-
-Generate either python or html files out of the documentation.
-
-Mode can either be python or html.\
-''' % sys.argv[0]
-
-TEMPLATE = '''\
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
- "http://www.w3.org/TR/html4/strict.dtd">
-<html>
-<head>
- <title>{{ title }} &mdash; Pygments</title>
- <meta http-equiv="content-type" content="text/html; charset=utf-8">
- <style type="text/css">
- {{ style }}
- </style>
-</head>
-<body>
- <div id="content">
- <h1 class="heading">Pygments</h1>
- <h2 class="subheading">{{ title }}</h2>
- {% if file_id != "index" %}
- <a id="backlink" href="index.html">&laquo; Back To Index</a>
- {% endif %}
- {% if toc %}
- <div class="toc">
- <h2>Contents</h2>
- <ul class="contents">
- {% for key, value in toc %}
- <li><a href="{{ key }}">{{ value }}</a></li>
- {% endfor %}
- </ul>
- </div>
- {% endif %}
- {{ body }}
- </div>
-</body>
-<!-- generated on: {{ generation_date }}
- file id: {{ file_id }} -->
-</html>\
-'''
-
-STYLESHEET = '''\
-body {
- background-color: #f2f2f2;
- margin: 0;
- padding: 0;
- font-family: 'Georgia', serif;
- color: #111;
-}
-
-#content {
- background-color: white;
- padding: 20px;
- margin: 20px auto 20px auto;
- max-width: 800px;
- border: 4px solid #ddd;
-}
-
-h1 {
- font-weight: normal;
- font-size: 40px;
- color: #09839A;
-}
-
-h2 {
- font-weight: normal;
- font-size: 30px;
- color: #C73F00;
-}
-
-h1.heading {
- margin: 0 0 30px 0;
-}
-
-h2.subheading {
- margin: -30px 0 0 45px;
-}
-
-h3 {
- margin-top: 30px;
-}
-
-table.docutils {
- border-collapse: collapse;
- border: 2px solid #aaa;
- margin: 0.5em 1.5em 0.5em 1.5em;
-}
-
-table.docutils td {
- padding: 2px;
- border: 1px solid #ddd;
-}
-
-p, li, dd, dt, blockquote {
- font-size: 15px;
- color: #333;
-}
-
-p {
- line-height: 150%;
- margin-bottom: 0;
- margin-top: 10px;
-}
-
-hr {
- border-top: 1px solid #ccc;
- border-bottom: 0;
- border-right: 0;
- border-left: 0;
- margin-bottom: 10px;
- margin-top: 20px;
-}
-
-dl {
- margin-left: 10px;
-}
-
-li, dt {
- margin-top: 5px;
-}
-
-dt {
- font-weight: bold;
-}
-
-th {
- text-align: left;
-}
-
-a {
- color: #990000;
-}
-
-a:hover {
- color: #c73f00;
-}
-
-pre {
- background-color: #f9f9f9;
- border-top: 1px solid #ccc;
- border-bottom: 1px solid #ccc;
- padding: 5px;
- font-size: 13px;
- font-family: Bitstream Vera Sans Mono,monospace;
-}
-
-tt {
- font-size: 13px;
- font-family: Bitstream Vera Sans Mono,monospace;
- color: black;
- padding: 1px 2px 1px 2px;
- background-color: #f0f0f0;
-}
-
-cite {
- /* abusing <cite>, it's generated by ReST for `x` */
- font-size: 13px;
- font-family: Bitstream Vera Sans Mono,monospace;
- font-weight: bold;
- font-style: normal;
-}
-
-#backlink {
- float: right;
- font-size: 11px;
- color: #888;
-}
-
-div.toc {
- margin: 0 0 10px 0;
-}
-
-div.toc h2 {
- font-size: 20px;
-}
-''' #'
-
-
-def pygments_directive(name, arguments, options, content, lineno,
- content_offset, block_text, state, state_machine):
- try:
- lexer = get_lexer_by_name(arguments[0])
- except ValueError:
- # no lexer found
- lexer = get_lexer_by_name('text')
- parsed = highlight(u'\n'.join(content), lexer, PYGMENTS_FORMATTER)
- return [nodes.raw('', parsed, format="html")]
-pygments_directive.arguments = (1, 0, 1)
-pygments_directive.content = 1
-directives.register_directive('sourcecode', pygments_directive)
-
-
-def create_translator(link_style):
- class Translator(html4css1.HTMLTranslator):
- def visit_reference(self, node):
- refuri = node.get('refuri')
- if refuri is not None and '/' not in refuri and refuri.endswith('.txt'):
- node['refuri'] = link_style(refuri[:-4])
- html4css1.HTMLTranslator.visit_reference(self, node)
- return Translator
-
-
-class DocumentationWriter(html4css1.Writer):
-
- def __init__(self, link_style):
- html4css1.Writer.__init__(self)
- self.translator_class = create_translator(link_style)
-
- def translate(self):
- html4css1.Writer.translate(self)
- # generate table of contents
- contents = self.build_contents(self.document)
- contents_doc = self.document.copy()
- contents_doc.children = contents
- contents_visitor = self.translator_class(contents_doc)
- contents_doc.walkabout(contents_visitor)
- self.parts['toc'] = self._generated_toc
-
- def build_contents(self, node, level=0):
- sections = []
- i = len(node) - 1
- while i >= 0 and isinstance(node[i], nodes.section):
- sections.append(node[i])
- i -= 1
- sections.reverse()
- toc = []
- for section in sections:
- try:
- reference = nodes.reference('', '', refid=section['ids'][0], *section[0])
- except IndexError:
- continue
- ref_id = reference['refid']
- text = escape(reference.astext())
- toc.append((ref_id, text))
-
- self._generated_toc = [('#%s' % href, caption) for href, caption in toc]
- # no further processing
- return []
-
-
-def generate_documentation(data, link_style):
- writer = DocumentationWriter(link_style)
- data = data.replace('[builtin_lexer_docs]', LEXERDOCS).\
- replace('[builtin_formatter_docs]', FORMATTERDOCS).\
- replace('[builtin_filter_docs]', FILTERDOCS).\
- replace('[changelog]', CHANGELOG).\
- replace('[authors]', AUTHORS)
- parts = publish_parts(
- data,
- writer=writer,
- settings_overrides={
- 'initial_header_level': 3,
- 'field_name_limit': 50,
- }
- )
- return {
- 'title': parts['title'],
- 'body': parts['body'],
- 'toc': parts['toc']
- }
-
-
-def handle_python(filename, fp, dst):
- now = datetime.now()
- title = os.path.basename(filename)[:-4]
- content = fp.read()
- def urlize(href):
- # create links for the pygments webpage
- if href == 'index.txt':
- return '/docs/'
- else:
- return '/docs/%s/' % href
- parts = generate_documentation(content, urlize)
- result = file(os.path.join(dst, title + '.py'), 'w')
- result.write('# -*- coding: utf-8 -*-\n')
- result.write('"""\n Pygments Documentation - %s\n' % title)
- result.write(' %s\n\n' % ('~' * (24 + len(title))))
- result.write(' Generated on: %s\n"""\n\n' % now)
- result.write('import datetime\n')
- result.write('DATE = %r\n' % now)
- result.write('TITLE = %r\n' % parts['title'])
- result.write('TOC = %r\n' % parts['toc'])
- result.write('BODY = %r\n' % parts['body'])
- result.close()
-
-
-def handle_html(filename, fp, dst):
- now = datetime.now()
- title = os.path.basename(filename)[:-4]
- content = fp.read().decode('utf-8')
- c = generate_documentation(content, (lambda x: './%s.html' % x))
- result = file(os.path.join(dst, title + '.html'), 'w')
- c['style'] = STYLESHEET + PYGMENTS_FORMATTER.get_style_defs('.syntax')
- c['generation_date'] = now
- c['file_id'] = title
- t = Template(TEMPLATE)
- result.write(t.render(c).encode('utf-8'))
- result.close()
-
-
-def run(handle_file, dst, sources=()):
- path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'src'))
- if not sources:
- sources = [os.path.join(path, fn) for fn in os.listdir(path)]
- if not os.path.isdir(dst):
- os.makedirs(dst)
- print 'Making docs for Pygments %s in %s' % (__version__, dst)
- for fn in sources:
- if not os.path.isfile(fn):
- continue
- print 'Processing %s' % fn
- f = open(fn)
- try:
- handle_file(fn, f, dst)
- finally:
- f.close()
-
-
-def main(mode, dst='build/', *sources):
- try:
- handler = {
- 'html': handle_html,
- 'python': handle_python
- }[mode]
- except KeyError:
- print 'Error: unknown mode "%s"' % mode
- sys.exit(1)
- run(handler, os.path.realpath(dst), sources)
-
-
-if __name__ == '__main__':
- if len(sys.argv) == 1:
- print USAGE
- else:
- main(*sys.argv[1:])
diff --git a/docs/src/api.txt b/docs/src/api.txt
deleted file mode 100644
index 4276eea2..00000000
--- a/docs/src/api.txt
+++ /dev/null
@@ -1,270 +0,0 @@
-.. -*- mode: rst -*-
-
-=====================
-The full Pygments API
-=====================
-
-This page describes the Pygments API.
-
-High-level API
-==============
-
-Functions from the `pygments` module:
-
-def `lex(code, lexer):`
- Lex `code` with the `lexer` (must be a `Lexer` instance)
- and return an iterable of tokens. Currently, this only calls
- `lexer.get_tokens()`.
-
-def `format(tokens, formatter, outfile=None):`
- Format a token stream (iterable of tokens) `tokens` with the
- `formatter` (must be a `Formatter` instance). The result is
- written to `outfile`, or if that is ``None``, returned as a
- string.
-
-def `highlight(code, lexer, formatter, outfile=None):`
- This is the most high-level highlighting function.
- It combines `lex` and `format` in one function.
-
-
-Functions from `pygments.lexers`:
-
-def `get_lexer_by_name(alias, **options):`
- Return an instance of a `Lexer` subclass that has `alias` in its
- aliases list. The lexer is given the `options` at its
- instantiation.
-
- Will raise `pygments.util.ClassNotFound` if no lexer with that alias is
- found.
-
-def `get_lexer_for_filename(fn, **options):`
- Return a `Lexer` subclass instance that has a filename pattern
- matching `fn`. The lexer is given the `options` at its
- instantiation.
-
- Will raise `pygments.util.ClassNotFound` if no lexer for that filename is
- found.
-
-def `get_lexer_for_mimetype(mime, **options):`
- Return a `Lexer` subclass instance that has `mime` in its mimetype
- list. The lexer is given the `options` at its instantiation.
-
- Will raise `pygments.util.ClassNotFound` if not lexer for that mimetype is
- found.
-
-def `guess_lexer(text, **options):`
- Return a `Lexer` subclass instance that's guessed from the text
- in `text`. For that, the `analyse_text()` method of every known
- lexer class is called with the text as argument, and the lexer
- which returned the highest value will be instantiated and returned.
-
- `pygments.util.ClassNotFound` is raised if no lexer thinks it can handle the
- content.
-
-def `guess_lexer_for_filename(filename, text, **options):`
- As `guess_lexer()`, but only lexers which have a pattern in `filenames`
- or `alias_filenames` that matches `filename` are taken into consideration.
-
- `pygments.util.ClassNotFound` is raised if no lexer thinks it can handle the
- content.
-
-def `get_all_lexers():`
- Return an iterable over all registered lexers, yielding tuples in the
- format::
-
- (longname, tuple of aliases, tuple of filename patterns, tuple of mimetypes)
-
- *New in Pygments 0.6.*
-
-
-Functions from `pygments.formatters`:
-
-def `get_formatter_by_name(alias, **options):`
- Return an instance of a `Formatter` subclass that has `alias` in its
- aliases list. The formatter is given the `options` at its
- instantiation.
-
- Will raise `pygments.util.ClassNotFound` if no formatter with that alias is
- found.
-
-def `get_formatter_for_filename(fn, **options):`
- Return a `Formatter` subclass instance that has a filename pattern
- matching `fn`. The formatter is given the `options` at its
- instantiation.
-
- Will raise `pygments.util.ClassNotFound` if no formatter for that filename
- is found.
-
-
-Functions from `pygments.styles`:
-
-def `get_style_by_name(name):`
- Return a style class by its short name. The names of the builtin styles
- are listed in `pygments.styles.STYLE_MAP`.
-
- Will raise `pygments.util.ClassNotFound` if no style of that name is found.
-
-def `get_all_styles():`
- Return an iterable over all registered styles, yielding their names.
-
- *New in Pygments 0.6.*
-
-
-Lexers
-======
-
-A lexer (derived from `pygments.lexer.Lexer`) has the following functions:
-
-def `__init__(self, **options):`
- The constructor. Takes a \*\*keywords dictionary of options.
- Every subclass must first process its own options and then call
- the `Lexer` constructor, since it processes the `stripnl`,
- `stripall` and `tabsize` options.
-
- An example looks like this:
-
- .. sourcecode:: python
-
- def __init__(self, **options):
- self.compress = options.get('compress', '')
- Lexer.__init__(self, **options)
-
- As these options must all be specifiable as strings (due to the
- command line usage), there are various utility functions
- available to help with that, see `Option processing`_.
-
-def `get_tokens(self, text):`
- This method is the basic interface of a lexer. It is called by
- the `highlight()` function. It must process the text and return an
- iterable of ``(tokentype, value)`` pairs from `text`.
-
- Normally, you don't need to override this method. The default
- implementation processes the `stripnl`, `stripall` and `tabsize`
- options and then yields all tokens from `get_tokens_unprocessed()`,
- with the ``index`` dropped.
-
-def `get_tokens_unprocessed(self, text):`
- This method should process the text and return an iterable of
- ``(index, tokentype, value)`` tuples where ``index`` is the starting
- position of the token within the input text.
-
- This method must be overridden by subclasses.
-
-def `analyse_text(text):`
- A static method which is called for lexer guessing. It should analyse
- the text and return a float in the range from ``0.0`` to ``1.0``.
- If it returns ``0.0``, the lexer will not be selected as the most
- probable one, if it returns ``1.0``, it will be selected immediately.
-
-For a list of known tokens have a look at the `Tokens`_ page.
-
-A lexer also can have the following attributes (in fact, they are mandatory
-except `alias_filenames`) that are used by the builtin lookup mechanism.
-
-`name`
- Full name for the lexer, in human-readable form.
-
-`aliases`
- A list of short, unique identifiers that can be used to lookup
- the lexer from a list, e.g. using `get_lexer_by_name()`.
-
-`filenames`
- A list of `fnmatch` patterns that match filenames which contain
- content for this lexer. The patterns in this list should be unique among
- all lexers.
-
-`alias_filenames`
- A list of `fnmatch` patterns that match filenames which may or may not
- contain content for this lexer. This list is used by the
- `guess_lexer_for_filename()` function, to determine which lexers are
- then included in guessing the correct one. That means that e.g. every
- lexer for HTML and a template language should include ``\*.html`` in
- this list.
-
-`mimetypes`
- A list of MIME types for content that can be lexed with this
- lexer.
-
-
-.. _Tokens: tokens.txt
-
-
-Formatters
-==========
-
-A formatter (derived from `pygments.formatter.Formatter`) has the following
-functions:
-
-def `__init__(self, **options):`
- As with lexers, this constructor processes options and then must call
- the base class `__init__`.
-
- The `Formatter` class recognizes the options `style`, `full` and
- `title`. It is up to the formatter class whether it uses them.
-
-def `get_style_defs(self, arg=''):`
- This method must return statements or declarations suitable to define
- the current style for subsequent highlighted text (e.g. CSS classes
- in the `HTMLFormatter`).
-
- The optional argument `arg` can be used to modify the generation and
- is formatter dependent (it is standardized because it can be given on
- the command line).
-
- This method is called by the ``-S`` `command-line option`_, the `arg`
- is then given by the ``-a`` option.
-
-def `format(self, tokensource, outfile):`
- This method must format the tokens from the `tokensource` iterable and
- write the formatted version to the file object `outfile`.
-
- Formatter options can control how exactly the tokens are converted.
-
-.. _command-line option: cmdline.txt
-
-A formatter must have the following attributes that are used by the
-builtin lookup mechanism. (*New in Pygments 0.7.*)
-
-`name`
- Full name for the formatter, in human-readable form.
-
-`aliases`
- A list of short, unique identifiers that can be used to lookup
- the formatter from a list, e.g. using `get_formatter_by_name()`.
-
-`filenames`
- A list of `fnmatch` patterns that match filenames for which this formatter
- can produce output. The patterns in this list should be unique among
- all formatters.
-
-
-Option processing
-=================
-
-The `pygments.util` module has some utility functions usable for option
-processing:
-
-class `OptionError`
- This exception will be raised by all option processing functions if
- the type or value of the argument is not correct.
-
-def `get_bool_opt(options, optname, default=None):`
- Interpret the key `optname` from the dictionary `options`
- as a boolean and return it. Return `default` if `optname`
- is not in `options`.
-
- The valid string values for ``True`` are ``1``, ``yes``,
- ``true`` and ``on``, the ones for ``False`` are ``0``,
- ``no``, ``false`` and ``off`` (matched case-insensitively).
-
-def `get_int_opt(options, optname, default=None):`
- As `get_bool_opt`, but interpret the value as an integer.
-
-def `get_list_opt(options, optname, default=None):`
- If the key `optname` from the dictionary `options` is a string,
- split it at whitespace and return it. If it is already a list
- or a tuple, it is returned as a list.
-
-def `get_choice_opt(options, optname, allowed, default=None):`
- If the key `optname` from the dictionary is not in the sequence
- `allowed`, raise an error, otherwise return it. *New in Pygments 0.8.*
diff --git a/docs/src/authors.txt b/docs/src/authors.txt
deleted file mode 100644
index c8c532aa..00000000
--- a/docs/src/authors.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-=======
-Authors
-=======
-
-[authors]
diff --git a/docs/src/changelog.txt b/docs/src/changelog.txt
deleted file mode 100644
index 6caf0a32..00000000
--- a/docs/src/changelog.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-=========
-Changelog
-=========
-
-[changelog]
diff --git a/docs/src/index.txt b/docs/src/index.txt
deleted file mode 100644
index b1e099c7..00000000
--- a/docs/src/index.txt
+++ /dev/null
@@ -1,69 +0,0 @@
-.. -*- mode: rst -*-
-
-========
-Overview
-========
-
-Welcome to the Pygments documentation.
-
-- Starting with Pygments
-
- - `Installation <installation.txt>`_
-
- - `Introduction and Quickstart <quickstart.txt>`_
-
- - `Command line interface <cmdline.txt>`_
-
-- Builtin components
-
- - `Lexers <lexers.txt>`_
-
- - `Formatters <formatters.txt>`_
-
- - `Filters <filters.txt>`_
-
- - `Styles <styles.txt>`_
-
-- Reference
-
- - `Unicode and encodings <unicode.txt>`_
-
- - `Builtin tokens <tokens.txt>`_
-
- - `API documentation <api.txt>`_
-
-- Hacking for Pygments
-
- - `Write your own lexer <lexerdevelopment.txt>`_
-
- - `Write your own formatter <formatterdevelopment.txt>`_
-
- - `Write your own filter <filterdevelopment.txt>`_
-
- - `Register plugins <plugins.txt>`_
-
-- Hints and Tricks
-
- - `Using Pygments in ReST documents <rstdirective.txt>`_
-
- - `Using Pygments with MoinMoin <moinmoin.txt>`_
-
- - `Using Pygments in other contexts <integrate.txt>`_
-
-- About Pygments
-
- - `Changelog <changelog.txt>`_
-
- - `Authors <authors.txt>`_
-
-
---------------
-
-If you find bugs or have suggestions for the documentation, please
-look `here`_ for info on how to contact the team.
-
-You can download an offline version of this documentation from the
-`download page`_.
-
-.. _here: http://pygments.org/contribute/
-.. _download page: http://pygments.org/download/
diff --git a/docs/src/installation.txt b/docs/src/installation.txt
deleted file mode 100644
index 17a9aad5..00000000
--- a/docs/src/installation.txt
+++ /dev/null
@@ -1,71 +0,0 @@
-.. -*- mode: rst -*-
-
-============
-Installation
-============
-
-Pygments requires at least Python 2.4 to work correctly. Just to clarify:
-there *won't* ever be support for Python versions below 2.4. However, there
-are no other dependencies.
-
-
-Installing a released version
-=============================
-
-As a Python egg (via easy_install)
-----------------------------------
-
-You can install the most recent Pygments version using `easy_install`_::
-
- sudo easy_install Pygments
-
-This will install a Pygments egg in your Python installation's site-packages
-directory.
-
-
-From the tarball release
--------------------------
-
-1. Download the most recent tarball from the `download page`_
-2. Unpack the tarball
-3. ``sudo python setup.py install``
-
-Note that the last command will automatically download and install
-`setuptools`_ if you don't already have it installed. This requires a working
-internet connection.
-
-This will install Pygments into your Python installation's site-packages directory.
-
-
-Installing the development version
-==================================
-
-If you want to play around with the code
-----------------------------------------
-
-1. Install `Mercurial`_
-2. ``hg clone http://bitbucket.org/birkenfeld/pygments-main pygments``
-3. ``cd pygments``
-4. ``ln -s pygments /usr/lib/python2.X/site-packages``
-5. ``ln -s pygmentize /usr/local/bin``
-
-As an alternative to steps 4 and 5 you can also do ``python setup.py develop``
-which will install the package via setuptools in development mode.
-
-..
- If you just want the latest features and use them
- -------------------------------------------------
-
- ::
-
- sudo easy_install Pygments==dev
-
- This will install a Pygments egg containing the latest Subversion trunk code
- in your Python installation's site-packages directory. Every time the command
- is run, the sources are updated from Subversion.
-
-
-.. _download page: http://pygments.org/download/
-.. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools
-.. _easy_install: http://peak.telecommunity.com/DevCenter/EasyInstall
-.. _Mercurial: http://selenic.com/mercurial/
diff --git a/external/autopygmentize b/external/autopygmentize
index 85c8dfd2..d2f969a1 100755
--- a/external/autopygmentize
+++ b/external/autopygmentize
@@ -1,64 +1,83 @@
-#!/bin/sh
+#!/bin/bash
# Best effort auto-pygmentization with transparent decompression
-# (c) Reuben Thomas 2012-2013
+# by Reuben Thomas 2008-2015
# This program is in the public domain.
# Strategy: first see if pygmentize can find a lexer; if not, ask file; if that finds nothing, fail
-# Set the environment variable PYGMENTIZE_OPTS to configure pygments.
+# Set the environment variable PYGMENTIZE_OPTS or pass options before the file path to configure pygments.
# This program can be used as a .lessfilter for the less pager to auto-color less's output
-lexer=`pygmentize -N "$1"`
-if [ "$lexer" = "text" ]; then
- file_common_opts="--brief --dereference --uncompress"
+file="${!#}" # last argument
+options=${@:1:$(($#-1))} # handle others args as options to pass to pygmentize
- unset lexer
- case `file --mime-type $file_common_opts "$1"` in
- application/xml|image/svg+xml) lexer=xml;;
- text/html) lexer=html;;
- text/troff) lexer=nroff;;
- text/x-asm) lexer=nasm;;
- text/x-awk) lexer=awk;;
- text/x-c) lexer=c;;
- text/x-c++) lexer=cpp;;
- text/x-diff) lexer=diff;;
- text/x-fortran) lexer=fortran;;
- text/x-gawk) lexer=gawk;;
- text/x-java) lexer=java;;
- text/x-lisp) lexer=common-lisp;;
- text/x-lua) lexer=lua;;
- text/x-makefile) lexer=make;;
- text/x-msdos-batch) lexer=bat;;
- text/x-nawk) lexer=nawk;;
- text/x-pascal) lexer=pascal;;
- text/x-perl) lexer=perl;;
- text/x-php) lexer=php;;
- text/x-po) lexer=po;;
- text/x-python) lexer=python;;
- text/x-ruby) lexer=ruby;;
- text/x-shellscript) lexer=sh;;
- text/x-tcl) lexer=tcl;;
- text/x-tex|text/x-texinfo) lexer=latex;; # FIXME: texinfo really needs its own lexer
-
- # Types that file outputs which pygmentize didn't support as of file 5.11, pygments 1.6rc1
- # text/calendar
- # text/PGP
- # text/rtf
- # text/texmacs
- # text/x-bcpl
- # text/x-info
- # text/x-m4
- # text/x-vcard
- # text/x-xmcd
- esac
+file_common_opts="--brief --dereference"
+
+lexer=$(pygmentize -N "$file")
+if [[ "$lexer" == text ]]; then
+ unset lexer
+ case $(file --mime-type --uncompress $file_common_opts "$file") in
+ application/xml|image/svg+xml) lexer=xml;;
+ application/javascript) lexer=javascript;;
+ text/html) lexer=html;;
+ text/troff) lexer=nroff;;
+ text/x-asm) lexer=nasm;;
+ text/x-awk) lexer=awk;;
+ text/x-c) lexer=c;;
+ text/x-c++) lexer=cpp;;
+ text/x-diff) lexer=diff;;
+ text/x-fortran) lexer=fortran;;
+ text/x-gawk) lexer=gawk;;
+ text/x-java) lexer=java;;
+ text/x-lisp) lexer=common-lisp;;
+ text/x-lua) lexer=lua;;
+ text/x-makefile) lexer=make;;
+ text/x-msdos-batch) lexer=bat;;
+ text/x-nawk) lexer=nawk;;
+ text/x-pascal) lexer=pascal;;
+ text/x-perl) lexer=perl;;
+ text/x-php) lexer=php;;
+ text/x-po) lexer=po;;
+ text/x-python) lexer=python;;
+ text/x-ruby) lexer=ruby;;
+ text/x-shellscript) lexer=sh;;
+ text/x-tcl) lexer=tcl;;
+ text/x-tex|text/x-texinfo) lexer=latex;; # FIXME: texinfo really needs its own lexer
+
+ # Types that file outputs which pygmentize didn't support as of file 5.20, pygments 2.0
+ # text/calendar
+ # text/inf
+ # text/PGP
+ # text/rtf
+ # text/texmacs
+ # text/vnd.graphviz
+ # text/x-bcpl
+ # text/x-info
+ # text/x-m4
+ # text/x-vcard
+ # text/x-xmcd
+
+ text/plain) # special filenames. TODO: insert more
+ case $(basename "$file") in
+ .zshrc) lexer=sh;;
+ esac
+ ;;
+ esac
fi
-encoding=`file --brief --mime-encoding $file_common_opts "$1"`
+encoding=$(file --mime-encoding --uncompress $file_common_opts "$file")
+if [[ $encoding == "us-asciibinarybinary" ]]; then
+ encoding="us-ascii"
+fi
-if [ -n "$lexer" ]; then
- # FIXME: Specify input encoding rather than output encoding https://bitbucket.org/birkenfeld/pygments-main/issue/800
- zcat "$1" | pygmentize -O encoding=$encoding,outencoding=UTF-8 $PYGMENTIZE_OPTS -l $lexer
- exit 0
+if [[ -n "$lexer" ]]; then
+ concat=cat
+ case $(file $file_common_opts --mime-type "$file") in
+ application/x-gzip) concat=zcat;;
+ application/x-bzip2) concat=bzcat;;
+ application/x-xz) concat=xzcat;;
+ esac
+ exec $concat "$file" | pygmentize -O inencoding=$encoding $PYGMENTIZE_OPTS $options -l $lexer
fi
exit 1
diff --git a/external/markdown-processor.py b/external/markdown-processor.py
index 12e64680..a3e178ec 100644
--- a/external/markdown-processor.py
+++ b/external/markdown-processor.py
@@ -6,14 +6,9 @@
This fragment is a Markdown_ preprocessor that renders source code
to HTML via Pygments. To use it, invoke Markdown like so::
- from markdown import Markdown
+ import markdown
- md = Markdown()
- md.textPreprocessors.insert(0, CodeBlockPreprocessor())
- html = md.convert(someText)
-
- markdown is then a callable that can be passed to the context of
- a template and used in that template, for example.
+ html = markdown.markdown(someText, extensions=[CodeBlockExtension()])
This uses CSS classes by default, so use
``pygmentize -S <some style> -f html > pygments.css``
@@ -25,9 +20,9 @@
some code
[/sourcecode]
- .. _Markdown: http://www.freewisdom.org/projects/python-markdown/
+ .. _Markdown: https://pypi.python.org/pypi/Markdown
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -40,17 +35,17 @@ INLINESTYLES = False
import re
-from markdown import TextPreprocessor
+from markdown.preprocessors import Preprocessor
+from markdown.extensions import Extension
from pygments import highlight
from pygments.formatters import HtmlFormatter
from pygments.lexers import get_lexer_by_name, TextLexer
-class CodeBlockPreprocessor(TextPreprocessor):
+class CodeBlockPreprocessor(Preprocessor):
- pattern = re.compile(
- r'\[sourcecode:(.+?)\](.+?)\[/sourcecode\]', re.S)
+ pattern = re.compile(r'\[sourcecode:(.+?)\](.+?)\[/sourcecode\]', re.S)
formatter = HtmlFormatter(noclasses=INLINESTYLES)
@@ -63,5 +58,10 @@ class CodeBlockPreprocessor(TextPreprocessor):
code = highlight(m.group(2), lexer, self.formatter)
code = code.replace('\n\n', '\n&nbsp;\n').replace('\n', '<br />')
return '\n\n<div class="code">%s</div>\n\n' % code
- return self.pattern.sub(
- repl, lines)
+ joined_lines = "\n".join(lines)
+ joined_lines = self.pattern.sub(repl, joined_lines)
+ return joined_lines.split("\n")
+
+class CodeBlockExtension(Extension):
+ def extendMarkdown(self, md, md_globals):
+ md.preprocessors.add('CodeBlockPreprocessor', CodeBlockPreprocessor(), '_begin')
diff --git a/external/moin-parser.py b/external/moin-parser.py
index 6544da1b..9cb082a2 100644
--- a/external/moin-parser.py
+++ b/external/moin-parser.py
@@ -31,7 +31,7 @@
If you do not want to do that and are willing to accept larger HTML
output, you can set the INLINESTYLES option below to True.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/external/rst-directive-old.py b/external/rst-directive-old.py
deleted file mode 100644
index a074536f..00000000
--- a/external/rst-directive-old.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- The Pygments reStructuredText directive
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This fragment is a Docutils_ 0.4 directive that renders source code
- (to HTML only, currently) via Pygments.
-
- To use it, adjust the options below and copy the code into a module
- that you import on initialization. The code then automatically
- registers a ``sourcecode`` directive that you can use instead of
- normal code blocks like this::
-
- .. sourcecode:: python
-
- My code goes here.
-
- If you want to have different code styles, e.g. one with line numbers
- and one without, add formatters with their names in the VARIANTS dict
- below. You can invoke them instead of the DEFAULT one by using a
- directive option::
-
- .. sourcecode:: python
- :linenos:
-
- My code goes here.
-
- Look at the `directive documentation`_ to get all the gory details.
-
- .. _Docutils: http://docutils.sf.net/
- .. _directive documentation:
- http://docutils.sourceforge.net/docs/howto/rst-directives.html
-
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# Options
-# ~~~~~~~
-
-# Set to True if you want inline CSS styles instead of classes
-INLINESTYLES = False
-
-from pygments.formatters import HtmlFormatter
-
-# The default formatter
-DEFAULT = HtmlFormatter(noclasses=INLINESTYLES)
-
-# Add name -> formatter pairs for every variant you want to use
-VARIANTS = {
- # 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True),
-}
-
-
-from docutils import nodes
-from docutils.parsers.rst import directives
-
-from pygments import highlight
-from pygments.lexers import get_lexer_by_name, TextLexer
-
-def pygments_directive(name, arguments, options, content, lineno,
- content_offset, block_text, state, state_machine):
- try:
- lexer = get_lexer_by_name(arguments[0])
- except ValueError:
- # no lexer found - use the text one instead of an exception
- lexer = TextLexer()
- # take an arbitrary option if more than one is given
- formatter = options and VARIANTS[options.keys()[0]] or DEFAULT
- parsed = highlight(u'\n'.join(content), lexer, formatter)
- return [nodes.raw('', parsed, format='html')]
-
-pygments_directive.arguments = (1, 0, 1)
-pygments_directive.content = 1
-pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS])
-
-directives.register_directive('sourcecode', pygments_directive)
diff --git a/external/rst-directive.py b/external/rst-directive.py
index 5c04038d..f81677b6 100644
--- a/external/rst-directive.py
+++ b/external/rst-directive.py
@@ -31,7 +31,7 @@
.. _directive documentation:
http://docutils.sourceforge.net/docs/howto/rst-directives.html
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -75,9 +75,8 @@ class Pygments(Directive):
# no lexer found - use the text one instead of an exception
lexer = TextLexer()
# take an arbitrary option if more than one is given
- formatter = self.options and VARIANTS[self.options.keys()[0]] or DEFAULT
+ formatter = self.options and VARIANTS[list(self.options)[0]] or DEFAULT
parsed = highlight(u'\n'.join(self.content), lexer, formatter)
return [nodes.raw('', parsed, format='html')]
directives.register_directive('sourcecode', Pygments)
-
diff --git a/ez_setup.py b/ez_setup.py
index e33744ba..9dc2c872 100755..100644
--- a/ez_setup.py
+++ b/ez_setup.py
@@ -13,264 +13,370 @@ the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
+import os
+import shutil
import sys
-DEFAULT_VERSION = "0.6c9"
-DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
-
-md5_data = {
- 'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca',
- 'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb',
- 'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b',
- 'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a',
- 'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618',
- 'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac',
- 'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5',
- 'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4',
- 'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c',
- 'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b',
- 'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27',
- 'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277',
- 'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa',
- 'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e',
- 'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e',
- 'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f',
- 'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2',
- 'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc',
- 'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167',
- 'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64',
- 'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d',
- 'setuptools-0.6c6-py2.3.egg': '35686b78116a668847237b69d549ec20',
- 'setuptools-0.6c6-py2.4.egg': '3c56af57be3225019260a644430065ab',
- 'setuptools-0.6c6-py2.5.egg': 'b2f8a7520709a5b34f80946de5f02f53',
- 'setuptools-0.6c7-py2.3.egg': '209fdf9adc3a615e5115b725658e13e2',
- 'setuptools-0.6c7-py2.4.egg': '5a8f954807d46a0fb67cf1f26c55a82e',
- 'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372',
- 'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902',
- 'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de',
- 'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b',
- 'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03',
- 'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a',
- 'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6',
- 'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a',
-}
-
-import sys, os
-try: from hashlib import md5
-except ImportError: from md5 import md5
-
-def _validate_md5(egg_name, data):
- if egg_name in md5_data:
- digest = md5(data).hexdigest()
- if digest != md5_data[egg_name]:
- print >>sys.stderr, (
- "md5 validation of %s failed! (Possible download problem?)"
- % egg_name
- )
- sys.exit(2)
- return data
-
-def use_setuptools(
- version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
- download_delay=15
-):
- """Automatically find/download setuptools and make it available on sys.path
-
- `version` should be a valid setuptools version number that is available
- as an egg for download under the `download_base` URL (which should end with
- a '/'). `to_dir` is the directory where setuptools will be downloaded, if
- it is not already available. If `download_delay` is specified, it should
- be the number of seconds that will be paused before initiating a download,
- should one be required. If an older version of setuptools is installed,
- this routine will print a message to ``sys.stderr`` and raise SystemExit in
- an attempt to abort the calling script.
- """
- was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules
- def do_download():
- egg = download_setuptools(version, download_base, to_dir, download_delay)
- sys.path.insert(0, egg)
- import setuptools; setuptools.bootstrap_install_from = egg
+import tempfile
+import tarfile
+import optparse
+import subprocess
+import platform
+
+from distutils import log
+
+try:
+ from site import USER_SITE
+except ImportError:
+ USER_SITE = None
+
+DEFAULT_VERSION = "1.4.2"
+DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
+
+def _python_cmd(*args):
+ args = (sys.executable,) + args
+ return subprocess.call(args) == 0
+
+def _check_call_py24(cmd, *args, **kwargs):
+ res = subprocess.call(cmd, *args, **kwargs)
+ class CalledProcessError(Exception):
+ pass
+ if not res == 0:
+ msg = "Command '%s' return non-zero exit status %d" % (cmd, res)
+ raise CalledProcessError(msg)
+vars(subprocess).setdefault('check_call', _check_call_py24)
+
+def _install(tarball, install_args=()):
+ # extracting the tarball
+ tmpdir = tempfile.mkdtemp()
+ log.warn('Extracting in %s', tmpdir)
+ old_wd = os.getcwd()
+ try:
+ os.chdir(tmpdir)
+ tar = tarfile.open(tarball)
+ _extractall(tar)
+ tar.close()
+
+ # going in the directory
+ subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+ os.chdir(subdir)
+ log.warn('Now working in %s', subdir)
+
+ # installing
+ log.warn('Installing Setuptools')
+ if not _python_cmd('setup.py', 'install', *install_args):
+ log.warn('Something went wrong during the installation.')
+ log.warn('See the error message above.')
+ # exitcode will be 2
+ return 2
+ finally:
+ os.chdir(old_wd)
+ shutil.rmtree(tmpdir)
+
+
+def _build_egg(egg, tarball, to_dir):
+ # extracting the tarball
+ tmpdir = tempfile.mkdtemp()
+ log.warn('Extracting in %s', tmpdir)
+ old_wd = os.getcwd()
+ try:
+ os.chdir(tmpdir)
+ tar = tarfile.open(tarball)
+ _extractall(tar)
+ tar.close()
+
+ # going in the directory
+ subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+ os.chdir(subdir)
+ log.warn('Now working in %s', subdir)
+
+ # building an egg
+ log.warn('Building a Setuptools egg in %s', to_dir)
+ _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
+
+ finally:
+ os.chdir(old_wd)
+ shutil.rmtree(tmpdir)
+ # returning the result
+ log.warn(egg)
+ if not os.path.exists(egg):
+ raise IOError('Could not build the egg.')
+
+
+def _do_download(version, download_base, to_dir, download_delay):
+ egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg'
+ % (version, sys.version_info[0], sys.version_info[1]))
+ if not os.path.exists(egg):
+ tarball = download_setuptools(version, download_base,
+ to_dir, download_delay)
+ _build_egg(egg, tarball, to_dir)
+ sys.path.insert(0, egg)
+
+ # Remove previously-imported pkg_resources if present (see
+ # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details).
+ if 'pkg_resources' in sys.modules:
+ del sys.modules['pkg_resources']
+
+ import setuptools
+ setuptools.bootstrap_install_from = egg
+
+
+def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+ to_dir=os.curdir, download_delay=15):
+ # making sure we use the absolute path
+ to_dir = os.path.abspath(to_dir)
+ was_imported = 'pkg_resources' in sys.modules or \
+ 'setuptools' in sys.modules
try:
import pkg_resources
except ImportError:
- return do_download()
+ return _do_download(version, download_base, to_dir, download_delay)
try:
- pkg_resources.require("setuptools>="+version); return
- except pkg_resources.VersionConflict, e:
+ pkg_resources.require("setuptools>=" + version)
+ return
+ except pkg_resources.VersionConflict:
+ e = sys.exc_info()[1]
if was_imported:
- print >>sys.stderr, (
- "The required version of setuptools (>=%s) is not available, and\n"
- "can't be installed while this script is running. Please install\n"
- " a more recent version first, using 'easy_install -U setuptools'."
- "\n\n(Currently using %r)"
- ) % (version, e.args[0])
+ sys.stderr.write(
+ "The required version of setuptools (>=%s) is not available,\n"
+ "and can't be installed while this script is running. Please\n"
+ "install a more recent version first, using\n"
+ "'easy_install -U setuptools'."
+ "\n\n(Currently using %r)\n" % (version, e.args[0]))
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
- return do_download()
+ return _do_download(version, download_base, to_dir,
+ download_delay)
except pkg_resources.DistributionNotFound:
- return do_download()
+ return _do_download(version, download_base, to_dir,
+ download_delay)
-def download_setuptools(
- version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
- delay = 15
-):
- """Download setuptools from a specified location and return its filename
+def _clean_check(cmd, target):
+ """
+ Run the command to download target. If the command fails, clean up before
+ re-raising the error.
+ """
+ try:
+ subprocess.check_call(cmd)
+ except subprocess.CalledProcessError:
+ if os.access(target, os.F_OK):
+ os.unlink(target)
+ raise
- `version` should be a valid setuptools version number that is available
- as an egg for download under the `download_base` URL (which should end
- with a '/'). `to_dir` is the directory where the egg will be downloaded.
- `delay` is the number of seconds to pause before an actual download attempt.
+def download_file_powershell(url, target):
"""
- import urllib2, shutil
- egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
- url = download_base + egg_name
- saveto = os.path.join(to_dir, egg_name)
- src = dst = None
- if not os.path.exists(saveto): # Avoid repeated downloads
+ Download the file at url to target using Powershell (which will validate
+ trust). Raise an exception if the command cannot complete.
+ """
+ target = os.path.abspath(target)
+ cmd = [
+ 'powershell',
+ '-Command',
+ "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(),
+ ]
+ _clean_check(cmd, target)
+
+def has_powershell():
+ if platform.system() != 'Windows':
+ return False
+ cmd = ['powershell', '-Command', 'echo test']
+ devnull = open(os.path.devnull, 'wb')
+ try:
try:
- from distutils import log
- if delay:
- log.warn("""
----------------------------------------------------------------------------
-This script requires setuptools version %s to run (even to display
-help). I will attempt to download it for you (from
-%s), but
-you may need to enable firewall access for this script first.
-I will start the download in %d seconds.
-
-(Note: if this machine does not have network access, please obtain the file
-
- %s
-
-and place it in this directory before rerunning this script.)
----------------------------------------------------------------------------""",
- version, download_base, delay, url
- ); from time import sleep; sleep(delay)
- log.warn("Downloading %s", url)
- src = urllib2.urlopen(url)
- # Read/write all in one block, so we don't create a corrupt file
- # if the download is interrupted.
- data = _validate_md5(egg_name, src.read())
- dst = open(saveto,"wb"); dst.write(data)
- finally:
- if src: src.close()
- if dst: dst.close()
- return os.path.realpath(saveto)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-def main(argv, version=DEFAULT_VERSION):
- """Install or upgrade setuptools and EasyInstall"""
+ subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
+ except:
+ return False
+ finally:
+ devnull.close()
+ return True
+
+download_file_powershell.viable = has_powershell
+
+def download_file_curl(url, target):
+ cmd = ['curl', url, '--silent', '--output', target]
+ _clean_check(cmd, target)
+
+def has_curl():
+ cmd = ['curl', '--version']
+ devnull = open(os.path.devnull, 'wb')
try:
- import setuptools
- except ImportError:
- egg = None
try:
- egg = download_setuptools(version, delay=0)
- sys.path.insert(0,egg)
- from setuptools.command.easy_install import main
- return main(list(argv)+[egg]) # we're done here
- finally:
- if egg and os.path.exists(egg):
- os.unlink(egg)
- else:
- if setuptools.__version__ == '0.0.1':
- print >>sys.stderr, (
- "You have an obsolete version of setuptools installed. Please\n"
- "remove it from your system entirely before rerunning this script."
- )
- sys.exit(2)
-
- req = "setuptools>="+version
- import pkg_resources
+ subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
+ except:
+ return False
+ finally:
+ devnull.close()
+ return True
+
+download_file_curl.viable = has_curl
+
+def download_file_wget(url, target):
+ cmd = ['wget', url, '--quiet', '--output-document', target]
+ _clean_check(cmd, target)
+
+def has_wget():
+ cmd = ['wget', '--version']
+ devnull = open(os.path.devnull, 'wb')
try:
- pkg_resources.require(req)
- except pkg_resources.VersionConflict:
try:
- from setuptools.command.easy_install import main
- except ImportError:
- from easy_install import main
- main(list(argv)+[download_setuptools(delay=0)])
- sys.exit(0) # try to force an exit
- else:
- if argv:
- from setuptools.command.easy_install import main
- main(argv)
- else:
- print "Setuptools version",version,"or greater has been installed."
- print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
+ subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
+ except:
+ return False
+ finally:
+ devnull.close()
+ return True
-def update_md5(filenames):
- """Update our built-in md5 registry"""
+download_file_wget.viable = has_wget
- import re
-
- for name in filenames:
- base = os.path.basename(name)
- f = open(name,'rb')
- md5_data[base] = md5(f.read()).hexdigest()
- f.close()
-
- data = [" %r: %r,\n" % it for it in md5_data.items()]
- data.sort()
- repl = "".join(data)
-
- import inspect
- srcfile = inspect.getsourcefile(sys.modules[__name__])
- f = open(srcfile, 'rb'); src = f.read(); f.close()
+def download_file_insecure(url, target):
+ """
+ Use Python to download the file, even though it cannot authenticate the
+ connection.
+ """
+ try:
+ from urllib.request import urlopen
+ except ImportError:
+ from urllib2 import urlopen
+ src = dst = None
+ try:
+ src = urlopen(url)
+ # Read/write all in one block, so we don't create a corrupt file
+ # if the download is interrupted.
+ data = src.read()
+ dst = open(target, "wb")
+ dst.write(data)
+ finally:
+ if src:
+ src.close()
+ if dst:
+ dst.close()
+
+download_file_insecure.viable = lambda: True
+
+def get_best_downloader():
+ downloaders = [
+ download_file_powershell,
+ download_file_curl,
+ download_file_wget,
+ download_file_insecure,
+ ]
+
+ for dl in downloaders:
+ if dl.viable():
+ return dl
+
+def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+ to_dir=os.curdir, delay=15,
+ downloader_factory=get_best_downloader):
+ """Download setuptools from a specified location and return its filename
- match = re.search("\nmd5_data = {\n([^}]+)}", src)
- if not match:
- print >>sys.stderr, "Internal error!"
- sys.exit(2)
+ `version` should be a valid setuptools version number that is available
+ as an egg for download under the `download_base` URL (which should end
+ with a '/'). `to_dir` is the directory where the egg will be downloaded.
+ `delay` is the number of seconds to pause before an actual download
+ attempt.
- src = src[:match.start(1)] + repl + src[match.end(1):]
- f = open(srcfile,'w')
- f.write(src)
- f.close()
+ ``downloader_factory`` should be a function taking no arguments and
+ returning a function for downloading a URL to a target.
+ """
+ # making sure we use the absolute path
+ to_dir = os.path.abspath(to_dir)
+ tgz_name = "setuptools-%s.tar.gz" % version
+ url = download_base + tgz_name
+ saveto = os.path.join(to_dir, tgz_name)
+ if not os.path.exists(saveto): # Avoid repeated downloads
+ log.warn("Downloading %s", url)
+ downloader = downloader_factory()
+ downloader(url, saveto)
+ return os.path.realpath(saveto)
-if __name__=='__main__':
- if len(sys.argv)>2 and sys.argv[1]=='--md5update':
- update_md5(sys.argv[2:])
+def _extractall(self, path=".", members=None):
+ """Extract all members from the archive to the current working
+ directory and set owner, modification time and permissions on
+ directories afterwards. `path' specifies a different directory
+ to extract to. `members' is optional and must be a subset of the
+ list returned by getmembers().
+ """
+ import copy
+ import operator
+ from tarfile import ExtractError
+ directories = []
+
+ if members is None:
+ members = self
+
+ for tarinfo in members:
+ if tarinfo.isdir():
+ # Extract directories with a safe mode.
+ directories.append(tarinfo)
+ tarinfo = copy.copy(tarinfo)
+ tarinfo.mode = 448 # decimal for oct 0700
+ self.extract(tarinfo, path)
+
+ # Reverse sort directories.
+ if sys.version_info < (2, 4):
+ def sorter(dir1, dir2):
+ return cmp(dir1.name, dir2.name)
+ directories.sort(sorter)
+ directories.reverse()
else:
- main(sys.argv[1:])
-
-
-
-
+ directories.sort(key=operator.attrgetter('name'), reverse=True)
+ # Set correct owner, mtime and filemode on directories.
+ for tarinfo in directories:
+ dirpath = os.path.join(path, tarinfo.name)
+ try:
+ self.chown(tarinfo, dirpath)
+ self.utime(tarinfo, dirpath)
+ self.chmod(tarinfo, dirpath)
+ except ExtractError:
+ e = sys.exc_info()[1]
+ if self.errorlevel > 1:
+ raise
+ else:
+ self._dbg(1, "tarfile: %s" % e)
+
+
+def _build_install_args(options):
+ """
+ Build the arguments to 'python setup.py install' on the setuptools package
+ """
+ install_args = []
+ if options.user_install:
+ if sys.version_info < (2, 6):
+ log.warn("--user requires Python 2.6 or later")
+ raise SystemExit(1)
+ install_args.append('--user')
+ return install_args
+
+def _parse_args():
+ """
+ Parse the command line for options
+ """
+ parser = optparse.OptionParser()
+ parser.add_option(
+ '--user', dest='user_install', action='store_true', default=False,
+ help='install in user site package (requires Python 2.6 or later)')
+ parser.add_option(
+ '--download-base', dest='download_base', metavar="URL",
+ default=DEFAULT_URL,
+ help='alternative URL from where to download the setuptools package')
+ parser.add_option(
+ '--insecure', dest='downloader_factory', action='store_const',
+ const=lambda: download_file_insecure, default=get_best_downloader,
+ help='Use internal, non-validating downloader'
+ )
+ options, args = parser.parse_args()
+ # positional arguments are ignored
+ return options
+
+def main(version=DEFAULT_VERSION):
+ """Install or upgrade setuptools and EasyInstall"""
+ options = _parse_args()
+ tarball = download_setuptools(download_base=options.download_base,
+ downloader_factory=options.downloader_factory)
+ return _install(tarball, _build_install_args(options))
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/pygmentize b/pygmentize
index 8b3b2067..aea38727 100755
--- a/pygmentize
+++ b/pygmentize
@@ -1,6 +1,7 @@
#!/usr/bin/env python2
-import sys, pygments.cmdline
+import sys
+import pygments.cmdline
try:
sys.exit(pygments.cmdline.main(sys.argv))
except KeyboardInterrupt:
diff --git a/pygments/__init__.py b/pygments/__init__.py
index 2bfd8ba5..1ce34b2a 100644
--- a/pygments/__init__.py
+++ b/pygments/__init__.py
@@ -22,11 +22,11 @@
.. _Pygments tip:
http://bitbucket.org/birkenfeld/pygments-main/get/tip.zip#egg=Pygments-dev
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-__version__ = '1.6'
+__version__ = '2.1a0'
__docformat__ = 'restructuredtext'
__all__ = ['lex', 'format', 'highlight']
@@ -43,9 +43,10 @@ def lex(code, lexer):
"""
try:
return lexer.get_tokens(code)
- except TypeError, err:
+ except TypeError as err:
if isinstance(err.args[0], str) and \
- 'unbound method get_tokens' in err.args[0]:
+ ('unbound method get_tokens' in err.args[0] or
+ 'missing 1 required positional argument' in err.args[0]):
raise TypeError('lex() argument must be a lexer instance, '
'not a class')
raise
@@ -61,15 +62,15 @@ def format(tokens, formatter, outfile=None):
"""
try:
if not outfile:
- #print formatter, 'using', formatter.encoding
- realoutfile = formatter.encoding and BytesIO() or StringIO()
+ realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO()
formatter.format(tokens, realoutfile)
return realoutfile.getvalue()
else:
formatter.format(tokens, outfile)
- except TypeError, err:
+ except TypeError as err:
if isinstance(err.args[0], str) and \
- 'unbound method format' in err.args[0]:
+ ('unbound method format' in err.args[0] or
+ 'missing 1 required positional argument' in err.args[0]):
raise TypeError('format() argument must be a formatter instance, '
'not a class')
raise
@@ -86,6 +87,6 @@ def highlight(code, lexer, formatter, outfile=None):
return format(lex(code, lexer), formatter, outfile)
-if __name__ == '__main__':
+if __name__ == '__main__': # pragma: no cover
from pygments.cmdline import main
sys.exit(main(sys.argv))
diff --git a/pygments/cmdline.py b/pygments/cmdline.py
index c25204bf..f5ea5653 100644
--- a/pygments/cmdline.py
+++ b/pygments/cmdline.py
@@ -5,27 +5,32 @@
Command line interface.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+
+from __future__ import print_function
+
import sys
import getopt
from textwrap import dedent
from pygments import __version__, highlight
-from pygments.util import ClassNotFound, OptionError, docstring_headline
-from pygments.lexers import get_all_lexers, get_lexer_by_name, get_lexer_for_filename, \
- find_lexer_class, guess_lexer, TextLexer
+from pygments.util import ClassNotFound, OptionError, docstring_headline, \
+ guess_decode, guess_decode_from_terminal, terminal_encoding
+from pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
+ get_lexer_for_filename, find_lexer_class_for_filename, TextLexer
+from pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter
from pygments.formatters import get_all_formatters, get_formatter_by_name, \
- get_formatter_for_filename, find_formatter_class, \
- TerminalFormatter # pylint:disable-msg=E0611
+ get_formatter_for_filename, find_formatter_class, \
+ TerminalFormatter # pylint:disable-msg=E0611
from pygments.filters import get_all_filters, find_filter_class
from pygments.styles import get_all_styles, get_style_by_name
USAGE = """\
Usage: %s [-l <lexer> | -g] [-F <filter>[:<options>]] [-f <formatter>]
- [-O <options>] [-P <option=value>] [-o <outfile>] [<infile>]
+ [-O <options>] [-P <option=value>] [-s] [-v] [-o <outfile>] [<infile>]
%s -S <style> -f <formatter> [-a <arg>] [-O <options>] [-P <option=value>]
%s -L [<which> ...]
@@ -37,6 +42,10 @@ Highlight the input file and write the result to <outfile>.
If no input file is given, use stdin, if -o is not given, use stdout.
+If -s is passed, lexing will be done in "streaming" mode, reading and
+highlighting one line at a time. This will only work properly with
+lexers that have no constructs spanning multiple lines!
+
<lexer> is a lexer name (query all lexer names with -L). If -l is not
given, the lexer is guessed from the extension of the input file name
(this obviously doesn't work if the input is stdin). If -g is passed,
@@ -76,6 +85,14 @@ If no specific lexer can be determined "text" is returned.
The -H option prints detailed help for the object <name> of type <type>,
where <type> is one of "lexer", "formatter" or "filter".
+The -s option processes lines one at a time until EOF, rather than
+waiting to process the entire file. This only works for stdin, and
+is intended for streaming input such as you get from 'tail -f'.
+Example usage: "tail -f sql.log | pygmentize -s -l sql"
+
+The -v option prints a detailed traceback on unhandled exceptions,
+which is useful for debugging and bug reports.
+
The -h option prints this help.
The -V option prints the package version.
"""
@@ -86,13 +103,13 @@ def _parse_options(o_strs):
if not o_strs:
return opts
for o_str in o_strs:
- if not o_str:
+ if not o_str.strip():
continue
o_args = o_str.split(',')
for o_arg in o_args:
o_arg = o_arg.strip()
try:
- o_key, o_val = o_arg.split('=')
+ o_key, o_val = o_arg.split('=', 1)
o_key = o_key.strip()
o_val = o_val.strip()
except ValueError:
@@ -118,26 +135,28 @@ def _parse_filters(f_strs):
def _print_help(what, name):
try:
if what == 'lexer':
- cls = find_lexer_class(name)
- print "Help on the %s lexer:" % cls.name
- print dedent(cls.__doc__)
+ cls = get_lexer_by_name(name)
+ print("Help on the %s lexer:" % cls.name)
+ print(dedent(cls.__doc__))
elif what == 'formatter':
cls = find_formatter_class(name)
- print "Help on the %s formatter:" % cls.name
- print dedent(cls.__doc__)
+ print("Help on the %s formatter:" % cls.name)
+ print(dedent(cls.__doc__))
elif what == 'filter':
cls = find_filter_class(name)
- print "Help on the %s filter:" % name
- print dedent(cls.__doc__)
- except AttributeError:
- print >>sys.stderr, "%s not found!" % what
+ print("Help on the %s filter:" % name)
+ print(dedent(cls.__doc__))
+ return 0
+ except (AttributeError, ValueError):
+ print("%s not found!" % what, file=sys.stderr)
+ return 1
def _print_list(what):
if what == 'lexer':
- print
- print "Lexers:"
- print "~~~~~~~"
+ print()
+ print("Lexers:")
+ print("~~~~~~~")
info = []
for fullname, names, exts, _ in get_all_lexers():
@@ -146,12 +165,12 @@ def _print_list(what):
info.append(tup)
info.sort()
for i in info:
- print ('* %s\n %s %s') % i
+ print(('* %s\n %s %s') % i)
elif what == 'formatter':
- print
- print "Formatters:"
- print "~~~~~~~~~~~"
+ print()
+ print("Formatters:")
+ print("~~~~~~~~~~~")
info = []
for cls in get_all_formatters():
@@ -161,50 +180,30 @@ def _print_list(what):
info.append(tup)
info.sort()
for i in info:
- print ('* %s\n %s %s') % i
+ print(('* %s\n %s %s') % i)
elif what == 'filter':
- print
- print "Filters:"
- print "~~~~~~~~"
+ print()
+ print("Filters:")
+ print("~~~~~~~~")
for name in get_all_filters():
cls = find_filter_class(name)
- print "* " + name + ':'
- print " %s" % docstring_headline(cls)
+ print("* " + name + ':')
+ print(" %s" % docstring_headline(cls))
elif what == 'style':
- print
- print "Styles:"
- print "~~~~~~~"
+ print()
+ print("Styles:")
+ print("~~~~~~~")
for name in get_all_styles():
cls = get_style_by_name(name)
- print "* " + name + ':'
- print " %s" % docstring_headline(cls)
-
-
-def main(args=sys.argv):
- """
- Main command line entry point.
- """
- # pylint: disable-msg=R0911,R0912,R0915
+ print("* " + name + ':')
+ print(" %s" % docstring_headline(cls))
- usage = USAGE % ((args[0],) * 6)
- if sys.platform in ['win32', 'cygwin']:
- try:
- # Provide coloring under Windows, if possible
- import colorama
- colorama.init()
- except ImportError:
- pass
-
- try:
- popts, args = getopt.getopt(args[1:], "l:f:F:o:O:P:LS:a:N:hVHg")
- except getopt.GetoptError, err:
- print >>sys.stderr, usage
- return 2
+def main_inner(popts, args, usage):
opts = {}
O_opts = []
P_opts = []
@@ -218,23 +217,19 @@ def main(args=sys.argv):
F_opts.append(arg)
opts[opt] = arg
- if not opts and not args:
- print usage
- return 0
-
if opts.pop('-h', None) is not None:
- print usage
+ print(usage)
return 0
if opts.pop('-V', None) is not None:
- print 'Pygments version %s, (c) 2006-2013 by Georg Brandl.' % __version__
+ print('Pygments version %s, (c) 2006-2015 by Georg Brandl.' % __version__)
return 0
# handle ``pygmentize -L``
L_opt = opts.pop('-L', None)
if L_opt is not None:
if opts:
- print >>sys.stderr, usage
+ print(usage, file=sys.stderr)
return 2
# print version
@@ -249,16 +244,15 @@ def main(args=sys.argv):
H_opt = opts.pop('-H', None)
if H_opt is not None:
if opts or len(args) != 2:
- print >>sys.stderr, usage
+ print(usage, file=sys.stderr)
return 2
what, name = args
if what not in ('lexer', 'formatter', 'filter'):
- print >>sys.stderr, usage
+ print(usage, file=sys.stderr)
return 2
- _print_help(what, name)
- return 0
+ return _print_help(what, name)
# parse -O options
parsed_opts = _parse_options(O_opts)
@@ -274,18 +268,18 @@ def main(args=sys.argv):
parsed_opts[name] = value
opts.pop('-P', None)
+ # encodings
+ inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding'))
+ outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding'))
+
# handle ``pygmentize -N``
infn = opts.pop('-N', None)
if infn is not None:
- try:
- lexer = get_lexer_for_filename(infn, **parsed_opts)
- except ClassNotFound, err:
- lexer = TextLexer()
- except OptionError, err:
- print >>sys.stderr, 'Error:', err
- return 1
+ lexer = find_lexer_class_for_filename(infn)
+ if lexer is None:
+ lexer = TextLexer
- print lexer.aliases[0]
+ print(lexer.aliases[0])
return 0
# handle ``pygmentize -S``
@@ -294,148 +288,241 @@ def main(args=sys.argv):
if S_opt is not None:
f_opt = opts.pop('-f', None)
if not f_opt:
- print >>sys.stderr, usage
+ print(usage, file=sys.stderr)
return 2
if opts or args:
- print >>sys.stderr, usage
+ print(usage, file=sys.stderr)
return 2
try:
parsed_opts['style'] = S_opt
fmter = get_formatter_by_name(f_opt, **parsed_opts)
- except ClassNotFound, err:
- print >>sys.stderr, err
+ except ClassNotFound as err:
+ print(err, file=sys.stderr)
return 1
- arg = a_opt or ''
- try:
- print fmter.get_style_defs(arg)
- except Exception, err:
- print >>sys.stderr, 'Error:', err
- return 1
+ print(fmter.get_style_defs(a_opt or ''))
return 0
# if no -S is given, -a is not allowed
if a_opt is not None:
- print >>sys.stderr, usage
+ print(usage, file=sys.stderr)
return 2
# parse -F options
F_opts = _parse_filters(F_opts)
opts.pop('-F', None)
- # select formatter
- outfn = opts.pop('-o', None)
- fmter = opts.pop('-f', None)
- if fmter:
- try:
- fmter = get_formatter_by_name(fmter, **parsed_opts)
- except (OptionError, ClassNotFound), err:
- print >>sys.stderr, 'Error:', err
- return 1
+ # select lexer
+ lexer = None
- if outfn:
- if not fmter:
- try:
- fmter = get_formatter_for_filename(outfn, **parsed_opts)
- except (OptionError, ClassNotFound), err:
- print >>sys.stderr, 'Error:', err
- return 1
+ # given by name?
+ lexername = opts.pop('-l', None)
+ if lexername:
try:
- outfile = open(outfn, 'wb')
- except Exception, err:
- print >>sys.stderr, 'Error: cannot open outfile:', err
+ lexer = get_lexer_by_name(lexername, **parsed_opts)
+ except (OptionError, ClassNotFound) as err:
+ print('Error:', err, file=sys.stderr)
return 1
- else:
- if not fmter:
- fmter = TerminalFormatter(**parsed_opts)
- outfile = sys.stdout
- # select lexer
- lexer = opts.pop('-l', None)
- if lexer:
- try:
- lexer = get_lexer_by_name(lexer, **parsed_opts)
- except (OptionError, ClassNotFound), err:
- print >>sys.stderr, 'Error:', err
- return 1
+ # read input code
+ code = None
if args:
if len(args) > 1:
- print >>sys.stderr, usage
+ print(usage, file=sys.stderr)
+ return 2
+
+ if '-s' in opts:
+ print('Error: -s option not usable when input file specified',
+ file=sys.stderr)
return 2
infn = args[0]
try:
- code = open(infn, 'rb').read()
- except Exception, err:
- print >>sys.stderr, 'Error: cannot read infile:', err
+ with open(infn, 'rb') as infp:
+ code = infp.read()
+ except Exception as err:
+ print('Error: cannot read infile:', err, file=sys.stderr)
return 1
+ if not inencoding:
+ code, inencoding = guess_decode(code)
+ # do we have to guess the lexer?
if not lexer:
try:
lexer = get_lexer_for_filename(infn, code, **parsed_opts)
- except ClassNotFound, err:
+ except ClassNotFound as err:
if '-g' in opts:
try:
lexer = guess_lexer(code, **parsed_opts)
except ClassNotFound:
lexer = TextLexer(**parsed_opts)
else:
- print >>sys.stderr, 'Error:', err
+ print('Error:', err, file=sys.stderr)
return 1
- except OptionError, err:
- print >>sys.stderr, 'Error:', err
+ except OptionError as err:
+ print('Error:', err, file=sys.stderr)
return 1
- else:
- if '-g' in opts:
+ elif '-s' not in opts: # treat stdin as full file (-s support is later)
+ # read code from terminal, always in binary mode since we want to
+ # decode ourselves and be tolerant with it
+ if sys.version_info > (3,):
+ # Python 3: we have to use .buffer to get a binary stream
+ code = sys.stdin.buffer.read()
+ else:
code = sys.stdin.read()
+ if not inencoding:
+ code, inencoding = guess_decode_from_terminal(code, sys.stdin)
+ # else the lexer will do the decoding
+ if not lexer:
try:
lexer = guess_lexer(code, **parsed_opts)
except ClassNotFound:
lexer = TextLexer(**parsed_opts)
- elif not lexer:
- print >>sys.stderr, 'Error: no lexer name given and reading ' + \
- 'from stdin (try using -g or -l <lexer>)'
+
+ else: # -s option needs a lexer with -l
+ if not lexer:
+ print('Error: when using -s a lexer has to be selected with -l',
+ file=sys.stderr)
return 2
+
+ # process filters
+ for fname, fopts in F_opts:
+ try:
+ lexer.add_filter(fname, **fopts)
+ except ClassNotFound as err:
+ print('Error:', err, file=sys.stderr)
+ return 1
+
+ # select formatter
+ outfn = opts.pop('-o', None)
+ fmter = opts.pop('-f', None)
+ if fmter:
+ try:
+ fmter = get_formatter_by_name(fmter, **parsed_opts)
+ except (OptionError, ClassNotFound) as err:
+ print('Error:', err, file=sys.stderr)
+ return 1
+
+ if outfn:
+ if not fmter:
+ try:
+ fmter = get_formatter_for_filename(outfn, **parsed_opts)
+ except (OptionError, ClassNotFound) as err:
+ print('Error:', err, file=sys.stderr)
+ return 1
+ try:
+ outfile = open(outfn, 'wb')
+ except Exception as err:
+ print('Error: cannot open outfile:', err, file=sys.stderr)
+ return 1
+ else:
+ if not fmter:
+ fmter = TerminalFormatter(**parsed_opts)
+ if sys.version_info > (3,):
+ # Python 3: we have to use .buffer to get a binary stream
+ outfile = sys.stdout.buffer
else:
- code = sys.stdin.read()
+ outfile = sys.stdout
- # No encoding given? Use latin1 if output file given,
- # stdin/stdout encoding otherwise.
- # (This is a compromise, I'm not too happy with it...)
- if 'encoding' not in parsed_opts and 'outencoding' not in parsed_opts:
+ # determine output encoding if not explicitly selected
+ if not outencoding:
if outfn:
- # encoding pass-through
- fmter.encoding = 'latin1'
+ # output file? use lexer encoding for now (can still be None)
+ fmter.encoding = inencoding
else:
- if sys.version_info < (3,):
- # use terminal encoding; Python 3's terminals already do that
- lexer.encoding = getattr(sys.stdin, 'encoding',
- None) or 'ascii'
- fmter.encoding = getattr(sys.stdout, 'encoding',
- None) or 'ascii'
- elif not outfn and sys.version_info > (3,):
- # output to terminal with encoding -> use .buffer
- outfile = sys.stdout.buffer
+ # else use terminal encoding
+ fmter.encoding = terminal_encoding(sys.stdout)
+
+ # provide coloring under Windows, if possible
+ if not outfn and sys.platform in ('win32', 'cygwin') and \
+ fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover
+ # unfortunately colorama doesn't support binary streams on Py3
+ if sys.version_info > (3,):
+ from pygments.util import UnclosingTextIOWrapper
+ outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding)
+ fmter.encoding = None
+ try:
+ import colorama.initialise
+ except ImportError:
+ pass
+ else:
+ outfile = colorama.initialise.wrap_stream(
+ outfile, convert=None, strip=None, autoreset=False, wrap=True)
+
+ # When using the LaTeX formatter and the option `escapeinside` is
+ # specified, we need a special lexer which collects escaped text
+ # before running the chosen language lexer.
+ escapeinside = parsed_opts.get('escapeinside', '')
+ if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter):
+ left = escapeinside[0]
+ right = escapeinside[1]
+ lexer = LatexEmbeddedLexer(left, right, lexer)
# ... and do it!
- try:
- # process filters
- for fname, fopts in F_opts:
- lexer.add_filter(fname, **fopts)
+ if '-s' not in opts:
+ # process whole input as per normal...
highlight(code, lexer, fmter, outfile)
- except Exception, err:
+ return 0
+ else:
+ # line by line processing of stdin (eg: for 'tail -f')...
+ try:
+ while 1:
+ if sys.version_info > (3,):
+ # Python 3: we have to use .buffer to get a binary stream
+ line = sys.stdin.buffer.readline()
+ else:
+ line = sys.stdin.readline()
+ if not line:
+ break
+ if not inencoding:
+ line = guess_decode_from_terminal(line, sys.stdin)[0]
+ highlight(line, lexer, fmter, outfile)
+ if hasattr(outfile, 'flush'):
+ outfile.flush()
+ return 0
+ except KeyboardInterrupt: # pragma: no cover
+ return 0
+
+
+def main(args=sys.argv):
+ """
+ Main command line entry point.
+ """
+ usage = USAGE % ((args[0],) * 6)
+
+ try:
+ popts, args = getopt.getopt(args[1:], "l:f:F:o:O:P:LS:a:N:vhVHgs")
+ except getopt.GetoptError:
+ print(usage, file=sys.stderr)
+ return 2
+
+ try:
+ return main_inner(popts, args, usage)
+ except Exception:
+ if '-v' in dict(popts):
+ print(file=sys.stderr)
+ print('*' * 65, file=sys.stderr)
+ print('An unhandled exception occurred while highlighting.',
+ file=sys.stderr)
+ print('Please report the whole traceback to the issue tracker at',
+ file=sys.stderr)
+ print('<https://bitbucket.org/birkenfeld/pygments-main/issues>.',
+ file=sys.stderr)
+ print('*' * 65, file=sys.stderr)
+ print(file=sys.stderr)
+ raise
import traceback
info = traceback.format_exception(*sys.exc_info())
msg = info[-1].strip()
if len(info) >= 3:
# extract relevant file and position info
msg += '\n (f%s)' % info[-2].split('\n')[0].strip()[1:]
- print >>sys.stderr
- print >>sys.stderr, '*** Error while highlighting:'
- print >>sys.stderr, msg
+ print(file=sys.stderr)
+ print('*** Error while highlighting:', file=sys.stderr)
+ print(msg, file=sys.stderr)
+ print('*** If this is a bug you want to report, please rerun with -v.',
+ file=sys.stderr)
return 1
-
- return 0
diff --git a/pygments/console.py b/pygments/console.py
index c8dfbd1f..4a2c9acb 100644
--- a/pygments/console.py
+++ b/pygments/console.py
@@ -5,7 +5,7 @@
Format colored console output.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/filter.py b/pygments/filter.py
index 0b9224f2..529d4f54 100644
--- a/pygments/filter.py
+++ b/pygments/filter.py
@@ -5,7 +5,7 @@
Module that implements the default filter.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -29,7 +29,7 @@ def simplefilter(f):
Decorator that converts a function into a filter::
@simplefilter
- def lowercase(lexer, stream, options):
+ def lowercase(self, lexer, stream, options):
for ttype, value in stream:
yield ttype, value.lower()
"""
diff --git a/pygments/filters/__init__.py b/pygments/filters/__init__.py
index 84c0193d..45bd49d5 100644
--- a/pygments/filters/__init__.py
+++ b/pygments/filters/__init__.py
@@ -6,7 +6,7 @@
Module containing filter lookup functions and default
filters.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -16,14 +16,12 @@ from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
string_to_tokentype
from pygments.filter import Filter
from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
- get_choice_opt, ClassNotFound, OptionError
+ get_choice_opt, ClassNotFound, OptionError, text_type, string_types
from pygments.plugin import find_plugin_filters
def find_filter_class(filtername):
- """
- Lookup a filter by name. Return None if not found.
- """
+ """Lookup a filter by name. Return None if not found."""
if filtername in FILTERS:
return FILTERS[filtername]
for name, cls in find_plugin_filters():
@@ -33,9 +31,10 @@ def find_filter_class(filtername):
def get_filter_by_name(filtername, **options):
- """
- Return an instantiated filter. Options are passed to the filter
- initializer if wanted. Raise a ClassNotFound if not found.
+ """Return an instantiated filter.
+
+ Options are passed to the filter initializer if wanted.
+ Raise a ClassNotFound if not found.
"""
cls = find_filter_class(filtername)
if cls:
@@ -45,9 +44,7 @@ def get_filter_by_name(filtername, **options):
def get_all_filters():
- """
- Return a generator of all filter names.
- """
+ """Return a generator of all filter names."""
for name in FILTERS:
yield name
for name, _ in find_plugin_filters():
@@ -68,8 +65,7 @@ def _replace_special(ttype, value, regex, specialttype,
class CodeTagFilter(Filter):
- """
- Highlight special code tags in comments and docstrings.
+ """Highlight special code tags in comments and docstrings.
Options accepted:
@@ -100,8 +96,7 @@ class CodeTagFilter(Filter):
class KeywordCaseFilter(Filter):
- """
- Convert keywords to lowercase or uppercase or capitalize them, which
+ """Convert keywords to lowercase or uppercase or capitalize them, which
means first letter uppercase, rest lowercase.
This can be useful e.g. if you highlight Pascal code and want to adapt the
@@ -116,8 +111,9 @@ class KeywordCaseFilter(Filter):
def __init__(self, **options):
Filter.__init__(self, **options)
- case = get_choice_opt(options, 'case', ['lower', 'upper', 'capitalize'], 'lower')
- self.convert = getattr(unicode, case)
+ case = get_choice_opt(options, 'case',
+ ['lower', 'upper', 'capitalize'], 'lower')
+ self.convert = getattr(text_type, case)
def filter(self, lexer, stream):
for ttype, value in stream:
@@ -128,8 +124,7 @@ class KeywordCaseFilter(Filter):
class NameHighlightFilter(Filter):
- """
- Highlight a normal Name (and Name.*) token with a different token type.
+ """Highlight a normal Name (and Name.*) token with a different token type.
Example::
@@ -172,9 +167,9 @@ class NameHighlightFilter(Filter):
class ErrorToken(Exception):
pass
+
class RaiseOnErrorTokenFilter(Filter):
- """
- Raise an exception when the lexer generates an error token.
+ """Raise an exception when the lexer generates an error token.
Options accepted:
@@ -182,7 +177,7 @@ class RaiseOnErrorTokenFilter(Filter):
The exception class to raise.
The default is `pygments.filters.ErrorToken`.
- *New in Pygments 0.8.*
+ .. versionadded:: 0.8
"""
def __init__(self, **options):
@@ -203,8 +198,7 @@ class RaiseOnErrorTokenFilter(Filter):
class VisibleWhitespaceFilter(Filter):
- """
- Convert tabs, newlines and/or spaces to visible characters.
+ """Convert tabs, newlines and/or spaces to visible characters.
Options accepted:
@@ -230,29 +224,31 @@ class VisibleWhitespaceFilter(Filter):
styling the visible whitespace differently (e.g. greyed out), but it can
disrupt background colors. The default is ``True``.
- *New in Pygments 0.8.*
+ .. versionadded:: 0.8
"""
def __init__(self, **options):
Filter.__init__(self, **options)
- for name, default in {'spaces': u'·', 'tabs': u'»', 'newlines': u'¶'}.items():
+ for name, default in [('spaces', u'·'),
+ ('tabs', u'»'),
+ ('newlines', u'¶')]:
opt = options.get(name, False)
- if isinstance(opt, basestring) and len(opt) == 1:
+ if isinstance(opt, string_types) and len(opt) == 1:
setattr(self, name, opt)
else:
setattr(self, name, (opt and default or ''))
tabsize = get_int_opt(options, 'tabsize', 8)
if self.tabs:
- self.tabs += ' '*(tabsize-1)
+ self.tabs += ' ' * (tabsize - 1)
if self.newlines:
self.newlines += '\n'
self.wstt = get_bool_opt(options, 'wstokentype', True)
def filter(self, lexer, stream):
if self.wstt:
- spaces = self.spaces or ' '
- tabs = self.tabs or '\t'
- newlines = self.newlines or '\n'
+ spaces = self.spaces or u' '
+ tabs = self.tabs or u'\t'
+ newlines = self.newlines or u'\n'
regex = re.compile(r'\s')
def replacefunc(wschar):
if wschar == ' ':
@@ -281,8 +277,7 @@ class VisibleWhitespaceFilter(Filter):
class GobbleFilter(Filter):
- """
- Gobbles source code lines (eats initial characters).
+ """Gobbles source code lines (eats initial characters).
This filter drops the first ``n`` characters off every line of code. This
may be useful when the source code fed to the lexer is indented by a fixed
@@ -293,7 +288,7 @@ class GobbleFilter(Filter):
`n` : int
The number of characters to gobble.
- *New in Pygments 1.2.*
+ .. versionadded:: 1.2
"""
def __init__(self, **options):
Filter.__init__(self, **options)
@@ -303,7 +298,7 @@ class GobbleFilter(Filter):
if left < len(value):
return value[left:], 0
else:
- return '', left - len(value)
+ return u'', left - len(value)
def filter(self, lexer, stream):
n = self.n
@@ -314,18 +309,17 @@ class GobbleFilter(Filter):
(parts[0], left) = self.gobble(parts[0], left)
for i in range(1, len(parts)):
(parts[i], left) = self.gobble(parts[i], n)
- value = '\n'.join(parts)
+ value = u'\n'.join(parts)
if value != '':
yield ttype, value
class TokenMergeFilter(Filter):
- """
- Merges consecutive tokens with the same token type in the output stream of a
- lexer.
+ """Merges consecutive tokens with the same token type in the output
+ stream of a lexer.
- *New in Pygments 1.2.*
+ .. versionadded:: 1.2
"""
def __init__(self, **options):
Filter.__init__(self, **options)
diff --git a/pygments/formatter.py b/pygments/formatter.py
index c1d6f2e1..addd07d7 100644
--- a/pygments/formatter.py
+++ b/pygments/formatter.py
@@ -5,20 +5,20 @@
Base formatter class.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import codecs
-from pygments.util import get_bool_opt
+from pygments.util import get_bool_opt, string_types
from pygments.styles import get_style_by_name
__all__ = ['Formatter']
def _lookup_style(style):
- if isinstance(style, basestring):
+ if isinstance(style, string_types):
return get_style_by_name(style)
return style
@@ -68,10 +68,10 @@ class Formatter(object):
self.full = get_bool_opt(options, 'full', False)
self.title = options.get('title', '')
self.encoding = options.get('encoding', None) or None
- if self.encoding == 'guess':
- # can happen for pygmentize -O encoding=guess
+ if self.encoding in ('guess', 'chardet'):
+ # can happen for e.g. pygmentize -O encoding=guess
self.encoding = 'utf-8'
- self.encoding = options.get('outencoding', None) or self.encoding
+ self.encoding = options.get('outencoding') or self.encoding
self.options = options
def get_style_defs(self, arg=''):
diff --git a/pygments/formatters/__init__.py b/pygments/formatters/__init__.py
index d842b96b..8ddf0ee3 100644
--- a/pygments/formatters/__init__.py
+++ b/pygments/formatters/__init__.py
@@ -5,64 +5,114 @@
Pygments formatters.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import os.path
+
+import re
+import sys
+import types
import fnmatch
+from os.path import basename
from pygments.formatters._mapping import FORMATTERS
from pygments.plugin import find_plugin_formatters
-from pygments.util import ClassNotFound
-
-ns = globals()
-for fcls in FORMATTERS:
- ns[fcls.__name__] = fcls
-del fcls
+from pygments.util import ClassNotFound, itervalues
__all__ = ['get_formatter_by_name', 'get_formatter_for_filename',
- 'get_all_formatters'] + [cls.__name__ for cls in FORMATTERS]
+ 'get_all_formatters'] + list(FORMATTERS)
+_formatter_cache = {} # classes by name
+_pattern_cache = {}
-_formatter_alias_cache = {}
-_formatter_filename_cache = []
-def _init_formatter_cache():
- if _formatter_alias_cache:
- return
- for cls in get_all_formatters():
- for alias in cls.aliases:
- _formatter_alias_cache[alias] = cls
- for fn in cls.filenames:
- _formatter_filename_cache.append((fn, cls))
+def _fn_matches(fn, glob):
+ """Return whether the supplied file name fn matches pattern filename."""
+ if glob not in _pattern_cache:
+ pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
+ return pattern.match(fn)
+ return _pattern_cache[glob].match(fn)
-def find_formatter_class(name):
- _init_formatter_cache()
- cls = _formatter_alias_cache.get(name, None)
- return cls
+def _load_formatters(module_name):
+ """Load a formatter (and all others in the module too)."""
+ mod = __import__(module_name, None, None, ['__all__'])
+ for formatter_name in mod.__all__:
+ cls = getattr(mod, formatter_name)
+ _formatter_cache[cls.name] = cls
-def get_formatter_by_name(name, **options):
- _init_formatter_cache()
- cls = _formatter_alias_cache.get(name, None)
- if not cls:
- raise ClassNotFound("No formatter found for name %r" % name)
- return cls(**options)
+def get_all_formatters():
+ """Return a generator for all formatter classes."""
+ # NB: this returns formatter classes, not info like get_all_lexers().
+ for info in itervalues(FORMATTERS):
+ if info[1] not in _formatter_cache:
+ _load_formatters(info[0])
+ yield _formatter_cache[info[1]]
+ for _, formatter in find_plugin_formatters():
+ yield formatter
-def get_formatter_for_filename(fn, **options):
- _init_formatter_cache()
- fn = os.path.basename(fn)
- for pattern, cls in _formatter_filename_cache:
- if fnmatch.fnmatch(fn, pattern):
- return cls(**options)
- raise ClassNotFound("No formatter found for file name %r" % fn)
+def find_formatter_class(alias):
+ """Lookup a formatter by alias.
+ Returns None if not found.
+ """
+ for module_name, name, aliases, _, _ in itervalues(FORMATTERS):
+ if alias in aliases:
+ if name not in _formatter_cache:
+ _load_formatters(module_name)
+ return _formatter_cache[name]
+ for _, cls in find_plugin_formatters():
+ if alias in cls.aliases:
+ return cls
-def get_all_formatters():
- """Return a generator for all formatters."""
- for formatter in FORMATTERS:
- yield formatter
- for _, formatter in find_plugin_formatters():
- yield formatter
+
+def get_formatter_by_name(_alias, **options):
+ """Lookup and instantiate a formatter by alias.
+
+ Raises ClassNotFound if not found.
+ """
+ cls = find_formatter_class(_alias)
+ if cls is None:
+ raise ClassNotFound("no formatter found for name %r" % _alias)
+ return cls(**options)
+
+
+def get_formatter_for_filename(fn, **options):
+ """Lookup and instantiate a formatter by filename pattern.
+
+ Raises ClassNotFound if not found.
+ """
+ fn = basename(fn)
+ for modname, name, _, filenames, _ in itervalues(FORMATTERS):
+ for filename in filenames:
+ if _fn_matches(fn, filename):
+ if name not in _formatter_cache:
+ _load_formatters(modname)
+ return _formatter_cache[name](**options)
+ for cls in find_plugin_formatters():
+ for filename in cls.filenames:
+ if _fn_matches(fn, filename):
+ return cls(**options)
+ raise ClassNotFound("no formatter found for file name %r" % fn)
+
+
+class _automodule(types.ModuleType):
+ """Automatically import formatters."""
+
+ def __getattr__(self, name):
+ info = FORMATTERS.get(name)
+ if info:
+ _load_formatters(info[0])
+ cls = _formatter_cache[info[1]]
+ setattr(self, name, cls)
+ return cls
+ raise AttributeError(name)
+
+
+oldmod = sys.modules[__name__]
+newmod = _automodule(__name__)
+newmod.__dict__.update(oldmod.__dict__)
+sys.modules[__name__] = newmod
+del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
diff --git a/pygments/formatters/_mapping.py b/pygments/formatters/_mapping.py
index a423ba50..bfc82253 100755
--- a/pygments/formatters/_mapping.py
+++ b/pygments/formatters/_mapping.py
@@ -3,48 +3,36 @@
pygments.formatters._mapping
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- Formatter mapping defintions. This file is generated by itself. Everytime
- you change something on a builtin formatter defintion, run this script from
+ Formatter mapping definitions. This file is generated by itself. Everytime
+ you change something on a builtin formatter definition, run this script from
the formatters folder to update it.
Do not alter the FORMATTERS dictionary by hand.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-# start
-from pygments.formatters.bbcode import BBCodeFormatter
-from pygments.formatters.html import HtmlFormatter
-from pygments.formatters.img import BmpImageFormatter
-from pygments.formatters.img import GifImageFormatter
-from pygments.formatters.img import ImageFormatter
-from pygments.formatters.img import JpgImageFormatter
-from pygments.formatters.latex import LatexFormatter
-from pygments.formatters.other import NullFormatter
-from pygments.formatters.other import RawTokenFormatter
-from pygments.formatters.rtf import RtfFormatter
-from pygments.formatters.svg import SvgFormatter
-from pygments.formatters.terminal import TerminalFormatter
-from pygments.formatters.terminal256 import Terminal256Formatter
+from __future__ import print_function
FORMATTERS = {
- BBCodeFormatter: ('BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'),
- BmpImageFormatter: ('img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- GifImageFormatter: ('img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- HtmlFormatter: ('HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass` option."),
- ImageFormatter: ('img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- JpgImageFormatter: ('img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- LatexFormatter: ('LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'),
- NullFormatter: ('Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'),
- RawTokenFormatter: ('Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'),
- RtfFormatter: ('RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft\xc2\xae Word\xc2\xae documents.'),
- SvgFormatter: ('SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ``<text>`` element with explicit ``x`` and ``y`` coordinates containing ``<tspan>`` elements with the individual token styles.'),
- Terminal256Formatter: ('Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
- TerminalFormatter: ('Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.')
+ 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'),
+ 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass` option."),
+ 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'),
+ 'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'),
+ 'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'),
+ 'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft(R) Word(R) documents.'),
+ 'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ``<text>`` element with explicit ``x`` and ``y`` coordinates containing ``<tspan>`` elements with the individual token styles.'),
+ 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
+ 'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'),
+ 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.')
}
-if __name__ == '__main__':
+if __name__ == '__main__': # pragma: no cover
import sys
import os
@@ -54,39 +42,35 @@ if __name__ == '__main__':
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
from pygments.util import docstring_headline
- for filename in os.listdir('.'):
- if filename.endswith('.py') and not filename.startswith('_'):
- module_name = 'pygments.formatters.%s' % filename[:-3]
- print module_name
- module = __import__(module_name, None, None, [''])
- for formatter_name in module.__all__:
- imports.append((module_name, formatter_name))
- formatter = getattr(module, formatter_name)
- found_formatters.append(
- '%s: %r' % (formatter_name,
- (formatter.name,
- tuple(formatter.aliases),
- tuple(formatter.filenames),
- docstring_headline(formatter))))
- # sort them, that should make the diff files for svn smaller
+ for root, dirs, files in os.walk('.'):
+ for filename in files:
+ if filename.endswith('.py') and not filename.startswith('_'):
+ module_name = 'pygments.formatters%s.%s' % (
+ root[1:].replace('/', '.'), filename[:-3])
+ print(module_name)
+ module = __import__(module_name, None, None, [''])
+ for formatter_name in module.__all__:
+ formatter = getattr(module, formatter_name)
+ found_formatters.append(
+ '%r: %r' % (formatter_name,
+ (module_name,
+ formatter.name,
+ tuple(formatter.aliases),
+ tuple(formatter.filenames),
+ docstring_headline(formatter))))
+ # sort them to make the diff minimal
found_formatters.sort()
- imports.sort()
# extract useful sourcecode from this file
- f = open(__file__)
- try:
- content = f.read()
- finally:
- f.close()
- header = content[:content.find('# start')]
+ with open(__file__) as fp:
+ content = fp.read()
+ header = content[:content.find('FORMATTERS = {')]
footer = content[content.find("if __name__ == '__main__':"):]
# write new file
- f = open(__file__, 'w')
- f.write(header)
- f.write('# start\n')
- f.write('\n'.join(['from %s import %s' % imp for imp in imports]))
- f.write('\n\n')
- f.write('FORMATTERS = {\n %s\n}\n\n' % ',\n '.join(found_formatters))
- f.write(footer)
- f.close()
+ with open(__file__, 'w') as fp:
+ fp.write(header)
+ fp.write('FORMATTERS = {\n %s\n}\n\n' % ',\n '.join(found_formatters))
+ fp.write(footer)
+
+ print ('=== %d formatters processed.' % len(found_formatters))
diff --git a/pygments/formatters/bbcode.py b/pygments/formatters/bbcode.py
index 15faff6c..580989f0 100644
--- a/pygments/formatters/bbcode.py
+++ b/pygments/formatters/bbcode.py
@@ -5,7 +5,7 @@
BBcode formatter.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/formatters/html.py b/pygments/formatters/html.py
index 06096930..67ad685f 100644
--- a/pygments/formatters/html.py
+++ b/pygments/formatters/html.py
@@ -5,18 +5,20 @@
Formatter for HTML output.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from __future__ import print_function
+
import os
import sys
import os.path
-import StringIO
from pygments.formatter import Formatter
from pygments.token import Token, Text, STANDARD_TYPES
-from pygments.util import get_bool_opt, get_int_opt, get_list_opt, bytes
+from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
+ StringIO, string_types, iteritems
try:
import ctags
@@ -34,21 +36,11 @@ _escape_html_table = {
ord("'"): u'&#39;',
}
+
def escape_html(text, table=_escape_html_table):
"""Escape &, <, > as well as single and double quotes for HTML."""
return text.translate(table)
-def get_random_id():
- """Return a random id for javascript fields."""
- from random import random
- from time import time
- try:
- from hashlib import sha1 as sha
- except ImportError:
- import sha
- sha = sha.new
- return sha('%s|%s' % (random(), time())).hexdigest()
-
def _get_ttype_class(ttype):
fname = STANDARD_TYPES.get(ttype)
@@ -218,29 +210,34 @@ class HtmlFormatter(Formatter):
If you set this option, the default selector for `get_style_defs()`
will be this class.
- *New in Pygments 0.9:* If you select the ``'table'`` line numbers, the
- wrapping table will have a CSS class of this string plus ``'table'``,
- the default is accordingly ``'highlighttable'``.
+ .. versionadded:: 0.9
+ If you select the ``'table'`` line numbers, the wrapping table will
+ have a CSS class of this string plus ``'table'``, the default is
+ accordingly ``'highlighttable'``.
`cssstyles`
Inline CSS styles for the wrapping ``<div>`` tag (default: ``''``).
`prestyles`
- Inline CSS styles for the ``<pre>`` tag (default: ``''``). *New in
- Pygments 0.11.*
+ Inline CSS styles for the ``<pre>`` tag (default: ``''``).
+
+ .. versionadded:: 0.11
`cssfile`
If the `full` option is true and this option is given, it must be the
name of an external file. If the filename does not include an absolute
path, the file's path will be assumed to be relative to the main output
file's path, if the latter can be found. The stylesheet is then written
- to this file instead of the HTML file. *New in Pygments 0.6.*
+ to this file instead of the HTML file.
+
+ .. versionadded:: 0.6
`noclobber_cssfile`
If `cssfile` is given and the specified file exists, the css file will
not be overwritten. This allows the use of the `full` option in
combination with a user specified css file. Default is ``False``.
- *New in Pygments 1.1.*
+
+ .. versionadded:: 1.1
`linenos`
If set to ``'table'``, output line numbers as a table with two cells,
@@ -263,7 +260,9 @@ class HtmlFormatter(Formatter):
125%``).
`hl_lines`
- Specify a list of lines to be highlighted. *New in Pygments 0.11.*
+ Specify a list of lines to be highlighted.
+
+ .. versionadded:: 0.11
`linenostart`
The line number for the first line (default: ``1``).
@@ -279,24 +278,30 @@ class HtmlFormatter(Formatter):
If set to ``True``, the formatter won't output the background color
for the wrapping element (this automatically defaults to ``False``
when there is no wrapping element [eg: no argument for the
- `get_syntax_defs` method given]) (default: ``False``). *New in
- Pygments 0.6.*
+ `get_syntax_defs` method given]) (default: ``False``).
+
+ .. versionadded:: 0.6
`lineseparator`
This string is output between lines of code. It defaults to ``"\n"``,
which is enough to break a line inside ``<pre>`` tags, but you can
- e.g. set it to ``"<br>"`` to get HTML line breaks. *New in Pygments
- 0.7.*
+ e.g. set it to ``"<br>"`` to get HTML line breaks.
+
+ .. versionadded:: 0.7
`lineanchors`
If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
output line in an anchor tag with a ``name`` of ``foo-linenumber``.
- This allows easy linking to certain lines. *New in Pygments 0.9.*
+ This allows easy linking to certain lines.
+
+ .. versionadded:: 0.9
`linespans`
If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
output line in a span tag with an ``id`` of ``foo-linenumber``.
- This allows easy access to lines via javascript. *New in Pygments 1.6.*
+ This allows easy access to lines via javascript.
+
+ .. versionadded:: 1.6
`anchorlinenos`
If set to `True`, will wrap line numbers in <a> tags. Used in
@@ -306,18 +311,20 @@ class HtmlFormatter(Formatter):
If set to the path of a ctags file, wrap names in anchor tags that
link to their definitions. `lineanchors` should be used, and the
tags file should specify line numbers (see the `-n` option to ctags).
- *New in Pygments 1.6.*
+
+ .. versionadded:: 1.6
`tagurlformat`
A string formatting pattern used to generate links to ctags definitions.
Available variables are `%(path)s`, `%(fname)s` and `%(fext)s`.
Defaults to an empty string, resulting in just `#prefix-number` links.
- *New in Pygments 1.6.*
+
+ .. versionadded:: 1.6
**Subclassing the HTML formatter**
- *New in Pygments 0.7.*
+ .. versionadded:: 0.7
The HTML formatter is now built in a way that allows easy subclassing, thus
customizing the output HTML code. The `format()` method calls
@@ -421,6 +428,15 @@ class HtmlFormatter(Formatter):
return self.classprefix + ttypeclass
return ''
+ def _get_css_classes(self, ttype):
+ """Return the css classes of this token type prefixed with
+ the classprefix option."""
+ cls = self._get_css_class(ttype)
+ while ttype not in STANDARD_TYPES:
+ ttype = ttype.parent
+ cls = self._get_css_class(ttype) + ' ' + cls
+ return cls
+
def _create_stylesheet(self):
t2c = self.ttype2class = {Token: ''}
c2s = self.class2style = {}
@@ -453,7 +469,7 @@ class HtmlFormatter(Formatter):
"""
if arg is None:
arg = ('cssclass' in self.options and '.'+self.cssclass or '')
- if isinstance(arg, basestring):
+ if isinstance(arg, string_types):
args = [arg]
else:
args = list(arg)
@@ -467,7 +483,7 @@ class HtmlFormatter(Formatter):
return ', '.join(tmp)
styles = [(level, ttype, cls, style)
- for cls, (style, ttype, level) in self.class2style.iteritems()
+ for cls, (style, ttype, level) in iteritems(self.class2style)
if cls and style]
styles.sort()
lines = ['%s { %s } /* %s */' % (prefix(cls), style, repr(ttype)[6:])
@@ -505,8 +521,9 @@ class HtmlFormatter(Formatter):
cssfilename = os.path.join(os.path.dirname(filename),
self.cssfile)
except AttributeError:
- print >>sys.stderr, 'Note: Cannot determine output file name, ' \
- 'using current directory as base for the CSS file name'
+ print('Note: Cannot determine output file name, ' \
+ 'using current directory as base for the CSS file name',
+ file=sys.stderr)
cssfilename = self.cssfile
# write CSS file only if noclobber_cssfile isn't given as an option.
try:
@@ -515,7 +532,7 @@ class HtmlFormatter(Formatter):
cf.write(CSSFILE_TEMPLATE %
{'styledefs': self.get_style_defs('body')})
cf.close()
- except IOError, err:
+ except IOError as err:
err.strerror = 'Error writing CSS file: ' + err.strerror
raise
@@ -534,7 +551,7 @@ class HtmlFormatter(Formatter):
yield 0, DOC_FOOTER
def _wrap_tablelinenos(self, inner):
- dummyoutfile = StringIO.StringIO()
+ dummyoutfile = StringIO()
lncount = 0
for t, line in inner:
if t:
@@ -610,24 +627,24 @@ class HtmlFormatter(Formatter):
style = 'background-color: #ffffc0; padding: 0 5px 0 5px'
else:
style = 'background-color: #f0f0f0; padding: 0 5px 0 5px'
- yield 1, '<span style="%s">%*s</span> ' % (
+ yield 1, '<span style="%s">%*s </span>' % (
style, mw, (num%st and ' ' or num)) + line
num += 1
else:
for t, line in lines:
yield 1, ('<span style="background-color: #f0f0f0; '
- 'padding: 0 5px 0 5px">%*s</span> ' % (
+ 'padding: 0 5px 0 5px">%*s </span>' % (
mw, (num%st and ' ' or num)) + line)
num += 1
elif sp:
for t, line in lines:
- yield 1, '<span class="lineno%s">%*s</span> ' % (
+ yield 1, '<span class="lineno%s">%*s </span>' % (
num%sp == 0 and ' special' or '', mw,
(num%st and ' ' or num)) + line
num += 1
else:
for t, line in lines:
- yield 1, '<span class="lineno">%*s</span> ' % (
+ yield 1, '<span class="lineno">%*s </span>' % (
mw, (num%st and ' ' or num)) + line
num += 1
@@ -703,7 +720,7 @@ class HtmlFormatter(Formatter):
cclass = getcls(ttype)
cspan = cclass and '<span style="%s">' % c2s[cclass][0] or ''
else:
- cls = self._get_css_class(ttype)
+ cls = self._get_css_classes(ttype)
cspan = cls and '<span class="%s">' % cls or ''
parts = value.translate(escape_table).split('\n')
diff --git a/pygments/formatters/img.py b/pygments/formatters/img.py
index 394c3b6a..9d1365a4 100644
--- a/pygments/formatters/img.py
+++ b/pygments/formatters/img.py
@@ -5,15 +5,15 @@
Formatter for Pixmap output.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
from pygments.formatter import Formatter
-from pygments.util import get_bool_opt, get_int_opt, \
- get_list_opt, get_choice_opt
+from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
+ get_choice_opt, xrange
# Import this carefully
try:
@@ -25,7 +25,10 @@ except ImportError:
try:
import _winreg
except ImportError:
- _winreg = None
+ try:
+ import winreg as _winreg
+ except ImportError:
+ _winreg = None
__all__ = ['ImageFormatter', 'GifImageFormatter', 'JpgImageFormatter',
'BmpImageFormatter']
@@ -72,7 +75,10 @@ class FontManager(object):
self._create_nix()
def _get_nix_font_path(self, name, style):
- from commands import getstatusoutput
+ try:
+ from commands import getstatusoutput
+ except ImportError:
+ from subprocess import getstatusoutput
exit, out = getstatusoutput('fc-list "%s:style=%s" file' %
(name, style))
if not exit:
@@ -169,7 +175,7 @@ class ImageFormatter(Formatter):
Create a PNG image from source code. This uses the Python Imaging Library to
generate a pixmap from the source code.
- *New in Pygments 0.10.*
+ .. versionadded:: 0.10
Additional options accepted:
@@ -258,12 +264,16 @@ class ImageFormatter(Formatter):
Default: 6
`hl_lines`
- Specify a list of lines to be highlighted. *New in Pygments 1.2.*
+ Specify a list of lines to be highlighted.
+
+ .. versionadded:: 1.2
Default: empty list
`hl_color`
- Specify the color for highlighting lines. *New in Pygments 1.2.*
+ Specify the color for highlighting lines.
+
+ .. versionadded:: 1.2
Default: highlight color of the selected style
"""
@@ -285,6 +295,7 @@ class ImageFormatter(Formatter):
raise PilNotAvailable(
'Python Imaging Library is required for this formatter')
Formatter.__init__(self, **options)
+ self.encoding = 'latin1' # let pygments.format() do the right thing
# Read the style
self.styles = dict(self.style)
if self.style.background_color is None:
@@ -305,20 +316,20 @@ class ImageFormatter(Formatter):
self.line_number_fg = options.get('line_number_fg', '#886')
self.line_number_bg = options.get('line_number_bg', '#eed')
self.line_number_chars = get_int_opt(options,
- 'line_number_chars', 2)
+ 'line_number_chars', 2)
self.line_number_bold = get_bool_opt(options,
- 'line_number_bold', False)
+ 'line_number_bold', False)
self.line_number_italic = get_bool_opt(options,
- 'line_number_italic', False)
+ 'line_number_italic', False)
self.line_number_pad = get_int_opt(options, 'line_number_pad', 6)
self.line_numbers = get_bool_opt(options, 'line_numbers', True)
self.line_number_separator = get_bool_opt(options,
- 'line_number_separator', True)
+ 'line_number_separator', True)
self.line_number_step = get_int_opt(options, 'line_number_step', 1)
self.line_number_start = get_int_opt(options, 'line_number_start', 1)
if self.line_numbers:
self.line_number_width = (self.fontw * self.line_number_chars +
- self.line_number_pad * 2)
+ self.line_number_pad * 2)
else:
self.line_number_width = 0
self.hl_lines = []
@@ -427,7 +438,7 @@ class ImageFormatter(Formatter):
# quite complex.
value = value.expandtabs(4)
lines = value.splitlines(True)
- #print lines
+ # print lines
for i, line in enumerate(lines):
temp = line.rstrip('\n')
if temp:
@@ -468,9 +479,8 @@ class ImageFormatter(Formatter):
draw = ImageDraw.Draw(im)
recth = im.size[-1]
rectw = self.image_pad + self.line_number_width - self.line_number_pad
- draw.rectangle([(0, 0),
- (rectw, recth)],
- fill=self.line_number_bg)
+ draw.rectangle([(0, 0), (rectw, recth)],
+ fill=self.line_number_bg)
draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg)
del draw
@@ -513,8 +523,7 @@ class GifImageFormatter(ImageFormatter):
Create a GIF image from source code. This uses the Python Imaging Library to
generate a pixmap from the source code.
- *New in Pygments 1.0.* (You could create GIF images before by passing a
- suitable `image_format` option to the `ImageFormatter`.)
+ .. versionadded:: 1.0
"""
name = 'img_gif'
@@ -528,8 +537,7 @@ class JpgImageFormatter(ImageFormatter):
Create a JPEG image from source code. This uses the Python Imaging Library to
generate a pixmap from the source code.
- *New in Pygments 1.0.* (You could create JPEG images before by passing a
- suitable `image_format` option to the `ImageFormatter`.)
+ .. versionadded:: 1.0
"""
name = 'img_jpg'
@@ -543,8 +551,7 @@ class BmpImageFormatter(ImageFormatter):
Create a bitmap image from source code. This uses the Python Imaging Library to
generate a pixmap from the source code.
- *New in Pygments 1.0.* (You could create bitmap images before by passing a
- suitable `image_format` option to the `ImageFormatter`.)
+ .. versionadded:: 1.0
"""
name = 'img_bmp'
diff --git a/pygments/formatters/latex.py b/pygments/formatters/latex.py
index 47fd1239..15e68e37 100644
--- a/pygments/formatters/latex.py
+++ b/pygments/formatters/latex.py
@@ -5,13 +5,17 @@
Formatter for LaTeX fancyvrb output.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from __future__ import division
+
from pygments.formatter import Formatter
+from pygments.lexer import Lexer
from pygments.token import Token, STANDARD_TYPES
-from pygments.util import get_bool_opt, get_int_opt, StringIO
+from pygments.util import get_bool_opt, get_int_opt, StringIO, xrange, \
+ iteritems
__all__ = ['LatexFormatter']
@@ -152,7 +156,7 @@ class LatexFormatter(Formatter):
.. sourcecode:: latex
- \begin{Verbatim}[commandchars=\\{\}]
+ \begin{Verbatim}[commandchars=\\\{\}]
\PY{k}{def }\PY{n+nf}{foo}(\PY{n}{bar}):
\PY{k}{pass}
\end{Verbatim}
@@ -205,19 +209,40 @@ class LatexFormatter(Formatter):
`commandprefix`
The LaTeX commands used to produce colored output are constructed
using this prefix and some letters (default: ``'PY'``).
- *New in Pygments 0.7.*
- *New in Pygments 0.10:* the default is now ``'PY'`` instead of ``'C'``.
+ .. versionadded:: 0.7
+ .. versionchanged:: 0.10
+ The default is now ``'PY'`` instead of ``'C'``.
`texcomments`
If set to ``True``, enables LaTeX comment lines. That is, LaTex markup
in comment tokens is not escaped so that LaTeX can render it (default:
- ``False``). *New in Pygments 1.2.*
+ ``False``).
+
+ .. versionadded:: 1.2
`mathescape`
If set to ``True``, enables LaTeX math mode escape in comments. That
is, ``'$...$'`` inside a comment will trigger math mode (default:
- ``False``). *New in Pygments 1.2.*
+ ``False``).
+
+ .. versionadded:: 1.2
+
+ `escapeinside`
+ If set to a string of length 2, enables escaping to LaTeX. Text
+ delimited by these 2 characters is read as LaTeX code and
+ typeset accordingly. It has no effect in string literals. It has
+ no effect in comments if `texcomments` or `mathescape` is
+ set. (default: ``''``).
+
+ .. versionadded:: 2.0
+
+ `envname`
+ Allows you to pick an alternative environment name replacing Verbatim.
+ The alternate environment still has to support Verbatim's option syntax.
+ (default: ``'Verbatim'``).
+
+ .. versionadded:: 2.0
"""
name = 'LaTeX'
aliases = ['latex', 'tex']
@@ -235,10 +260,16 @@ class LatexFormatter(Formatter):
self.commandprefix = options.get('commandprefix', 'PY')
self.texcomments = get_bool_opt(options, 'texcomments', False)
self.mathescape = get_bool_opt(options, 'mathescape', False)
+ self.escapeinside = options.get('escapeinside', '')
+ if len(self.escapeinside) == 2:
+ self.left = self.escapeinside[0]
+ self.right = self.escapeinside[1]
+ else:
+ self.escapeinside = ''
+ self.envname = options.get('envname', u'Verbatim')
self._create_stylesheet()
-
def _create_stylesheet(self):
t2n = self.ttype2name = {Token: ''}
c2d = self.cmd2def = {}
@@ -246,7 +277,7 @@ class LatexFormatter(Formatter):
def rgbcolor(col):
if col:
- return ','.join(['%.2f' %(int(col[i] + col[i + 1], 16) / 255.0)
+ return ','.join(['%.2f' % (int(col[i] + col[i + 1], 16) / 255.0)
for i in (0, 2, 4)])
else:
return '1,1,1'
@@ -291,7 +322,7 @@ class LatexFormatter(Formatter):
"""
cp = self.commandprefix
styles = []
- for name, definition in self.cmd2def.iteritems():
+ for name, definition in iteritems(self.cmd2def):
styles.append(r'\expandafter\def\csname %s@tok@%s\endcsname{%s}' %
(cp, name, definition))
return STYLE_TEMPLATE % {'cp': self.commandprefix,
@@ -306,14 +337,14 @@ class LatexFormatter(Formatter):
realoutfile = outfile
outfile = StringIO()
- outfile.write(ur'\begin{Verbatim}[commandchars=\\\{\}')
+ outfile.write(u'\\begin{' + self.envname + u'}[commandchars=\\\\\\{\\}')
if self.linenos:
start, step = self.linenostart, self.linenostep
outfile.write(u',numbers=left' +
(start and u',firstnumber=%d' % start or u'') +
(step and u',stepnumber=%d' % step or u''))
- if self.mathescape or self.texcomments:
- outfile.write(ur',codes={\catcode`\$=3\catcode`\^=7\catcode`\_=8}')
+ if self.mathescape or self.texcomments or self.escapeinside:
+ outfile.write(u',codes={\\catcode`\\$=3\\catcode`\\^=7\\catcode`\\_=8}')
if self.verboptions:
outfile.write(u',' + self.verboptions)
outfile.write(u']\n')
@@ -329,7 +360,7 @@ class LatexFormatter(Formatter):
start += value[i]
value = value[len(start):]
- start = escape_tex(start, self.commandprefix)
+ start = escape_tex(start, cp)
# ... but do not escape inside comment.
value = start + value
@@ -339,13 +370,26 @@ class LatexFormatter(Formatter):
in_math = False
for i, part in enumerate(parts):
if not in_math:
- parts[i] = escape_tex(part, self.commandprefix)
+ parts[i] = escape_tex(part, cp)
in_math = not in_math
value = '$'.join(parts)
+ elif self.escapeinside:
+ text = value
+ value = ''
+ while text:
+ a, sep1, text = text.partition(self.left)
+ if sep1:
+ b, sep2, text = text.partition(self.right)
+ if sep2:
+ value += escape_tex(a, cp) + b
+ else:
+ value += escape_tex(a + sep1 + b, cp)
+ else:
+ value += escape_tex(a, cp)
else:
- value = escape_tex(value, self.commandprefix)
- else:
- value = escape_tex(value, self.commandprefix)
+ value = escape_tex(value, cp)
+ elif ttype not in Token.Escape:
+ value = escape_tex(value, cp)
styles = []
while ttype is not Token:
try:
@@ -366,13 +410,66 @@ class LatexFormatter(Formatter):
else:
outfile.write(value)
- outfile.write(u'\\end{Verbatim}\n')
+ outfile.write(u'\\end{' + self.envname + u'}\n')
if self.full:
realoutfile.write(DOC_TEMPLATE %
dict(docclass = self.docclass,
preamble = self.preamble,
title = self.title,
- encoding = self.encoding or 'latin1',
+ encoding = self.encoding or 'utf8',
styledefs = self.get_style_defs(),
code = outfile.getvalue()))
+
+
+class LatexEmbeddedLexer(Lexer):
+ """
+ This lexer takes one lexer as argument, the lexer for the language
+ being formatted, and the left and right delimiters for escaped text.
+
+ First everything is scanned using the language lexer to obtain
+ strings and comments. All other consecutive tokens are merged and
+ the resulting text is scanned for escaped segments, which are given
+ the Token.Escape type. Finally text that is not escaped is scanned
+ again with the language lexer.
+ """
+ def __init__(self, left, right, lang, **options):
+ self.left = left
+ self.right = right
+ self.lang = lang
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ buf = ''
+ idx = 0
+ for i, t, v in self.lang.get_tokens_unprocessed(text):
+ if t in Token.Comment or t in Token.String:
+ if buf:
+ for x in self.get_tokens_aux(idx, buf):
+ yield x
+ buf = ''
+ yield i, t, v
+ else:
+ if not buf:
+ idx = i
+ buf += v
+ if buf:
+ for x in self.get_tokens_aux(idx, buf):
+ yield x
+
+ def get_tokens_aux(self, index, text):
+ while text:
+ a, sep1, text = text.partition(self.left)
+ if a:
+ for i, t, v in self.lang.get_tokens_unprocessed(a):
+ yield index + i, t, v
+ index += len(a)
+ if sep1:
+ b, sep2, text = text.partition(self.right)
+ if sep2:
+ yield index + len(sep1), Token.Escape, b
+ index += len(sep1) + len(b) + len(sep2)
+ else:
+ yield index, Token.Error, sep1
+ index += len(sep1)
+ text = b
diff --git a/pygments/formatters/other.py b/pygments/formatters/other.py
index 1029a7a7..4945d763 100644
--- a/pygments/formatters/other.py
+++ b/pygments/formatters/other.py
@@ -5,16 +5,16 @@
Other formatters: NullFormatter, RawTokenFormatter.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.formatter import Formatter
-from pygments.util import OptionError, get_choice_opt, b
+from pygments.util import OptionError, get_choice_opt
from pygments.token import Token
from pygments.console import colorize
-__all__ = ['NullFormatter', 'RawTokenFormatter']
+__all__ = ['NullFormatter', 'RawTokenFormatter', 'TestcaseFormatter']
class NullFormatter(Formatter):
@@ -40,7 +40,7 @@ class RawTokenFormatter(Formatter):
The format is ``tokentype<TAB>repr(tokenstring)\n``. The output can later
be converted to a token stream with the `RawTokenLexer`, described in the
- `lexer list <lexers.txt>`_.
+ :doc:`lexer list <lexers>`.
Only two options are accepted:
@@ -50,7 +50,8 @@ class RawTokenFormatter(Formatter):
`error_color`
If set to a color name, highlight error tokens using that color. If
set but with no value, defaults to ``'red'``.
- *New in Pygments 0.11.*
+
+ .. versionadded:: 0.11
"""
name = 'Raw tokens'
@@ -61,9 +62,9 @@ class RawTokenFormatter(Formatter):
def __init__(self, **options):
Formatter.__init__(self, **options)
- if self.encoding:
- raise OptionError('the raw formatter does not support the '
- 'encoding option')
+ # We ignore self.encoding if it is set, since it gets set for lexer
+ # and formatter if given with -Oencoding on the command line.
+ # The RawTokenFormatter outputs only ASCII. Override here.
self.encoding = 'ascii' # let pygments.format() do the right thing
self.compress = get_choice_opt(options, 'compress',
['', 'none', 'gz', 'bz2'], '')
@@ -79,7 +80,7 @@ class RawTokenFormatter(Formatter):
def format(self, tokensource, outfile):
try:
- outfile.write(b(''))
+ outfile.write(b'')
except TypeError:
raise TypeError('The raw tokens formatter needs a binary '
'output file')
@@ -113,3 +114,47 @@ class RawTokenFormatter(Formatter):
for ttype, value in tokensource:
write("%s\t%r\n" % (ttype, value))
flush()
+
+TESTCASE_BEFORE = u'''\
+ def testNeedsName(self):
+ fragment = %r
+ tokens = [
+'''
+TESTCASE_AFTER = u'''\
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+'''
+
+
+class TestcaseFormatter(Formatter):
+ """
+ Format tokens as appropriate for a new testcase.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Testcase'
+ aliases = ['testcase']
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ if self.encoding is not None and self.encoding != 'utf-8':
+ raise ValueError("Only None and utf-8 are allowed encodings.")
+
+ def format(self, tokensource, outfile):
+ indentation = ' ' * 12
+ rawbuf = []
+ outbuf = []
+ for ttype, value in tokensource:
+ rawbuf.append(value)
+ outbuf.append('%s(%s, %r),\n' % (indentation, ttype, value))
+
+ before = TESTCASE_BEFORE % (u''.join(rawbuf),)
+ during = u''.join(outbuf)
+ after = TESTCASE_AFTER
+ if self.encoding is None:
+ outfile.write(before + during + after)
+ else:
+ outfile.write(before.encode('utf-8'))
+ outfile.write(during.encode('utf-8'))
+ outfile.write(after.encode('utf-8'))
+ outfile.flush()
diff --git a/pygments/formatters/rtf.py b/pygments/formatters/rtf.py
index 3efda284..27be225a 100644
--- a/pygments/formatters/rtf.py
+++ b/pygments/formatters/rtf.py
@@ -5,11 +5,12 @@
A formatter that generates RTF files.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.formatter import Formatter
+from pygments.util import get_int_opt, _surrogatepair
__all__ = ['RtfFormatter']
@@ -19,9 +20,13 @@ class RtfFormatter(Formatter):
"""
Format tokens as RTF markup. This formatter automatically outputs full RTF
documents with color information and other useful stuff. Perfect for Copy and
- Paste into Microsoft® Word® documents.
+ Paste into Microsoft(R) Word(R) documents.
- *New in Pygments 0.6.*
+ Please note that ``encoding`` and ``outencoding`` options are ignored.
+ The RTF format is ASCII natively, but handles unicode characters correctly
+ thanks to escape sequences.
+
+ .. versionadded:: 0.6
Additional options accepted:
@@ -32,15 +37,19 @@ class RtfFormatter(Formatter):
`fontface`
The used font famliy, for example ``Bitstream Vera Sans``. Defaults to
some generic font which is supposed to have fixed width.
+
+ `fontsize`
+ Size of the font used. Size is specified in half points. The
+ default is 24 half-points, giving a size 12 font.
+
+ .. versionadded:: 2.0
"""
name = 'RTF'
aliases = ['rtf']
filenames = ['*.rtf']
- unicodeoutput = False
-
def __init__(self, **options):
- """
+ r"""
Additional options accepted:
``fontface``
@@ -49,48 +58,48 @@ class RtfFormatter(Formatter):
specification claims that ``\fmodern`` are "Fixed-pitch serif
and sans serif fonts". Hope every RTF implementation thinks
the same about modern...
+
"""
Formatter.__init__(self, **options)
self.fontface = options.get('fontface') or ''
+ self.fontsize = get_int_opt(options, 'fontsize', 0)
def _escape(self, text):
- return text.replace('\\', '\\\\') \
- .replace('{', '\\{') \
- .replace('}', '\\}')
+ return text.replace(u'\\', u'\\\\') \
+ .replace(u'{', u'\\{') \
+ .replace(u'}', u'\\}')
def _escape_text(self, text):
# empty strings, should give a small performance improvment
if not text:
- return ''
+ return u''
# escape text
text = self._escape(text)
- if self.encoding in ('utf-8', 'utf-16', 'utf-32'):
- encoding = 'iso-8859-15'
- else:
- encoding = self.encoding or 'iso-8859-15'
buf = []
for c in text:
- if ord(c) > 128:
- ansic = c.encode(encoding, 'ignore') or '?'
- if ord(ansic) > 128:
- ansic = '\\\'%x' % ord(ansic)
- else:
- ansic = c
- buf.append(r'\ud{\u%d%s}' % (ord(c), ansic))
- else:
+ cn = ord(c)
+ if cn < (2**7):
+ # ASCII character
buf.append(str(c))
+ elif (2**7) <= cn < (2**16):
+ # single unicode escape sequence
+ buf.append(u'{\\u%d}' % cn)
+ elif (2**16) <= cn:
+ # RTF limits unicode to 16 bits.
+ # Force surrogate pairs
+ buf.append(u'{\\u%d}{\\u%d}' % _surrogatepair(cn))
- return ''.join(buf).replace('\n', '\\par\n')
+ return u''.join(buf).replace(u'\n', u'\\par\n')
def format_unencoded(self, tokensource, outfile):
# rtf 1.8 header
- outfile.write(r'{\rtf1\ansi\deff0'
- r'{\fonttbl{\f0\fmodern\fprq1\fcharset0%s;}}'
- r'{\colortbl;' % (self.fontface and
- ' ' + self._escape(self.fontface) or
- ''))
+ outfile.write(u'{\\rtf1\\ansi\\uc0\\deff0'
+ u'{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}'
+ u'{\\colortbl;' % (self.fontface and
+ u' ' + self._escape(self.fontface) or
+ u''))
# convert colors and save them in a mapping to access them later.
color_mapping = {}
@@ -99,13 +108,15 @@ class RtfFormatter(Formatter):
for color in style['color'], style['bgcolor'], style['border']:
if color and color not in color_mapping:
color_mapping[color] = offset
- outfile.write(r'\red%d\green%d\blue%d;' % (
+ outfile.write(u'\\red%d\\green%d\\blue%d;' % (
int(color[0:2], 16),
int(color[2:4], 16),
int(color[4:6], 16)
))
offset += 1
- outfile.write(r'}\f0')
+ outfile.write(u'}\\f0 ')
+ if self.fontsize:
+ outfile.write(u'\\fs%d' % (self.fontsize))
# highlight stream
for ttype, value in tokensource:
@@ -114,23 +125,23 @@ class RtfFormatter(Formatter):
style = self.style.style_for_token(ttype)
buf = []
if style['bgcolor']:
- buf.append(r'\cb%d' % color_mapping[style['bgcolor']])
+ buf.append(u'\\cb%d' % color_mapping[style['bgcolor']])
if style['color']:
- buf.append(r'\cf%d' % color_mapping[style['color']])
+ buf.append(u'\\cf%d' % color_mapping[style['color']])
if style['bold']:
- buf.append(r'\b')
+ buf.append(u'\\b')
if style['italic']:
- buf.append(r'\i')
+ buf.append(u'\\i')
if style['underline']:
- buf.append(r'\ul')
+ buf.append(u'\\ul')
if style['border']:
- buf.append(r'\chbrdr\chcfpat%d' %
+ buf.append(u'\\chbrdr\\chcfpat%d' %
color_mapping[style['border']])
- start = ''.join(buf)
+ start = u''.join(buf)
if start:
- outfile.write('{%s ' % start)
+ outfile.write(u'{%s ' % start)
outfile.write(self._escape_text(value))
if start:
- outfile.write('}')
+ outfile.write(u'}')
- outfile.write('}')
+ outfile.write(u'}')
diff --git a/pygments/formatters/svg.py b/pygments/formatters/svg.py
index 271f22a7..0efe9eea 100644
--- a/pygments/formatters/svg.py
+++ b/pygments/formatters/svg.py
@@ -5,7 +5,7 @@
Formatter for SVG output.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -35,7 +35,7 @@ class SvgFormatter(Formatter):
By default, this formatter outputs a full SVG document including doctype
declaration and the ``<svg>`` root element.
- *New in Pygments 0.9.*
+ .. versionadded:: 0.9
Additional options accepted:
@@ -78,7 +78,6 @@ class SvgFormatter(Formatter):
filenames = ['*.svg']
def __init__(self, **options):
- # XXX outencoding
Formatter.__init__(self, **options)
self.nowrap = get_bool_opt(options, 'nowrap', False)
self.fontfamily = options.get('fontfamily', 'monospace')
diff --git a/pygments/formatters/terminal.py b/pygments/formatters/terminal.py
index 94e078f2..3c4b025f 100644
--- a/pygments/formatters/terminal.py
+++ b/pygments/formatters/terminal.py
@@ -5,7 +5,7 @@
Formatter for terminal output with ANSI sequences.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -13,7 +13,7 @@ import sys
from pygments.formatter import Formatter
from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Token, Whitespace
+ Number, Operator, Generic, Token, Whitespace
from pygments.console import ansiformat
from pygments.util import get_choice_opt
@@ -73,6 +73,10 @@ class TerminalFormatter(Formatter):
`colorscheme`
A dictionary mapping token types to (lightbg, darkbg) color names or
``None`` (default: ``None`` = use builtin colorscheme).
+
+ `linenos`
+ Set to ``True`` to have line numbers on the terminal output as well
+ (default: ``False`` = no line numbers).
"""
name = 'Terminal'
aliases = ['terminal', 'console']
@@ -83,6 +87,8 @@ class TerminalFormatter(Formatter):
self.darkbg = get_choice_opt(options, 'bg',
['light', 'dark'], 'light') == 'dark'
self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
+ self.linenos = options.get('linenos', False)
+ self._lineno = 0
def format(self, tokensource, outfile):
# hack: if the output is a terminal and has an encoding set,
@@ -93,7 +99,40 @@ class TerminalFormatter(Formatter):
self.encoding = outfile.encoding
return Formatter.format(self, tokensource, outfile)
+ def _write_lineno(self, outfile):
+ self._lineno += 1
+ outfile.write("\n%04d: " % self._lineno)
+
+ def _format_unencoded_with_lineno(self, tokensource, outfile):
+ self._write_lineno(outfile)
+
+ for ttype, value in tokensource:
+ if value.endswith("\n"):
+ self._write_lineno(outfile)
+ value = value[:-1]
+ color = self.colorscheme.get(ttype)
+ while color is None:
+ ttype = ttype[:-1]
+ color = self.colorscheme.get(ttype)
+ if color:
+ color = color[self.darkbg]
+ spl = value.split('\n')
+ for line in spl[:-1]:
+ self._write_lineno(outfile)
+ if line:
+ outfile.write(ansiformat(color, line[:-1]))
+ if spl[-1]:
+ outfile.write(ansiformat(color, spl[-1]))
+ else:
+ outfile.write(value)
+
+ outfile.write("\n")
+
def format_unencoded(self, tokensource, outfile):
+ if self.linenos:
+ self._format_unencoded_with_lineno(tokensource, outfile)
+ return
+
for ttype, value in tokensource:
color = self.colorscheme.get(ttype)
while color is None:
diff --git a/pygments/formatters/terminal256.py b/pygments/formatters/terminal256.py
index 772ed423..5d794f4e 100644
--- a/pygments/formatters/terminal256.py
+++ b/pygments/formatters/terminal256.py
@@ -11,7 +11,7 @@
Formatter version 1.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -66,6 +66,7 @@ class EscapeSequence:
attrs.append("00")
return self.escape(attrs)
+
class Terminal256Formatter(Formatter):
r"""
Format tokens with ANSI color sequences, for output in a 256-color
@@ -76,7 +77,7 @@ class Terminal256Formatter(Formatter):
and converts them to nearest ANSI 256-color escape sequences. Bold and
underline attributes from the style are preserved (and displayed).
- *New in Pygments 0.9.*
+ .. versionadded:: 0.9
Options accepted:
@@ -98,28 +99,28 @@ class Terminal256Formatter(Formatter):
self.usebold = 'nobold' not in options
self.useunderline = 'nounderline' not in options
- self._build_color_table() # build an RGB-to-256 color conversion table
- self._setup_styles() # convert selected style's colors to term. colors
+ self._build_color_table() # build an RGB-to-256 color conversion table
+ self._setup_styles() # convert selected style's colors to term. colors
def _build_color_table(self):
# colors 0..15: 16 basic colors
- self.xterm_colors.append((0x00, 0x00, 0x00)) # 0
- self.xterm_colors.append((0xcd, 0x00, 0x00)) # 1
- self.xterm_colors.append((0x00, 0xcd, 0x00)) # 2
- self.xterm_colors.append((0xcd, 0xcd, 0x00)) # 3
- self.xterm_colors.append((0x00, 0x00, 0xee)) # 4
- self.xterm_colors.append((0xcd, 0x00, 0xcd)) # 5
- self.xterm_colors.append((0x00, 0xcd, 0xcd)) # 6
- self.xterm_colors.append((0xe5, 0xe5, 0xe5)) # 7
- self.xterm_colors.append((0x7f, 0x7f, 0x7f)) # 8
- self.xterm_colors.append((0xff, 0x00, 0x00)) # 9
- self.xterm_colors.append((0x00, 0xff, 0x00)) # 10
- self.xterm_colors.append((0xff, 0xff, 0x00)) # 11
- self.xterm_colors.append((0x5c, 0x5c, 0xff)) # 12
- self.xterm_colors.append((0xff, 0x00, 0xff)) # 13
- self.xterm_colors.append((0x00, 0xff, 0xff)) # 14
- self.xterm_colors.append((0xff, 0xff, 0xff)) # 15
+ self.xterm_colors.append((0x00, 0x00, 0x00)) # 0
+ self.xterm_colors.append((0xcd, 0x00, 0x00)) # 1
+ self.xterm_colors.append((0x00, 0xcd, 0x00)) # 2
+ self.xterm_colors.append((0xcd, 0xcd, 0x00)) # 3
+ self.xterm_colors.append((0x00, 0x00, 0xee)) # 4
+ self.xterm_colors.append((0xcd, 0x00, 0xcd)) # 5
+ self.xterm_colors.append((0x00, 0xcd, 0xcd)) # 6
+ self.xterm_colors.append((0xe5, 0xe5, 0xe5)) # 7
+ self.xterm_colors.append((0x7f, 0x7f, 0x7f)) # 8
+ self.xterm_colors.append((0xff, 0x00, 0x00)) # 9
+ self.xterm_colors.append((0x00, 0xff, 0x00)) # 10
+ self.xterm_colors.append((0xff, 0xff, 0x00)) # 11
+ self.xterm_colors.append((0x5c, 0x5c, 0xff)) # 12
+ self.xterm_colors.append((0xff, 0x00, 0xff)) # 13
+ self.xterm_colors.append((0x00, 0xff, 0xff)) # 14
+ self.xterm_colors.append((0xff, 0xff, 0xff)) # 15
# colors 16..232: the 6x6x6 color cube
@@ -138,7 +139,7 @@ class Terminal256Formatter(Formatter):
self.xterm_colors.append((v, v, v))
def _closest_color(self, r, g, b):
- distance = 257*257*3 # "infinity" (>distance from #000000 to #ffffff)
+ distance = 257*257*3 # "infinity" (>distance from #000000 to #ffffff)
match = 0
for i in range(0, 254):
@@ -197,7 +198,7 @@ class Terminal256Formatter(Formatter):
not_found = True
while ttype and not_found:
try:
- #outfile.write( "<" + str(ttype) + ">" )
+ # outfile.write( "<" + str(ttype) + ">" )
on, off = self.style_string[str(ttype)]
# Like TerminalFormatter, add "reset colors" escape sequence
@@ -211,12 +212,12 @@ class Terminal256Formatter(Formatter):
outfile.write(on + spl[-1] + off)
not_found = False
- #outfile.write( '#' + str(ttype) + '#' )
+ # outfile.write( '#' + str(ttype) + '#' )
except KeyError:
- #ottype = ttype
+ # ottype = ttype
ttype = ttype[:-1]
- #outfile.write( '!' + str(ottype) + '->' + str(ttype) + '!' )
+ # outfile.write( '!' + str(ottype) + '->' + str(ttype) + '!' )
if not_found:
outfile.write(value)
diff --git a/pygments/lexer.py b/pygments/lexer.py
index 8f88dfda..07e81033 100644
--- a/pygments/lexer.py
+++ b/pygments/lexer.py
@@ -5,27 +5,34 @@
Base lexer classes.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re, itertools
+
+from __future__ import print_function
+
+import re
+import sys
+import time
+import itertools
from pygments.filter import apply_filters, Filter
from pygments.filters import get_filter_by_name
from pygments.token import Error, Text, Other, _TokenType
from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
- make_analysator
-
+ make_analysator, text_type, add_metaclass, iteritems, Future, guess_decode
+from pygments.regexopt import regex_opt
__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
- 'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this']
+ 'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this',
+ 'default', 'words']
-_encoding_map = [('\xef\xbb\xbf', 'utf-8'),
- ('\xff\xfe\0\0', 'utf-32'),
- ('\0\0\xfe\xff', 'utf-32be'),
- ('\xff\xfe', 'utf-16'),
- ('\xfe\xff', 'utf-16be')]
+_encoding_map = [(b'\xef\xbb\xbf', 'utf-8'),
+ (b'\xff\xfe\0\0', 'utf-32'),
+ (b'\0\0\xfe\xff', 'utf-32be'),
+ (b'\xff\xfe', 'utf-16'),
+ (b'\xfe\xff', 'utf-16be')]
_default_analyse = staticmethod(lambda x: 0.0)
@@ -42,6 +49,7 @@ class LexerMeta(type):
return type.__new__(cls, name, bases, d)
+@add_metaclass(LexerMeta)
class Lexer(object):
"""
Lexer for a specific language.
@@ -55,15 +63,19 @@ class Lexer(object):
``ensurenl``
Make sure that the input ends with a newline (default: True). This
is required for some lexers that consume input linewise.
- *New in Pygments 1.3.*
+
+ .. versionadded:: 1.3
+
``tabsize``
If given and greater than 0, expand tabs in the input (default: 0).
``encoding``
If given, must be an encoding name. This encoding will be used to
convert the input string to Unicode, if it is not already a Unicode
- string (default: ``'latin1'``).
- Can also be ``'guess'`` to use a simple UTF-8 / Latin1 detection, or
- ``'chardet'`` to use the chardet library, if it is installed.
+ string (default: ``'guess'``, which uses a simple UTF-8 / Locale /
+ Latin1 detection. Can also be ``'chardet'`` to use the chardet
+ library, if it is installed.
+ ``inencoding``
+ Overrides the ``encoding`` if given.
"""
#: Name of the lexer
@@ -84,16 +96,14 @@ class Lexer(object):
#: Priority, should multiple lexers match and no content is provided
priority = 0
- __metaclass__ = LexerMeta
-
def __init__(self, **options):
self.options = options
self.stripnl = get_bool_opt(options, 'stripnl', True)
self.stripall = get_bool_opt(options, 'stripall', False)
self.ensurenl = get_bool_opt(options, 'ensurenl', True)
self.tabsize = get_int_opt(options, 'tabsize', 0)
- self.encoding = options.get('encoding', 'latin1')
- # self.encoding = options.get('inencoding', None) or self.encoding
+ self.encoding = options.get('encoding', 'guess')
+ self.encoding = options.get('inencoding') or self.encoding
self.filters = []
for filter_ in get_list_opt(options, 'filters', ()):
self.add_filter(filter_)
@@ -136,14 +146,9 @@ class Lexer(object):
Also preprocess the text, i.e. expand tabs and strip it if
wanted and applies registered filters.
"""
- if not isinstance(text, unicode):
+ if not isinstance(text, text_type):
if self.encoding == 'guess':
- try:
- text = text.decode('utf-8')
- if text.startswith(u'\ufeff'):
- text = text[len(u'\ufeff'):]
- except UnicodeDecodeError:
- text = text.decode('latin1')
+ text, _ = guess_decode(text)
elif self.encoding == 'chardet':
try:
import chardet
@@ -155,17 +160,18 @@ class Lexer(object):
decoded = None
for bom, encoding in _encoding_map:
if text.startswith(bom):
- decoded = unicode(text[len(bom):], encoding,
- errors='replace')
+ decoded = text[len(bom):].decode(encoding, 'replace')
break
# no BOM found, so use chardet
if decoded is None:
- enc = chardet.detect(text[:1024]) # Guess using first 1KB
- decoded = unicode(text, enc.get('encoding') or 'utf-8',
- errors='replace')
+ enc = chardet.detect(text[:1024]) # Guess using first 1KB
+ decoded = text.decode(enc.get('encoding') or 'utf-8',
+ 'replace')
text = decoded
else:
text = text.decode(self.encoding)
+ if text.startswith(u'\ufeff'):
+ text = text[len(u'\ufeff'):]
else:
if text.startswith(u'\ufeff'):
text = text[len(u'\ufeff'):]
@@ -192,7 +198,9 @@ class Lexer(object):
def get_tokens_unprocessed(self, text):
"""
- Return an iterable of (tokentype, value) pairs.
+ Return an iterable of (index, tokentype, value) pairs where "index"
+ is the starting position of the token within the input text.
+
In subclasses, implement this method as a generator to
maximize effectiveness.
"""
@@ -233,7 +241,7 @@ class DelegatingLexer(Lexer):
self.root_lexer.get_tokens_unprocessed(buffered))
-#-------------------------------------------------------------------------------
+# ------------------------------------------------------------------------------
# RegexLexer and ExtendedRegexLexer
#
@@ -379,20 +387,50 @@ def using(_other, **kwargs):
return callback
+class default:
+ """
+ Indicates a state or state action (e.g. #pop) to apply.
+ For example default('#pop') is equivalent to ('', Token, '#pop')
+ Note that state tuples may be used as well.
+
+ .. versionadded:: 2.0
+ """
+ def __init__(self, state):
+ self.state = state
+
+
+class words(Future):
+ """
+ Indicates a list of literal words that is transformed into an optimized
+ regex that matches any of the words.
+
+ .. versionadded:: 2.0
+ """
+ def __init__(self, words, prefix='', suffix=''):
+ self.words = words
+ self.prefix = prefix
+ self.suffix = suffix
+
+ def get(self):
+ return regex_opt(self.words, prefix=self.prefix, suffix=self.suffix)
+
+
class RegexLexerMeta(LexerMeta):
"""
Metaclass for RegexLexer, creates the self._tokens attribute from
self.tokens on the first instantiation.
"""
- def _process_regex(cls, regex, rflags):
+ def _process_regex(cls, regex, rflags, state):
"""Preprocess the regular expression component of a token definition."""
+ if isinstance(regex, Future):
+ regex = regex.get()
return re.compile(regex, rflags).match
def _process_token(cls, token):
"""Preprocess the token component of a token definition."""
assert type(token) is _TokenType or callable(token), \
- 'token type must be simple type or callable, not %r' % (token,)
+ 'token type must be simple type or callable, not %r' % (token,)
return token
def _process_new_state(cls, new_state, unprocessed, processed):
@@ -425,7 +463,7 @@ class RegexLexerMeta(LexerMeta):
for istate in new_state:
assert (istate in unprocessed or
istate in ('#pop', '#push')), \
- 'unknown new state ' + istate
+ 'unknown new state ' + istate
return new_state
else:
assert False, 'unknown new state def %r' % new_state
@@ -446,14 +484,20 @@ class RegexLexerMeta(LexerMeta):
str(tdef)))
continue
if isinstance(tdef, _inherit):
- # processed already
+ # should be processed already, but may not in the case of:
+ # 1. the state has no counterpart in any parent
+ # 2. the state includes more than one 'inherit'
+ continue
+ if isinstance(tdef, default):
+ new_state = cls._process_new_state(tdef.state, unprocessed, processed)
+ tokens.append((re.compile('').match, None, new_state))
continue
assert type(tdef) is tuple, "wrong rule def %r" % tdef
try:
- rex = cls._process_regex(tdef[0], rflags)
- except Exception, err:
+ rex = cls._process_regex(tdef[0], rflags, state)
+ except Exception as err:
raise ValueError("uncompilable regex %r in state %r of %r: %s" %
(tdef[0], state, cls, err))
@@ -472,7 +516,7 @@ class RegexLexerMeta(LexerMeta):
"""Preprocess a dictionary of token definitions."""
processed = cls._all_tokens[name] = {}
tokendefs = tokendefs or cls.tokens[name]
- for state in tokendefs.keys():
+ for state in list(tokendefs):
cls._process_state(tokendefs, processed, state)
return processed
@@ -490,12 +534,16 @@ class RegexLexerMeta(LexerMeta):
"""
tokens = {}
inheritable = {}
- for c in itertools.chain((cls,), cls.__mro__):
+ for c in cls.__mro__:
toks = c.__dict__.get('tokens', {})
- for state, items in toks.iteritems():
+ for state, items in iteritems(toks):
curitems = tokens.get(state)
if curitems is None:
+ # N.b. because this is assigned by reference, sufficiently
+ # deep hierarchies are processed incrementally (e.g. for
+ # A(B), B(C), C(RegexLexer), B will be premodified so X(B)
+ # will not see any inherits in B).
tokens[state] = items
try:
inherit_ndx = items.index(inherit)
@@ -511,6 +559,8 @@ class RegexLexerMeta(LexerMeta):
# Replace the "inherit" value with the items
curitems[inherit_ndx:inherit_ndx+1] = items
try:
+ # N.b. this is the index in items (that is, the superclass
+ # copy), so offset required when storing below.
new_inh_ndx = items.index(inherit)
except ValueError:
pass
@@ -533,13 +583,13 @@ class RegexLexerMeta(LexerMeta):
return type.__call__(cls, *args, **kwds)
+@add_metaclass(RegexLexerMeta)
class RegexLexer(Lexer):
"""
Base for simple stateful regular expression-based lexers.
Simplifies the lexing process so that you need only
provide a list of states and regular expressions.
"""
- __metaclass__ = RegexLexerMeta
#: Flags for compiling the regular expressions.
#: Defaults to MULTILINE.
@@ -578,11 +628,12 @@ class RegexLexer(Lexer):
for rexmatch, action, new_state in statetokens:
m = rexmatch(text, pos)
if m:
- if type(action) is _TokenType:
- yield pos, action, m.group()
- else:
- for item in action(self, m):
- yield item
+ if action is not None:
+ if type(action) is _TokenType:
+ yield pos, action, m.group()
+ else:
+ for item in action(self, m):
+ yield item
pos = m.end()
if new_state is not None:
# state transition
@@ -626,7 +677,7 @@ class LexerContext(object):
def __init__(self, text, pos, stack=None, end=None):
self.text = text
self.pos = pos
- self.end = end or len(text) # end=0 not supported ;-)
+ self.end = end or len(text) # end=0 not supported ;-)
self.stack = stack or ['root']
def __repr__(self):
@@ -656,15 +707,16 @@ class ExtendedRegexLexer(RegexLexer):
for rexmatch, action, new_state in statetokens:
m = rexmatch(text, ctx.pos, ctx.end)
if m:
- if type(action) is _TokenType:
- yield ctx.pos, action, m.group()
- ctx.pos = m.end()
- else:
- for item in action(self, m, ctx):
- yield item
- if not new_state:
- # altered the state stack?
- statetokens = tokendefs[ctx.stack[-1]]
+ if action is not None:
+ if type(action) is _TokenType:
+ yield ctx.pos, action, m.group()
+ ctx.pos = m.end()
+ else:
+ for item in action(self, m, ctx):
+ yield item
+ if not new_state:
+ # altered the state stack?
+ statetokens = tokendefs[ctx.stack[-1]]
# CAUTION: callback must set ctx.pos!
if new_state is not None:
# state transition
@@ -673,7 +725,7 @@ class ExtendedRegexLexer(RegexLexer):
if state == '#pop':
ctx.stack.pop()
elif state == '#push':
- ctx.stack.append(statestack[-1])
+ ctx.stack.append(ctx.stack[-1])
else:
ctx.stack.append(state)
elif isinstance(new_state, int):
@@ -718,7 +770,7 @@ def do_insertions(insertions, tokens):
"""
insertions = iter(insertions)
try:
- index, itokens = insertions.next()
+ index, itokens = next(insertions)
except StopIteration:
# no insertions
for item in tokens:
@@ -744,7 +796,7 @@ def do_insertions(insertions, tokens):
realpos += len(it_value)
oldi = index - i
try:
- index, itokens = insertions.next()
+ index, itokens = next(insertions)
except StopIteration:
insleft = False
break # not strictly necessary
@@ -759,7 +811,60 @@ def do_insertions(insertions, tokens):
yield realpos, t, v
realpos += len(v)
try:
- index, itokens = insertions.next()
+ index, itokens = next(insertions)
except StopIteration:
insleft = False
break # not strictly necessary
+
+
+class ProfilingRegexLexerMeta(RegexLexerMeta):
+ """Metaclass for ProfilingRegexLexer, collects regex timing info."""
+
+ def _process_regex(cls, regex, rflags, state):
+ if isinstance(regex, words):
+ rex = regex_opt(regex.words, prefix=regex.prefix,
+ suffix=regex.suffix)
+ else:
+ rex = regex
+ compiled = re.compile(rex, rflags)
+
+ def match_func(text, pos, endpos=sys.maxsize):
+ info = cls._prof_data[-1].setdefault((state, rex), [0, 0.0])
+ t0 = time.time()
+ res = compiled.match(text, pos, endpos)
+ t1 = time.time()
+ info[0] += 1
+ info[1] += t1 - t0
+ return res
+ return match_func
+
+
+@add_metaclass(ProfilingRegexLexerMeta)
+class ProfilingRegexLexer(RegexLexer):
+ """Drop-in replacement for RegexLexer that does profiling of its regexes."""
+
+ _prof_data = []
+ _prof_sort_index = 4 # defaults to time per call
+
+ def get_tokens_unprocessed(self, text, stack=('root',)):
+ # this needs to be a stack, since using(this) will produce nested calls
+ self.__class__._prof_data.append({})
+ for tok in RegexLexer.get_tokens_unprocessed(self, text, stack):
+ yield tok
+ rawdata = self.__class__._prof_data.pop()
+ data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65],
+ n, 1000 * t, 1000 * t / n)
+ for ((s, r), (n, t)) in rawdata.items()),
+ key=lambda x: x[self._prof_sort_index],
+ reverse=True)
+ sum_total = sum(x[3] for x in data)
+
+ print()
+ print('Profiling result for %s lexing %d chars in %.3f ms' %
+ (self.__class__.__name__, len(text), sum_total))
+ print('=' * 110)
+ print('%-20s %-64s ncalls tottime percall' % ('state', 'regex'))
+ print('-' * 110)
+ for d in data:
+ print('%-20s %-65s %5d %8.4f %8.4f' % d)
+ print('=' * 110)
diff --git a/pygments/lexers/__init__.py b/pygments/lexers/__init__.py
index dbfe4351..7d0b89d4 100644
--- a/pygments/lexers/__init__.py
+++ b/pygments/lexers/__init__.py
@@ -5,10 +5,11 @@
Pygments lexers.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+import re
import sys
import types
import fnmatch
@@ -17,19 +18,26 @@ from os.path import basename
from pygments.lexers._mapping import LEXERS
from pygments.modeline import get_filetype_from_buffer
from pygments.plugin import find_plugin_lexers
-from pygments.util import ClassNotFound, bytes
+from pygments.util import ClassNotFound, itervalues, guess_decode
__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
- 'guess_lexer'] + LEXERS.keys()
+ 'guess_lexer'] + list(LEXERS)
_lexer_cache = {}
+_pattern_cache = {}
+
+
+def _fn_matches(fn, glob):
+ """Return whether the supplied file name fn matches pattern filename."""
+ if glob not in _pattern_cache:
+ pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
+ return pattern.match(fn)
+ return _pattern_cache[glob].match(fn)
def _load_lexers(module_name):
- """
- Load a lexer (and all others in the module too).
- """
+ """Load a lexer (and all others in the module too)."""
mod = __import__(module_name, None, None, ['__all__'])
for lexer_name in mod.__all__:
cls = getattr(mod, lexer_name)
@@ -37,24 +45,24 @@ def _load_lexers(module_name):
def get_all_lexers():
- """
- Return a generator of tuples in the form ``(name, aliases,
+ """Return a generator of tuples in the form ``(name, aliases,
filenames, mimetypes)`` of all know lexers.
"""
- for item in LEXERS.itervalues():
+ for item in itervalues(LEXERS):
yield item[1:]
for lexer in find_plugin_lexers():
yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes
def find_lexer_class(name):
- """
- Lookup a lexer class by name. Return None if not found.
+ """Lookup a lexer class by name.
+
+ Return None if not found.
"""
if name in _lexer_cache:
return _lexer_cache[name]
# lookup builtin lexers
- for module_name, lname, aliases, _, _ in LEXERS.itervalues():
+ for module_name, lname, aliases, _, _ in itervalues(LEXERS):
if name == lname:
_load_lexers(module_name)
return _lexer_cache[name]
@@ -65,44 +73,50 @@ def find_lexer_class(name):
def get_lexer_by_name(_alias, **options):
+ """Get a lexer by an alias.
+
+ Raises ClassNotFound if not found.
"""
- Get a lexer by an alias.
- """
+ if not _alias:
+ raise ClassNotFound('no lexer for alias %r found' % _alias)
+
# lookup builtin lexers
- for module_name, name, aliases, _, _ in LEXERS.itervalues():
- if _alias in aliases:
+ for module_name, name, aliases, _, _ in itervalues(LEXERS):
+ if _alias.lower() in aliases:
if name not in _lexer_cache:
_load_lexers(module_name)
return _lexer_cache[name](**options)
# continue with lexers from setuptools entrypoints
for cls in find_plugin_lexers():
- if _alias in cls.aliases:
+ if _alias.lower() in cls.aliases:
return cls(**options)
raise ClassNotFound('no lexer for alias %r found' % _alias)
-def get_lexer_for_filename(_fn, code=None, **options):
- """
- Get a lexer for a filename. If multiple lexers match the filename
- pattern, use ``analyze_text()`` to figure out which one is more
- appropriate.
+def find_lexer_class_for_filename(_fn, code=None):
+ """Get a lexer for a filename.
+
+ If multiple lexers match the filename pattern, use ``analyse_text()`` to
+ figure out which one is more appropriate.
+
+ Returns None if not found.
"""
matches = []
fn = basename(_fn)
- for modname, name, _, filenames, _ in LEXERS.itervalues():
+ for modname, name, _, filenames, _ in itervalues(LEXERS):
for filename in filenames:
- if fnmatch.fnmatch(fn, filename):
+ if _fn_matches(fn, filename):
if name not in _lexer_cache:
_load_lexers(modname)
matches.append((_lexer_cache[name], filename))
for cls in find_plugin_lexers():
for filename in cls.filenames:
- if fnmatch.fnmatch(fn, filename):
+ if _fn_matches(fn, filename):
matches.append((cls, filename))
if sys.version_info > (3,) and isinstance(code, bytes):
# decode it, since all analyse_text functions expect unicode
- code = code.decode('latin1')
+ code = guess_decode(code)
def get_rating(info):
cls, filename = info
@@ -118,16 +132,30 @@ def get_lexer_for_filename(_fn, code=None, **options):
if matches:
matches.sort(key=get_rating)
- #print "Possible lexers, after sort:", matches
- return matches[-1][0](**options)
- raise ClassNotFound('no lexer for filename %r found' % _fn)
+ # print "Possible lexers, after sort:", matches
+ return matches[-1][0]
-def get_lexer_for_mimetype(_mime, **options):
+def get_lexer_for_filename(_fn, code=None, **options):
+ """Get a lexer for a filename.
+
+ If multiple lexers match the filename pattern, use ``analyse_text()`` to
+ figure out which one is more appropriate.
+
+ Raises ClassNotFound if not found.
"""
- Get a lexer for a mimetype.
+ res = find_lexer_class_for_filename(_fn, code)
+ if not res:
+ raise ClassNotFound('no lexer for filename %r found' % _fn)
+ return res(**options)
+
+
+def get_lexer_for_mimetype(_mime, **options):
+ """Get a lexer for a mimetype.
+
+ Raises ClassNotFound if not found.
"""
- for modname, name, _, _, mimetypes in LEXERS.itervalues():
+ for modname, name, _, _, mimetypes in itervalues(LEXERS):
if _mime in mimetypes:
if name not in _lexer_cache:
_load_lexers(modname)
@@ -138,17 +166,16 @@ def get_lexer_for_mimetype(_mime, **options):
raise ClassNotFound('no lexer for mimetype %r found' % _mime)
-def _iter_lexerclasses():
- """
- Return an iterator over all lexer classes.
- """
+def _iter_lexerclasses(plugins=True):
+ """Return an iterator over all lexer classes."""
for key in sorted(LEXERS):
module_name, name = LEXERS[key][:2]
if name not in _lexer_cache:
_load_lexers(module_name)
yield _lexer_cache[name]
- for lexer in find_plugin_lexers():
- yield lexer
+ if plugins:
+ for lexer in find_plugin_lexers():
+ yield lexer
def guess_lexer_for_filename(_fn, _text, **options):
@@ -168,16 +195,17 @@ def guess_lexer_for_filename(_fn, _text, **options):
<pygments.lexers.templates.CssPhpLexer object at 0xb7ba518c>
"""
fn = basename(_fn)
- primary = None
+ primary = {}
matching_lexers = set()
for lexer in _iter_lexerclasses():
for filename in lexer.filenames:
- if fnmatch.fnmatch(fn, filename):
+ if _fn_matches(fn, filename):
matching_lexers.add(lexer)
- primary = lexer
+ primary[lexer] = True
for filename in lexer.alias_filenames:
- if fnmatch.fnmatch(fn, filename):
+ if _fn_matches(fn, filename):
matching_lexers.add(lexer)
+ primary[lexer] = False
if not matching_lexers:
raise ClassNotFound('no lexer for filename %r found' % fn)
if len(matching_lexers) == 1:
@@ -188,16 +216,21 @@ def guess_lexer_for_filename(_fn, _text, **options):
if rv == 1.0:
return lexer(**options)
result.append((rv, lexer))
- result.sort()
- if not result[-1][0] and primary is not None:
- return primary(**options)
+
+ def type_sort(t):
+ # sort by:
+ # - analyse score
+ # - is primary filename pattern?
+ # - priority
+ # - last resort: class name
+ return (t[0], primary[t[1]], t[1].priority, t[1].__name__)
+ result.sort(key=type_sort)
+
return result[-1][1](**options)
def guess_lexer(_text, **options):
- """
- Guess a lexer by strong distinctions in the text (eg, shebang).
- """
+ """Guess a lexer by strong distinctions in the text (eg, shebang)."""
# try to get a vim modeline first
ft = get_filetype_from_buffer(_text)
@@ -233,8 +266,8 @@ class _automodule(types.ModuleType):
raise AttributeError(name)
-oldmod = sys.modules['pygments.lexers']
-newmod = _automodule('pygments.lexers')
+oldmod = sys.modules[__name__]
+newmod = _automodule(__name__)
newmod.__dict__.update(oldmod.__dict__)
-sys.modules['pygments.lexers'] = newmod
+sys.modules[__name__] = newmod
del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
diff --git a/pygments/lexers/_asybuiltins.py b/pygments/lexers/_asy_builtins.py
index 108fa199..51716866 100644
--- a/pygments/lexers/_asybuiltins.py
+++ b/pygments/lexers/_asy_builtins.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""
- pygments.lexers._asybuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ pygments.lexers._asy_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file contains the asy-function names and asy-variable names of
Asymptote.
@@ -10,11 +10,11 @@
TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
for function and variable names.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-ASYFUNCNAME = set([
+ASYFUNCNAME = set((
'AND',
'Arc',
'ArcArrow',
@@ -1038,9 +1038,9 @@ ASYFUNCNAME = set([
'ztick',
'ztick3',
'ztrans'
-])
+))
-ASYVARNAME = set([
+ASYVARNAME = set((
'AliceBlue',
'Align',
'Allow',
@@ -1642,4 +1642,4 @@ ASYVARNAME = set([
'ylabelwidth',
'zerotickfuzz',
'zerowinding'
-])
+))
diff --git a/pygments/lexers/_clbuiltins.py b/pygments/lexers/_cl_builtins.py
index 59f948ba..a2243647 100644
--- a/pygments/lexers/_clbuiltins.py
+++ b/pygments/lexers/_cl_builtins.py
@@ -1,15 +1,15 @@
# -*- coding: utf-8 -*-
"""
- pygments.lexers._clbuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ pygments.lexers._cl_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ANSI Common Lisp builtins.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-BUILTIN_FUNCTIONS = [ # 638 functions
+BUILTIN_FUNCTIONS = set(( # 638 functions
'<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
'adjustable-array-p', 'adjust-array', 'allocate-instance',
@@ -157,17 +157,17 @@ BUILTIN_FUNCTIONS = [ # 638 functions
'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
'y-or-n-p', 'zerop',
-]
+))
-SPECIAL_FORMS = [
+SPECIAL_FORMS = set((
'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
'unwind-protect',
-]
+))
-MACROS = [
+MACROS = set((
'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
'define-compiler-macro', 'define-condition', 'define-method-combination',
@@ -188,19 +188,19 @@ MACROS = [
'with-input-from-string', 'with-open-file', 'with-open-stream',
'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
'with-slots', 'with-standard-io-syntax',
-]
+))
-LAMBDA_LIST_KEYWORDS = [
+LAMBDA_LIST_KEYWORDS = set((
'&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
'&rest', '&whole',
-]
+))
-DECLARATIONS = [
+DECLARATIONS = set((
'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
'ignorable', 'notinline', 'type',
-]
+))
-BUILTIN_TYPES = [
+BUILTIN_TYPES = set((
'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
@@ -217,9 +217,9 @@ BUILTIN_TYPES = [
'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
'undefined-function', 'warning',
-]
+))
-BUILTIN_CLASSES = [
+BUILTIN_CLASSES = set((
'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
'file-stream', 'float', 'function', 'generic-function', 'hash-table',
@@ -229,4 +229,4 @@ BUILTIN_CLASSES = [
'standard-generic-function', 'standard-method', 'standard-object',
'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
-]
+))
diff --git a/pygments/lexers/_cocoa_builtins.py b/pygments/lexers/_cocoa_builtins.py
new file mode 100644
index 00000000..b97860b3
--- /dev/null
+++ b/pygments/lexers/_cocoa_builtins.py
@@ -0,0 +1,73 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._cocoa_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file defines a set of types used across Cocoa frameworks from Apple.
+ There is a list of @interfaces, @protocols and some other (structs, unions)
+
+ File may be also used as standalone generator for aboves.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+COCOA_INTERFACES = set(['UITableViewCell', 'HKCorrelationQuery', 'NSURLSessionDataTask', 'PHFetchOptions', 'NSLinguisticTagger', 'NSStream', 'AVAudioUnitDelay', 'GCMotion', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'AVAudioEnvironmentDistanceAttenuationParameters', 'HKStatisticsCollection', 'SCNPlane', 'CBPeer', 'JSContext', 'SCNTransaction', 'SCNTorus', 'AVAudioUnitEffect', 'UICollectionReusableView', 'MTLSamplerDescriptor', 'AVAssetReaderSampleReferenceOutput', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'SKRange', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UIVisualEffectView', 'CIWarpKernel', 'PKObject', 'MKRoute', 'MPVolumeView', 'UIPrintInfo', 'SCNText', 'ADClient', 'UIKeyCommand', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'WKScriptMessage', 'AVMIDIPlayer', 'PHCollectionListChangeRequest', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'SKPaymentTransaction', 'NEOnDemandRuleConnect', 'NSShadow', 'SCNView', 'NSURLSessionConfiguration', 'MTLVertexAttributeDescriptor', 'CBCharacteristic', 'HKQuantityType', 'CKLocationSortDescriptor', 'NEVPNIKEv2SecurityAssociationParameters', 'CMStepCounter', 'NSNetService', 'AVAssetWriterInputMetadataAdaptor', 'UICollectionView', 'UIViewPrintFormatter', 'SCNLevelOfDetail', 'CAShapeLayer', 'MCPeerID', 'MPRatingCommand', 'WKNavigation', 'NSDictionary', 'NSFileVersion', 'CMGyroData', 'AVAudioUnitDistortion', 'CKFetchRecordsOperation', 'SKPhysicsJointSpring', 'SCNHitTestResult', 'AVAudioTime', 'CIFilter', 'UIView', 'SCNConstraint', 'CAPropertyAnimation', 'MKMapItem', 'MPRemoteCommandCenter', 'UICollectionViewFlowLayoutInvalidationContext', 'UIInputViewController', 'PKPass', 'SCNPhysicsBehavior', 'MTLRenderPassColorAttachmentDescriptor', 'MKPolygonRenderer', 'CKNotification', 'JSValue', 'PHCollectionList', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'MPFeedbackCommand', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'NSLengthFormatter', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'SCNPyramid', 'PHAssetCollection', 'MKMapView', 'HMHomeManager', 'CATransition', 'MTLCompileOptions', 'UIVibrancyEffect', 'CLCircularRegion', 'MKTileOverlay', 'SCNShape', 'ACAccountCredential', 'SKPhysicsJointLimit', 'MKMapSnapshotter', 'AVMediaSelectionGroup', 'NSIndexSet', 'CBPeripheralManager', 'CKRecordZone', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'AVAudioOutputNode', 'MTLVertexAttributeDescriptorArray', 'MKETAResponse', 'SKTransition', 'SSReadingList', 'HKSourceQuery', 'UITableViewRowAction', 'UITableView', 'SCNParticlePropertyController', 'AVCaptureStillImageOutput', 'GCController', 'AVAudioPlayerNode', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NEOnDemandRuleEvaluateConnection', 'SKEffectNode', 'HKQuantity', 'GCControllerElement', 'AVPlayerItemAccessLogEvent', 'SCNBox', 'NSExtensionContext', 'MKOverlayRenderer', 'SCNPhysicsVehicle', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'CKQuery', 'AVAudioMixerNode', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'NSBatchUpdateRequest', 'PHContentEditingOutput', 'PHObjectChangeDetails', 'MPMoviePlayerController', 'AVAudioFormat', 'HMTrigger', 'MTLRenderPassDepthAttachmentDescriptor', 'SCNRenderer', 'GKScore', 'UISplitViewController', 'HKSource', 'NSURLConnection', 'ABUnknownPersonViewController', 'SCNTechnique', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'AVAudioPCMBuffer', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'SKProductsRequest', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SCNFloor', 'NSSortDescriptor', 'CKFetchNotificationChangesOperation', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVAudioUnitGenerator', 'WKBackForwardList', 'SKMutableTexture', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'MPRatingCommandEvent', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'MPRemoteCommand', 'AVCaptureSession', 'UIStepper', 'UIRefreshControl', 'NEEvaluateConnectionRule', 'CKModifyRecordsOperation', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UILexiconEntry', 'CMMotionActivity', 'SKAction', 'SKShader', 'AVPlayerItemOutput', 'MTLRenderPassAttachmentDescriptor', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'CAInterAppAudioTransportView', 'SKMutablePayment', 'UIImage', 'PHCachingImageManager', 'SCNTransformConstraint', 'UIColor', 'SCNGeometrySource', 'AVCaptureAutoExposureBracketedStillImageSettings', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'AVAssetReaderOutputMetadataAdaptor', 'NSEntityMigrationPolicy', 'NSLocale', 'NSURLSession', 'SCNCamera', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'HMServiceGroup', 'UIPasteboard', 'PHContentEditingInput', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSEnergyFormatter', 'SLRequest', 'HMCharacteristic', 'AVPlayerLayer', 'MTLRenderPassDescriptor', 'SKPayment', 'NSPointerArray', 'AVAudioMix', 'SCNLight', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'HKCategorySample', 'AVAudioEnvironmentReverbParameters', 'SCNMorpher', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'HMActionSet', 'CTSubscriber', 'PHAssetChangeRequest', 'NSPersistentStoreRequest', 'UITabBarController', 'HKQuantitySample', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UIPresentationController', 'UINavigationItem', 'PHFetchResultChangeDetails', 'PHImageManager', 'AVCaptureManualExposureBracketedStillImageSettings', 'UIStoryboardPopoverSegue', 'SCNLookAtConstraint', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'NEOnDemandRuleDisconnect', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSURLResponse', 'SKPaymentQueue', 'NSAssertionHandler', 'MKReverseGeocoder', 'GCControllerAxisInput', 'NSArray', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVMutableVideoCompositionInstruction', 'AVAssetReaderOutput', 'EAGLContext', 'WKFrameInfo', 'CMPedometer', 'MyClass', 'CKModifyBadgeOperation', 'AVCaptureAudioFileOutput', 'SKEmitterNode', 'NSMachPort', 'AVVideoCompositionCoreAnimationTool', 'PHCollection', 'SCNPhysicsWorld', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'CLFloor', 'AVAsynchronousVideoCompositionRequest', 'SCNGeometry', 'SCNIKConstraint', 'CIKernel', 'CAGradientLayer', 'HKCharacteristicType', 'NSFormatter', 'SCNAction', 'CATransaction', 'CBUUID', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'AVAudioUnit', 'MCBrowserViewController', 'NSRelationshipDescription', 'HKSample', 'WKWebView', 'NSMutableAttributedString', 'NSPersistentStoreAsynchronousResult', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'HKCorrelation', 'CATextLayer', 'NSNotificationQueue', 'UINib', 'GLKTextureLoader', 'HKObjectType', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'AVPlayerViewController', 'CAScrollLayer', 'GKSavedGame', 'NSTextCheckingResult', 'PHObjectPlaceholder', 'SKConstraint', 'EKEventEditViewController', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'SCNNode', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'NEOnDemandRule', 'MTLRenderPassStencilAttachmentDescriptor', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'NEOnDemandRuleIgnore', 'SKRegion', 'AVAssetResourceLoader', 'EAWiFiUnconfiguredAccessoryBrowser', 'NSUserActivity', 'CTCall', 'UIPrinterPickerController', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'CKRecordZoneID', 'HKAnchoredObjectQuery', 'CKFetchRecordZonesOperation', 'UIStoryboardSegue', 'ACAccountType', 'GKSession', 'SKVideoNode', 'PHChange', 'SKReceiptRefreshRequest', 'GCExtendedGamepadSnapshot', 'MPSeekCommandEvent', 'GCExtendedGamepad', 'CAValueFunction', 'SCNCylinder', 'NSNotification', 'NSBatchUpdateResult', 'PKPushCredentials', 'SCNPhysicsSliderJoint', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'UILexicon', 'MTLArrayType', 'AVAudioUnitReverb', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIPrinter', 'NSOrderedSet', 'CBAttribute', 'PKPushPayload', 'NSIncrementalStoreNode', 'EKEventStore', 'MPRemoteCommandEvent', 'UISlider', 'UIBlurEffect', 'CKAsset', 'AVCaptureInput', 'AVAudioEngine', 'MTLVertexDescriptor', 'SKPhysicsBody', 'NSOperation', 'UIImageAsset', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CABTMIDILocalPeripheralViewController', 'NEVPNManager', 'HKQuery', 'SCNPhysicsContact', 'CBMutableService', 'AVSampleBufferDisplayLayer', 'SCNSceneSource', 'SKLightNode', 'CKDiscoveredUserInfo', 'NSMutableArray', 'MTLDepthStencilDescriptor', 'MTLArgument', 'NSMassFormatter', 'CIRectangleFeature', 'PKPushRegistry', 'NEVPNConnection', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'UICollectionViewLayoutAttributes', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'CIBarcodeFeature', 'MPChangePlaybackRateCommandEvent', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'AVAudioUnitSampler', 'NSItemProvider', 'SKUniform', 'MPMediaPickerController', 'CKOperation', 'MTLRenderPipelineDescriptor', 'EAWiFiUnconfiguredAccessory', 'NSFileCoordinator', 'SKRequest', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'SCNCone', 'MPMediaItem', 'SCNMaterial', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NEVPNProtocol', 'NSCompoundPredicate', 'HKHealthStore', 'MKMultiPoint', 'HKSampleType', 'UIPrintFormatter', 'AVAudioUnitEQFilterParameters', 'SKView', 'NSConstantString', 'UIPopoverController', 'CKDatabase', 'AVMetadataFaceObject', 'UIAccelerometer', 'EKEventViewController', 'CMAltitudeData', 'MTLStencilDescriptor', 'UISwipeGestureRecognizer', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'NSAsynchronousFetchRequest', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'CKNotificationID', 'AVAudioSession', 'HKUnit', 'NSNull', 'NSPersistentStoreResult', 'MKCircleView', 'AVAudioChannelLayout', 'NEVPNProtocolIKEv2', 'WKProcessPool', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'NSHTTPCookie', 'AVMutableVideoComposition', 'PHFetchResult', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'UIFontDescriptor', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'AVAudioUnitTimePitch', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'UIDocumentPickerViewController', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'AVAudioFile', 'UIActivity', 'NSPipe', 'MKShape', 'NSMergeConflict', 'CIImage', 'HKObject', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'CKMarkNotificationsReadOperation', 'CKSubscription', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'HMAccessoryBrowser', 'GKTurnBasedEventHandler', 'UIWebView', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'WKNavigationResponse', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'HKStatistics', 'UILocalNotification', 'HKBiologicalSexObject', 'AVURLAsset', 'CBPeripheral', 'NSDateComponentsFormatter', 'SKSpriteNode', 'UIAccessibilityElement', 'AVAssetWriterInputGroup', 'HMZone', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'PHPhotoLibrary', 'CBService', 'UIDocumentMenuViewController', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'ASIdentifierManager', 'AVAssetResourceLoadingRequest', 'SLComposeServiceViewController', 'UIPinchGestureRecognizer', 'PHObject', 'NSExtensionItem', 'HKSampleQuery', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MKRouteStep', 'SCNCapsule', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'UITraitCollection', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'MTLRenderPipelineReflection', 'CLHeading', 'CLVisit', 'MKDirectionsResponse', 'HMAccessory', 'MTLStructType', 'UITextView', 'CMMagnetometerData', 'UICollisionBehavior', 'UIProgressView', 'CKServerChangeToken', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'NSPropertyMapping', 'ALAssetsFilter', 'SK3DNode', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'SCNSphere', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSFileProviderExtension', 'NSDecimalNumberHandler', 'NSAtomicStoreCacheNode', 'NSAtomicStore', 'EKAlarm', 'CKNotificationInfo', 'AVAudioUnitEQ', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'MTLVertexAttribute', 'NSExpressionDescription', 'HKStatisticsCollectionQuery', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'UIAlertAction', 'NSPropertyListSerialization', 'GKPeerPickerController', 'UIUserNotificationSettings', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'MTLRenderPassColorAttachmentDescriptorArray', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'UIViewController', 'MFMessageComposeViewController', 'AVAudioInputNode', 'NSDataDetector', 'CABTMIDICentralViewController', 'AVAudioUnitMIDIInstrument', 'AVCaptureVideoPreviewLayer', 'AVAssetWriterInputPassDescription', 'MPChangePlaybackRateCommand', 'NSURLComponents', 'CAMetalLayer', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'CKDiscoverUserInfosOperation', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'SCNGeometryElement', 'NCWidgetController', 'CAEmitterLayer', 'MKUserLocation', 'UIImagePickerController', 'CIFeature', 'AVCaptureDeviceInput', 'ALAsset', 'NSURLSessionDownloadTask', 'SCNPhysicsHingeJoint', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'SCNMaterialProperty', 'UIFont', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'WKWebViewConfiguration', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'AVComposition', 'CKDiscoverAllContactsOperation', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'UIMutableUserNotificationCategory', 'NSLock', 'NEVPNProtocolIPSec', 'ADBannerView', 'UIDocumentPickerExtensionViewController', 'UIActivityIndicatorView', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIAccessibilityCustomAction', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'HKBloodTypeObject', 'MTLVertexBufferLayoutDescriptorArray', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'UIScrollView', 'WKNavigationAction', 'AVPlayerItemMetadataOutput', 'EKRecurrenceDayOfWeek', 'NSNumberFormatter', 'MTLComputePipelineReflection', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'SCNSkinner', 'AVCaptureMetadataOutput', 'SCNAnimationEvent', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'WKBackForwardListItem', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKCalendar', 'NSMutableURLRequest', 'UIVideoEditorController', 'HMTimerTrigger', 'AVAudioUnitVarispeed', 'UIDynamicAnimator', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'UIPushBehavior', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'WKUserScript', 'EKEvent', 'NSDateFormatter', 'NSAsynchronousFetchResult', 'AVAssetWriterInputPixelBufferAdaptor', 'UIVisualEffect', 'UICollectionViewCell', 'UITextField', 'CLPlacemark', 'MPPlayableContentManager', 'AVCaptureOutput', 'HMCharacteristicWriteAction', 'CKModifySubscriptionsOperation', 'NSPropertyDescription', 'GCGamepad', 'UIMarkupTextPrintFormatter', 'SCNTube', 'NSPersistentStoreCoordinator', 'AVAudioEnvironmentNode', 'GKMatchmaker', 'CIContext', 'NSThread', 'SLComposeSheetConfigurationItem', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'SCNPhysicsBallSocketJoint', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'AVAudioIONode', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CKRecordZoneNotification', 'SCNScene', 'MPMovieAccessLogEvent', 'CKFetchSubscriptionsOperation', 'CAEmitterCell', 'AVAudioUnitTimeEffect', 'HMCharacteristicMetadata', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'UIUserNotificationCategory', 'SCNPhysicsVehicleWheel', 'HKCategoryType', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'MTLRenderPipelineColorAttachmentDescriptor', 'SCNPhysicsShape', 'UISearchController', 'SCNPhysicsBody', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'SKNode', 'UIMutableUserNotificationAction', 'SCNProgram', 'AVSpeechSynthesisVoice', 'CMAltimeter', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SCNPhysicsField', 'CKReference', 'LAContext', 'CKRecordID', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'AVAudioBuffer', 'CIColorKernel', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'CKQueryNotification', 'PHAdjustmentData', 'EASession', 'AVAssetResourceRenewalRequest', 'UIInputView', 'NSFileWrapper', 'UIResponder', 'NSPointerFunctions', 'NSHTTPCookieStorage', 'AVMediaSelectionOption', 'NSRunLoop', 'NSFileAccessIntent', 'CAAnimationGroup', 'MKCircle', 'UIAlertController', 'NSMigrationManager', 'NSDateIntervalFormatter', 'UICollectionViewUpdateItem', 'CKDatabaseOperation', 'PHImageRequestOptions', 'SKReachConstraints', 'CKRecord', 'CAInterAppAudioSwitcherView', 'WKWindowFeatures', 'GKInvite', 'NSMutableData', 'PHAssetCollectionChangeRequest', 'NSMutableParagraphStyle', 'UIDynamicBehavior', 'GLKEffectProperty', 'CKFetchRecordChangesOperation', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'MPContentItem', 'HMAction', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'CKContainer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'AVCaptureMovieFileOutput', 'HMRoom', 'GKChallenge', 'UITextRange', 'NSURLProtectionSpace', 'ACAccountStore', 'MPSkipIntervalCommand', 'NSComparisonPredicate', 'HMHome', 'PHVideoRequestOptions', 'NSOutputStream', 'MPSkipIntervalCommandEvent', 'PKAddPassesViewController', 'UITextSelectionRect', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'UICollectionViewController', 'AVAudioNode', 'MCNearbyServiceAdvertiser', 'NSObject', 'PHAsset', 'GKLeaderboardViewController', 'CKQueryCursor', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'CMPedometerData', 'HMService', 'SKFieldNode', 'GKAchievement', 'WKUserContentController', 'AVAssetTrack', 'TWRequest', 'SKLabelNode', 'AVCaptureBracketedStillImageSettings', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'MTLTextureDescriptor', 'MTLVertexBufferLayoutDescriptor', 'MPFeedbackCommandEvent', 'UIUserNotificationAction', 'HKStatisticsQuery', 'SCNParticleSystem', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'HKObserverQuery', 'UIPopoverPresentationController', 'CKQueryOperation', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'NSURLQueryItem', 'MTLStructMember', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'GKTurnBasedParticipant', 'NSURLProtocol', 'NSUserDefaults', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'CKModifyRecordZonesOperation', 'WKPreferences', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'PHContentEditingInputRequestOptions', 'GKMatch', 'CIColor', 'UIDictationPhrase'])
+COCOA_PROTOCOLS = set(['SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'MTLComputeCommandEncoder', 'SCNSceneExportDelegate', 'UISearchResultsUpdating', 'MFMailComposeViewControllerDelegate', 'MTLBlitCommandEncoder', 'NSDecimalNumberBehaviors', 'PHContentEditingController', 'NSMutableCopying', 'UIActionSheetDelegate', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UIAdaptivePresentationControllerDelegate', 'GKAchievementViewControllerDelegate', 'UIViewControllerTransitionCoordinator', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'UITableViewDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'AVAudio3DMixing', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'HMAccessoryBrowserDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'CKRecordValue', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'SKSceneDelegate', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'MyClassJavaScriptMethods', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'SCNBoundingVolume', 'NSPortDelegate', 'UIContentContainer', 'SCNNodeRendererDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'HMAccessoryDelegate', 'UIPageViewControllerDataSource', 'SCNSceneRendererDelegate', 'SCNPhysicsContactDelegate', 'MKMapViewDelegate', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'MTLRenderCommandEncoder', 'UIToolbarDelegate', 'WKUIDelegate', 'SCNActionable', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'PKPushRegistryDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSLayoutManagerDelegate', 'MTLLibrary', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'MTLResource', 'NSDiscardableContent', 'UITextFieldDelegate', 'MTLBuffer', 'MTLSamplerState', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UISplitViewControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UITraitEnvironment', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'MPPlayableContentDataSource', 'MTLComputePipelineState', 'NSURLSessionDelegate', 'MTLCommandBuffer', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MTLDepthStencilState', 'GKTurnBasedMatchmakerViewControllerDelegate', 'MPPlayableContentDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'UITextDocumentProxy', 'MTLDrawable', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'AVAudioStereoMixing', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'SCNProgramDelegate', 'AVVideoCompositing', 'SCNAnimatable', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'NSExtensionRequestHandling', 'UITextSelecting', 'UIPrinterPickerControllerDelegate', 'NCWidgetProviding', 'MTLCommandEncoder', 'NSURLProtocolClient', 'MFMessageComposeViewControllerDelegate', 'UIVideoEditorControllerDelegate', 'WKNavigationDelegate', 'GKSavedGameListener', 'UITableViewDataSource', 'MTLFunction', 'EKCalendarChooserDelegate', 'NSUserActivityDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'HMHomeDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIDocumentPickerDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'SCNSceneRenderer', 'MTLTexture', 'GLKViewDelegate', 'EAAccessoryDelegate', 'WKScriptMessageHandler', 'PHPhotoLibraryChangeObserver', 'NSKeyedUnarchiverDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'NSMachPortDelegate', 'SCNShadable', 'UIPopoverBackgroundViewMethods', 'UIDocumentMenuDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'AVAudioMixing', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'MTLParallelRenderCommandEncoder', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UIPopoverPresentationControllerDelegate', 'UIDynamicAnimatorDelegate', 'NSTextAttachmentContainer', 'MKAnnotation', 'UIAccessibilityIdentification', 'UICoordinateSpace', 'ABNewPersonViewControllerDelegate', 'MTLDevice', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'HMHomeManagerDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'UISearchControllerDelegate', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'UITextInputTraits', 'MTLRenderPipelineState', 'GKVoiceChatClient', 'UIKeyInput', 'UICollectionViewDataSource', 'SCNTechniqueSupport', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'MTLCommandQueue', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'CAMetalDrawable', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'NSURLSessionDataDelegate'])
+COCOA_PRIMITIVES = set(['ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', '_GLKMatrix3', '_GLKMatrix2', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'OpaqueCMBlockBuffer', 'CGTextDrawingMode', 'EKErrorCode', 'GCAcceleration', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'AudioFile_SMPTE_Time', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', '__CFBinaryHeap', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', 'SFNTLookupSegment', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__SecCertificate', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', 'AnchorPoint', 'JustTable', '__CFNetService', 'CF_BRIDGED_TYPE', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'KerxSubtableHeader', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', 'OpaqueJSClass', 'CGGradient', 'OpaqueMIDISetup', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', 'MIDIThruConnectionParams', 'BslnFormat0Part', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'AnkrTable', '__SCNetworkProtocol', 'gss_buffer_desc_struct', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'SCNMatrix4', 'KerxControlPointEntry', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'KernStateEntry', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'AVAudio3DAngularOrientation', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'KernVersion0Header', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'GCQuaternion', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'CGImagePropertyOrientation', 'CGPDFScanner', 'OpaqueMusicEventIterator', 'sfntDescriptorHeader', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'BslnFormat1Part', 'sfntFontDescriptor', 'KernSimpleArrayHeader', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGInterpolationQuality', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', '__CTRubyAnnotation', 'SCNVector4', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__GLsync', '__CFStringTokenizer', 'JustWidthDeltaEntry', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', '__CVMetalTextureCache', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'AVAudio3DPoint', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', 'CGDataConsumerCallbacks', 'ALMXHeader', 'CGLineCap', 'MIDIControlTransform', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'AVAudio3DVectorOrientation', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'OpaqueVTCompressionSession', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'VTDecompressionOutputCallbackRecord', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'OpaqueVTDecompressionSession', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'PropTable', 'KernOrderedListEntry', 'CF_BRIDGED_MUTABLE_TYPE', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'SCNVector3', 'OpaqueAudioConverter', 'MIDIRawData', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', '__SecAccessControl', 'JustPCAction', 'OpaqueVTFrameSilo', 'OpaqueVTMultiPassStorage', 'CGPathElementType', 'AudioFormatListItem', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'sfntCMapHeader', '__CFURLEnumerator', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader'])
+
+
+if __name__ == '__main__': # pragma: no cover
+ import os
+ import re
+
+ FRAMEWORKS_PATH = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.0.sdk/System/Library/Frameworks/'
+ frameworks = os.listdir(FRAMEWORKS_PATH)
+
+ all_interfaces = set()
+ all_protocols = set()
+ all_primitives = set()
+ for framework in frameworks:
+ frameworkHeadersDir = FRAMEWORKS_PATH + framework + '/Headers/'
+ if not os.path.exists(frameworkHeadersDir):
+ continue
+
+ headerFilenames = os.listdir(frameworkHeadersDir)
+
+ for f in headerFilenames:
+ if not f.endswith('.h'):
+ continue
+
+ headerFilePath = frameworkHeadersDir + f
+ content = open(headerFilePath).read()
+ res = re.findall('(?<=@interface )\w+', content)
+ for r in res:
+ all_interfaces.add(r)
+
+ res = re.findall('(?<=@protocol )\w+', content)
+ for r in res:
+ all_protocols.add(r)
+
+ res = re.findall('(?<=typedef enum )\w+', content)
+ for r in res:
+ all_primitives.add(r)
+
+ res = re.findall('(?<=typedef struct )\w+', content)
+ for r in res:
+ all_primitives.add(r)
+
+ res = re.findall('(?<=typedef const struct )\w+', content)
+ for r in res:
+ all_primitives.add(r)
+
+
+ print("ALL interfaces: \n")
+ print(all_interfaces)
+
+ print("\nALL protocols: \n")
+ print(all_protocols)
+
+ print("\nALL primitives: \n")
+ print(all_primitives)
diff --git a/pygments/lexers/_lassobuiltins.py b/pygments/lexers/_lasso_builtins.py
index f3e5147e..6c442800 100644
--- a/pygments/lexers/_lassobuiltins.py
+++ b/pygments/lexers/_lasso_builtins.py
@@ -1,16 +1,16 @@
# -*- coding: utf-8 -*-
"""
- pygments.lexers._lassobuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ pygments.lexers._lasso_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Built-in Lasso types, traits, methods, and members.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
BUILTINS = {
- 'Types': [
+ 'Types': (
'null',
'void',
'tag',
@@ -136,8 +136,10 @@ BUILTINS = {
'timeonly',
'net_tcp',
'net_tcpssl',
+ 'net_tcp_ssl',
'net_named_pipe',
'net_udppacket',
+ 'net_udp_packet',
'net_udp',
'pdf_typebase',
'pdf_doc',
@@ -268,8 +270,8 @@ BUILTINS = {
'web_error_atend',
'web_response_impl',
'web_router'
- ],
- 'Traits': [
+ ),
+ 'Traits': (
'trait_asstring',
'any',
'trait_generator',
@@ -342,8 +344,8 @@ BUILTINS = {
'web_node_content_html_specialized',
'web_node_content_css_specialized',
'web_node_content_js_specialized'
- ],
- 'Unbound Methods': [
+ ),
+ 'Unbound Methods': (
'fail_now',
'register',
'register_thread',
@@ -1275,6 +1277,7 @@ BUILTINS = {
'lcapi_datasourcesortascending',
'lcapi_datasourcesortdescending',
'lcapi_datasourcesortcustom',
+ 'lcapi_updatedatasourceslist',
'lcapi_loadmodules',
'lasso_version',
'lasso_uniqueid',
@@ -1842,8 +1845,8 @@ BUILTINS = {
'web_response',
'web_router_database',
'web_router_initialize'
- ],
- 'Lasso 8 Tags': [
+ ),
+ 'Lasso 8 Tags': (
'__char',
'__sync_timestamp__',
'_admin_addgroup',
@@ -3027,10 +3030,10 @@ BUILTINS = {
'xsd_processsimpletype',
'xsd_ref',
'xsd_type'
- ]
+ )
}
MEMBERS = {
- 'Member Methods': [
+ 'Member Methods': (
'escape_member',
'oncompare',
'sameas',
@@ -4024,6 +4027,10 @@ MEMBERS = {
'statuscode',
'raw',
'version',
+ 'download',
+ 'upload',
+ 'ftpdeletefile',
+ 'ftpgetlisting',
'perform',
'performonce',
's',
@@ -4114,8 +4121,11 @@ MEMBERS = {
'foreachaccept',
'writeobjecttcp',
'readobjecttcp',
+ 'beginssl',
+ 'endssl',
'begintls',
'endtls',
+ 'acceptnossl',
'loadcerts',
'sslerrfail',
'fromname',
@@ -4710,8 +4720,8 @@ MEMBERS = {
'acceptpost',
'csscontent',
'jscontent'
- ],
- 'Lasso 8 Member Tags': [
+ ),
+ 'Lasso 8 Member Tags': (
'accept',
'add',
'addattachment',
@@ -5168,5 +5178,5 @@ MEMBERS = {
'xmllang',
'xmlschematype',
'year'
- ]
+ )
}
diff --git a/pygments/lexers/_luabuiltins.py b/pygments/lexers/_lua_builtins.py
index 069c44fd..6d2929b6 100644
--- a/pygments/lexers/_luabuiltins.py
+++ b/pygments/lexers/_lua_builtins.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""
- pygments.lexers._luabuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ pygments.lexers._lua_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file contains the names and modules of lua functions
It is able to re-generate itself, but for adding new functions you
@@ -9,11 +9,14 @@
Do not edit the MODULES dict by hand.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-MODULES = {'basic': ['_G',
+from __future__ import print_function
+
+
+MODULES = {'basic': ('_G',
'_VERSION',
'assert',
'collectgarbage',
@@ -39,14 +42,14 @@ MODULES = {'basic': ['_G',
'tostring',
'type',
'unpack',
- 'xpcall'],
- 'coroutine': ['coroutine.create',
+ 'xpcall'),
+ 'coroutine': ('coroutine.create',
'coroutine.resume',
'coroutine.running',
'coroutine.status',
'coroutine.wrap',
- 'coroutine.yield'],
- 'debug': ['debug.debug',
+ 'coroutine.yield'),
+ 'debug': ('debug.debug',
'debug.getfenv',
'debug.gethook',
'debug.getinfo',
@@ -59,8 +62,8 @@ MODULES = {'basic': ['_G',
'debug.setlocal',
'debug.setmetatable',
'debug.setupvalue',
- 'debug.traceback'],
- 'io': ['io.close',
+ 'debug.traceback'),
+ 'io': ('io.close',
'io.flush',
'io.input',
'io.lines',
@@ -70,8 +73,8 @@ MODULES = {'basic': ['_G',
'io.read',
'io.tmpfile',
'io.type',
- 'io.write'],
- 'math': ['math.abs',
+ 'io.write'),
+ 'math': ('math.abs',
'math.acos',
'math.asin',
'math.atan2',
@@ -100,16 +103,16 @@ MODULES = {'basic': ['_G',
'math.sin',
'math.sqrt',
'math.tanh',
- 'math.tan'],
- 'modules': ['module',
+ 'math.tan'),
+ 'modules': ('module',
'require',
'package.cpath',
'package.loaded',
'package.loadlib',
'package.path',
'package.preload',
- 'package.seeall'],
- 'os': ['os.clock',
+ 'package.seeall'),
+ 'os': ('os.clock',
'os.date',
'os.difftime',
'os.execute',
@@ -119,8 +122,8 @@ MODULES = {'basic': ['_G',
'os.rename',
'os.setlocale',
'os.time',
- 'os.tmpname'],
- 'string': ['string.byte',
+ 'os.tmpname'),
+ 'string': ('string.byte',
'string.char',
'string.dump',
'string.find',
@@ -133,16 +136,20 @@ MODULES = {'basic': ['_G',
'string.rep',
'string.reverse',
'string.sub',
- 'string.upper'],
- 'table': ['table.concat',
+ 'string.upper'),
+ 'table': ('table.concat',
'table.insert',
'table.maxn',
'table.remove',
- 'table.sort']}
+ 'table.sort')}
-if __name__ == '__main__':
+
+if __name__ == '__main__': # pragma: no cover
import re
- import urllib
+ try:
+ from urllib import urlopen
+ except ImportError:
+ from urllib.request import urlopen
import pprint
# you can't generally find out what module a function belongs to if you
@@ -188,7 +195,7 @@ if __name__ == '__main__':
def get_newest_version():
- f = urllib.urlopen('http://www.lua.org/manual/')
+ f = urlopen('http://www.lua.org/manual/')
r = re.compile(r'^<A HREF="(\d\.\d)/">Lua \1</A>')
for line in f:
m = r.match(line)
@@ -196,7 +203,7 @@ if __name__ == '__main__':
return m.groups()[0]
def get_lua_functions(version):
- f = urllib.urlopen('http://www.lua.org/manual/%s/' % version)
+ f = urlopen('http://www.lua.org/manual/%s/' % version)
r = re.compile(r'^<A HREF="manual.html#pdf-(.+)">\1</A>')
functions = []
for line in f:
@@ -206,7 +213,7 @@ if __name__ == '__main__':
return functions
def get_function_module(name):
- for mod, cb in module_callbacks().iteritems():
+ for mod, cb in module_callbacks().items():
if cb(name):
return mod
if '.' in name:
@@ -215,35 +222,30 @@ if __name__ == '__main__':
return 'basic'
def regenerate(filename, modules):
- f = open(filename)
- try:
- content = f.read()
- finally:
- f.close()
+ with open(filename) as fp:
+ content = fp.read()
header = content[:content.find('MODULES = {')]
footer = content[content.find("if __name__ == '__main__':"):]
- f = open(filename, 'w')
- f.write(header)
- f.write('MODULES = %s\n\n' % pprint.pformat(modules))
- f.write(footer)
- f.close()
+ with open(filename, 'w') as fp:
+ fp.write(header)
+ fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
+ fp.write(footer)
def run():
version = get_newest_version()
- print '> Downloading function index for Lua %s' % version
+ print('> Downloading function index for Lua %s' % version)
functions = get_lua_functions(version)
- print '> %d functions found:' % len(functions)
+ print('> %d functions found:' % len(functions))
modules = {}
for full_function_name in functions:
- print '>> %s' % full_function_name
+ print('>> %s' % full_function_name)
m = get_function_module(full_function_name)
modules.setdefault(m, []).append(full_function_name)
regenerate(__file__, modules)
-
run()
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index aeefb444..091b9b9a 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -3,22 +3,27 @@
pygments.lexers._mapping
~~~~~~~~~~~~~~~~~~~~~~~~
- Lexer mapping defintions. This file is generated by itself. Everytime
- you change something on a builtin lexer defintion, run this script from
+ Lexer mapping definitions. This file is generated by itself. Everytime
+ you change something on a builtin lexer definition, run this script from
the lexers folder to update it.
Do not alter the LEXERS dictionary by hand.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from __future__ import print_function
+
LEXERS = {
- 'ABAPLexer': ('pygments.lexers.other', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)),
- 'ActionScript3Lexer': ('pygments.lexers.web', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
- 'ActionScriptLexer': ('pygments.lexers.web', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
- 'AdaLexer': ('pygments.lexers.compiled', 'Ada', ('ada', 'ada95ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
- 'AgdaLexer': ('pygments.lexers.functional', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
+ 'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)),
+ 'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()),
+ 'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
+ 'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
+ 'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
+ 'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
+ 'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
+ 'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)),
'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()),
'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
@@ -28,217 +33,263 @@ LEXERS = {
'AntlrPerlLexer': ('pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()),
'AntlrPythonLexer': ('pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()),
'AntlrRubyLexer': ('pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()),
- 'ApacheConfLexer': ('pygments.lexers.text', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
- 'AppleScriptLexer': ('pygments.lexers.other', 'AppleScript', ('applescript',), ('*.applescript',), ()),
+ 'ApacheConfLexer': ('pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
+ 'AppleScriptLexer': ('pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()),
+ 'ArduinoLexer': ('pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)),
'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
- 'AsymptoteLexer': ('pygments.lexers.other', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
- 'AutoItLexer': ('pygments.lexers.other', 'AutoIt', ('autoit', 'Autoit'), ('*.au3',), ('text/x-autoit',)),
- 'AutohotkeyLexer': ('pygments.lexers.other', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
- 'AwkLexer': ('pygments.lexers.other', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
- 'BBCodeLexer': ('pygments.lexers.text', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
- 'BaseMakefileLexer': ('pygments.lexers.text', 'Base Makefile', ('basemake',), (), ()),
- 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '.bashrc', 'bashrc', '.bash_*', 'bash_*'), ('application/x-sh', 'application/x-shellscript')),
+ 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
+ 'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)),
+ 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
+ 'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
+ 'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
+ 'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
+ 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')),
'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)),
- 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
- 'BefungeLexer': ('pygments.lexers.other', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
- 'BlitzBasicLexer': ('pygments.lexers.compiled', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
- 'BlitzMaxLexer': ('pygments.lexers.compiled', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
+ 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
+ 'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
+ 'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
+ 'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
- 'BrainfuckLexer': ('pygments.lexers.other', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
- 'BroLexer': ('pygments.lexers.other', 'Bro', ('bro',), ('*.bro',), ()),
- 'BugsLexer': ('pygments.lexers.math', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
- 'CLexer': ('pygments.lexers.compiled', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')),
- 'CMakeLexer': ('pygments.lexers.text', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
+ 'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
+ 'BroLexer': ('pygments.lexers.dsls', 'Bro', ('bro',), ('*.bro',), ()),
+ 'BugsLexer': ('pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
+ 'CLexer': ('pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')),
+ 'CMakeLexer': ('pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)),
- 'Ca65Lexer': ('pygments.lexers.asm', 'ca65', ('ca65',), ('*.s',), ()),
- 'CbmBasicV2Lexer': ('pygments.lexers.other', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
+ 'Ca65Lexer': ('pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()),
+ 'CbmBasicV2Lexer': ('pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
- 'Cfengine3Lexer': ('pygments.lexers.other', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
+ 'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
+ 'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
+ 'ChapelLexer': ('pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
- 'ClayLexer': ('pygments.lexers.compiled', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)),
+ 'CirruLexer': ('pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)),
+ 'ClayLexer': ('pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)),
'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
- 'CobolFreeformatLexer': ('pygments.lexers.compiled', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
- 'CobolLexer': ('pygments.lexers.compiled', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
- 'CoffeeScriptLexer': ('pygments.lexers.web', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
- 'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml', '*.cfc'), ('application/x-coldfusion',)),
+ 'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')),
+ 'CobolFreeformatLexer': ('pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
+ 'CobolLexer': ('pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
+ 'CoffeeScriptLexer': ('pygments.lexers.javascript', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
+ 'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()),
+ 'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)),
'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
- 'CommonLispLexer': ('pygments.lexers.functional', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)),
- 'CoqLexer': ('pygments.lexers.functional', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
- 'CppLexer': ('pygments.lexers.compiled', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')),
+ 'CommonLispLexer': ('pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp'), ('text/x-common-lisp',)),
+ 'CoqLexer': ('pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
+ 'CppLexer': ('pygments.lexers.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')),
'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
- 'CrocLexer': ('pygments.lexers.agile', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
+ 'CrocLexer': ('pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
+ 'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)),
'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')),
'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)),
'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)),
- 'CssLexer': ('pygments.lexers.web', 'CSS', ('css',), ('*.css',), ('text/css',)),
+ 'CssLexer': ('pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)),
'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
- 'CudaLexer': ('pygments.lexers.compiled', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
- 'CythonLexer': ('pygments.lexers.compiled', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
- 'DLexer': ('pygments.lexers.compiled', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
+ 'CudaLexer': ('pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
+ 'CypherLexer': ('pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()),
+ 'CythonLexer': ('pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
+ 'DLexer': ('pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
- 'DarcsPatchLexer': ('pygments.lexers.text', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
- 'DartLexer': ('pygments.lexers.web', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
- 'DebianControlLexer': ('pygments.lexers.text', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()),
- 'DelphiLexer': ('pygments.lexers.compiled', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
- 'DgLexer': ('pygments.lexers.agile', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
- 'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
+ 'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
+ 'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
+ 'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()),
+ 'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
+ 'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
+ 'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
- 'DtdLexer': ('pygments.lexers.web', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
- 'DuelLexer': ('pygments.lexers.web', 'Duel', ('duel', 'Duel Engine', 'Duel View', 'JBST', 'jbst', 'JsonML+BST'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
- 'DylanConsoleLexer': ('pygments.lexers.compiled', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
- 'DylanLexer': ('pygments.lexers.compiled', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)),
- 'DylanLidLexer': ('pygments.lexers.compiled', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
- 'ECLLexer': ('pygments.lexers.other', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
- 'ECLexer': ('pygments.lexers.compiled', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
- 'EbnfLexer': ('pygments.lexers.text', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
- 'ElixirConsoleLexer': ('pygments.lexers.functional', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
- 'ElixirLexer': ('pygments.lexers.functional', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)),
+ 'DockerLexer': ('pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)),
+ 'DtdLexer': ('pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
+ 'DuelLexer': ('pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
+ 'DylanConsoleLexer': ('pygments.lexers.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
+ 'DylanLexer': ('pygments.lexers.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)),
+ 'DylanLidLexer': ('pygments.lexers.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
+ 'ECLLexer': ('pygments.lexers.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
+ 'ECLexer': ('pygments.lexers.c_like', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
+ 'EbnfLexer': ('pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
+ 'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)),
+ 'ElixirConsoleLexer': ('pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
+ 'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)),
+ 'EmacsLispLexer': ('pygments.lexers.lisp', 'EmacsLisp', ('emacs', 'elisp'), ('*.el',), ('text/x-elisp', 'application/x-elisp')),
'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
- 'ErlangLexer': ('pygments.lexers.functional', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
- 'ErlangShellLexer': ('pygments.lexers.functional', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
+ 'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
+ 'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
'FSharpLexer': ('pygments.lexers.dotnet', 'FSharp', ('fsharp',), ('*.fs', '*.fsi'), ('text/x-fsharp',)),
- 'FactorLexer': ('pygments.lexers.agile', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
- 'FancyLexer': ('pygments.lexers.agile', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
- 'FantomLexer': ('pygments.lexers.compiled', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
- 'FelixLexer': ('pygments.lexers.compiled', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
- 'FortranFixedLexer': ('pygments.lexers.compiled', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()),
- 'FortranLexer': ('pygments.lexers.compiled', 'Fortran', ('fortran',), ('*.f90', '*.F90', '*.f03', '*.F03'), ('text/x-fortran',)),
- 'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('Clipper', 'XBase'), ('*.PRG', '*.prg'), ()),
- 'GLShaderLexer': ('pygments.lexers.compiled', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
+ 'FactorLexer': ('pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
+ 'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
+ 'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
+ 'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
+ 'FortranFixedLexer': ('pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()),
+ 'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)),
+ 'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()),
+ 'GAPLexer': ('pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()),
+ 'GLShaderLexer': ('pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)),
'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
- 'GettextLexer': ('pygments.lexers.text', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
- 'GherkinLexer': ('pygments.lexers.other', 'Gherkin', ('Cucumber', 'cucumber', 'Gherkin', 'gherkin'), ('*.feature',), ('text/x-gherkin',)),
- 'GnuplotLexer': ('pygments.lexers.other', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
- 'GoLexer': ('pygments.lexers.compiled', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)),
- 'GoodDataCLLexer': ('pygments.lexers.other', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
+ 'GettextLexer': ('pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
+ 'GherkinLexer': ('pygments.lexers.testing', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)),
+ 'GnuplotLexer': ('pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
+ 'GoLexer': ('pygments.lexers.go', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)),
+ 'GoloLexer': ('pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()),
+ 'GoodDataCLLexer': ('pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
- 'GroffLexer': ('pygments.lexers.text', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
- 'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy',), ('text/x-groovy',)),
- 'HamlLexer': ('pygments.lexers.web', 'Haml', ('haml', 'HAML'), ('*.haml',), ('text/x-haml',)),
- 'HaskellLexer': ('pygments.lexers.functional', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
- 'HaxeLexer': ('pygments.lexers.web', 'Haxe', ('hx', 'Haxe', 'haxe', 'haXe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
+ 'GroffLexer': ('pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
+ 'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)),
+ 'HamlLexer': ('pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
+ 'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
+ 'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
+ 'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
+ 'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')),
'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
- 'HtmlLexer': ('pygments.lexers.web', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
+ 'HtmlLexer': ('pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
- 'HttpLexer': ('pygments.lexers.text', 'HTTP', ('http',), (), ()),
- 'HxmlLexer': ('pygments.lexers.text', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
- 'HybrisLexer': ('pygments.lexers.other', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
- 'IDLLexer': ('pygments.lexers.math', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
- 'IgorLexer': ('pygments.lexers.math', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)),
- 'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg'), ('text/x-ini',)),
- 'IoLexer': ('pygments.lexers.agile', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
+ 'HttpLexer': ('pygments.lexers.textfmts', 'HTTP', ('http',), (), ()),
+ 'HxmlLexer': ('pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
+ 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')),
+ 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
+ 'IDLLexer': ('pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
+ 'IdrisLexer': ('pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)),
+ 'IgorLexer': ('pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)),
+ 'Inform6Lexer': ('pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()),
+ 'Inform6TemplateLexer': ('pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()),
+ 'Inform7Lexer': ('pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()),
+ 'IniLexer': ('pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg'), ('text/x-ini',)),
+ 'IoLexer': ('pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
- 'IrcLogsLexer': ('pygments.lexers.text', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
- 'JadeLexer': ('pygments.lexers.web', 'Jade', ('jade', 'JADE'), ('*.jade',), ('text/x-jade',)),
- 'JagsLexer': ('pygments.lexers.math', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
+ 'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
+ 'IsabelleLexer': ('pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)),
+ 'JadeLexer': ('pygments.lexers.html', 'Jade', ('jade',), ('*.jade',), ('text/x-jade',)),
+ 'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
+ 'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
- 'JavascriptLexer': ('pygments.lexers.web', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
+ 'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js', '*.jsm'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
- 'JsonLexer': ('pygments.lexers.web', 'JSON', ('json',), ('*.json',), ('application/json',)),
+ 'JsonLdLexer': ('pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
+ 'JsonLexer': ('pygments.lexers.data', 'JSON', ('json',), ('*.json',), ('application/json',)),
'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
- 'JuliaConsoleLexer': ('pygments.lexers.math', 'Julia console', ('jlcon',), (), ()),
- 'JuliaLexer': ('pygments.lexers.math', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
- 'KconfigLexer': ('pygments.lexers.other', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
- 'KokaLexer': ('pygments.lexers.functional', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
+ 'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()),
+ 'JuliaLexer': ('pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
+ 'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
+ 'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
+ 'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt',), ('text/x-kotlin',)),
+ 'LSLLexer': ('pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)),
'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)),
'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')),
'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
- 'LassoLexer': ('pygments.lexers.web', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
+ 'LassoLexer': ('pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
- 'LighttpdConfLexer': ('pygments.lexers.text', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)),
- 'LiterateAgdaLexer': ('pygments.lexers.functional', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)),
- 'LiterateHaskellLexer': ('pygments.lexers.functional', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)),
- 'LiveScriptLexer': ('pygments.lexers.web', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)),
+ 'LeanLexer': ('pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)),
+ 'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)),
+ 'LimboLexer': ('pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)),
+ 'LiquidLexer': ('pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()),
+ 'LiterateAgdaLexer': ('pygments.lexers.haskell', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)),
+ 'LiterateCryptolLexer': ('pygments.lexers.haskell', 'Literate Cryptol', ('lcry', 'literate-cryptol', 'lcryptol'), ('*.lcry',), ('text/x-literate-cryptol',)),
+ 'LiterateHaskellLexer': ('pygments.lexers.haskell', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)),
+ 'LiterateIdrisLexer': ('pygments.lexers.haskell', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)),
+ 'LiveScriptLexer': ('pygments.lexers.javascript', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)),
'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
- 'LogosLexer': ('pygments.lexers.compiled', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
- 'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)),
- 'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
- 'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
- 'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
+ 'LogosLexer': ('pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
+ 'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
+ 'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
+ 'MOOCodeLexer': ('pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
+ 'MakefileLexer': ('pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
- 'MaqlLexer': ('pygments.lexers.other', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
+ 'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
+ 'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
- 'MatlabLexer': ('pygments.lexers.math', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
- 'MatlabSessionLexer': ('pygments.lexers.math', 'Matlab session', ('matlabsession',), (), ()),
- 'MiniDLexer': ('pygments.lexers.agile', 'MiniD', ('minid',), ('*.md',), ('text/x-minidsrc',)),
- 'ModelicaLexer': ('pygments.lexers.other', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
- 'Modula2Lexer': ('pygments.lexers.compiled', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
- 'MoinWikiLexer': ('pygments.lexers.text', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
- 'MonkeyLexer': ('pygments.lexers.compiled', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
- 'MoonScriptLexer': ('pygments.lexers.agile', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
- 'MscgenLexer': ('pygments.lexers.other', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()),
- 'MuPADLexer': ('pygments.lexers.math', 'MuPAD', ('mupad',), ('*.mu',), ()),
- 'MxmlLexer': ('pygments.lexers.web', 'MXML', ('mxml',), ('*.mxml',), ()),
+ 'MathematicaLexer': ('pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
+ 'MatlabLexer': ('pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
+ 'MatlabSessionLexer': ('pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()),
+ 'MiniDLexer': ('pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)),
+ 'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
+ 'Modula2Lexer': ('pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
+ 'MoinWikiLexer': ('pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
+ 'MonkeyLexer': ('pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
+ 'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
+ 'MozPreprocCssLexer': ('pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()),
+ 'MozPreprocHashLexer': ('pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()),
+ 'MozPreprocJavascriptLexer': ('pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()),
+ 'MozPreprocPercentLexer': ('pygments.lexers.markup', 'mozpercentpreproc', ('mozpercentpreproc',), (), ()),
+ 'MozPreprocXulLexer': ('pygments.lexers.markup', 'XUL+mozpreproc', ('xul+mozpreproc',), ('*.xul.in',), ()),
+ 'MqlLexer': ('pygments.lexers.c_like', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)),
+ 'MscgenLexer': ('pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()),
+ 'MuPADLexer': ('pygments.lexers.algebra', 'MuPAD', ('mupad',), ('*.mu',), ()),
+ 'MxmlLexer': ('pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()),
'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
- 'NSISLexer': ('pygments.lexers.other', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
+ 'NSISLexer': ('pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)),
+ 'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)),
'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
- 'NesCLexer': ('pygments.lexers.compiled', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
- 'NewLispLexer': ('pygments.lexers.functional', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')),
- 'NewspeakLexer': ('pygments.lexers.other', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
- 'NginxConfLexer': ('pygments.lexers.text', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)),
- 'NimrodLexer': ('pygments.lexers.compiled', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)),
- 'NumPyLexer': ('pygments.lexers.math', 'NumPy', ('numpy',), (), ()),
+ 'NesCLexer': ('pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
+ 'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')),
+ 'NewspeakLexer': ('pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
+ 'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)),
+ 'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)),
+ 'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
+ 'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
+ 'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
- 'ObjectiveCLexer': ('pygments.lexers.compiled', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
- 'ObjectiveCppLexer': ('pygments.lexers.compiled', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
- 'ObjectiveJLexer': ('pygments.lexers.web', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
- 'OcamlLexer': ('pygments.lexers.functional', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
- 'OctaveLexer': ('pygments.lexers.math', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
- 'OocLexer': ('pygments.lexers.compiled', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
- 'OpaLexer': ('pygments.lexers.functional', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
- 'OpenEdgeLexer': ('pygments.lexers.other', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
- 'Perl6Lexer': ('pygments.lexers.agile', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6'), ('text/x-perl6', 'application/x-perl6')),
- 'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm'), ('text/x-perl', 'application/x-perl')),
- 'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
+ 'ObjectiveCLexer': ('pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
+ 'ObjectiveCppLexer': ('pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
+ 'ObjectiveJLexer': ('pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
+ 'OcamlLexer': ('pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
+ 'OctaveLexer': ('pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
+ 'OocLexer': ('pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
+ 'OpaLexer': ('pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
+ 'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
+ 'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
+ 'PawnLexer': ('pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)),
+ 'Perl6Lexer': ('pygments.lexers.perl', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')),
+ 'PerlLexer': ('pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')),
+ 'PhpLexer': ('pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
+ 'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)),
+ 'PikeLexer': ('pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)),
'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
- 'PostScriptLexer': ('pygments.lexers.other', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
+ 'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
- 'PovrayLexer': ('pygments.lexers.other', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
+ 'PovrayLexer': ('pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)),
- 'PrologLexer': ('pygments.lexers.compiled', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
- 'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
- 'ProtoBufLexer': ('pygments.lexers.other', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
- 'PuppetLexer': ('pygments.lexers.other', 'Puppet', ('puppet',), ('*.pp',), ()),
- 'PyPyLogLexer': ('pygments.lexers.text', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
- 'Python3Lexer': ('pygments.lexers.agile', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
- 'Python3TracebackLexer': ('pygments.lexers.agile', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)),
- 'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
- 'PythonLexer': ('pygments.lexers.agile', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')),
- 'PythonTracebackLexer': ('pygments.lexers.agile', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)),
- 'QmlLexer': ('pygments.lexers.web', 'QML', ('qml', 'Qt Meta Language', 'Qt modeling Language'), ('*.qml',), ('application/x-qml',)),
- 'RConsoleLexer': ('pygments.lexers.math', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
- 'RPMSpecLexer': ('pygments.lexers.other', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
- 'RacketLexer': ('pygments.lexers.functional', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktl'), ('text/x-racket', 'application/x-racket')),
+ 'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
+ 'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
+ 'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
+ 'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
+ 'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
+ 'Python3Lexer': ('pygments.lexers.python', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
+ 'Python3TracebackLexer': ('pygments.lexers.python', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)),
+ 'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
+ 'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')),
+ 'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)),
+ 'QBasicLexer': ('pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
+ 'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml',), ('*.qml',), ('application/x-qml',)),
+ 'RConsoleLexer': ('pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
+ 'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
+ 'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')),
'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
'RagelCppLexer': ('pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()),
'RagelDLexer': ('pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()),
@@ -248,49 +299,61 @@ LEXERS = {
'RagelObjectiveCLexer': ('pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()),
'RagelRubyLexer': ('pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()),
'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)),
- 'RdLexer': ('pygments.lexers.math', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
- 'RebolLexer': ('pygments.lexers.other', 'REBOL', ('rebol',), ('*.r', '*.r3'), ('text/x-rebol',)),
- 'RedcodeLexer': ('pygments.lexers.other', 'Redcode', ('redcode',), ('*.cw',), ()),
- 'RegeditLexer': ('pygments.lexers.text', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
- 'RexxLexer': ('pygments.lexers.other', 'Rexx', ('rexx', 'ARexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
+ 'RdLexer': ('pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
+ 'RebolLexer': ('pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)),
+ 'RedLexer': ('pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
+ 'RedcodeLexer': ('pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()),
+ 'RegeditLexer': ('pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
+ 'ResourceLexer': ('pygments.lexers.resource', 'ResourceBundle', ('resource', 'resourcebundle'), ('*.txt',), ()),
+ 'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
- 'RobotFrameworkLexer': ('pygments.lexers.other', 'RobotFramework', ('RobotFramework', 'robotframework'), ('*.txt', '*.robot'), ('text/x-robotframework',)),
- 'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
- 'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
- 'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')),
- 'RustLexer': ('pygments.lexers.compiled', 'Rust', ('rust',), ('*.rs', '*.rc'), ('text/x-rustsrc',)),
- 'SLexer': ('pygments.lexers.math', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
- 'SMLLexer': ('pygments.lexers.functional', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
- 'SassLexer': ('pygments.lexers.web', 'Sass', ('sass', 'SASS'), ('*.sass',), ('text/x-sass',)),
+ 'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.txt', '*.robot'), ('text/x-robotframework',)),
+ 'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
+ 'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
+ 'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
+ 'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
+ 'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')),
+ 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust',), ('*.rs',), ('text/rust',)),
+ 'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
+ 'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
+ 'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
- 'ScamlLexer': ('pygments.lexers.web', 'Scaml', ('scaml', 'SCAML'), ('*.scaml',), ('text/x-scaml',)),
- 'SchemeLexer': ('pygments.lexers.functional', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
- 'ScilabLexer': ('pygments.lexers.math', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
- 'ScssLexer': ('pygments.lexers.web', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
+ 'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)),
+ 'SchemeLexer': ('pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
+ 'ScilabLexer': ('pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
+ 'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
'ShellSessionLexer': ('pygments.lexers.shell', 'Shell Session', ('shell-session',), ('*.shell-session',), ('application/x-sh-session',)),
+ 'SlimLexer': ('pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)),
'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
- 'SmalltalkLexer': ('pygments.lexers.other', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
+ 'SmalltalkLexer': ('pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
- 'SnobolLexer': ('pygments.lexers.other', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
- 'SourcePawnLexer': ('pygments.lexers.other', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
- 'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()),
+ 'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
+ 'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
+ 'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()),
+ 'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
- 'SquidConfLexer': ('pygments.lexers.text', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
+ 'SquidConfLexer': ('pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
- 'StanLexer': ('pygments.lexers.math', 'Stan', ('stan',), ('*.stan',), ()),
- 'SwigLexer': ('pygments.lexers.compiled', 'SWIG', ('Swig', 'swig'), ('*.swg', '*.i'), ('text/swig',)),
+ 'StanLexer': ('pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()),
+ 'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)),
+ 'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
- 'TclLexer': ('pygments.lexers.agile', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
+ 'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
+ 'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)),
- 'TexLexer': ('pygments.lexers.text', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
+ 'TexLexer': ('pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
+ 'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
- 'TypeScriptLexer': ('pygments.lexers.web', 'TypeScript', ('ts',), ('*.ts',), ('text/x-typescript',)),
- 'UrbiscriptLexer': ('pygments.lexers.other', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
- 'VGLLexer': ('pygments.lexers.other', 'VGL', ('vgl',), ('*.rpf',), ()),
- 'ValaLexer': ('pygments.lexers.compiled', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
+ 'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)),
+ 'TwigLexer': ('pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)),
+ 'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('ts',), ('*.ts',), ('text/x-typescript',)),
+ 'UrbiscriptLexer': ('pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
+ 'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()),
+ 'VGLLexer': ('pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()),
+ 'ValaLexer': ('pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
@@ -298,54 +361,56 @@ LEXERS = {
'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)),
'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
- 'VimLexer': ('pygments.lexers.text', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
- 'XQueryLexer': ('pygments.lexers.web', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
+ 'VimLexer': ('pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
+ 'XQueryLexer': ('pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)),
- 'XmlLexer': ('pygments.lexers.web', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
+ 'XmlLexer': ('pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
- 'XsltLexer': ('pygments.lexers.web', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
+ 'XsltLexer': ('pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)),
- 'YamlLexer': ('pygments.lexers.text', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
+ 'YamlJinjaLexer': ('pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')),
+ 'YamlLexer': ('pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
+ 'ZephirLexer': ('pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()),
}
-if __name__ == '__main__':
+if __name__ == '__main__': # pragma: no cover
import sys
import os
# lookup lexers
found_lexers = []
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
- for filename in os.listdir('.'):
- if filename.endswith('.py') and not filename.startswith('_'):
- module_name = 'pygments.lexers.%s' % filename[:-3]
- print module_name
- module = __import__(module_name, None, None, [''])
- for lexer_name in module.__all__:
- lexer = getattr(module, lexer_name)
- found_lexers.append(
- '%r: %r' % (lexer_name,
- (module_name,
- lexer.name,
- tuple(lexer.aliases),
- tuple(lexer.filenames),
- tuple(lexer.mimetypes))))
- # sort them, that should make the diff files for svn smaller
+ for root, dirs, files in os.walk('.'):
+ for filename in files:
+ if filename.endswith('.py') and not filename.startswith('_'):
+ module_name = 'pygments.lexers%s.%s' % (
+ root[1:].replace('/', '.'), filename[:-3])
+ print(module_name)
+ module = __import__(module_name, None, None, [''])
+ for lexer_name in module.__all__:
+ lexer = getattr(module, lexer_name)
+ found_lexers.append(
+ '%r: %r' % (lexer_name,
+ (module_name,
+ lexer.name,
+ tuple(lexer.aliases),
+ tuple(lexer.filenames),
+ tuple(lexer.mimetypes))))
+ # sort them to make the diff minimal
found_lexers.sort()
# extract useful sourcecode from this file
- f = open(__file__)
- try:
- content = f.read()
- finally:
- f.close()
+ with open(__file__) as fp:
+ content = fp.read()
header = content[:content.find('LEXERS = {')]
footer = content[content.find("if __name__ == '__main__':"):]
# write new file
- f = open(__file__, 'wb')
- f.write(header)
- f.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers))
- f.write(footer)
- f.close()
+ with open(__file__, 'w') as fp:
+ fp.write(header)
+ fp.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers))
+ fp.write(footer)
+
+ print ('=== %d lexers processed.' % len(found_lexers))
diff --git a/pygments/lexers/_mql_builtins.py b/pygments/lexers/_mql_builtins.py
new file mode 100644
index 00000000..524a2ea2
--- /dev/null
+++ b/pygments/lexers/_mql_builtins.py
@@ -0,0 +1,1172 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._mql_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Builtins for the MqlLexer.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+types = (
+ 'AccountBalance',
+ 'AccountCompany',
+ 'AccountCredit',
+ 'AccountCurrency',
+ 'AccountEquity',
+ 'AccountFreeMarginCheck',
+ 'AccountFreeMarginMode',
+ 'AccountFreeMargin',
+ 'AccountInfoDouble',
+ 'AccountInfoInteger',
+ 'AccountInfoString',
+ 'AccountLeverage',
+ 'AccountMargin',
+ 'AccountName',
+ 'AccountNumber',
+ 'AccountProfit',
+ 'AccountServer',
+ 'AccountStopoutLevel',
+ 'AccountStopoutMode',
+ 'Alert',
+ 'ArrayBsearch',
+ 'ArrayCompare',
+ 'ArrayCopyRates',
+ 'ArrayCopySeries',
+ 'ArrayCopy',
+ 'ArrayDimension',
+ 'ArrayFill',
+ 'ArrayFree',
+ 'ArrayGetAsSeries',
+ 'ArrayInitialize',
+ 'ArrayIsDynamic',
+ 'ArrayIsSeries',
+ 'ArrayMaximum',
+ 'ArrayMinimum',
+ 'ArrayRange',
+ 'ArrayResize',
+ 'ArraySetAsSeries',
+ 'ArraySize',
+ 'ArraySort',
+ 'CharArrayToString',
+ 'CharToString',
+ 'CharToStr',
+ 'CheckPointer',
+ 'ColorToARGB',
+ 'ColorToString',
+ 'Comment',
+ 'CopyClose',
+ 'CopyHigh',
+ 'CopyLow',
+ 'CopyOpen',
+ 'CopyRates',
+ 'CopyRealVolume',
+ 'CopySpread',
+ 'CopyTickVolume',
+ 'CopyTime',
+ 'DayOfWeek',
+ 'DayOfYear',
+ 'Day',
+ 'DebugBreak',
+ 'Digits',
+ 'DoubleToString',
+ 'DoubleToStr',
+ 'EnumToString',
+ 'EventChartCustom',
+ 'EventKillTimer',
+ 'EventSetMillisecondTimer',
+ 'EventSetTimer',
+ 'ExpertRemove',
+ 'FileClose',
+ 'FileCopy',
+ 'FileDelete',
+ 'FileFindClose',
+ 'FileFindFirst',
+ 'FileFindNext',
+ 'FileFlush',
+ 'FileGetInteger',
+ 'FileIsEnding',
+ 'FileIsExist',
+ 'FileIsLineEnding',
+ 'FileMove',
+ 'FileOpenHistory',
+ 'FileOpen',
+ 'FileReadArray',
+ 'FileReadBool',
+ 'FileReadDatetime',
+ 'FileReadDouble',
+ 'FileReadFloat',
+ 'FileReadInteger',
+ 'FileReadLong',
+ 'FileReadNumber',
+ 'FileReadString',
+ 'FileReadStruct',
+ 'FileSeek',
+ 'FileSize',
+ 'FileTell',
+ 'FileWriteArray',
+ 'FileWriteDouble',
+ 'FileWriteFloat',
+ 'FileWriteInteger',
+ 'FileWriteLong',
+ 'FileWriteString',
+ 'FileWriteStruct',
+ 'FileWrite',
+ 'FolderClean',
+ 'FolderCreate',
+ 'FolderDelete',
+ 'GetLastError',
+ 'GetPointer',
+ 'GetTickCount',
+ 'GlobalVariableCheck',
+ 'GlobalVariableDel',
+ 'GlobalVariableGet',
+ 'GlobalVariableName',
+ 'GlobalVariableSetOnCondition',
+ 'GlobalVariableSet',
+ 'GlobalVariableTemp',
+ 'GlobalVariableTime',
+ 'GlobalVariablesDeleteAll',
+ 'GlobalVariablesFlush',
+ 'GlobalVariablesTotal',
+ 'HideTestIndicators',
+ 'Hour',
+ 'IndicatorBuffers',
+ 'IndicatorCounted',
+ 'IndicatorDigits',
+ 'IndicatorSetDouble',
+ 'IndicatorSetInteger',
+ 'IndicatorSetString',
+ 'IndicatorShortName',
+ 'IntegerToString',
+ 'IsConnected',
+ 'IsDemo',
+ 'IsDllsAllowed',
+ 'IsExpertEnabled',
+ 'IsLibrariesAllowed',
+ 'IsOptimization',
+ 'IsStopped',
+ 'IsTesting',
+ 'IsTradeAllowed',
+ 'IsTradeContextBusy',
+ 'IsVisualMode',
+ 'MQLInfoInteger',
+ 'MQLInfoString',
+ 'MarketInfo',
+ 'MathAbs',
+ 'MathArccos',
+ 'MathArcsin',
+ 'MathArctan',
+ 'MathCeil',
+ 'MathCos',
+ 'MathExp',
+ 'MathFloor',
+ 'MathIsValidNumber',
+ 'MathLog',
+ 'MathMax',
+ 'MathMin',
+ 'MathMod',
+ 'MathPow',
+ 'MathRand',
+ 'MathRound',
+ 'MathSin',
+ 'MathSqrt',
+ 'MathSrand',
+ 'MathTan',
+ 'MessageBox',
+ 'Minute',
+ 'Month',
+ 'NormalizeDouble',
+ 'ObjectCreate',
+ 'ObjectDelete',
+ 'ObjectDescription',
+ 'ObjectFind',
+ 'ObjectGetDouble',
+ 'ObjectGetFiboDescription',
+ 'ObjectGetInteger',
+ 'ObjectGetShiftByValue',
+ 'ObjectGetString',
+ 'ObjectGetTimeByValue',
+ 'ObjectGetValueByShift',
+ 'ObjectGetValueByTime',
+ 'ObjectGet',
+ 'ObjectMove',
+ 'ObjectName',
+ 'ObjectSetDouble',
+ 'ObjectSetFiboDescription',
+ 'ObjectSetInteger',
+ 'ObjectSetString',
+ 'ObjectSetText',
+ 'ObjectSet',
+ 'ObjectType',
+ 'ObjectsDeleteAll',
+ 'ObjectsTotal',
+ 'OrderCloseBy',
+ 'OrderClosePrice',
+ 'OrderCloseTime',
+ 'OrderClose',
+ 'OrderComment',
+ 'OrderCommission',
+ 'OrderDelete',
+ 'OrderExpiration',
+ 'OrderLots',
+ 'OrderMagicNumber',
+ 'OrderModify',
+ 'OrderOpenPrice',
+ 'OrderOpenTime',
+ 'OrderPrint',
+ 'OrderProfit',
+ 'OrderSelect',
+ 'OrderSend',
+ 'OrderStopLoss',
+ 'OrderSwap',
+ 'OrderSymbol',
+ 'OrderTakeProfit',
+ 'OrderTicket',
+ 'OrderType',
+ 'OrdersHistoryTotal',
+ 'OrdersTotal',
+ 'PeriodSeconds',
+ 'Period',
+ 'PlaySound',
+ 'Point',
+ 'PrintFormat',
+ 'Print',
+ 'RefreshRates',
+ 'ResetLastError',
+ 'ResourceCreate',
+ 'ResourceFree',
+ 'ResourceReadImage',
+ 'ResourceSave',
+ 'Seconds',
+ 'SendFTP',
+ 'SendMail',
+ 'SendNotification',
+ 'SeriesInfoInteger',
+ 'SetIndexArrow',
+ 'SetIndexBuffer',
+ 'SetIndexDrawBegin',
+ 'SetIndexEmptyValue',
+ 'SetIndexLabel',
+ 'SetIndexShift',
+ 'SetIndexStyle',
+ 'SetLevelStyle',
+ 'SetLevelValue',
+ 'ShortArrayToString',
+ 'ShortToString',
+ 'Sleep',
+ 'StrToDouble',
+ 'StrToInteger',
+ 'StrToTime',
+ 'StringAdd',
+ 'StringBufferLen',
+ 'StringCompare',
+ 'StringConcatenate',
+ 'StringFill',
+ 'StringFind',
+ 'StringFormat',
+ 'StringGetCharacter',
+ 'StringGetChar',
+ 'StringInit',
+ 'StringLen',
+ 'StringReplace',
+ 'StringSetCharacter',
+ 'StringSetChar',
+ 'StringSplit',
+ 'StringSubstr',
+ 'StringToCharArray',
+ 'StringToColor',
+ 'StringToDouble',
+ 'StringToInteger',
+ 'StringToLower',
+ 'StringToShortArray',
+ 'StringToTime',
+ 'StringToUpper',
+ 'StringTrimLeft',
+ 'StringTrimRight',
+ 'StructToTime',
+ 'SymbolInfoDouble',
+ 'SymbolInfoInteger',
+ 'SymbolInfoSessionQuote',
+ 'SymbolInfoSessionTrade',
+ 'SymbolInfoString',
+ 'SymbolInfoTick',
+ 'SymbolIsSynchronized',
+ 'SymbolName',
+ 'SymbolSelect',
+ 'SymbolsTotal',
+ 'Symbol',
+ 'TerminalClose',
+ 'TerminalCompany',
+ 'TerminalName',
+ 'TerminalPath',
+ 'TesterStatistics',
+ 'TextGetSize',
+ 'TextOut',
+ 'TextSetFont',
+ 'TimeCurrent',
+ 'TimeDayOfWeek',
+ 'TimeDayOfYear',
+ 'TimeDaylightSavings',
+ 'TimeDay',
+ 'TimeGMTOffset',
+ 'TimeGMT',
+ 'TimeHour',
+ 'TimeLocal',
+ 'TimeMinute',
+ 'TimeMonth',
+ 'TimeSeconds',
+ 'TimeToString',
+ 'TimeToStruct',
+ 'TimeToStr',
+ 'TimeTradeServer',
+ 'TimeYear',
+ 'UninitializeReason',
+ 'WindowBarsPerChart',
+ 'WindowExpertName',
+ 'WindowFind',
+ 'WindowFirstVisibleBar',
+ 'WindowHandle',
+ 'WindowIsVisible',
+ 'WindowOnDropped',
+ 'WindowPriceMax',
+ 'WindowPriceMin',
+ 'WindowPriceOnDropped',
+ 'WindowRedraw',
+ 'WindowScreenShot',
+ 'WindowTimeOnDropped',
+ 'WindowXOnDropped',
+ 'WindowYOnDropped',
+ 'WindowsTotal',
+ 'Year',
+ 'ZeroMemory',
+ 'iAC',
+ 'iADX',
+ 'iAD',
+ 'iAO',
+ 'iATR',
+ 'iAlligator',
+ 'iBWMFI',
+ 'iBandsOnArray',
+ 'iBands',
+ 'iBarShift',
+ 'iBars',
+ 'iBearsPower',
+ 'iBullsPower',
+ 'iCCIOnArray',
+ 'iCCI',
+ 'iClose',
+ 'iCustom',
+ 'iDeMarker',
+ 'iEnvelopesOnArray',
+ 'iEnvelopes',
+ 'iForce',
+ 'iFractals',
+ 'iGator',
+ 'iHighest',
+ 'iHigh',
+ 'iIchimoku',
+ 'iLowest',
+ 'iLow',
+ 'iMACD',
+ 'iMAOnArray',
+ 'iMA',
+ 'iMFI',
+ 'iMomentumOnArray',
+ 'iMomentum',
+ 'iOBV',
+ 'iOpen',
+ 'iOsMA',
+ 'iRSIOnArray',
+ 'iRSI',
+ 'iRVI',
+ 'iSAR',
+ 'iStdDevOnArray',
+ 'iStdDev',
+ 'iStochastic',
+ 'iTime',
+ 'iVolume',
+ 'iWPR',
+)
+
+constants = (
+ 'ACCOUNT_BALANCE',
+ 'ACCOUNT_COMPANY',
+ 'ACCOUNT_CREDIT',
+ 'ACCOUNT_CURRENCY',
+ 'ACCOUNT_EQUITY',
+ 'ACCOUNT_FREEMARGIN',
+ 'ACCOUNT_LEVERAGE',
+ 'ACCOUNT_LIMIT_ORDERS',
+ 'ACCOUNT_LOGIN',
+ 'ACCOUNT_MARGIN',
+ 'ACCOUNT_MARGIN_LEVEL',
+ 'ACCOUNT_MARGIN_SO_CALL',
+ 'ACCOUNT_MARGIN_SO_MODE',
+ 'ACCOUNT_MARGIN_SO_SO',
+ 'ACCOUNT_NAME',
+ 'ACCOUNT_PROFIT',
+ 'ACCOUNT_SERVER',
+ 'ACCOUNT_STOPOUT_MODE_MONEY',
+ 'ACCOUNT_STOPOUT_MODE_PERCENT',
+ 'ACCOUNT_TRADE_ALLOWED',
+ 'ACCOUNT_TRADE_EXPERT',
+ 'ACCOUNT_TRADE_MODE',
+ 'ACCOUNT_TRADE_MODE_CONTEST',
+ 'ACCOUNT_TRADE_MODE_DEMO',
+ 'ACCOUNT_TRADE_MODE_REAL',
+ 'ALIGN_CENTER',
+ 'ALIGN_LEFT',
+ 'ALIGN_RIGHT',
+ 'ANCHOR_BOTTOM',
+ 'ANCHOR_CENTER',
+ 'ANCHOR_LEFT',
+ 'ANCHOR_LEFT_LOWER',
+ 'ANCHOR_LEFT_UPPER',
+ 'ANCHOR_LOWER',
+ 'ANCHOR_RIGHT',
+ 'ANCHOR_RIGHT_LOWER',
+ 'ANCHOR_RIGHT_UPPER',
+ 'ANCHOR_TOP',
+ 'ANCHOR_UPPER',
+ 'BORDER_FLAT',
+ 'BORDER_RAISED',
+ 'BORDER_SUNKEN',
+ 'CHARTEVENT_CHART_CHANGE',
+ 'CHARTEVENT_CLICK',
+ 'CHARTEVENT_CUSTOM',
+ 'CHARTEVENT_CUSTOM_LAST',
+ 'CHARTEVENT_KEYDOWN',
+ 'CHARTEVENT_MOUSE_MOVE',
+ 'CHARTEVENT_OBJECT_CHANGE',
+ 'CHARTEVENT_OBJECT_CLICK',
+ 'CHARTEVENT_OBJECT_CREATE',
+ 'CHARTEVENT_OBJECT_DELETE',
+ 'CHARTEVENT_OBJECT_DRAG',
+ 'CHARTEVENT_OBJECT_ENDEDIT',
+ 'CHARTS_MAX',
+ 'CHART_AUTOSCROLL',
+ 'CHART_BARS',
+ 'CHART_BEGIN',
+ 'CHART_BRING_TO_TOP',
+ 'CHART_CANDLES',
+ 'CHART_COLOR_ASK',
+ 'CHART_COLOR_BACKGROUND',
+ 'CHART_COLOR_BID',
+ 'CHART_COLOR_CANDLE_BEAR',
+ 'CHART_COLOR_CANDLE_BULL',
+ 'CHART_COLOR_CHART_DOWN',
+ 'CHART_COLOR_CHART_LINE',
+ 'CHART_COLOR_CHART_UP',
+ 'CHART_COLOR_FOREGROUND',
+ 'CHART_COLOR_GRID',
+ 'CHART_COLOR_LAST',
+ 'CHART_COLOR_STOP_LEVEL',
+ 'CHART_COLOR_VOLUME',
+ 'CHART_COMMENT',
+ 'CHART_CURRENT_POS',
+ 'CHART_DRAG_TRADE_LEVELS',
+ 'CHART_END',
+ 'CHART_EVENT_MOUSE_MOVE',
+ 'CHART_EVENT_OBJECT_CREATE',
+ 'CHART_EVENT_OBJECT_DELETE',
+ 'CHART_FIRST_VISIBLE_BAR',
+ 'CHART_FIXED_MAX',
+ 'CHART_FIXED_MIN',
+ 'CHART_FIXED_POSITION',
+ 'CHART_FOREGROUND',
+ 'CHART_HEIGHT_IN_PIXELS',
+ 'CHART_IS_OBJECT',
+ 'CHART_LINE',
+ 'CHART_MODE',
+ 'CHART_MOUSE_SCROLL',
+ 'CHART_POINTS_PER_BAR',
+ 'CHART_PRICE_MAX',
+ 'CHART_PRICE_MIN',
+ 'CHART_SCALEFIX',
+ 'CHART_SCALEFIX_11',
+ 'CHART_SCALE',
+ 'CHART_SCALE_PT_PER_BAR',
+ 'CHART_SHIFT',
+ 'CHART_SHIFT_SIZE',
+ 'CHART_SHOW_ASK_LINE',
+ 'CHART_SHOW_BID_LINE',
+ 'CHART_SHOW_DATE_SCALE',
+ 'CHART_SHOW_GRID',
+ 'CHART_SHOW_LAST_LINE',
+ 'CHART_SHOW_OBJECT_DESCR',
+ 'CHART_SHOW_OHLC',
+ 'CHART_SHOW_PERIOD_SEP',
+ 'CHART_SHOW_PRICE_SCALE',
+ 'CHART_SHOW_TRADE_LEVELS',
+ 'CHART_SHOW_VOLUMES',
+ 'CHART_VISIBLE_BARS',
+ 'CHART_VOLUME_HIDE',
+ 'CHART_VOLUME_REAL',
+ 'CHART_VOLUME_TICK',
+ 'CHART_WIDTH_IN_BARS',
+ 'CHART_WIDTH_IN_PIXELS',
+ 'CHART_WINDOWS_TOTAL',
+ 'CHART_WINDOW_HANDLE',
+ 'CHART_WINDOW_IS_VISIBLE',
+ 'CHART_WINDOW_YDISTANCE',
+ 'CHAR_MAX',
+ 'CHAR_MIN',
+ 'CLR_NONE',
+ 'CORNER_LEFT_LOWER',
+ 'CORNER_LEFT_UPPER',
+ 'CORNER_RIGHT_LOWER',
+ 'CORNER_RIGHT_UPPER',
+ 'CP_ACP',
+ 'CP_MACCP',
+ 'CP_OEMCP',
+ 'CP_SYMBOL',
+ 'CP_THREAD_ACP',
+ 'CP_UTF7',
+ 'CP_UTF8',
+ 'DBL_DIG',
+ 'DBL_EPSILON',
+ 'DBL_MANT_DIG',
+ 'DBL_MAX',
+ 'DBL_MAX_10_EXP',
+ 'DBL_MAX_EXP',
+ 'DBL_MIN',
+ 'DBL_MIN_10_EXP',
+ 'DBL_MIN_EXP',
+ 'DRAW_ARROW',
+ 'DRAW_FILLING',
+ 'DRAW_HISTOGRAM',
+ 'DRAW_LINE',
+ 'DRAW_NONE',
+ 'DRAW_SECTION',
+ 'DRAW_ZIGZAG',
+ 'EMPTY',
+ 'EMPTY_VALUE',
+ 'ERR_ACCOUNT_DISABLED',
+ 'ERR_BROKER_BUSY',
+ 'ERR_COMMON_ERROR',
+ 'ERR_INVALID_ACCOUNT',
+ 'ERR_INVALID_PRICE',
+ 'ERR_INVALID_STOPS',
+ 'ERR_INVALID_TRADE_PARAMETERS',
+ 'ERR_INVALID_TRADE_VOLUME',
+ 'ERR_LONG_POSITIONS_ONLY_ALLOWED',
+ 'ERR_MALFUNCTIONAL_TRADE',
+ 'ERR_MARKET_CLOSED',
+ 'ERR_NOT_ENOUGH_MONEY',
+ 'ERR_NOT_ENOUGH_RIGHTS',
+ 'ERR_NO_CONNECTION',
+ 'ERR_NO_ERROR',
+ 'ERR_NO_RESULT',
+ 'ERR_OFF_QUOTES',
+ 'ERR_OLD_VERSION',
+ 'ERR_ORDER_LOCKED',
+ 'ERR_PRICE_CHANGED',
+ 'ERR_REQUOTE',
+ 'ERR_SERVER_BUSY',
+ 'ERR_TOO_FREQUENT_REQUESTS',
+ 'ERR_TOO_MANY_REQUESTS',
+ 'ERR_TRADE_CONTEXT_BUSY',
+ 'ERR_TRADE_DISABLED',
+ 'ERR_TRADE_EXPIRATION_DENIED',
+ 'ERR_TRADE_HEDGE_PROHIBITED',
+ 'ERR_TRADE_MODIFY_DENIED',
+ 'ERR_TRADE_PROHIBITED_BY_FIFO',
+ 'ERR_TRADE_TIMEOUT',
+ 'ERR_TRADE_TOO_MANY_ORDERS',
+ 'FILE_ACCESS_DATE',
+ 'FILE_ANSI',
+ 'FILE_BIN',
+ 'FILE_COMMON',
+ 'FILE_CREATE_DATE',
+ 'FILE_CSV',
+ 'FILE_END',
+ 'FILE_EXISTS',
+ 'FILE_IS_ANSI',
+ 'FILE_IS_BINARY',
+ 'FILE_IS_COMMON',
+ 'FILE_IS_CSV',
+ 'FILE_IS_READABLE',
+ 'FILE_IS_TEXT',
+ 'FILE_IS_WRITABLE',
+ 'FILE_LINE_END',
+ 'FILE_MODIFY_DATE',
+ 'FILE_POSITION',
+ 'FILE_READ',
+ 'FILE_REWRITE',
+ 'FILE_SHARE_READ',
+ 'FILE_SHARE_WRITE',
+ 'FILE_SIZE',
+ 'FILE_TXT',
+ 'FILE_UNICODE',
+ 'FILE_WRITE',
+ 'FLT_DIG',
+ 'FLT_EPSILON',
+ 'FLT_MANT_DIG',
+ 'FLT_MAX',
+ 'FLT_MAX_10_EXP',
+ 'FLT_MAX_EXP',
+ 'FLT_MIN',
+ 'FLT_MIN_10_EXP',
+ 'FLT_MIN_EXP',
+ 'FRIDAY',
+ 'GANN_DOWN_TREND',
+ 'GANN_UP_TREND',
+ 'IDABORT',
+ 'IDCANCEL',
+ 'IDCONTINUE',
+ 'IDIGNORE',
+ 'IDNO',
+ 'IDOK',
+ 'IDRETRY',
+ 'IDTRYAGAIN',
+ 'IDYES',
+ 'INDICATOR_CALCULATIONS',
+ 'INDICATOR_COLOR_INDEX',
+ 'INDICATOR_DATA',
+ 'INDICATOR_DIGITS',
+ 'INDICATOR_HEIGHT',
+ 'INDICATOR_LEVELCOLOR',
+ 'INDICATOR_LEVELSTYLE',
+ 'INDICATOR_LEVELS',
+ 'INDICATOR_LEVELTEXT',
+ 'INDICATOR_LEVELVALUE',
+ 'INDICATOR_LEVELWIDTH',
+ 'INDICATOR_MAXIMUM',
+ 'INDICATOR_MINIMUM',
+ 'INDICATOR_SHORTNAME',
+ 'INT_MAX',
+ 'INT_MIN',
+ 'INVALID_HANDLE',
+ 'IS_DEBUG_MODE',
+ 'IS_PROFILE_MODE',
+ 'LICENSE_DEMO',
+ 'LICENSE_FREE',
+ 'LICENSE_FULL',
+ 'LICENSE_TIME',
+ 'LONG_MAX',
+ 'LONG_MIN',
+ 'MB_ABORTRETRYIGNORE',
+ 'MB_CANCELTRYCONTINUE',
+ 'MB_DEFBUTTON1',
+ 'MB_DEFBUTTON2',
+ 'MB_DEFBUTTON3',
+ 'MB_DEFBUTTON4',
+ 'MB_ICONASTERISK',
+ 'MB_ICONERROR',
+ 'MB_ICONEXCLAMATION',
+ 'MB_ICONHAND',
+ 'MB_ICONINFORMATION',
+ 'MB_ICONQUESTION',
+ 'MB_ICONSTOP',
+ 'MB_ICONWARNING',
+ 'MB_OKCANCEL',
+ 'MB_OK',
+ 'MB_RETRYCANCEL',
+ 'MB_YESNOCANCEL',
+ 'MB_YESNO',
+ 'MODE_ASK',
+ 'MODE_BID',
+ 'MODE_CHINKOUSPAN',
+ 'MODE_CLOSE',
+ 'MODE_DIGITS',
+ 'MODE_EMA',
+ 'MODE_EXPIRATION',
+ 'MODE_FREEZELEVEL',
+ 'MODE_GATORJAW',
+ 'MODE_GATORLIPS',
+ 'MODE_GATORTEETH',
+ 'MODE_HIGH',
+ 'MODE_KIJUNSEN',
+ 'MODE_LOTSIZE',
+ 'MODE_LOTSTEP',
+ 'MODE_LOWER',
+ 'MODE_LOW',
+ 'MODE_LWMA',
+ 'MODE_MAIN',
+ 'MODE_MARGINCALCMODE',
+ 'MODE_MARGINHEDGED',
+ 'MODE_MARGININIT',
+ 'MODE_MARGINMAINTENANCE',
+ 'MODE_MARGINREQUIRED',
+ 'MODE_MAXLOT',
+ 'MODE_MINLOT',
+ 'MODE_MINUSDI',
+ 'MODE_OPEN',
+ 'MODE_PLUSDI',
+ 'MODE_POINT',
+ 'MODE_PROFITCALCMODE',
+ 'MODE_SENKOUSPANA',
+ 'MODE_SENKOUSPANB',
+ 'MODE_SIGNAL',
+ 'MODE_SMA',
+ 'MODE_SMMA',
+ 'MODE_SPREAD',
+ 'MODE_STARTING',
+ 'MODE_STOPLEVEL',
+ 'MODE_SWAPLONG',
+ 'MODE_SWAPSHORT',
+ 'MODE_SWAPTYPE',
+ 'MODE_TENKANSEN',
+ 'MODE_TICKSIZE',
+ 'MODE_TICKVALUE',
+ 'MODE_TIME',
+ 'MODE_TRADEALLOWED',
+ 'MODE_UPPER',
+ 'MODE_VOLUME',
+ 'MONDAY',
+ 'MQL_DEBUG',
+ 'MQL_DLLS_ALLOWED',
+ 'MQL_FRAME_MODE',
+ 'MQL_LICENSE_TYPE',
+ 'MQL_OPTIMIZATION',
+ 'MQL_PROFILER',
+ 'MQL_PROGRAM_NAME',
+ 'MQL_PROGRAM_PATH',
+ 'MQL_PROGRAM_TYPE',
+ 'MQL_TESTER',
+ 'MQL_TRADE_ALLOWED',
+ 'MQL_VISUAL_MODE',
+ 'M_1_PI',
+ 'M_2_PI',
+ 'M_2_SQRTPI',
+ 'M_E',
+ 'M_LN2',
+ 'M_LN10',
+ 'M_LOG2E',
+ 'M_LOG10E',
+ 'M_PI',
+ 'M_PI_2',
+ 'M_PI_4',
+ 'M_SQRT1_2',
+ 'M_SQRT2',
+ 'NULL',
+ 'OBJPROP_ALIGN',
+ 'OBJPROP_ANCHOR',
+ 'OBJPROP_ANGLE',
+ 'OBJPROP_ARROWCODE',
+ 'OBJPROP_BACK',
+ 'OBJPROP_BGCOLOR',
+ 'OBJPROP_BMPFILE',
+ 'OBJPROP_BORDER_COLOR',
+ 'OBJPROP_BORDER_TYPE',
+ 'OBJPROP_CHART_ID',
+ 'OBJPROP_CHART_SCALE',
+ 'OBJPROP_COLOR',
+ 'OBJPROP_CORNER',
+ 'OBJPROP_CREATETIME',
+ 'OBJPROP_DATE_SCALE',
+ 'OBJPROP_DEVIATION',
+ 'OBJPROP_DRAWLINES',
+ 'OBJPROP_ELLIPSE',
+ 'OBJPROP_FIBOLEVELS',
+ 'OBJPROP_FILL',
+ 'OBJPROP_FIRSTLEVEL',
+ 'OBJPROP_FONTSIZE',
+ 'OBJPROP_FONT',
+ 'OBJPROP_HIDDEN',
+ 'OBJPROP_LEVELCOLOR',
+ 'OBJPROP_LEVELSTYLE',
+ 'OBJPROP_LEVELS',
+ 'OBJPROP_LEVELTEXT',
+ 'OBJPROP_LEVELVALUE',
+ 'OBJPROP_LEVELWIDTH',
+ 'OBJPROP_NAME',
+ 'OBJPROP_PERIOD',
+ 'OBJPROP_PRICE1',
+ 'OBJPROP_PRICE2',
+ 'OBJPROP_PRICE3',
+ 'OBJPROP_PRICE',
+ 'OBJPROP_PRICE_SCALE',
+ 'OBJPROP_RAY',
+ 'OBJPROP_RAY_RIGHT',
+ 'OBJPROP_READONLY',
+ 'OBJPROP_SCALE',
+ 'OBJPROP_SELECTABLE',
+ 'OBJPROP_SELECTED',
+ 'OBJPROP_STATE',
+ 'OBJPROP_STYLE',
+ 'OBJPROP_SYMBOL',
+ 'OBJPROP_TEXT',
+ 'OBJPROP_TIME1',
+ 'OBJPROP_TIME2',
+ 'OBJPROP_TIME3',
+ 'OBJPROP_TIMEFRAMES',
+ 'OBJPROP_TIME',
+ 'OBJPROP_TOOLTIP',
+ 'OBJPROP_TYPE',
+ 'OBJPROP_WIDTH',
+ 'OBJPROP_XDISTANCE',
+ 'OBJPROP_XOFFSET',
+ 'OBJPROP_XSIZE',
+ 'OBJPROP_YDISTANCE',
+ 'OBJPROP_YOFFSET',
+ 'OBJPROP_YSIZE',
+ 'OBJPROP_ZORDER',
+ 'OBJ_ALL_PERIODS',
+ 'OBJ_ARROW',
+ 'OBJ_ARROW_BUY',
+ 'OBJ_ARROW_CHECK',
+ 'OBJ_ARROW_DOWN',
+ 'OBJ_ARROW_LEFT_PRICE',
+ 'OBJ_ARROW_RIGHT_PRICE',
+ 'OBJ_ARROW_SELL',
+ 'OBJ_ARROW_STOP',
+ 'OBJ_ARROW_THUMB_DOWN',
+ 'OBJ_ARROW_THUMB_UP',
+ 'OBJ_ARROW_UP',
+ 'OBJ_BITMAP',
+ 'OBJ_BITMAP_LABEL',
+ 'OBJ_BUTTON',
+ 'OBJ_CHANNEL',
+ 'OBJ_CYCLES',
+ 'OBJ_EDIT',
+ 'OBJ_ELLIPSE',
+ 'OBJ_EVENT',
+ 'OBJ_EXPANSION',
+ 'OBJ_FIBOARC',
+ 'OBJ_FIBOCHANNEL',
+ 'OBJ_FIBOFAN',
+ 'OBJ_FIBOTIMES',
+ 'OBJ_FIBO',
+ 'OBJ_GANNFAN',
+ 'OBJ_GANNGRID',
+ 'OBJ_GANNLINE',
+ 'OBJ_HLINE',
+ 'OBJ_LABEL',
+ 'OBJ_NO_PERIODS',
+ 'OBJ_PERIOD_D1',
+ 'OBJ_PERIOD_H1',
+ 'OBJ_PERIOD_H4',
+ 'OBJ_PERIOD_M1',
+ 'OBJ_PERIOD_M5',
+ 'OBJ_PERIOD_M15',
+ 'OBJ_PERIOD_M30',
+ 'OBJ_PERIOD_MN1',
+ 'OBJ_PERIOD_W1',
+ 'OBJ_PITCHFORK',
+ 'OBJ_RECTANGLE',
+ 'OBJ_RECTANGLE_LABEL',
+ 'OBJ_REGRESSION',
+ 'OBJ_STDDEVCHANNEL',
+ 'OBJ_TEXT',
+ 'OBJ_TRENDBYANGLE',
+ 'OBJ_TREND',
+ 'OBJ_TRIANGLE',
+ 'OBJ_VLINE',
+ 'OP_BUYLIMIT',
+ 'OP_BUYSTOP',
+ 'OP_BUY',
+ 'OP_SELLLIMIT',
+ 'OP_SELLSTOP',
+ 'OP_SELL',
+ 'PERIOD_CURRENT',
+ 'PERIOD_D1',
+ 'PERIOD_H1',
+ 'PERIOD_H2',
+ 'PERIOD_H3',
+ 'PERIOD_H4',
+ 'PERIOD_H6',
+ 'PERIOD_H8',
+ 'PERIOD_H12',
+ 'PERIOD_M1',
+ 'PERIOD_M2',
+ 'PERIOD_M3',
+ 'PERIOD_M4',
+ 'PERIOD_M5',
+ 'PERIOD_M6',
+ 'PERIOD_M10',
+ 'PERIOD_M12',
+ 'PERIOD_M15',
+ 'PERIOD_M20',
+ 'PERIOD_M30',
+ 'PERIOD_MN1',
+ 'PERIOD_W1',
+ 'POINTER_AUTOMATIC',
+ 'POINTER_DYNAMIC',
+ 'POINTER_INVALID'
+ 'PRICE_CLOSE',
+ 'PRICE_HIGH',
+ 'PRICE_LOW',
+ 'PRICE_MEDIAN',
+ 'PRICE_OPEN',
+ 'PRICE_TYPICAL',
+ 'PRICE_WEIGHTED',
+ 'PROGRAM_EXPERT',
+ 'PROGRAM_INDICATOR',
+ 'PROGRAM_SCRIPT',
+ 'REASON_ACCOUNT',
+ 'REASON_CHARTCHANGE',
+ 'REASON_CHARTCLOSE',
+ 'REASON_CLOSE',
+ 'REASON_INITFAILED',
+ 'REASON_PARAMETERS',
+ 'REASON_PROGRAM'
+ 'REASON_RECOMPILE',
+ 'REASON_REMOVE',
+ 'REASON_TEMPLATE',
+ 'SATURDAY',
+ 'SEEK_CUR',
+ 'SEEK_END',
+ 'SEEK_SET',
+ 'SERIES_BARS_COUNT',
+ 'SERIES_FIRSTDATE',
+ 'SERIES_LASTBAR_DATE',
+ 'SERIES_SERVER_FIRSTDATE',
+ 'SERIES_SYNCHRONIZED',
+ 'SERIES_TERMINAL_FIRSTDATE',
+ 'SHORT_MAX',
+ 'SHORT_MIN',
+ 'STAT_BALANCEDD_PERCENT',
+ 'STAT_BALANCEMIN',
+ 'STAT_BALANCE_DDREL_PERCENT',
+ 'STAT_BALANCE_DD',
+ 'STAT_BALANCE_DD_RELATIVE',
+ 'STAT_CONLOSSMAX',
+ 'STAT_CONLOSSMAX_TRADES',
+ 'STAT_CONPROFITMAX',
+ 'STAT_CONPROFITMAX_TRADES',
+ 'STAT_CUSTOM_ONTESTER',
+ 'STAT_DEALS',
+ 'STAT_EQUITYDD_PERCENT',
+ 'STAT_EQUITYMIN',
+ 'STAT_EQUITY_DDREL_PERCENT',
+ 'STAT_EQUITY_DD',
+ 'STAT_EQUITY_DD_RELATIVE',
+ 'STAT_EXPECTED_PAYOFF',
+ 'STAT_GROSS_LOSS',
+ 'STAT_GROSS_PROFIT',
+ 'STAT_INITIAL_DEPOSIT',
+ 'STAT_LONG_TRADES',
+ 'STAT_LOSSTRADES_AVGCON',
+ 'STAT_LOSS_TRADES',
+ 'STAT_MAX_CONLOSSES',
+ 'STAT_MAX_CONLOSS_TRADES',
+ 'STAT_MAX_CONPROFIT_TRADES',
+ 'STAT_MAX_CONWINS',
+ 'STAT_MAX_LOSSTRADE',
+ 'STAT_MAX_PROFITTRADE',
+ 'STAT_MIN_MARGINLEVEL',
+ 'STAT_PROFITTRADES_AVGCON',
+ 'STAT_PROFIT',
+ 'STAT_PROFIT_FACTOR',
+ 'STAT_PROFIT_LONGTRADES',
+ 'STAT_PROFIT_SHORTTRADES',
+ 'STAT_PROFIT_TRADES',
+ 'STAT_RECOVERY_FACTOR',
+ 'STAT_SHARPE_RATIO',
+ 'STAT_SHORT_TRADES',
+ 'STAT_TRADES',
+ 'STAT_WITHDRAWAL',
+ 'STO_CLOSECLOSE',
+ 'STO_LOWHIGH',
+ 'STYLE_DASHDOTDOT',
+ 'STYLE_DASHDOT',
+ 'STYLE_DASH',
+ 'STYLE_DOT',
+ 'STYLE_SOLID',
+ 'SUNDAY',
+ 'SYMBOL_ARROWDOWN',
+ 'SYMBOL_ARROWUP',
+ 'SYMBOL_CHECKSIGN',
+ 'SYMBOL_LEFTPRICE',
+ 'SYMBOL_RIGHTPRICE',
+ 'SYMBOL_STOPSIGN',
+ 'SYMBOL_THUMBSDOWN',
+ 'SYMBOL_THUMBSUP',
+ 'TERMINAL_BUILD',
+ 'TERMINAL_CODEPAGE',
+ 'TERMINAL_COMMONDATA_PATH',
+ 'TERMINAL_COMPANY',
+ 'TERMINAL_CONNECTED',
+ 'TERMINAL_CPU_CORES',
+ 'TERMINAL_DATA_PATH',
+ 'TERMINAL_DISK_SPACE',
+ 'TERMINAL_DLLS_ALLOWED',
+ 'TERMINAL_EMAIL_ENABLED',
+ 'TERMINAL_FTP_ENABLED',
+ 'TERMINAL_LANGUAGE',
+ 'TERMINAL_MAXBARS',
+ 'TERMINAL_MEMORY_AVAILABLE',
+ 'TERMINAL_MEMORY_PHYSICAL',
+ 'TERMINAL_MEMORY_TOTAL',
+ 'TERMINAL_MEMORY_USED',
+ 'TERMINAL_NAME',
+ 'TERMINAL_OPENCL_SUPPORT',
+ 'TERMINAL_PATH',
+ 'TERMINAL_TRADE_ALLOWED',
+ 'TERMINAL_X64',
+ 'THURSDAY',
+ 'TRADE_ACTION_DEAL',
+ 'TRADE_ACTION_MODIFY',
+ 'TRADE_ACTION_PENDING',
+ 'TRADE_ACTION_REMOVE',
+ 'TRADE_ACTION_SLTP',
+ 'TUESDAY',
+ 'UCHAR_MAX',
+ 'UINT_MAX',
+ 'ULONG_MAX',
+ 'USHORT_MAX',
+ 'VOLUME_REAL',
+ 'VOLUME_TICK',
+ 'WEDNESDAY',
+ 'WHOLE_ARRAY',
+ 'WRONG_VALUE',
+ 'clrNONE',
+ '__DATETIME__',
+ '__DATE__',
+ '__FILE__',
+ '__FUNCSIG__',
+ '__FUNCTION__',
+ '__LINE__',
+ '__MQL4BUILD__',
+ '__MQLBUILD__',
+ '__PATH__',
+)
+
+colors = (
+ 'AliceBlue',
+ 'AntiqueWhite',
+ 'Aquamarine',
+ 'Aqua',
+ 'Beige',
+ 'Bisque',
+ 'Black',
+ 'BlanchedAlmond',
+ 'BlueViolet',
+ 'Blue',
+ 'Brown',
+ 'BurlyWood',
+ 'CadetBlue',
+ 'Chartreuse',
+ 'Chocolate',
+ 'Coral',
+ 'CornflowerBlue',
+ 'Cornsilk',
+ 'Crimson',
+ 'DarkBlue',
+ 'DarkGoldenrod',
+ 'DarkGray',
+ 'DarkGreen',
+ 'DarkKhaki',
+ 'DarkOliveGreen',
+ 'DarkOrange',
+ 'DarkOrchid',
+ 'DarkSalmon',
+ 'DarkSeaGreen',
+ 'DarkSlateBlue',
+ 'DarkSlateGray',
+ 'DarkTurquoise',
+ 'DarkViolet',
+ 'DeepPink',
+ 'DeepSkyBlue',
+ 'DimGray',
+ 'DodgerBlue',
+ 'FireBrick',
+ 'ForestGreen',
+ 'Gainsboro',
+ 'Goldenrod',
+ 'Gold',
+ 'Gray',
+ 'GreenYellow',
+ 'Green',
+ 'Honeydew',
+ 'HotPink',
+ 'IndianRed',
+ 'Indigo',
+ 'Ivory',
+ 'Khaki',
+ 'LavenderBlush',
+ 'Lavender',
+ 'LawnGreen',
+ 'LemonChiffon',
+ 'LightBlue',
+ 'LightCoral',
+ 'LightCyan',
+ 'LightGoldenrod',
+ 'LightGray',
+ 'LightGreen',
+ 'LightPink',
+ 'LightSalmon',
+ 'LightSeaGreen',
+ 'LightSkyBlue',
+ 'LightSlateGray',
+ 'LightSteelBlue',
+ 'LightYellow',
+ 'LimeGreen',
+ 'Lime',
+ 'Linen',
+ 'Magenta',
+ 'Maroon',
+ 'MediumAquamarine',
+ 'MediumBlue',
+ 'MediumOrchid',
+ 'MediumPurple',
+ 'MediumSeaGreen',
+ 'MediumSlateBlue',
+ 'MediumSpringGreen',
+ 'MediumTurquoise',
+ 'MediumVioletRed',
+ 'MidnightBlue',
+ 'MintCream',
+ 'MistyRose',
+ 'Moccasin',
+ 'NavajoWhite',
+ 'Navy',
+ 'OldLace',
+ 'OliveDrab',
+ 'Olive',
+ 'OrangeRed',
+ 'Orange',
+ 'Orchid',
+ 'PaleGoldenrod',
+ 'PaleGreen',
+ 'PaleTurquoise',
+ 'PaleVioletRed',
+ 'PapayaWhip',
+ 'PeachPuff',
+ 'Peru',
+ 'Pink',
+ 'Plum',
+ 'PowderBlue',
+ 'Purple',
+ 'Red',
+ 'RosyBrown',
+ 'RoyalBlue',
+ 'SaddleBrown',
+ 'Salmon',
+ 'SandyBrown',
+ 'SeaGreen',
+ 'Seashell',
+ 'Sienna',
+ 'Silver',
+ 'SkyBlue',
+ 'SlateBlue',
+ 'SlateGray',
+ 'Snow',
+ 'SpringGreen',
+ 'SteelBlue',
+ 'Tan',
+ 'Teal',
+ 'Thistle',
+ 'Tomato',
+ 'Turquoise',
+ 'Violet',
+ 'Wheat',
+ 'WhiteSmoke',
+ 'White',
+ 'YellowGreen',
+ 'Yellow',
+)
+
+keywords = (
+ 'input', '_Digits', '_Point', '_LastError', '_Period', '_RandomSeed',
+ '_StopFlag', '_Symbol', '_UninitReason', 'Ask', 'Bars', 'Bid',
+ 'Close', 'Digits', 'High', 'Low', 'Open', 'Point', 'Time',
+ 'Volume',
+)
+c_types = (
+ 'void', 'char', 'uchar', 'bool', 'short', 'ushort', 'int', 'uint',
+ 'color', 'long', 'ulong', 'datetime', 'float', 'double',
+ 'string',
+)
diff --git a/pygments/lexers/_openedge_builtins.py b/pygments/lexers/_openedge_builtins.py
new file mode 100644
index 00000000..46b6cc42
--- /dev/null
+++ b/pygments/lexers/_openedge_builtins.py
@@ -0,0 +1,2547 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._openedge_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Builtin list for the OpenEdgeLexer.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+OPENEDGEKEYWORDS = (
+ 'ABSOLUTE',
+ 'ABS',
+ 'ABSO',
+ 'ABSOL',
+ 'ABSOLU',
+ 'ABSOLUT',
+ 'ACCELERATOR',
+ 'ACCUMULATE',
+ 'ACCUM',
+ 'ACCUMU',
+ 'ACCUMUL',
+ 'ACCUMULA',
+ 'ACCUMULAT',
+ 'ACTIVE-FORM',
+ 'ACTIVE-WINDOW',
+ 'ADD',
+ 'ADD-BUFFER',
+ 'ADD-CALC-COLUMN',
+ 'ADD-COLUMNS-FROM',
+ 'ADD-EVENTS-PROCEDURE',
+ 'ADD-FIELDS-FROM',
+ 'ADD-FIRST',
+ 'ADD-INDEX-FIELD',
+ 'ADD-LAST',
+ 'ADD-LIKE-COLUMN',
+ 'ADD-LIKE-FIELD',
+ 'ADD-LIKE-INDEX',
+ 'ADD-NEW-FIELD',
+ 'ADD-NEW-INDEX',
+ 'ADD-SCHEMA-LOCATION',
+ 'ADD-SUPER-PROCEDURE',
+ 'ADM-DATA',
+ 'ADVISE',
+ 'ALERT-BOX',
+ 'ALIAS',
+ 'ALL',
+ 'ALLOW-COLUMN-SEARCHING',
+ 'ALLOW-REPLICATION',
+ 'ALTER',
+ 'ALWAYS-ON-TOP',
+ 'AMBIGUOUS',
+ 'AMBIG',
+ 'AMBIGU',
+ 'AMBIGUO',
+ 'AMBIGUOU',
+ 'ANALYZE',
+ 'ANALYZ',
+ 'AND',
+ 'ANSI-ONLY',
+ 'ANY',
+ 'ANYWHERE',
+ 'APPEND',
+ 'APPL-ALERT-BOXES',
+ 'APPL-ALERT',
+ 'APPL-ALERT-',
+ 'APPL-ALERT-B',
+ 'APPL-ALERT-BO',
+ 'APPL-ALERT-BOX',
+ 'APPL-ALERT-BOXE',
+ 'APPL-CONTEXT-ID',
+ 'APPLICATION',
+ 'APPLY',
+ 'APPSERVER-INFO',
+ 'APPSERVER-PASSWORD',
+ 'APPSERVER-USERID',
+ 'ARRAY-MESSAGE',
+ 'AS',
+ 'ASC',
+ 'ASCENDING',
+ 'ASCE',
+ 'ASCEN',
+ 'ASCEND',
+ 'ASCENDI',
+ 'ASCENDIN',
+ 'ASK-OVERWRITE',
+ 'ASSEMBLY',
+ 'ASSIGN',
+ 'ASYNCHRONOUS',
+ 'ASYNC-REQUEST-COUNT',
+ 'ASYNC-REQUEST-HANDLE',
+ 'AT',
+ 'ATTACHED-PAIRLIST',
+ 'ATTR-SPACE',
+ 'ATTR',
+ 'ATTRI',
+ 'ATTRIB',
+ 'ATTRIBU',
+ 'ATTRIBUT',
+ 'AUDIT-CONTROL',
+ 'AUDIT-ENABLED',
+ 'AUDIT-EVENT-CONTEXT',
+ 'AUDIT-POLICY',
+ 'AUTHENTICATION-FAILED',
+ 'AUTHORIZATION',
+ 'AUTO-COMPLETION',
+ 'AUTO-COMP',
+ 'AUTO-COMPL',
+ 'AUTO-COMPLE',
+ 'AUTO-COMPLET',
+ 'AUTO-COMPLETI',
+ 'AUTO-COMPLETIO',
+ 'AUTO-ENDKEY',
+ 'AUTO-END-KEY',
+ 'AUTO-GO',
+ 'AUTO-INDENT',
+ 'AUTO-IND',
+ 'AUTO-INDE',
+ 'AUTO-INDEN',
+ 'AUTOMATIC',
+ 'AUTO-RESIZE',
+ 'AUTO-RETURN',
+ 'AUTO-RET',
+ 'AUTO-RETU',
+ 'AUTO-RETUR',
+ 'AUTO-SYNCHRONIZE',
+ 'AUTO-ZAP',
+ 'AUTO-Z',
+ 'AUTO-ZA',
+ 'AVAILABLE',
+ 'AVAIL',
+ 'AVAILA',
+ 'AVAILAB',
+ 'AVAILABL',
+ 'AVAILABLE-FORMATS',
+ 'AVERAGE',
+ 'AVE',
+ 'AVER',
+ 'AVERA',
+ 'AVERAG',
+ 'AVG',
+ 'BACKGROUND',
+ 'BACK',
+ 'BACKG',
+ 'BACKGR',
+ 'BACKGRO',
+ 'BACKGROU',
+ 'BACKGROUN',
+ 'BACKWARDS',
+ 'BACKWARD',
+ 'BASE64-DECODE',
+ 'BASE64-ENCODE',
+ 'BASE-ADE',
+ 'BASE-KEY',
+ 'BATCH-MODE',
+ 'BATCH',
+ 'BATCH-',
+ 'BATCH-M',
+ 'BATCH-MO',
+ 'BATCH-MOD',
+ 'BATCH-SIZE',
+ 'BEFORE-HIDE',
+ 'BEFORE-H',
+ 'BEFORE-HI',
+ 'BEFORE-HID',
+ 'BEGIN-EVENT-GROUP',
+ 'BEGINS',
+ 'BELL',
+ 'BETWEEN',
+ 'BGCOLOR',
+ 'BGC',
+ 'BGCO',
+ 'BGCOL',
+ 'BGCOLO',
+ 'BIG-ENDIAN',
+ 'BINARY',
+ 'BIND',
+ 'BIND-WHERE',
+ 'BLANK',
+ 'BLOCK-ITERATION-DISPLAY',
+ 'BORDER-BOTTOM-CHARS',
+ 'BORDER-B',
+ 'BORDER-BO',
+ 'BORDER-BOT',
+ 'BORDER-BOTT',
+ 'BORDER-BOTTO',
+ 'BORDER-BOTTOM-PIXELS',
+ 'BORDER-BOTTOM-P',
+ 'BORDER-BOTTOM-PI',
+ 'BORDER-BOTTOM-PIX',
+ 'BORDER-BOTTOM-PIXE',
+ 'BORDER-BOTTOM-PIXEL',
+ 'BORDER-LEFT-CHARS',
+ 'BORDER-L',
+ 'BORDER-LE',
+ 'BORDER-LEF',
+ 'BORDER-LEFT',
+ 'BORDER-LEFT-',
+ 'BORDER-LEFT-C',
+ 'BORDER-LEFT-CH',
+ 'BORDER-LEFT-CHA',
+ 'BORDER-LEFT-CHAR',
+ 'BORDER-LEFT-PIXELS',
+ 'BORDER-LEFT-P',
+ 'BORDER-LEFT-PI',
+ 'BORDER-LEFT-PIX',
+ 'BORDER-LEFT-PIXE',
+ 'BORDER-LEFT-PIXEL',
+ 'BORDER-RIGHT-CHARS',
+ 'BORDER-R',
+ 'BORDER-RI',
+ 'BORDER-RIG',
+ 'BORDER-RIGH',
+ 'BORDER-RIGHT',
+ 'BORDER-RIGHT-',
+ 'BORDER-RIGHT-C',
+ 'BORDER-RIGHT-CH',
+ 'BORDER-RIGHT-CHA',
+ 'BORDER-RIGHT-CHAR',
+ 'BORDER-RIGHT-PIXELS',
+ 'BORDER-RIGHT-P',
+ 'BORDER-RIGHT-PI',
+ 'BORDER-RIGHT-PIX',
+ 'BORDER-RIGHT-PIXE',
+ 'BORDER-RIGHT-PIXEL',
+ 'BORDER-TOP-CHARS',
+ 'BORDER-T',
+ 'BORDER-TO',
+ 'BORDER-TOP',
+ 'BORDER-TOP-',
+ 'BORDER-TOP-C',
+ 'BORDER-TOP-CH',
+ 'BORDER-TOP-CHA',
+ 'BORDER-TOP-CHAR',
+ 'BORDER-TOP-PIXELS',
+ 'BORDER-TOP-P',
+ 'BORDER-TOP-PI',
+ 'BORDER-TOP-PIX',
+ 'BORDER-TOP-PIXE',
+ 'BORDER-TOP-PIXEL',
+ 'BOX',
+ 'BOX-SELECTABLE',
+ 'BOX-SELECT',
+ 'BOX-SELECTA',
+ 'BOX-SELECTAB',
+ 'BOX-SELECTABL',
+ 'BREAK',
+ 'BROWSE',
+ 'BUFFER',
+ 'BUFFER-CHARS',
+ 'BUFFER-COMPARE',
+ 'BUFFER-COPY',
+ 'BUFFER-CREATE',
+ 'BUFFER-DELETE',
+ 'BUFFER-FIELD',
+ 'BUFFER-HANDLE',
+ 'BUFFER-LINES',
+ 'BUFFER-NAME',
+ 'BUFFER-RELEASE',
+ 'BUFFER-VALUE',
+ 'BUTTON',
+ 'BUTTONS',
+ 'BY',
+ 'BY-POINTER',
+ 'BY-VARIANT-POINTER',
+ 'CACHE',
+ 'CACHE-SIZE',
+ 'CALL',
+ 'CALL-NAME',
+ 'CALL-TYPE',
+ 'CANCEL-BREAK',
+ 'CANCEL-BUTTON',
+ 'CAN-CREATE',
+ 'CAN-DELETE',
+ 'CAN-DO',
+ 'CAN-FIND',
+ 'CAN-QUERY',
+ 'CAN-READ',
+ 'CAN-SET',
+ 'CAN-WRITE',
+ 'CAPS',
+ 'CAREFUL-PAINT',
+ 'CASE',
+ 'CASE-SENSITIVE',
+ 'CASE-SEN',
+ 'CASE-SENS',
+ 'CASE-SENSI',
+ 'CASE-SENSIT',
+ 'CASE-SENSITI',
+ 'CASE-SENSITIV',
+ 'CAST',
+ 'CATCH',
+ 'CDECL',
+ 'CENTERED',
+ 'CENTER',
+ 'CENTERE',
+ 'CHAINED',
+ 'CHARACTER_LENGTH',
+ 'CHARSET',
+ 'CHECK',
+ 'CHECKED',
+ 'CHOOSE',
+ 'CHR',
+ 'CLASS',
+ 'CLASS-TYPE',
+ 'CLEAR',
+ 'CLEAR-APPL-CONTEXT',
+ 'CLEAR-LOG',
+ 'CLEAR-SELECTION',
+ 'CLEAR-SELECT',
+ 'CLEAR-SELECTI',
+ 'CLEAR-SELECTIO',
+ 'CLEAR-SORT-ARROWS',
+ 'CLEAR-SORT-ARROW',
+ 'CLIENT-CONNECTION-ID',
+ 'CLIENT-PRINCIPAL',
+ 'CLIENT-TTY',
+ 'CLIENT-TYPE',
+ 'CLIENT-WORKSTATION',
+ 'CLIPBOARD',
+ 'CLOSE',
+ 'CLOSE-LOG',
+ 'CODE',
+ 'CODEBASE-LOCATOR',
+ 'CODEPAGE',
+ 'CODEPAGE-CONVERT',
+ 'COLLATE',
+ 'COL-OF',
+ 'COLON',
+ 'COLON-ALIGNED',
+ 'COLON-ALIGN',
+ 'COLON-ALIGNE',
+ 'COLOR',
+ 'COLOR-TABLE',
+ 'COLUMN',
+ 'COL',
+ 'COLU',
+ 'COLUM',
+ 'COLUMN-BGCOLOR',
+ 'COLUMN-DCOLOR',
+ 'COLUMN-FGCOLOR',
+ 'COLUMN-FONT',
+ 'COLUMN-LABEL',
+ 'COLUMN-LAB',
+ 'COLUMN-LABE',
+ 'COLUMN-MOVABLE',
+ 'COLUMN-OF',
+ 'COLUMN-PFCOLOR',
+ 'COLUMN-READ-ONLY',
+ 'COLUMN-RESIZABLE',
+ 'COLUMNS',
+ 'COLUMN-SCROLLING',
+ 'COMBO-BOX',
+ 'COMMAND',
+ 'COMPARES',
+ 'COMPILE',
+ 'COMPILER',
+ 'COMPLETE',
+ 'COM-SELF',
+ 'CONFIG-NAME',
+ 'CONNECT',
+ 'CONNECTED',
+ 'CONSTRUCTOR',
+ 'CONTAINS',
+ 'CONTENTS',
+ 'CONTEXT',
+ 'CONTEXT-HELP',
+ 'CONTEXT-HELP-FILE',
+ 'CONTEXT-HELP-ID',
+ 'CONTEXT-POPUP',
+ 'CONTROL',
+ 'CONTROL-BOX',
+ 'CONTROL-FRAME',
+ 'CONVERT',
+ 'CONVERT-3D-COLORS',
+ 'CONVERT-TO-OFFSET',
+ 'CONVERT-TO-OFFS',
+ 'CONVERT-TO-OFFSE',
+ 'COPY-DATASET',
+ 'COPY-LOB',
+ 'COPY-SAX-ATTRIBUTES',
+ 'COPY-TEMP-TABLE',
+ 'COUNT',
+ 'COUNT-OF',
+ 'CPCASE',
+ 'CPCOLL',
+ 'CPINTERNAL',
+ 'CPLOG',
+ 'CPPRINT',
+ 'CPRCODEIN',
+ 'CPRCODEOUT',
+ 'CPSTREAM',
+ 'CPTERM',
+ 'CRC-VALUE',
+ 'CREATE',
+ 'CREATE-LIKE',
+ 'CREATE-LIKE-SEQUENTIAL',
+ 'CREATE-NODE-NAMESPACE',
+ 'CREATE-RESULT-LIST-ENTRY',
+ 'CREATE-TEST-FILE',
+ 'CURRENT',
+ 'CURRENT_DATE',
+ 'CURRENT-CHANGED',
+ 'CURRENT-COLUMN',
+ 'CURRENT-ENVIRONMENT',
+ 'CURRENT-ENV',
+ 'CURRENT-ENVI',
+ 'CURRENT-ENVIR',
+ 'CURRENT-ENVIRO',
+ 'CURRENT-ENVIRON',
+ 'CURRENT-ENVIRONM',
+ 'CURRENT-ENVIRONME',
+ 'CURRENT-ENVIRONMEN',
+ 'CURRENT-ITERATION',
+ 'CURRENT-LANGUAGE',
+ 'CURRENT-LANG',
+ 'CURRENT-LANGU',
+ 'CURRENT-LANGUA',
+ 'CURRENT-LANGUAG',
+ 'CURRENT-QUERY',
+ 'CURRENT-RESULT-ROW',
+ 'CURRENT-ROW-MODIFIED',
+ 'CURRENT-VALUE',
+ 'CURRENT-WINDOW',
+ 'CURSOR',
+ 'CURS',
+ 'CURSO',
+ 'CURSOR-CHAR',
+ 'CURSOR-LINE',
+ 'CURSOR-OFFSET',
+ 'DATABASE',
+ 'DATA-BIND',
+ 'DATA-ENTRY-RETURN',
+ 'DATA-ENTRY-RET',
+ 'DATA-ENTRY-RETU',
+ 'DATA-ENTRY-RETUR',
+ 'DATA-RELATION',
+ 'DATA-REL',
+ 'DATA-RELA',
+ 'DATA-RELAT',
+ 'DATA-RELATI',
+ 'DATA-RELATIO',
+ 'DATASERVERS',
+ 'DATASET',
+ 'DATASET-HANDLE',
+ 'DATA-SOURCE',
+ 'DATA-SOURCE-COMPLETE-MAP',
+ 'DATA-SOURCE-MODIFIED',
+ 'DATA-SOURCE-ROWID',
+ 'DATA-TYPE',
+ 'DATA-T',
+ 'DATA-TY',
+ 'DATA-TYP',
+ 'DATE-FORMAT',
+ 'DATE-F',
+ 'DATE-FO',
+ 'DATE-FOR',
+ 'DATE-FORM',
+ 'DATE-FORMA',
+ 'DAY',
+ 'DBCODEPAGE',
+ 'DBCOLLATION',
+ 'DBNAME',
+ 'DBPARAM',
+ 'DB-REFERENCES',
+ 'DBRESTRICTIONS',
+ 'DBREST',
+ 'DBRESTR',
+ 'DBRESTRI',
+ 'DBRESTRIC',
+ 'DBRESTRICT',
+ 'DBRESTRICTI',
+ 'DBRESTRICTIO',
+ 'DBRESTRICTION',
+ 'DBTASKID',
+ 'DBTYPE',
+ 'DBVERSION',
+ 'DBVERS',
+ 'DBVERSI',
+ 'DBVERSIO',
+ 'DCOLOR',
+ 'DDE',
+ 'DDE-ERROR',
+ 'DDE-ID',
+ 'DDE-I',
+ 'DDE-ITEM',
+ 'DDE-NAME',
+ 'DDE-TOPIC',
+ 'DEBLANK',
+ 'DEBUG',
+ 'DEBU',
+ 'DEBUG-ALERT',
+ 'DEBUGGER',
+ 'DEBUG-LIST',
+ 'DECIMALS',
+ 'DECLARE',
+ 'DECLARE-NAMESPACE',
+ 'DECRYPT',
+ 'DEFAULT',
+ 'DEFAULT-BUFFER-HANDLE',
+ 'DEFAULT-BUTTON',
+ 'DEFAUT-B',
+ 'DEFAUT-BU',
+ 'DEFAUT-BUT',
+ 'DEFAUT-BUTT',
+ 'DEFAUT-BUTTO',
+ 'DEFAULT-COMMIT',
+ 'DEFAULT-EXTENSION',
+ 'DEFAULT-EX',
+ 'DEFAULT-EXT',
+ 'DEFAULT-EXTE',
+ 'DEFAULT-EXTEN',
+ 'DEFAULT-EXTENS',
+ 'DEFAULT-EXTENSI',
+ 'DEFAULT-EXTENSIO',
+ 'DEFAULT-NOXLATE',
+ 'DEFAULT-NOXL',
+ 'DEFAULT-NOXLA',
+ 'DEFAULT-NOXLAT',
+ 'DEFAULT-VALUE',
+ 'DEFAULT-WINDOW',
+ 'DEFINED',
+ 'DEFINE-USER-EVENT-MANAGER',
+ 'DELETE',
+ 'DEL',
+ 'DELE',
+ 'DELET',
+ 'DELETE-CHARACTER',
+ 'DELETE-CHAR',
+ 'DELETE-CHARA',
+ 'DELETE-CHARAC',
+ 'DELETE-CHARACT',
+ 'DELETE-CHARACTE',
+ 'DELETE-CURRENT-ROW',
+ 'DELETE-LINE',
+ 'DELETE-RESULT-LIST-ENTRY',
+ 'DELETE-SELECTED-ROW',
+ 'DELETE-SELECTED-ROWS',
+ 'DELIMITER',
+ 'DESC',
+ 'DESCENDING',
+ 'DESCE',
+ 'DESCEN',
+ 'DESCEND',
+ 'DESCENDI',
+ 'DESCENDIN',
+ 'DESELECT-FOCUSED-ROW',
+ 'DESELECTION',
+ 'DESELECT-ROWS',
+ 'DESELECT-SELECTED-ROW',
+ 'DESTRUCTOR',
+ 'DIALOG-BOX',
+ 'DICTIONARY',
+ 'DICT',
+ 'DICTI',
+ 'DICTIO',
+ 'DICTION',
+ 'DICTIONA',
+ 'DICTIONAR',
+ 'DIR',
+ 'DISABLE',
+ 'DISABLE-AUTO-ZAP',
+ 'DISABLED',
+ 'DISABLE-DUMP-TRIGGERS',
+ 'DISABLE-LOAD-TRIGGERS',
+ 'DISCONNECT',
+ 'DISCON',
+ 'DISCONN',
+ 'DISCONNE',
+ 'DISCONNEC',
+ 'DISP',
+ 'DISPLAY',
+ 'DISPL',
+ 'DISPLA',
+ 'DISPLAY-MESSAGE',
+ 'DISPLAY-TYPE',
+ 'DISPLAY-T',
+ 'DISPLAY-TY',
+ 'DISPLAY-TYP',
+ 'DISTINCT',
+ 'DO',
+ 'DOMAIN-DESCRIPTION',
+ 'DOMAIN-NAME',
+ 'DOMAIN-TYPE',
+ 'DOS',
+ 'DOUBLE',
+ 'DOWN',
+ 'DRAG-ENABLED',
+ 'DROP',
+ 'DROP-DOWN',
+ 'DROP-DOWN-LIST',
+ 'DROP-FILE-NOTIFY',
+ 'DROP-TARGET',
+ 'DUMP',
+ 'DYNAMIC',
+ 'DYNAMIC-FUNCTION',
+ 'EACH',
+ 'ECHO',
+ 'EDGE-CHARS',
+ 'EDGE',
+ 'EDGE-',
+ 'EDGE-C',
+ 'EDGE-CH',
+ 'EDGE-CHA',
+ 'EDGE-CHAR',
+ 'EDGE-PIXELS',
+ 'EDGE-P',
+ 'EDGE-PI',
+ 'EDGE-PIX',
+ 'EDGE-PIXE',
+ 'EDGE-PIXEL',
+ 'EDIT-CAN-PASTE',
+ 'EDIT-CAN-UNDO',
+ 'EDIT-CLEAR',
+ 'EDIT-COPY',
+ 'EDIT-CUT',
+ 'EDITING',
+ 'EDITOR',
+ 'EDIT-PASTE',
+ 'EDIT-UNDO',
+ 'ELSE',
+ 'EMPTY',
+ 'EMPTY-TEMP-TABLE',
+ 'ENABLE',
+ 'ENABLED-FIELDS',
+ 'ENCODE',
+ 'ENCRYPT',
+ 'ENCRYPT-AUDIT-MAC-KEY',
+ 'ENCRYPTION-SALT',
+ 'END',
+ 'END-DOCUMENT',
+ 'END-ELEMENT',
+ 'END-EVENT-GROUP',
+ 'END-FILE-DROP',
+ 'ENDKEY',
+ 'END-KEY',
+ 'END-MOVE',
+ 'END-RESIZE',
+ 'END-ROW-RESIZE',
+ 'END-USER-PROMPT',
+ 'ENTERED',
+ 'ENTRY',
+ 'EQ',
+ 'ERROR',
+ 'ERROR-COLUMN',
+ 'ERROR-COL',
+ 'ERROR-COLU',
+ 'ERROR-COLUM',
+ 'ERROR-ROW',
+ 'ERROR-STACK-TRACE',
+ 'ERROR-STATUS',
+ 'ERROR-STAT',
+ 'ERROR-STATU',
+ 'ESCAPE',
+ 'ETIME',
+ 'EVENT-GROUP-ID',
+ 'EVENT-PROCEDURE',
+ 'EVENT-PROCEDURE-CONTEXT',
+ 'EVENTS',
+ 'EVENT',
+ 'EVENT-TYPE',
+ 'EVENT-T',
+ 'EVENT-TY',
+ 'EVENT-TYP',
+ 'EXCEPT',
+ 'EXCLUSIVE-ID',
+ 'EXCLUSIVE-LOCK',
+ 'EXCLUSIVE',
+ 'EXCLUSIVE-',
+ 'EXCLUSIVE-L',
+ 'EXCLUSIVE-LO',
+ 'EXCLUSIVE-LOC',
+ 'EXCLUSIVE-WEB-USER',
+ 'EXECUTE',
+ 'EXISTS',
+ 'EXP',
+ 'EXPAND',
+ 'EXPANDABLE',
+ 'EXPLICIT',
+ 'EXPORT',
+ 'EXPORT-PRINCIPAL',
+ 'EXTENDED',
+ 'EXTENT',
+ 'EXTERNAL',
+ 'FALSE',
+ 'FETCH',
+ 'FETCH-SELECTED-ROW',
+ 'FGCOLOR',
+ 'FGC',
+ 'FGCO',
+ 'FGCOL',
+ 'FGCOLO',
+ 'FIELD',
+ 'FIELDS',
+ 'FILE',
+ 'FILE-CREATE-DATE',
+ 'FILE-CREATE-TIME',
+ 'FILE-INFORMATION',
+ 'FILE-INFO',
+ 'FILE-INFOR',
+ 'FILE-INFORM',
+ 'FILE-INFORMA',
+ 'FILE-INFORMAT',
+ 'FILE-INFORMATI',
+ 'FILE-INFORMATIO',
+ 'FILE-MOD-DATE',
+ 'FILE-MOD-TIME',
+ 'FILENAME',
+ 'FILE-NAME',
+ 'FILE-OFFSET',
+ 'FILE-OFF',
+ 'FILE-OFFS',
+ 'FILE-OFFSE',
+ 'FILE-SIZE',
+ 'FILE-TYPE',
+ 'FILL',
+ 'FILLED',
+ 'FILL-IN',
+ 'FILTERS',
+ 'FINAL',
+ 'FINALLY',
+ 'FIND',
+ 'FIND-BY-ROWID',
+ 'FIND-CASE-SENSITIVE',
+ 'FIND-CURRENT',
+ 'FINDER',
+ 'FIND-FIRST',
+ 'FIND-GLOBAL',
+ 'FIND-LAST',
+ 'FIND-NEXT-OCCURRENCE',
+ 'FIND-PREV-OCCURRENCE',
+ 'FIND-SELECT',
+ 'FIND-UNIQUE',
+ 'FIND-WRAP-AROUND',
+ 'FIRST',
+ 'FIRST-ASYNCH-REQUEST',
+ 'FIRST-CHILD',
+ 'FIRST-COLUMN',
+ 'FIRST-FORM',
+ 'FIRST-OBJECT',
+ 'FIRST-OF',
+ 'FIRST-PROCEDURE',
+ 'FIRST-PROC',
+ 'FIRST-PROCE',
+ 'FIRST-PROCED',
+ 'FIRST-PROCEDU',
+ 'FIRST-PROCEDUR',
+ 'FIRST-SERVER',
+ 'FIRST-TAB-ITEM',
+ 'FIRST-TAB-I',
+ 'FIRST-TAB-IT',
+ 'FIRST-TAB-ITE',
+ 'FIT-LAST-COLUMN',
+ 'FIXED-ONLY',
+ 'FLAT-BUTTON',
+ 'FLOAT',
+ 'FOCUS',
+ 'FOCUSED-ROW',
+ 'FOCUSED-ROW-SELECTED',
+ 'FONT',
+ 'FONT-TABLE',
+ 'FOR',
+ 'FORCE-FILE',
+ 'FOREGROUND',
+ 'FORE',
+ 'FOREG',
+ 'FOREGR',
+ 'FOREGRO',
+ 'FOREGROU',
+ 'FOREGROUN',
+ 'FORM',
+ 'FORMAT',
+ 'FORMA',
+ 'FORMATTED',
+ 'FORMATTE',
+ 'FORM-LONG-INPUT',
+ 'FORWARD',
+ 'FORWARDS',
+ 'FRAGMENT',
+ 'FRAGMEN',
+ 'FRAME',
+ 'FRAM',
+ 'FRAME-COL',
+ 'FRAME-DB',
+ 'FRAME-DOWN',
+ 'FRAME-FIELD',
+ 'FRAME-FILE',
+ 'FRAME-INDEX',
+ 'FRAME-INDE',
+ 'FRAME-LINE',
+ 'FRAME-NAME',
+ 'FRAME-ROW',
+ 'FRAME-SPACING',
+ 'FRAME-SPA',
+ 'FRAME-SPAC',
+ 'FRAME-SPACI',
+ 'FRAME-SPACIN',
+ 'FRAME-VALUE',
+ 'FRAME-VAL',
+ 'FRAME-VALU',
+ 'FRAME-X',
+ 'FRAME-Y',
+ 'FREQUENCY',
+ 'FROM',
+ 'FROM-CHARS',
+ 'FROM-C',
+ 'FROM-CH',
+ 'FROM-CHA',
+ 'FROM-CHAR',
+ 'FROM-CURRENT',
+ 'FROM-CUR',
+ 'FROM-CURR',
+ 'FROM-CURRE',
+ 'FROM-CURREN',
+ 'FROM-PIXELS',
+ 'FROM-P',
+ 'FROM-PI',
+ 'FROM-PIX',
+ 'FROM-PIXE',
+ 'FROM-PIXEL',
+ 'FULL-HEIGHT-CHARS',
+ 'FULL-HEIGHT',
+ 'FULL-HEIGHT-',
+ 'FULL-HEIGHT-C',
+ 'FULL-HEIGHT-CH',
+ 'FULL-HEIGHT-CHA',
+ 'FULL-HEIGHT-CHAR',
+ 'FULL-HEIGHT-PIXELS',
+ 'FULL-HEIGHT-P',
+ 'FULL-HEIGHT-PI',
+ 'FULL-HEIGHT-PIX',
+ 'FULL-HEIGHT-PIXE',
+ 'FULL-HEIGHT-PIXEL',
+ 'FULL-PATHNAME',
+ 'FULL-PATHN',
+ 'FULL-PATHNA',
+ 'FULL-PATHNAM',
+ 'FULL-WIDTH-CHARS',
+ 'FULL-WIDTH',
+ 'FULL-WIDTH-',
+ 'FULL-WIDTH-C',
+ 'FULL-WIDTH-CH',
+ 'FULL-WIDTH-CHA',
+ 'FULL-WIDTH-CHAR',
+ 'FULL-WIDTH-PIXELS',
+ 'FULL-WIDTH-P',
+ 'FULL-WIDTH-PI',
+ 'FULL-WIDTH-PIX',
+ 'FULL-WIDTH-PIXE',
+ 'FULL-WIDTH-PIXEL',
+ 'FUNCTION',
+ 'FUNCTION-CALL-TYPE',
+ 'GATEWAYS',
+ 'GATEWAY',
+ 'GE',
+ 'GENERATE-MD5',
+ 'GENERATE-PBE-KEY',
+ 'GENERATE-PBE-SALT',
+ 'GENERATE-RANDOM-KEY',
+ 'GENERATE-UUID',
+ 'GET',
+ 'GET-ATTR-CALL-TYPE',
+ 'GET-ATTRIBUTE-NODE',
+ 'GET-BINARY-DATA',
+ 'GET-BLUE-VALUE',
+ 'GET-BLUE',
+ 'GET-BLUE-',
+ 'GET-BLUE-V',
+ 'GET-BLUE-VA',
+ 'GET-BLUE-VAL',
+ 'GET-BLUE-VALU',
+ 'GET-BROWSE-COLUMN',
+ 'GET-BUFFER-HANDLEGETBYTE',
+ 'GET-BYTE',
+ 'GET-CALLBACK-PROC-CONTEXT',
+ 'GET-CALLBACK-PROC-NAME',
+ 'GET-CGI-LIST',
+ 'GET-CGI-LONG-VALUE',
+ 'GET-CGI-VALUE',
+ 'GET-CODEPAGES',
+ 'GET-COLLATIONS',
+ 'GET-CONFIG-VALUE',
+ 'GET-CURRENT',
+ 'GET-DOUBLE',
+ 'GET-DROPPED-FILE',
+ 'GET-DYNAMIC',
+ 'GET-ERROR-COLUMN',
+ 'GET-ERROR-ROW',
+ 'GET-FILE',
+ 'GET-FILE-NAME',
+ 'GET-FILE-OFFSET',
+ 'GET-FILE-OFFSE',
+ 'GET-FIRST',
+ 'GET-FLOAT',
+ 'GET-GREEN-VALUE',
+ 'GET-GREEN',
+ 'GET-GREEN-',
+ 'GET-GREEN-V',
+ 'GET-GREEN-VA',
+ 'GET-GREEN-VAL',
+ 'GET-GREEN-VALU',
+ 'GET-INDEX-BY-NAMESPACE-NAME',
+ 'GET-INDEX-BY-QNAME',
+ 'GET-INT64',
+ 'GET-ITERATION',
+ 'GET-KEY-VALUE',
+ 'GET-KEY-VAL',
+ 'GET-KEY-VALU',
+ 'GET-LAST',
+ 'GET-LOCALNAME-BY-INDEX',
+ 'GET-LONG',
+ 'GET-MESSAGE',
+ 'GET-NEXT',
+ 'GET-NUMBER',
+ 'GET-POINTER-VALUE',
+ 'GET-PREV',
+ 'GET-PRINTERS',
+ 'GET-PROPERTY',
+ 'GET-QNAME-BY-INDEX',
+ 'GET-RED-VALUE',
+ 'GET-RED',
+ 'GET-RED-',
+ 'GET-RED-V',
+ 'GET-RED-VA',
+ 'GET-RED-VAL',
+ 'GET-RED-VALU',
+ 'GET-REPOSITIONED-ROW',
+ 'GET-RGB-VALUE',
+ 'GET-SELECTED-WIDGET',
+ 'GET-SELECTED',
+ 'GET-SELECTED-',
+ 'GET-SELECTED-W',
+ 'GET-SELECTED-WI',
+ 'GET-SELECTED-WID',
+ 'GET-SELECTED-WIDG',
+ 'GET-SELECTED-WIDGE',
+ 'GET-SHORT',
+ 'GET-SIGNATURE',
+ 'GET-SIZE',
+ 'GET-STRING',
+ 'GET-TAB-ITEM',
+ 'GET-TEXT-HEIGHT-CHARS',
+ 'GET-TEXT-HEIGHT',
+ 'GET-TEXT-HEIGHT-',
+ 'GET-TEXT-HEIGHT-C',
+ 'GET-TEXT-HEIGHT-CH',
+ 'GET-TEXT-HEIGHT-CHA',
+ 'GET-TEXT-HEIGHT-CHAR',
+ 'GET-TEXT-HEIGHT-PIXELS',
+ 'GET-TEXT-HEIGHT-P',
+ 'GET-TEXT-HEIGHT-PI',
+ 'GET-TEXT-HEIGHT-PIX',
+ 'GET-TEXT-HEIGHT-PIXE',
+ 'GET-TEXT-HEIGHT-PIXEL',
+ 'GET-TEXT-WIDTH-CHARS',
+ 'GET-TEXT-WIDTH',
+ 'GET-TEXT-WIDTH-',
+ 'GET-TEXT-WIDTH-C',
+ 'GET-TEXT-WIDTH-CH',
+ 'GET-TEXT-WIDTH-CHA',
+ 'GET-TEXT-WIDTH-CHAR',
+ 'GET-TEXT-WIDTH-PIXELS',
+ 'GET-TEXT-WIDTH-P',
+ 'GET-TEXT-WIDTH-PI',
+ 'GET-TEXT-WIDTH-PIX',
+ 'GET-TEXT-WIDTH-PIXE',
+ 'GET-TEXT-WIDTH-PIXEL',
+ 'GET-TYPE-BY-INDEX',
+ 'GET-TYPE-BY-NAMESPACE-NAME',
+ 'GET-TYPE-BY-QNAME',
+ 'GET-UNSIGNED-LONG',
+ 'GET-UNSIGNED-SHORT',
+ 'GET-URI-BY-INDEX',
+ 'GET-VALUE-BY-INDEX',
+ 'GET-VALUE-BY-NAMESPACE-NAME',
+ 'GET-VALUE-BY-QNAME',
+ 'GET-WAIT-STATE',
+ 'GLOBAL',
+ 'GO-ON',
+ 'GO-PENDING',
+ 'GO-PEND',
+ 'GO-PENDI',
+ 'GO-PENDIN',
+ 'GRANT',
+ 'GRAPHIC-EDGE',
+ 'GRAPHIC-E',
+ 'GRAPHIC-ED',
+ 'GRAPHIC-EDG',
+ 'GRID-FACTOR-HORIZONTAL',
+ 'GRID-FACTOR-H',
+ 'GRID-FACTOR-HO',
+ 'GRID-FACTOR-HOR',
+ 'GRID-FACTOR-HORI',
+ 'GRID-FACTOR-HORIZ',
+ 'GRID-FACTOR-HORIZO',
+ 'GRID-FACTOR-HORIZON',
+ 'GRID-FACTOR-HORIZONT',
+ 'GRID-FACTOR-HORIZONTA',
+ 'GRID-FACTOR-VERTICAL',
+ 'GRID-FACTOR-V',
+ 'GRID-FACTOR-VE',
+ 'GRID-FACTOR-VER',
+ 'GRID-FACTOR-VERT',
+ 'GRID-FACTOR-VERTI',
+ 'GRID-FACTOR-VERTIC',
+ 'GRID-FACTOR-VERTICA',
+ 'GRID-SNAP',
+ 'GRID-UNIT-HEIGHT-CHARS',
+ 'GRID-UNIT-HEIGHT',
+ 'GRID-UNIT-HEIGHT-',
+ 'GRID-UNIT-HEIGHT-C',
+ 'GRID-UNIT-HEIGHT-CH',
+ 'GRID-UNIT-HEIGHT-CHA',
+ 'GRID-UNIT-HEIGHT-PIXELS',
+ 'GRID-UNIT-HEIGHT-P',
+ 'GRID-UNIT-HEIGHT-PI',
+ 'GRID-UNIT-HEIGHT-PIX',
+ 'GRID-UNIT-HEIGHT-PIXE',
+ 'GRID-UNIT-HEIGHT-PIXEL',
+ 'GRID-UNIT-WIDTH-CHARS',
+ 'GRID-UNIT-WIDTH',
+ 'GRID-UNIT-WIDTH-',
+ 'GRID-UNIT-WIDTH-C',
+ 'GRID-UNIT-WIDTH-CH',
+ 'GRID-UNIT-WIDTH-CHA',
+ 'GRID-UNIT-WIDTH-CHAR',
+ 'GRID-UNIT-WIDTH-PIXELS',
+ 'GRID-UNIT-WIDTH-P',
+ 'GRID-UNIT-WIDTH-PI',
+ 'GRID-UNIT-WIDTH-PIX',
+ 'GRID-UNIT-WIDTH-PIXE',
+ 'GRID-UNIT-WIDTH-PIXEL',
+ 'GRID-VISIBLE',
+ 'GROUP',
+ 'GT',
+ 'GUID',
+ 'HANDLER',
+ 'HAS-RECORDS',
+ 'HAVING',
+ 'HEADER',
+ 'HEIGHT-CHARS',
+ 'HEIGHT',
+ 'HEIGHT-',
+ 'HEIGHT-C',
+ 'HEIGHT-CH',
+ 'HEIGHT-CHA',
+ 'HEIGHT-CHAR',
+ 'HEIGHT-PIXELS',
+ 'HEIGHT-P',
+ 'HEIGHT-PI',
+ 'HEIGHT-PIX',
+ 'HEIGHT-PIXE',
+ 'HEIGHT-PIXEL',
+ 'HELP',
+ 'HEX-DECODE',
+ 'HEX-ENCODE',
+ 'HIDDEN',
+ 'HIDE',
+ 'HORIZONTAL',
+ 'HORI',
+ 'HORIZ',
+ 'HORIZO',
+ 'HORIZON',
+ 'HORIZONT',
+ 'HORIZONTA',
+ 'HOST-BYTE-ORDER',
+ 'HTML-CHARSET',
+ 'HTML-END-OF-LINE',
+ 'HTML-END-OF-PAGE',
+ 'HTML-FRAME-BEGIN',
+ 'HTML-FRAME-END',
+ 'HTML-HEADER-BEGIN',
+ 'HTML-HEADER-END',
+ 'HTML-TITLE-BEGIN',
+ 'HTML-TITLE-END',
+ 'HWND',
+ 'ICON',
+ 'IF',
+ 'IMAGE',
+ 'IMAGE-DOWN',
+ 'IMAGE-INSENSITIVE',
+ 'IMAGE-SIZE',
+ 'IMAGE-SIZE-CHARS',
+ 'IMAGE-SIZE-C',
+ 'IMAGE-SIZE-CH',
+ 'IMAGE-SIZE-CHA',
+ 'IMAGE-SIZE-CHAR',
+ 'IMAGE-SIZE-PIXELS',
+ 'IMAGE-SIZE-P',
+ 'IMAGE-SIZE-PI',
+ 'IMAGE-SIZE-PIX',
+ 'IMAGE-SIZE-PIXE',
+ 'IMAGE-SIZE-PIXEL',
+ 'IMAGE-UP',
+ 'IMMEDIATE-DISPLAY',
+ 'IMPLEMENTS',
+ 'IMPORT',
+ 'IMPORT-PRINCIPAL',
+ 'IN',
+ 'INCREMENT-EXCLUSIVE-ID',
+ 'INDEX',
+ 'INDEXED-REPOSITION',
+ 'INDEX-HINT',
+ 'INDEX-INFORMATION',
+ 'INDICATOR',
+ 'INFORMATION',
+ 'INFO',
+ 'INFOR',
+ 'INFORM',
+ 'INFORMA',
+ 'INFORMAT',
+ 'INFORMATI',
+ 'INFORMATIO',
+ 'IN-HANDLE',
+ 'INHERIT-BGCOLOR',
+ 'INHERIT-BGC',
+ 'INHERIT-BGCO',
+ 'INHERIT-BGCOL',
+ 'INHERIT-BGCOLO',
+ 'INHERIT-FGCOLOR',
+ 'INHERIT-FGC',
+ 'INHERIT-FGCO',
+ 'INHERIT-FGCOL',
+ 'INHERIT-FGCOLO',
+ 'INHERITS',
+ 'INITIAL',
+ 'INIT',
+ 'INITI',
+ 'INITIA',
+ 'INITIAL-DIR',
+ 'INITIAL-FILTER',
+ 'INITIALIZE-DOCUMENT-TYPE',
+ 'INITIATE',
+ 'INNER-CHARS',
+ 'INNER-LINES',
+ 'INPUT',
+ 'INPUT-OUTPUT',
+ 'INPUT-O',
+ 'INPUT-OU',
+ 'INPUT-OUT',
+ 'INPUT-OUTP',
+ 'INPUT-OUTPU',
+ 'INPUT-VALUE',
+ 'INSERT',
+ 'INSERT-ATTRIBUTE',
+ 'INSERT-BACKTAB',
+ 'INSERT-B',
+ 'INSERT-BA',
+ 'INSERT-BAC',
+ 'INSERT-BACK',
+ 'INSERT-BACKT',
+ 'INSERT-BACKTA',
+ 'INSERT-FILE',
+ 'INSERT-ROW',
+ 'INSERT-STRING',
+ 'INSERT-TAB',
+ 'INSERT-T',
+ 'INSERT-TA',
+ 'INTERFACE',
+ 'INTERNAL-ENTRIES',
+ 'INTO',
+ 'INVOKE',
+ 'IS',
+ 'IS-ATTR-SPACE',
+ 'IS-ATTR',
+ 'IS-ATTR-',
+ 'IS-ATTR-S',
+ 'IS-ATTR-SP',
+ 'IS-ATTR-SPA',
+ 'IS-ATTR-SPAC',
+ 'IS-CLASS',
+ 'IS-CLAS',
+ 'IS-LEAD-BYTE',
+ 'IS-OPEN',
+ 'IS-PARAMETER-SET',
+ 'IS-ROW-SELECTED',
+ 'IS-SELECTED',
+ 'ITEM',
+ 'ITEMS-PER-ROW',
+ 'JOIN',
+ 'JOIN-BY-SQLDB',
+ 'KBLABEL',
+ 'KEEP-CONNECTION-OPEN',
+ 'KEEP-FRAME-Z-ORDER',
+ 'KEEP-FRAME-Z',
+ 'KEEP-FRAME-Z-',
+ 'KEEP-FRAME-Z-O',
+ 'KEEP-FRAME-Z-OR',
+ 'KEEP-FRAME-Z-ORD',
+ 'KEEP-FRAME-Z-ORDE',
+ 'KEEP-MESSAGES',
+ 'KEEP-SECURITY-CACHE',
+ 'KEEP-TAB-ORDER',
+ 'KEY',
+ 'KEYCODE',
+ 'KEY-CODE',
+ 'KEYFUNCTION',
+ 'KEYFUNC',
+ 'KEYFUNCT',
+ 'KEYFUNCTI',
+ 'KEYFUNCTIO',
+ 'KEY-FUNCTION',
+ 'KEY-FUNC',
+ 'KEY-FUNCT',
+ 'KEY-FUNCTI',
+ 'KEY-FUNCTIO',
+ 'KEYLABEL',
+ 'KEY-LABEL',
+ 'KEYS',
+ 'KEYWORD',
+ 'KEYWORD-ALL',
+ 'LABEL',
+ 'LABEL-BGCOLOR',
+ 'LABEL-BGC',
+ 'LABEL-BGCO',
+ 'LABEL-BGCOL',
+ 'LABEL-BGCOLO',
+ 'LABEL-DCOLOR',
+ 'LABEL-DC',
+ 'LABEL-DCO',
+ 'LABEL-DCOL',
+ 'LABEL-DCOLO',
+ 'LABEL-FGCOLOR',
+ 'LABEL-FGC',
+ 'LABEL-FGCO',
+ 'LABEL-FGCOL',
+ 'LABEL-FGCOLO',
+ 'LABEL-FONT',
+ 'LABEL-PFCOLOR',
+ 'LABEL-PFC',
+ 'LABEL-PFCO',
+ 'LABEL-PFCOL',
+ 'LABEL-PFCOLO',
+ 'LABELS',
+ 'LANDSCAPE',
+ 'LANGUAGES',
+ 'LANGUAGE',
+ 'LARGE',
+ 'LARGE-TO-SMALL',
+ 'LAST',
+ 'LAST-ASYNCH-REQUEST',
+ 'LAST-BATCH',
+ 'LAST-CHILD',
+ 'LAST-EVENT',
+ 'LAST-EVEN',
+ 'LAST-FORM',
+ 'LASTKEY',
+ 'LAST-KEY',
+ 'LAST-OBJECT',
+ 'LAST-OF',
+ 'LAST-PROCEDURE',
+ 'LAST-PROCE',
+ 'LAST-PROCED',
+ 'LAST-PROCEDU',
+ 'LAST-PROCEDUR',
+ 'LAST-SERVER',
+ 'LAST-TAB-ITEM',
+ 'LAST-TAB-I',
+ 'LAST-TAB-IT',
+ 'LAST-TAB-ITE',
+ 'LC',
+ 'LDBNAME',
+ 'LE',
+ 'LEAVE',
+ 'LEFT-ALIGNED',
+ 'LEFT-ALIGN',
+ 'LEFT-ALIGNE',
+ 'LEFT-TRIM',
+ 'LENGTH',
+ 'LIBRARY',
+ 'LIKE',
+ 'LIKE-SEQUENTIAL',
+ 'LINE',
+ 'LINE-COUNTER',
+ 'LINE-COUNT',
+ 'LINE-COUNTE',
+ 'LIST-EVENTS',
+ 'LISTING',
+ 'LISTI',
+ 'LISTIN',
+ 'LIST-ITEM-PAIRS',
+ 'LIST-ITEMS',
+ 'LIST-PROPERTY-NAMES',
+ 'LIST-QUERY-ATTRS',
+ 'LIST-SET-ATTRS',
+ 'LIST-WIDGETS',
+ 'LITERAL-QUESTION',
+ 'LITTLE-ENDIAN',
+ 'LOAD',
+ 'LOAD-DOMAINS',
+ 'LOAD-ICON',
+ 'LOAD-IMAGE',
+ 'LOAD-IMAGE-DOWN',
+ 'LOAD-IMAGE-INSENSITIVE',
+ 'LOAD-IMAGE-UP',
+ 'LOAD-MOUSE-POINTER',
+ 'LOAD-MOUSE-P',
+ 'LOAD-MOUSE-PO',
+ 'LOAD-MOUSE-POI',
+ 'LOAD-MOUSE-POIN',
+ 'LOAD-MOUSE-POINT',
+ 'LOAD-MOUSE-POINTE',
+ 'LOAD-PICTURE',
+ 'LOAD-SMALL-ICON',
+ 'LOCAL-NAME',
+ 'LOCATOR-COLUMN-NUMBER',
+ 'LOCATOR-LINE-NUMBER',
+ 'LOCATOR-PUBLIC-ID',
+ 'LOCATOR-SYSTEM-ID',
+ 'LOCATOR-TYPE',
+ 'LOCKED',
+ 'LOCK-REGISTRATION',
+ 'LOG',
+ 'LOG-AUDIT-EVENT',
+ 'LOGIN-EXPIRATION-TIMESTAMP',
+ 'LOGIN-HOST',
+ 'LOGIN-STATE',
+ 'LOG-MANAGER',
+ 'LOGOUT',
+ 'LOOKAHEAD',
+ 'LOOKUP',
+ 'LT',
+ 'MACHINE-CLASS',
+ 'MANDATORY',
+ 'MANUAL-HIGHLIGHT',
+ 'MAP',
+ 'MARGIN-EXTRA',
+ 'MARGIN-HEIGHT-CHARS',
+ 'MARGIN-HEIGHT',
+ 'MARGIN-HEIGHT-',
+ 'MARGIN-HEIGHT-C',
+ 'MARGIN-HEIGHT-CH',
+ 'MARGIN-HEIGHT-CHA',
+ 'MARGIN-HEIGHT-CHAR',
+ 'MARGIN-HEIGHT-PIXELS',
+ 'MARGIN-HEIGHT-P',
+ 'MARGIN-HEIGHT-PI',
+ 'MARGIN-HEIGHT-PIX',
+ 'MARGIN-HEIGHT-PIXE',
+ 'MARGIN-HEIGHT-PIXEL',
+ 'MARGIN-WIDTH-CHARS',
+ 'MARGIN-WIDTH',
+ 'MARGIN-WIDTH-',
+ 'MARGIN-WIDTH-C',
+ 'MARGIN-WIDTH-CH',
+ 'MARGIN-WIDTH-CHA',
+ 'MARGIN-WIDTH-CHAR',
+ 'MARGIN-WIDTH-PIXELS',
+ 'MARGIN-WIDTH-P',
+ 'MARGIN-WIDTH-PI',
+ 'MARGIN-WIDTH-PIX',
+ 'MARGIN-WIDTH-PIXE',
+ 'MARGIN-WIDTH-PIXEL',
+ 'MARK-NEW',
+ 'MARK-ROW-STATE',
+ 'MATCHES',
+ 'MAX-BUTTON',
+ 'MAX-CHARS',
+ 'MAX-DATA-GUESS',
+ 'MAX-HEIGHT',
+ 'MAX-HEIGHT-CHARS',
+ 'MAX-HEIGHT-C',
+ 'MAX-HEIGHT-CH',
+ 'MAX-HEIGHT-CHA',
+ 'MAX-HEIGHT-CHAR',
+ 'MAX-HEIGHT-PIXELS',
+ 'MAX-HEIGHT-P',
+ 'MAX-HEIGHT-PI',
+ 'MAX-HEIGHT-PIX',
+ 'MAX-HEIGHT-PIXE',
+ 'MAX-HEIGHT-PIXEL',
+ 'MAXIMIZE',
+ 'MAXIMUM',
+ 'MAX',
+ 'MAXI',
+ 'MAXIM',
+ 'MAXIMU',
+ 'MAXIMUM-LEVEL',
+ 'MAX-ROWS',
+ 'MAX-SIZE',
+ 'MAX-VALUE',
+ 'MAX-VAL',
+ 'MAX-VALU',
+ 'MAX-WIDTH-CHARS',
+ 'MAX-WIDTH',
+ 'MAX-WIDTH-',
+ 'MAX-WIDTH-C',
+ 'MAX-WIDTH-CH',
+ 'MAX-WIDTH-CHA',
+ 'MAX-WIDTH-CHAR',
+ 'MAX-WIDTH-PIXELS',
+ 'MAX-WIDTH-P',
+ 'MAX-WIDTH-PI',
+ 'MAX-WIDTH-PIX',
+ 'MAX-WIDTH-PIXE',
+ 'MAX-WIDTH-PIXEL',
+ 'MD5-DIGEST',
+ 'MEMBER',
+ 'MEMPTR-TO-NODE-VALUE',
+ 'MENU',
+ 'MENUBAR',
+ 'MENU-BAR',
+ 'MENU-ITEM',
+ 'MENU-KEY',
+ 'MENU-K',
+ 'MENU-KE',
+ 'MENU-MOUSE',
+ 'MENU-M',
+ 'MENU-MO',
+ 'MENU-MOU',
+ 'MENU-MOUS',
+ 'MERGE-BY-FIELD',
+ 'MESSAGE',
+ 'MESSAGE-AREA',
+ 'MESSAGE-AREA-FONT',
+ 'MESSAGE-LINES',
+ 'METHOD',
+ 'MIN-BUTTON',
+ 'MIN-COLUMN-WIDTH-CHARS',
+ 'MIN-COLUMN-WIDTH-C',
+ 'MIN-COLUMN-WIDTH-CH',
+ 'MIN-COLUMN-WIDTH-CHA',
+ 'MIN-COLUMN-WIDTH-CHAR',
+ 'MIN-COLUMN-WIDTH-PIXELS',
+ 'MIN-COLUMN-WIDTH-P',
+ 'MIN-COLUMN-WIDTH-PI',
+ 'MIN-COLUMN-WIDTH-PIX',
+ 'MIN-COLUMN-WIDTH-PIXE',
+ 'MIN-COLUMN-WIDTH-PIXEL',
+ 'MIN-HEIGHT-CHARS',
+ 'MIN-HEIGHT',
+ 'MIN-HEIGHT-',
+ 'MIN-HEIGHT-C',
+ 'MIN-HEIGHT-CH',
+ 'MIN-HEIGHT-CHA',
+ 'MIN-HEIGHT-CHAR',
+ 'MIN-HEIGHT-PIXELS',
+ 'MIN-HEIGHT-P',
+ 'MIN-HEIGHT-PI',
+ 'MIN-HEIGHT-PIX',
+ 'MIN-HEIGHT-PIXE',
+ 'MIN-HEIGHT-PIXEL',
+ 'MINIMUM',
+ 'MIN',
+ 'MINI',
+ 'MINIM',
+ 'MINIMU',
+ 'MIN-SIZE',
+ 'MIN-VALUE',
+ 'MIN-VAL',
+ 'MIN-VALU',
+ 'MIN-WIDTH-CHARS',
+ 'MIN-WIDTH',
+ 'MIN-WIDTH-',
+ 'MIN-WIDTH-C',
+ 'MIN-WIDTH-CH',
+ 'MIN-WIDTH-CHA',
+ 'MIN-WIDTH-CHAR',
+ 'MIN-WIDTH-PIXELS',
+ 'MIN-WIDTH-P',
+ 'MIN-WIDTH-PI',
+ 'MIN-WIDTH-PIX',
+ 'MIN-WIDTH-PIXE',
+ 'MIN-WIDTH-PIXEL',
+ 'MODIFIED',
+ 'MODULO',
+ 'MOD',
+ 'MODU',
+ 'MODUL',
+ 'MONTH',
+ 'MOUSE',
+ 'MOUSE-POINTER',
+ 'MOUSE-P',
+ 'MOUSE-PO',
+ 'MOUSE-POI',
+ 'MOUSE-POIN',
+ 'MOUSE-POINT',
+ 'MOUSE-POINTE',
+ 'MOVABLE',
+ 'MOVE-AFTER-TAB-ITEM',
+ 'MOVE-AFTER',
+ 'MOVE-AFTER-',
+ 'MOVE-AFTER-T',
+ 'MOVE-AFTER-TA',
+ 'MOVE-AFTER-TAB',
+ 'MOVE-AFTER-TAB-',
+ 'MOVE-AFTER-TAB-I',
+ 'MOVE-AFTER-TAB-IT',
+ 'MOVE-AFTER-TAB-ITE',
+ 'MOVE-BEFORE-TAB-ITEM',
+ 'MOVE-BEFOR',
+ 'MOVE-BEFORE',
+ 'MOVE-BEFORE-',
+ 'MOVE-BEFORE-T',
+ 'MOVE-BEFORE-TA',
+ 'MOVE-BEFORE-TAB',
+ 'MOVE-BEFORE-TAB-',
+ 'MOVE-BEFORE-TAB-I',
+ 'MOVE-BEFORE-TAB-IT',
+ 'MOVE-BEFORE-TAB-ITE',
+ 'MOVE-COLUMN',
+ 'MOVE-COL',
+ 'MOVE-COLU',
+ 'MOVE-COLUM',
+ 'MOVE-TO-BOTTOM',
+ 'MOVE-TO-B',
+ 'MOVE-TO-BO',
+ 'MOVE-TO-BOT',
+ 'MOVE-TO-BOTT',
+ 'MOVE-TO-BOTTO',
+ 'MOVE-TO-EOF',
+ 'MOVE-TO-TOP',
+ 'MOVE-TO-T',
+ 'MOVE-TO-TO',
+ 'MPE',
+ 'MULTI-COMPILE',
+ 'MULTIPLE',
+ 'MULTIPLE-KEY',
+ 'MULTITASKING-INTERVAL',
+ 'MUST-EXIST',
+ 'NAME',
+ 'NAMESPACE-PREFIX',
+ 'NAMESPACE-URI',
+ 'NATIVE',
+ 'NE',
+ 'NEEDS-APPSERVER-PROMPT',
+ 'NEEDS-PROMPT',
+ 'NEW',
+ 'NEW-INSTANCE',
+ 'NEW-ROW',
+ 'NEXT',
+ 'NEXT-COLUMN',
+ 'NEXT-PROMPT',
+ 'NEXT-ROWID',
+ 'NEXT-SIBLING',
+ 'NEXT-TAB-ITEM',
+ 'NEXT-TAB-I',
+ 'NEXT-TAB-IT',
+ 'NEXT-TAB-ITE',
+ 'NEXT-VALUE',
+ 'NO',
+ 'NO-APPLY',
+ 'NO-ARRAY-MESSAGE',
+ 'NO-ASSIGN',
+ 'NO-ATTR-LIST',
+ 'NO-ATTR',
+ 'NO-ATTR-',
+ 'NO-ATTR-L',
+ 'NO-ATTR-LI',
+ 'NO-ATTR-LIS',
+ 'NO-ATTR-SPACE',
+ 'NO-ATTR-S',
+ 'NO-ATTR-SP',
+ 'NO-ATTR-SPA',
+ 'NO-ATTR-SPAC',
+ 'NO-AUTO-VALIDATE',
+ 'NO-BIND-WHERE',
+ 'NO-BOX',
+ 'NO-CONSOLE',
+ 'NO-CONVERT',
+ 'NO-CONVERT-3D-COLORS',
+ 'NO-CURRENT-VALUE',
+ 'NO-DEBUG',
+ 'NODE-VALUE-TO-MEMPTR',
+ 'NO-DRAG',
+ 'NO-ECHO',
+ 'NO-EMPTY-SPACE',
+ 'NO-ERROR',
+ 'NO-FILL',
+ 'NO-F',
+ 'NO-FI',
+ 'NO-FIL',
+ 'NO-FOCUS',
+ 'NO-HELP',
+ 'NO-HIDE',
+ 'NO-INDEX-HINT',
+ 'NO-INHERIT-BGCOLOR',
+ 'NO-INHERIT-BGC',
+ 'NO-INHERIT-BGCO',
+ 'NO-INHERIT-FGCOLOR',
+ 'NO-INHERIT-FGC',
+ 'NO-INHERIT-FGCO',
+ 'NO-INHERIT-FGCOL',
+ 'NO-INHERIT-FGCOLO',
+ 'NO-JOIN-BY-SQLDB',
+ 'NO-LABELS',
+ 'NO-LABE',
+ 'NO-LOBS',
+ 'NO-LOCK',
+ 'NO-LOOKAHEAD',
+ 'NO-MAP',
+ 'NO-MESSAGE',
+ 'NO-MES',
+ 'NO-MESS',
+ 'NO-MESSA',
+ 'NO-MESSAG',
+ 'NONAMESPACE-SCHEMA-LOCATION',
+ 'NONE',
+ 'NO-PAUSE',
+ 'NO-PREFETCH',
+ 'NO-PREFE',
+ 'NO-PREFET',
+ 'NO-PREFETC',
+ 'NORMALIZE',
+ 'NO-ROW-MARKERS',
+ 'NO-SCROLLBAR-VERTICAL',
+ 'NO-SEPARATE-CONNECTION',
+ 'NO-SEPARATORS',
+ 'NOT',
+ 'NO-TAB-STOP',
+ 'NOT-ACTIVE',
+ 'NO-UNDERLINE',
+ 'NO-UND',
+ 'NO-UNDE',
+ 'NO-UNDER',
+ 'NO-UNDERL',
+ 'NO-UNDERLI',
+ 'NO-UNDERLIN',
+ 'NO-UNDO',
+ 'NO-VALIDATE',
+ 'NO-VAL',
+ 'NO-VALI',
+ 'NO-VALID',
+ 'NO-VALIDA',
+ 'NO-VALIDAT',
+ 'NOW',
+ 'NO-WAIT',
+ 'NO-WORD-WRAP',
+ 'NULL',
+ 'NUM-ALIASES',
+ 'NUM-ALI',
+ 'NUM-ALIA',
+ 'NUM-ALIAS',
+ 'NUM-ALIASE',
+ 'NUM-BUFFERS',
+ 'NUM-BUTTONS',
+ 'NUM-BUT',
+ 'NUM-BUTT',
+ 'NUM-BUTTO',
+ 'NUM-BUTTON',
+ 'NUM-COLUMNS',
+ 'NUM-COL',
+ 'NUM-COLU',
+ 'NUM-COLUM',
+ 'NUM-COLUMN',
+ 'NUM-COPIES',
+ 'NUM-DBS',
+ 'NUM-DROPPED-FILES',
+ 'NUM-ENTRIES',
+ 'NUMERIC',
+ 'NUMERIC-FORMAT',
+ 'NUMERIC-F',
+ 'NUMERIC-FO',
+ 'NUMERIC-FOR',
+ 'NUMERIC-FORM',
+ 'NUMERIC-FORMA',
+ 'NUM-FIELDS',
+ 'NUM-FORMATS',
+ 'NUM-ITEMS',
+ 'NUM-ITERATIONS',
+ 'NUM-LINES',
+ 'NUM-LOCKED-COLUMNS',
+ 'NUM-LOCKED-COL',
+ 'NUM-LOCKED-COLU',
+ 'NUM-LOCKED-COLUM',
+ 'NUM-LOCKED-COLUMN',
+ 'NUM-MESSAGES',
+ 'NUM-PARAMETERS',
+ 'NUM-REFERENCES',
+ 'NUM-REPLACED',
+ 'NUM-RESULTS',
+ 'NUM-SELECTED-ROWS',
+ 'NUM-SELECTED-WIDGETS',
+ 'NUM-SELECTED',
+ 'NUM-SELECTED-',
+ 'NUM-SELECTED-W',
+ 'NUM-SELECTED-WI',
+ 'NUM-SELECTED-WID',
+ 'NUM-SELECTED-WIDG',
+ 'NUM-SELECTED-WIDGE',
+ 'NUM-SELECTED-WIDGET',
+ 'NUM-TABS',
+ 'NUM-TO-RETAIN',
+ 'NUM-VISIBLE-COLUMNS',
+ 'OCTET-LENGTH',
+ 'OF',
+ 'OFF',
+ 'OK',
+ 'OK-CANCEL',
+ 'OLD',
+ 'ON',
+ 'ON-FRAME-BORDER',
+ 'ON-FRAME',
+ 'ON-FRAME-',
+ 'ON-FRAME-B',
+ 'ON-FRAME-BO',
+ 'ON-FRAME-BOR',
+ 'ON-FRAME-BORD',
+ 'ON-FRAME-BORDE',
+ 'OPEN',
+ 'OPSYS',
+ 'OPTION',
+ 'OR',
+ 'ORDERED-JOIN',
+ 'ORDINAL',
+ 'OS-APPEND',
+ 'OS-COMMAND',
+ 'OS-COPY',
+ 'OS-CREATE-DIR',
+ 'OS-DELETE',
+ 'OS-DIR',
+ 'OS-DRIVES',
+ 'OS-DRIVE',
+ 'OS-ERROR',
+ 'OS-GETENV',
+ 'OS-RENAME',
+ 'OTHERWISE',
+ 'OUTPUT',
+ 'OVERLAY',
+ 'OVERRIDE',
+ 'OWNER',
+ 'PAGE',
+ 'PAGE-BOTTOM',
+ 'PAGE-BOT',
+ 'PAGE-BOTT',
+ 'PAGE-BOTTO',
+ 'PAGED',
+ 'PAGE-NUMBER',
+ 'PAGE-NUM',
+ 'PAGE-NUMB',
+ 'PAGE-NUMBE',
+ 'PAGE-SIZE',
+ 'PAGE-TOP',
+ 'PAGE-WIDTH',
+ 'PAGE-WID',
+ 'PAGE-WIDT',
+ 'PARAMETER',
+ 'PARAM',
+ 'PARAME',
+ 'PARAMET',
+ 'PARAMETE',
+ 'PARENT',
+ 'PARSE-STATUS',
+ 'PARTIAL-KEY',
+ 'PASCAL',
+ 'PASSWORD-FIELD',
+ 'PATHNAME',
+ 'PAUSE',
+ 'PBE-HASH-ALGORITHM',
+ 'PBE-HASH-ALG',
+ 'PBE-HASH-ALGO',
+ 'PBE-HASH-ALGOR',
+ 'PBE-HASH-ALGORI',
+ 'PBE-HASH-ALGORIT',
+ 'PBE-HASH-ALGORITH',
+ 'PBE-KEY-ROUNDS',
+ 'PDBNAME',
+ 'PERSISTENT',
+ 'PERSIST',
+ 'PERSISTE',
+ 'PERSISTEN',
+ 'PERSISTENT-CACHE-DISABLED',
+ 'PFCOLOR',
+ 'PFC',
+ 'PFCO',
+ 'PFCOL',
+ 'PFCOLO',
+ 'PIXELS',
+ 'PIXELS-PER-COLUMN',
+ 'PIXELS-PER-COL',
+ 'PIXELS-PER-COLU',
+ 'PIXELS-PER-COLUM',
+ 'PIXELS-PER-ROW',
+ 'POPUP-MENU',
+ 'POPUP-M',
+ 'POPUP-ME',
+ 'POPUP-MEN',
+ 'POPUP-ONLY',
+ 'POPUP-O',
+ 'POPUP-ON',
+ 'POPUP-ONL',
+ 'PORTRAIT',
+ 'POSITION',
+ 'PRECISION',
+ 'PREFER-DATASET',
+ 'PREPARED',
+ 'PREPARE-STRING',
+ 'PREPROCESS',
+ 'PREPROC',
+ 'PREPROCE',
+ 'PREPROCES',
+ 'PRESELECT',
+ 'PRESEL',
+ 'PRESELE',
+ 'PRESELEC',
+ 'PREV',
+ 'PREV-COLUMN',
+ 'PREV-SIBLING',
+ 'PREV-TAB-ITEM',
+ 'PREV-TAB-I',
+ 'PREV-TAB-IT',
+ 'PREV-TAB-ITE',
+ 'PRIMARY',
+ 'PRINTER',
+ 'PRINTER-CONTROL-HANDLE',
+ 'PRINTER-HDC',
+ 'PRINTER-NAME',
+ 'PRINTER-PORT',
+ 'PRINTER-SETUP',
+ 'PRIVATE',
+ 'PRIVATE-DATA',
+ 'PRIVATE-D',
+ 'PRIVATE-DA',
+ 'PRIVATE-DAT',
+ 'PRIVILEGES',
+ 'PROCEDURE',
+ 'PROCE',
+ 'PROCED',
+ 'PROCEDU',
+ 'PROCEDUR',
+ 'PROCEDURE-CALL-TYPE',
+ 'PROCESS',
+ 'PROC-HANDLE',
+ 'PROC-HA',
+ 'PROC-HAN',
+ 'PROC-HAND',
+ 'PROC-HANDL',
+ 'PROC-STATUS',
+ 'PROC-ST',
+ 'PROC-STA',
+ 'PROC-STAT',
+ 'PROC-STATU',
+ 'proc-text',
+ 'proc-text-buffe',
+ 'PROFILER',
+ 'PROGRAM-NAME',
+ 'PROGRESS',
+ 'PROGRESS-SOURCE',
+ 'PROGRESS-S',
+ 'PROGRESS-SO',
+ 'PROGRESS-SOU',
+ 'PROGRESS-SOUR',
+ 'PROGRESS-SOURC',
+ 'PROMPT',
+ 'PROMPT-FOR',
+ 'PROMPT-F',
+ 'PROMPT-FO',
+ 'PROMSGS',
+ 'PROPATH',
+ 'PROPERTY',
+ 'PROTECTED',
+ 'PROVERSION',
+ 'PROVERS',
+ 'PROVERSI',
+ 'PROVERSIO',
+ 'PROXY',
+ 'PROXY-PASSWORD',
+ 'PROXY-USERID',
+ 'PUBLIC',
+ 'PUBLIC-ID',
+ 'PUBLISH',
+ 'PUBLISHED-EVENTS',
+ 'PUT',
+ 'PUTBYTE',
+ 'PUT-BYTE',
+ 'PUT-DOUBLE',
+ 'PUT-FLOAT',
+ 'PUT-INT64',
+ 'PUT-KEY-VALUE',
+ 'PUT-KEY-VAL',
+ 'PUT-KEY-VALU',
+ 'PUT-LONG',
+ 'PUT-SHORT',
+ 'PUT-STRING',
+ 'PUT-UNSIGNED-LONG',
+ 'QUERY',
+ 'QUERY-CLOSE',
+ 'QUERY-OFF-END',
+ 'QUERY-OPEN',
+ 'QUERY-PREPARE',
+ 'QUERY-TUNING',
+ 'QUESTION',
+ 'QUIT',
+ 'QUOTER',
+ 'RADIO-BUTTONS',
+ 'RADIO-SET',
+ 'RANDOM',
+ 'RAW-TRANSFER',
+ 'RCODE-INFORMATION',
+ 'RCODE-INFO',
+ 'RCODE-INFOR',
+ 'RCODE-INFORM',
+ 'RCODE-INFORMA',
+ 'RCODE-INFORMAT',
+ 'RCODE-INFORMATI',
+ 'RCODE-INFORMATIO',
+ 'READ-AVAILABLE',
+ 'READ-EXACT-NUM',
+ 'READ-FILE',
+ 'READKEY',
+ 'READ-ONLY',
+ 'READ-XML',
+ 'READ-XMLSCHEMA',
+ 'REAL',
+ 'RECORD-LENGTH',
+ 'RECTANGLE',
+ 'RECT',
+ 'RECTA',
+ 'RECTAN',
+ 'RECTANG',
+ 'RECTANGL',
+ 'RECURSIVE',
+ 'REFERENCE-ONLY',
+ 'REFRESH',
+ 'REFRESHABLE',
+ 'REFRESH-AUDIT-POLICY',
+ 'REGISTER-DOMAIN',
+ 'RELEASE',
+ 'REMOTE',
+ 'REMOVE-EVENTS-PROCEDURE',
+ 'REMOVE-SUPER-PROCEDURE',
+ 'REPEAT',
+ 'REPLACE',
+ 'REPLACE-SELECTION-TEXT',
+ 'REPOSITION',
+ 'REPOSITION-BACKWARD',
+ 'REPOSITION-FORWARD',
+ 'REPOSITION-MODE',
+ 'REPOSITION-TO-ROW',
+ 'REPOSITION-TO-ROWID',
+ 'REQUEST',
+ 'RESET',
+ 'RESIZABLE',
+ 'RESIZA',
+ 'RESIZAB',
+ 'RESIZABL',
+ 'RESIZE',
+ 'RESTART-ROW',
+ 'RESTART-ROWID',
+ 'RETAIN',
+ 'RETAIN-SHAPE',
+ 'RETRY',
+ 'RETRY-CANCEL',
+ 'RETURN',
+ 'RETURN-INSERTED',
+ 'RETURN-INS',
+ 'RETURN-INSE',
+ 'RETURN-INSER',
+ 'RETURN-INSERT',
+ 'RETURN-INSERTE',
+ 'RETURNS',
+ 'RETURN-TO-START-DIR',
+ 'RETURN-TO-START-DI',
+ 'RETURN-VALUE',
+ 'RETURN-VAL',
+ 'RETURN-VALU',
+ 'RETURN-VALUE-DATA-TYPE',
+ 'REVERSE-FROM',
+ 'REVERT',
+ 'REVOKE',
+ 'RGB-VALUE',
+ 'RIGHT-ALIGNED',
+ 'RETURN-ALIGN',
+ 'RETURN-ALIGNE',
+ 'RIGHT-TRIM',
+ 'R-INDEX',
+ 'ROLES',
+ 'ROUND',
+ 'ROUTINE-LEVEL',
+ 'ROW',
+ 'ROW-HEIGHT-CHARS',
+ 'ROW-HEIGHT-PIXELS',
+ 'ROW-MARKERS',
+ 'ROW-OF',
+ 'ROW-RESIZABLE',
+ 'RULE',
+ 'RUN',
+ 'RUN-PROCEDURE',
+ 'SAVE',
+ 'SAVE-AS',
+ 'SAVE-FILE',
+ 'SAX-COMPLETE',
+ 'SAX-COMPLE',
+ 'SAX-COMPLET',
+ 'SAX-PARSE',
+ 'SAX-PARSE-FIRST',
+ 'SAX-PARSE-NEXT',
+ 'SAX-PARSER-ERROR',
+ 'SAX-RUNNING',
+ 'SAX-UNINITIALIZED',
+ 'SAX-WRITE-BEGIN',
+ 'SAX-WRITE-COMPLETE',
+ 'SAX-WRITE-CONTENT',
+ 'SAX-WRITE-ELEMENT',
+ 'SAX-WRITE-ERROR',
+ 'SAX-WRITE-IDLE',
+ 'SAX-WRITER',
+ 'SAX-WRITE-TAG',
+ 'SCHEMA',
+ 'SCHEMA-LOCATION',
+ 'SCHEMA-MARSHAL',
+ 'SCHEMA-PATH',
+ 'SCREEN',
+ 'SCREEN-IO',
+ 'SCREEN-LINES',
+ 'SCREEN-VALUE',
+ 'SCREEN-VAL',
+ 'SCREEN-VALU',
+ 'SCROLL',
+ 'SCROLLABLE',
+ 'SCROLLBAR-HORIZONTAL',
+ 'SCROLLBAR-H',
+ 'SCROLLBAR-HO',
+ 'SCROLLBAR-HOR',
+ 'SCROLLBAR-HORI',
+ 'SCROLLBAR-HORIZ',
+ 'SCROLLBAR-HORIZO',
+ 'SCROLLBAR-HORIZON',
+ 'SCROLLBAR-HORIZONT',
+ 'SCROLLBAR-HORIZONTA',
+ 'SCROLL-BARS',
+ 'SCROLLBAR-VERTICAL',
+ 'SCROLLBAR-V',
+ 'SCROLLBAR-VE',
+ 'SCROLLBAR-VER',
+ 'SCROLLBAR-VERT',
+ 'SCROLLBAR-VERTI',
+ 'SCROLLBAR-VERTIC',
+ 'SCROLLBAR-VERTICA',
+ 'SCROLL-DELTA',
+ 'SCROLLED-ROW-POSITION',
+ 'SCROLLED-ROW-POS',
+ 'SCROLLED-ROW-POSI',
+ 'SCROLLED-ROW-POSIT',
+ 'SCROLLED-ROW-POSITI',
+ 'SCROLLED-ROW-POSITIO',
+ 'SCROLLING',
+ 'SCROLL-OFFSET',
+ 'SCROLL-TO-CURRENT-ROW',
+ 'SCROLL-TO-ITEM',
+ 'SCROLL-TO-I',
+ 'SCROLL-TO-IT',
+ 'SCROLL-TO-ITE',
+ 'SCROLL-TO-SELECTED-ROW',
+ 'SDBNAME',
+ 'SEAL',
+ 'SEAL-TIMESTAMP',
+ 'SEARCH',
+ 'SEARCH-SELF',
+ 'SEARCH-TARGET',
+ 'SECTION',
+ 'SECURITY-POLICY',
+ 'SEEK',
+ 'SELECT',
+ 'SELECTABLE',
+ 'SELECT-ALL',
+ 'SELECTED',
+ 'SELECT-FOCUSED-ROW',
+ 'SELECTION',
+ 'SELECTION-END',
+ 'SELECTION-LIST',
+ 'SELECTION-START',
+ 'SELECTION-TEXT',
+ 'SELECT-NEXT-ROW',
+ 'SELECT-PREV-ROW',
+ 'SELECT-ROW',
+ 'SELF',
+ 'SEND',
+ 'send-sql-statement',
+ 'send-sql',
+ 'SENSITIVE',
+ 'SEPARATE-CONNECTION',
+ 'SEPARATOR-FGCOLOR',
+ 'SEPARATORS',
+ 'SERVER',
+ 'SERVER-CONNECTION-BOUND',
+ 'SERVER-CONNECTION-BOUND-REQUEST',
+ 'SERVER-CONNECTION-CONTEXT',
+ 'SERVER-CONNECTION-ID',
+ 'SERVER-OPERATING-MODE',
+ 'SESSION',
+ 'SESSION-ID',
+ 'SET',
+ 'SET-APPL-CONTEXT',
+ 'SET-ATTR-CALL-TYPE',
+ 'SET-ATTRIBUTE-NODE',
+ 'SET-BLUE-VALUE',
+ 'SET-BLUE',
+ 'SET-BLUE-',
+ 'SET-BLUE-V',
+ 'SET-BLUE-VA',
+ 'SET-BLUE-VAL',
+ 'SET-BLUE-VALU',
+ 'SET-BREAK',
+ 'SET-BUFFERS',
+ 'SET-CALLBACK',
+ 'SET-CLIENT',
+ 'SET-COMMIT',
+ 'SET-CONTENTS',
+ 'SET-CURRENT-VALUE',
+ 'SET-DB-CLIENT',
+ 'SET-DYNAMIC',
+ 'SET-EVENT-MANAGER-OPTION',
+ 'SET-GREEN-VALUE',
+ 'SET-GREEN',
+ 'SET-GREEN-',
+ 'SET-GREEN-V',
+ 'SET-GREEN-VA',
+ 'SET-GREEN-VAL',
+ 'SET-GREEN-VALU',
+ 'SET-INPUT-SOURCE',
+ 'SET-OPTION',
+ 'SET-OUTPUT-DESTINATION',
+ 'SET-PARAMETER',
+ 'SET-POINTER-VALUE',
+ 'SET-PROPERTY',
+ 'SET-RED-VALUE',
+ 'SET-RED',
+ 'SET-RED-',
+ 'SET-RED-V',
+ 'SET-RED-VA',
+ 'SET-RED-VAL',
+ 'SET-RED-VALU',
+ 'SET-REPOSITIONED-ROW',
+ 'SET-RGB-VALUE',
+ 'SET-ROLLBACK',
+ 'SET-SELECTION',
+ 'SET-SIZE',
+ 'SET-SORT-ARROW',
+ 'SETUSERID',
+ 'SETUSER',
+ 'SETUSERI',
+ 'SET-WAIT-STATE',
+ 'SHA1-DIGEST',
+ 'SHARED',
+ 'SHARE-LOCK',
+ 'SHARE',
+ 'SHARE-',
+ 'SHARE-L',
+ 'SHARE-LO',
+ 'SHARE-LOC',
+ 'SHOW-IN-TASKBAR',
+ 'SHOW-STATS',
+ 'SHOW-STAT',
+ 'SIDE-LABEL-HANDLE',
+ 'SIDE-LABEL-H',
+ 'SIDE-LABEL-HA',
+ 'SIDE-LABEL-HAN',
+ 'SIDE-LABEL-HAND',
+ 'SIDE-LABEL-HANDL',
+ 'SIDE-LABELS',
+ 'SIDE-LAB',
+ 'SIDE-LABE',
+ 'SIDE-LABEL',
+ 'SILENT',
+ 'SIMPLE',
+ 'SINGLE',
+ 'SIZE',
+ 'SIZE-CHARS',
+ 'SIZE-C',
+ 'SIZE-CH',
+ 'SIZE-CHA',
+ 'SIZE-CHAR',
+ 'SIZE-PIXELS',
+ 'SIZE-P',
+ 'SIZE-PI',
+ 'SIZE-PIX',
+ 'SIZE-PIXE',
+ 'SIZE-PIXEL',
+ 'SKIP',
+ 'SKIP-DELETED-RECORD',
+ 'SLIDER',
+ 'SMALL-ICON',
+ 'SMALLINT',
+ 'SMALL-TITLE',
+ 'SOME',
+ 'SORT',
+ 'SORT-ASCENDING',
+ 'SORT-NUMBER',
+ 'SOURCE',
+ 'SOURCE-PROCEDURE',
+ 'SPACE',
+ 'SQL',
+ 'SQRT',
+ 'SSL-SERVER-NAME',
+ 'STANDALONE',
+ 'START',
+ 'START-DOCUMENT',
+ 'START-ELEMENT',
+ 'START-MOVE',
+ 'START-RESIZE',
+ 'START-ROW-RESIZE',
+ 'STATE-DETAIL',
+ 'STATIC',
+ 'STATUS',
+ 'STATUS-AREA',
+ 'STATUS-AREA-FONT',
+ 'STDCALL',
+ 'STOP',
+ 'STOP-PARSING',
+ 'STOPPED',
+ 'STOPPE',
+ 'STORED-PROCEDURE',
+ 'STORED-PROC',
+ 'STORED-PROCE',
+ 'STORED-PROCED',
+ 'STORED-PROCEDU',
+ 'STORED-PROCEDUR',
+ 'STREAM',
+ 'STREAM-HANDLE',
+ 'STREAM-IO',
+ 'STRETCH-TO-FIT',
+ 'STRICT',
+ 'STRING',
+ 'STRING-VALUE',
+ 'STRING-XREF',
+ 'SUB-AVERAGE',
+ 'SUB-AVE',
+ 'SUB-AVER',
+ 'SUB-AVERA',
+ 'SUB-AVERAG',
+ 'SUB-COUNT',
+ 'SUB-MAXIMUM',
+ 'SUM-MAX',
+ 'SUM-MAXI',
+ 'SUM-MAXIM',
+ 'SUM-MAXIMU',
+ 'SUB-MENU',
+ 'SUBSUB-',
+ 'SUB-MIN',
+ 'SUBSCRIBE',
+ 'SUBSTITUTE',
+ 'SUBST',
+ 'SUBSTI',
+ 'SUBSTIT',
+ 'SUBSTITU',
+ 'SUBSTITUT',
+ 'SUBSTRING',
+ 'SUBSTR',
+ 'SUBSTRI',
+ 'SUBSTRIN',
+ 'SUB-TOTAL',
+ 'SUBTYPE',
+ 'SUM',
+ 'SUPER',
+ 'SUPER-PROCEDURES',
+ 'SUPPRESS-NAMESPACE-PROCESSING',
+ 'SUPPRESS-WARNINGS',
+ 'SUPPRESS-W',
+ 'SUPPRESS-WA',
+ 'SUPPRESS-WAR',
+ 'SUPPRESS-WARN',
+ 'SUPPRESS-WARNI',
+ 'SUPPRESS-WARNIN',
+ 'SUPPRESS-WARNING',
+ 'SYMMETRIC-ENCRYPTION-ALGORITHM',
+ 'SYMMETRIC-ENCRYPTION-IV',
+ 'SYMMETRIC-ENCRYPTION-KEY',
+ 'SYMMETRIC-SUPPORT',
+ 'SYSTEM-ALERT-BOXES',
+ 'SYSTEM-ALERT',
+ 'SYSTEM-ALERT-',
+ 'SYSTEM-ALERT-B',
+ 'SYSTEM-ALERT-BO',
+ 'SYSTEM-ALERT-BOX',
+ 'SYSTEM-ALERT-BOXE',
+ 'SYSTEM-DIALOG',
+ 'SYSTEM-HELP',
+ 'SYSTEM-ID',
+ 'TABLE',
+ 'TABLE-HANDLE',
+ 'TABLE-NUMBER',
+ 'TAB-POSITION',
+ 'TAB-STOP',
+ 'TARGET',
+ 'TARGET-PROCEDURE',
+ 'TEMP-DIRECTORY',
+ 'TEMP-DIR',
+ 'TEMP-DIRE',
+ 'TEMP-DIREC',
+ 'TEMP-DIRECT',
+ 'TEMP-DIRECTO',
+ 'TEMP-DIRECTOR',
+ 'TEMP-TABLE',
+ 'TEMP-TABLE-PREPARE',
+ 'TERM',
+ 'TERMINAL',
+ 'TERMI',
+ 'TERMIN',
+ 'TERMINA',
+ 'TERMINATE',
+ 'TEXT',
+ 'TEXT-CURSOR',
+ 'TEXT-SEG-GROW',
+ 'TEXT-SELECTED',
+ 'THEN',
+ 'THIS-OBJECT',
+ 'THIS-PROCEDURE',
+ 'THREE-D',
+ 'THROW',
+ 'THROUGH',
+ 'THRU',
+ 'TIC-MARKS',
+ 'TIME',
+ 'TIME-SOURCE',
+ 'TITLE',
+ 'TITLE-BGCOLOR',
+ 'TITLE-BGC',
+ 'TITLE-BGCO',
+ 'TITLE-BGCOL',
+ 'TITLE-BGCOLO',
+ 'TITLE-DCOLOR',
+ 'TITLE-DC',
+ 'TITLE-DCO',
+ 'TITLE-DCOL',
+ 'TITLE-DCOLO',
+ 'TITLE-FGCOLOR',
+ 'TITLE-FGC',
+ 'TITLE-FGCO',
+ 'TITLE-FGCOL',
+ 'TITLE-FGCOLO',
+ 'TITLE-FONT',
+ 'TITLE-FO',
+ 'TITLE-FON',
+ 'TO',
+ 'TODAY',
+ 'TOGGLE-BOX',
+ 'TOOLTIP',
+ 'TOOLTIPS',
+ 'TOPIC',
+ 'TOP-NAV-QUERY',
+ 'TOP-ONLY',
+ 'TO-ROWID',
+ 'TOTAL',
+ 'TRAILING',
+ 'TRANS',
+ 'TRANSACTION',
+ 'TRANSACTION-MODE',
+ 'TRANS-INIT-PROCEDURE',
+ 'TRANSPARENT',
+ 'TRIGGER',
+ 'TRIGGERS',
+ 'TRIM',
+ 'TRUE',
+ 'TRUNCATE',
+ 'TRUNC',
+ 'TRUNCA',
+ 'TRUNCAT',
+ 'TYPE',
+ 'TYPE-OF',
+ 'UNBOX',
+ 'UNBUFFERED',
+ 'UNBUFF',
+ 'UNBUFFE',
+ 'UNBUFFER',
+ 'UNBUFFERE',
+ 'UNDERLINE',
+ 'UNDERL',
+ 'UNDERLI',
+ 'UNDERLIN',
+ 'UNDO',
+ 'UNFORMATTED',
+ 'UNFORM',
+ 'UNFORMA',
+ 'UNFORMAT',
+ 'UNFORMATT',
+ 'UNFORMATTE',
+ 'UNION',
+ 'UNIQUE',
+ 'UNIQUE-ID',
+ 'UNIQUE-MATCH',
+ 'UNIX',
+ 'UNLESS-HIDDEN',
+ 'UNLOAD',
+ 'UNSIGNED-LONG',
+ 'UNSUBSCRIBE',
+ 'UP',
+ 'UPDATE',
+ 'UPDATE-ATTRIBUTE',
+ 'URL',
+ 'URL-DECODE',
+ 'URL-ENCODE',
+ 'URL-PASSWORD',
+ 'URL-USERID',
+ 'USE',
+ 'USE-DICT-EXPS',
+ 'USE-FILENAME',
+ 'USE-INDEX',
+ 'USER',
+ 'USE-REVVIDEO',
+ 'USERID',
+ 'USER-ID',
+ 'USE-TEXT',
+ 'USE-UNDERLINE',
+ 'USE-WIDGET-POOL',
+ 'USING',
+ 'V6DISPLAY',
+ 'V6FRAME',
+ 'VALIDATE',
+ 'VALIDATE-EXPRESSION',
+ 'VALIDATE-MESSAGE',
+ 'VALIDATE-SEAL',
+ 'VALIDATION-ENABLED',
+ 'VALID-EVENT',
+ 'VALID-HANDLE',
+ 'VALID-OBJECT',
+ 'VALUE',
+ 'VALUE-CHANGED',
+ 'VALUES',
+ 'VARIABLE',
+ 'VAR',
+ 'VARI',
+ 'VARIA',
+ 'VARIAB',
+ 'VARIABL',
+ 'VERBOSE',
+ 'VERSION',
+ 'VERTICAL',
+ 'VERT',
+ 'VERTI',
+ 'VERTIC',
+ 'VERTICA',
+ 'VIEW',
+ 'VIEW-AS',
+ 'VIEW-FIRST-COLUMN-ON-REOPEN',
+ 'VIRTUAL-HEIGHT-CHARS',
+ 'VIRTUAL-HEIGHT',
+ 'VIRTUAL-HEIGHT-',
+ 'VIRTUAL-HEIGHT-C',
+ 'VIRTUAL-HEIGHT-CH',
+ 'VIRTUAL-HEIGHT-CHA',
+ 'VIRTUAL-HEIGHT-CHAR',
+ 'VIRTUAL-HEIGHT-PIXELS',
+ 'VIRTUAL-HEIGHT-P',
+ 'VIRTUAL-HEIGHT-PI',
+ 'VIRTUAL-HEIGHT-PIX',
+ 'VIRTUAL-HEIGHT-PIXE',
+ 'VIRTUAL-HEIGHT-PIXEL',
+ 'VIRTUAL-WIDTH-CHARS',
+ 'VIRTUAL-WIDTH',
+ 'VIRTUAL-WIDTH-',
+ 'VIRTUAL-WIDTH-C',
+ 'VIRTUAL-WIDTH-CH',
+ 'VIRTUAL-WIDTH-CHA',
+ 'VIRTUAL-WIDTH-CHAR',
+ 'VIRTUAL-WIDTH-PIXELS',
+ 'VIRTUAL-WIDTH-P',
+ 'VIRTUAL-WIDTH-PI',
+ 'VIRTUAL-WIDTH-PIX',
+ 'VIRTUAL-WIDTH-PIXE',
+ 'VIRTUAL-WIDTH-PIXEL',
+ 'VISIBLE',
+ 'VOID',
+ 'WAIT',
+ 'WAIT-FOR',
+ 'WARNING',
+ 'WEB-CONTEXT',
+ 'WEEKDAY',
+ 'WHEN',
+ 'WHERE',
+ 'WHILE',
+ 'WIDGET',
+ 'WIDGET-ENTER',
+ 'WIDGET-E',
+ 'WIDGET-EN',
+ 'WIDGET-ENT',
+ 'WIDGET-ENTE',
+ 'WIDGET-ID',
+ 'WIDGET-LEAVE',
+ 'WIDGET-L',
+ 'WIDGET-LE',
+ 'WIDGET-LEA',
+ 'WIDGET-LEAV',
+ 'WIDGET-POOL',
+ 'WIDTH-CHARS',
+ 'WIDTH',
+ 'WIDTH-',
+ 'WIDTH-C',
+ 'WIDTH-CH',
+ 'WIDTH-CHA',
+ 'WIDTH-CHAR',
+ 'WIDTH-PIXELS',
+ 'WIDTH-P',
+ 'WIDTH-PI',
+ 'WIDTH-PIX',
+ 'WIDTH-PIXE',
+ 'WIDTH-PIXEL',
+ 'WINDOW',
+ 'WINDOW-MAXIMIZED',
+ 'WINDOW-MAXIM',
+ 'WINDOW-MAXIMI',
+ 'WINDOW-MAXIMIZ',
+ 'WINDOW-MAXIMIZE',
+ 'WINDOW-MINIMIZED',
+ 'WINDOW-MINIM',
+ 'WINDOW-MINIMI',
+ 'WINDOW-MINIMIZ',
+ 'WINDOW-MINIMIZE',
+ 'WINDOW-NAME',
+ 'WINDOW-NORMAL',
+ 'WINDOW-STATE',
+ 'WINDOW-STA',
+ 'WINDOW-STAT',
+ 'WINDOW-SYSTEM',
+ 'WITH',
+ 'WORD-INDEX',
+ 'WORD-WRAP',
+ 'WORK-AREA-HEIGHT-PIXELS',
+ 'WORK-AREA-WIDTH-PIXELS',
+ 'WORK-AREA-X',
+ 'WORK-AREA-Y',
+ 'WORKFILE',
+ 'WORK-TABLE',
+ 'WORK-TAB',
+ 'WORK-TABL',
+ 'WRITE',
+ 'WRITE-CDATA',
+ 'WRITE-CHARACTERS',
+ 'WRITE-COMMENT',
+ 'WRITE-DATA-ELEMENT',
+ 'WRITE-EMPTY-ELEMENT',
+ 'WRITE-ENTITY-REF',
+ 'WRITE-EXTERNAL-DTD',
+ 'WRITE-FRAGMENT',
+ 'WRITE-MESSAGE',
+ 'WRITE-PROCESSING-INSTRUCTION',
+ 'WRITE-STATUS',
+ 'WRITE-XML',
+ 'WRITE-XMLSCHEMA',
+ 'X',
+ 'XCODE',
+ 'XML-DATA-TYPE',
+ 'XML-NODE-TYPE',
+ 'XML-SCHEMA-PATH',
+ 'XML-SUPPRESS-NAMESPACE-PROCESSING',
+ 'X-OF',
+ 'XREF',
+ 'XREF-XML',
+ 'Y',
+ 'YEAR',
+ 'YEAR-OFFSET',
+ 'YES',
+ 'YES-NO',
+ 'YES-NO-CANCEL',
+ 'Y-OF'
+)
diff --git a/pygments/lexers/_openedgebuiltins.py b/pygments/lexers/_openedgebuiltins.py
deleted file mode 100644
index 4561b07b..00000000
--- a/pygments/lexers/_openedgebuiltins.py
+++ /dev/null
@@ -1,562 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._openedgebuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Builtin list for the OpenEdgeLexer.
-
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-OPENEDGEKEYWORDS = [
- 'ABSOLUTE', 'ABS', 'ABSO', 'ABSOL', 'ABSOLU', 'ABSOLUT', 'ACCELERATOR',
- 'ACCUM', 'ACCUMULATE', 'ACCUM', 'ACCUMU', 'ACCUMUL', 'ACCUMULA',
- 'ACCUMULAT', 'ACTIVE-FORM', 'ACTIVE-WINDOW', 'ADD', 'ADD-BUFFER',
- 'ADD-CALC-COLUMN', 'ADD-COLUMNS-FROM', 'ADD-EVENTS-PROCEDURE',
- 'ADD-FIELDS-FROM', 'ADD-FIRST', 'ADD-INDEX-FIELD', 'ADD-LAST',
- 'ADD-LIKE-COLUMN', 'ADD-LIKE-FIELD', 'ADD-LIKE-INDEX', 'ADD-NEW-FIELD',
- 'ADD-NEW-INDEX', 'ADD-SCHEMA-LOCATION', 'ADD-SUPER-PROCEDURE', 'ADM-DATA',
- 'ADVISE', 'ALERT-BOX', 'ALIAS', 'ALL', 'ALLOW-COLUMN-SEARCHING',
- 'ALLOW-REPLICATION', 'ALTER', 'ALWAYS-ON-TOP', 'AMBIGUOUS', 'AMBIG',
- 'AMBIGU', 'AMBIGUO', 'AMBIGUOU', 'ANALYZE', 'ANALYZ', 'AND', 'ANSI-ONLY',
- 'ANY', 'ANYWHERE', 'APPEND', 'APPL-ALERT-BOXES', 'APPL-ALERT',
- 'APPL-ALERT-', 'APPL-ALERT-B', 'APPL-ALERT-BO', 'APPL-ALERT-BOX',
- 'APPL-ALERT-BOXE', 'APPL-CONTEXT-ID', 'APPLICATION', 'APPLY',
- 'APPSERVER-INFO', 'APPSERVER-PASSWORD', 'APPSERVER-USERID', 'ARRAY-MESSAGE',
- 'AS', 'ASC', 'ASCENDING', 'ASCE', 'ASCEN', 'ASCEND', 'ASCENDI', 'ASCENDIN',
- 'ASK-OVERWRITE', 'ASSEMBLY', 'ASSIGN', 'ASYNCHRONOUS',
- 'ASYNC-REQUEST-COUNT', 'ASYNC-REQUEST-HANDLE', 'AT', 'ATTACHED-PAIRLIST',
- 'ATTR-SPACE', 'ATTR', 'ATTRI', 'ATTRIB', 'ATTRIBU', 'ATTRIBUT',
- 'AUDIT-CONTROL', 'AUDIT-ENABLED', 'AUDIT-EVENT-CONTEXT', 'AUDIT-POLICY',
- 'AUTHENTICATION-FAILED', 'AUTHORIZATION', 'AUTO-COMPLETION', 'AUTO-COMP',
- 'AUTO-COMPL', 'AUTO-COMPLE', 'AUTO-COMPLET', 'AUTO-COMPLETI',
- 'AUTO-COMPLETIO', 'AUTO-ENDKEY', 'AUTO-END-KEY', 'AUTO-GO', 'AUTO-INDENT',
- 'AUTO-IND', 'AUTO-INDE', 'AUTO-INDEN', 'AUTOMATIC', 'AUTO-RESIZE',
- 'AUTO-RETURN', 'AUTO-RET', 'AUTO-RETU', 'AUTO-RETUR', 'AUTO-SYNCHRONIZE',
- 'AUTO-ZAP', 'AUTO-Z', 'AUTO-ZA', 'AVAILABLE', 'AVAIL', 'AVAILA', 'AVAILAB',
- 'AVAILABL', 'AVAILABLE-FORMATS', 'AVERAGE', 'AVE', 'AVER', 'AVERA',
- 'AVERAG', 'AVG', 'BACKGROUND', 'BACK', 'BACKG', 'BACKGR', 'BACKGRO',
- 'BACKGROU', 'BACKGROUN', 'BACKWARDS', 'BACKWARD', 'BASE64-DECODE',
- 'BASE64-ENCODE', 'BASE-ADE', 'BASE-KEY', 'BATCH-MODE', 'BATCH', 'BATCH-',
- 'BATCH-M', 'BATCH-MO', 'BATCH-MOD', 'BATCH-SIZE', 'BEFORE-HIDE', 'BEFORE-H',
- 'BEFORE-HI', 'BEFORE-HID', 'BEGIN-EVENT-GROUP', 'BEGINS', 'BELL', 'BETWEEN',
- 'BGCOLOR', 'BGC', 'BGCO', 'BGCOL', 'BGCOLO', 'BIG-ENDIAN', 'BINARY', 'BIND',
- 'BIND-WHERE', 'BLANK', 'BLOCK-ITERATION-DISPLAY', 'BORDER-BOTTOM-CHARS',
- 'BORDER-B', 'BORDER-BO', 'BORDER-BOT', 'BORDER-BOTT', 'BORDER-BOTTO',
- 'BORDER-BOTTOM-PIXELS', 'BORDER-BOTTOM-P', 'BORDER-BOTTOM-PI',
- 'BORDER-BOTTOM-PIX', 'BORDER-BOTTOM-PIXE', 'BORDER-BOTTOM-PIXEL',
- 'BORDER-LEFT-CHARS', 'BORDER-L', 'BORDER-LE', 'BORDER-LEF', 'BORDER-LEFT',
- 'BORDER-LEFT-', 'BORDER-LEFT-C', 'BORDER-LEFT-CH', 'BORDER-LEFT-CHA',
- 'BORDER-LEFT-CHAR', 'BORDER-LEFT-PIXELS', 'BORDER-LEFT-P', 'BORDER-LEFT-PI',
- 'BORDER-LEFT-PIX', 'BORDER-LEFT-PIXE', 'BORDER-LEFT-PIXEL',
- 'BORDER-RIGHT-CHARS', 'BORDER-R', 'BORDER-RI', 'BORDER-RIG', 'BORDER-RIGH',
- 'BORDER-RIGHT', 'BORDER-RIGHT-', 'BORDER-RIGHT-C', 'BORDER-RIGHT-CH',
- 'BORDER-RIGHT-CHA', 'BORDER-RIGHT-CHAR', 'BORDER-RIGHT-PIXELS',
- 'BORDER-RIGHT-P', 'BORDER-RIGHT-PI', 'BORDER-RIGHT-PIX',
- 'BORDER-RIGHT-PIXE', 'BORDER-RIGHT-PIXEL', 'BORDER-TOP-CHARS', 'BORDER-T',
- 'BORDER-TO', 'BORDER-TOP', 'BORDER-TOP-', 'BORDER-TOP-C', 'BORDER-TOP-CH',
- 'BORDER-TOP-CHA', 'BORDER-TOP-CHAR', 'BORDER-TOP-PIXELS', 'BORDER-TOP-P',
- 'BORDER-TOP-PI', 'BORDER-TOP-PIX', 'BORDER-TOP-PIXE', 'BORDER-TOP-PIXEL',
- 'BOX', 'BOX-SELECTABLE', 'BOX-SELECT', 'BOX-SELECTA', 'BOX-SELECTAB',
- 'BOX-SELECTABL', 'BREAK', 'BROWSE', 'BUFFER', 'BUFFER-CHARS',
- 'BUFFER-COMPARE', 'BUFFER-COPY', 'BUFFER-CREATE', 'BUFFER-DELETE',
- 'BUFFER-FIELD', 'BUFFER-HANDLE', 'BUFFER-LINES', 'BUFFER-NAME',
- 'BUFFER-RELEASE', 'BUFFER-VALUE', 'BUTTON', 'BUTTONS', 'BUTTON', 'BY',
- 'BY-POINTER', 'BY-VARIANT-POINTER', 'CACHE', 'CACHE-SIZE', 'CALL',
- 'CALL-NAME', 'CALL-TYPE', 'CANCEL-BREAK', 'CANCEL-BUTTON', 'CAN-CREATE',
- 'CAN-DELETE', 'CAN-DO', 'CAN-FIND', 'CAN-QUERY', 'CAN-READ', 'CAN-SET',
- 'CAN-WRITE', 'CAPS', 'CAREFUL-PAINT', 'CASE', 'CASE-SENSITIVE', 'CASE-SEN',
- 'CASE-SENS', 'CASE-SENSI', 'CASE-SENSIT', 'CASE-SENSITI', 'CASE-SENSITIV',
- 'CAST', 'CATCH', 'CDECL', 'CENTERED', 'CENTER', 'CENTERE', 'CHAINED',
- 'CHARACTER_LENGTH', 'CHARSET', 'CHECK', 'CHECKED', 'CHOOSE', 'CHR', 'CLASS',
- 'CLASS-TYPE', 'CLEAR', 'CLEAR-APPL-CONTEXT', 'CLEAR-LOG', 'CLEAR-SELECTION',
- 'CLEAR-SELECT', 'CLEAR-SELECTI', 'CLEAR-SELECTIO', 'CLEAR-SORT-ARROWS',
- 'CLEAR-SORT-ARROW', 'CLIENT-CONNECTION-ID', 'CLIENT-PRINCIPAL',
- 'CLIENT-TTY', 'CLIENT-TYPE', 'CLIENT-WORKSTATION', 'CLIPBOARD', 'CLOSE',
- 'CLOSE-LOG', 'CODE', 'CODEBASE-LOCATOR', 'CODEPAGE', 'CODEPAGE-CONVERT',
- 'COLLATE', 'COL-OF', 'COLON', 'COLON-ALIGNED', 'COLON-ALIGN',
- 'COLON-ALIGNE', 'COLOR', 'COLOR-TABLE', 'COLUMN', 'COL', 'COLU', 'COLUM',
- 'COLUMN-BGCOLOR', 'COLUMN-DCOLOR', 'COLUMN-FGCOLOR', 'COLUMN-FONT',
- 'COLUMN-LABEL', 'COLUMN-LAB', 'COLUMN-LABE', 'COLUMN-MOVABLE', 'COLUMN-OF',
- 'COLUMN-PFCOLOR', 'COLUMN-READ-ONLY', 'COLUMN-RESIZABLE', 'COLUMNS',
- 'COLUMN-SCROLLING', 'COMBO-BOX', 'COMMAND', 'COMPARES', 'COMPILE',
- 'COMPILER', 'COMPLETE', 'COM-SELF', 'CONFIG-NAME', 'CONNECT', 'CONNECTED',
- 'CONSTRUCTOR', 'CONTAINS', 'CONTENTS', 'CONTEXT', 'CONTEXT-HELP',
- 'CONTEXT-HELP-FILE', 'CONTEXT-HELP-ID', 'CONTEXT-POPUP', 'CONTROL',
- 'CONTROL-BOX', 'CONTROL-FRAME', 'CONVERT', 'CONVERT-3D-COLORS',
- 'CONVERT-TO-OFFSET', 'CONVERT-TO-OFFS', 'CONVERT-TO-OFFSE', 'COPY-DATASET',
- 'COPY-LOB', 'COPY-SAX-ATTRIBUTES', 'COPY-TEMP-TABLE', 'COUNT', 'COUNT-OF',
- 'CPCASE', 'CPCOLL', 'CPINTERNAL', 'CPLOG', 'CPPRINT', 'CPRCODEIN',
- 'CPRCODEOUT', 'CPSTREAM', 'CPTERM', 'CRC-VALUE', 'CREATE', 'CREATE-LIKE',
- 'CREATE-LIKE-SEQUENTIAL', 'CREATE-NODE-NAMESPACE',
- 'CREATE-RESULT-LIST-ENTRY', 'CREATE-TEST-FILE', 'CURRENT', 'CURRENT_DATE',
- 'CURRENT_DATE', 'CURRENT-CHANGED', 'CURRENT-COLUMN', 'CURRENT-ENVIRONMENT',
- 'CURRENT-ENV', 'CURRENT-ENVI', 'CURRENT-ENVIR', 'CURRENT-ENVIRO',
- 'CURRENT-ENVIRON', 'CURRENT-ENVIRONM', 'CURRENT-ENVIRONME',
- 'CURRENT-ENVIRONMEN', 'CURRENT-ITERATION', 'CURRENT-LANGUAGE',
- 'CURRENT-LANG', 'CURRENT-LANGU', 'CURRENT-LANGUA', 'CURRENT-LANGUAG',
- 'CURRENT-QUERY', 'CURRENT-RESULT-ROW', 'CURRENT-ROW-MODIFIED',
- 'CURRENT-VALUE', 'CURRENT-WINDOW', 'CURSOR', 'CURS', 'CURSO', 'CURSOR-CHAR',
- 'CURSOR-LINE', 'CURSOR-OFFSET', 'DATABASE', 'DATA-BIND',
- 'DATA-ENTRY-RETURN', 'DATA-ENTRY-RET', 'DATA-ENTRY-RETU',
- 'DATA-ENTRY-RETUR', 'DATA-RELATION', 'DATA-REL', 'DATA-RELA', 'DATA-RELAT',
- 'DATA-RELATI', 'DATA-RELATIO', 'DATASERVERS', 'DATASET', 'DATASET-HANDLE',
- 'DATA-SOURCE', 'DATA-SOURCE-COMPLETE-MAP', 'DATA-SOURCE-MODIFIED',
- 'DATA-SOURCE-ROWID', 'DATA-TYPE', 'DATA-T', 'DATA-TY', 'DATA-TYP',
- 'DATE-FORMAT', 'DATE-F', 'DATE-FO', 'DATE-FOR', 'DATE-FORM', 'DATE-FORMA',
- 'DAY', 'DBCODEPAGE', 'DBCOLLATION', 'DBNAME', 'DBPARAM', 'DB-REFERENCES',
- 'DBRESTRICTIONS', 'DBREST', 'DBRESTR', 'DBRESTRI', 'DBRESTRIC',
- 'DBRESTRICT', 'DBRESTRICTI', 'DBRESTRICTIO', 'DBRESTRICTION', 'DBTASKID',
- 'DBTYPE', 'DBVERSION', 'DBVERS', 'DBVERSI', 'DBVERSIO', 'DCOLOR', 'DDE',
- 'DDE-ERROR', 'DDE-ID', 'DDE-I', 'DDE-ITEM', 'DDE-NAME', 'DDE-TOPIC',
- 'DEBLANK', 'DEBUG', 'DEBU', 'DEBUG-ALERT', 'DEBUGGER', 'DEBUG-LIST',
- 'DECIMALS', 'DECLARE', 'DECLARE-NAMESPACE', 'DECRYPT', 'DEFAULT',
- 'DEFAULT-BUFFER-HANDLE', 'DEFAULT-BUTTON', 'DEFAUT-B', 'DEFAUT-BU',
- 'DEFAUT-BUT', 'DEFAUT-BUTT', 'DEFAUT-BUTTO', 'DEFAULT-COMMIT',
- 'DEFAULT-EXTENSION', 'DEFAULT-EX', 'DEFAULT-EXT', 'DEFAULT-EXTE',
- 'DEFAULT-EXTEN', 'DEFAULT-EXTENS', 'DEFAULT-EXTENSI', 'DEFAULT-EXTENSIO',
- 'DEFAULT-NOXLATE', 'DEFAULT-NOXL', 'DEFAULT-NOXLA', 'DEFAULT-NOXLAT',
- 'DEFAULT-VALUE', 'DEFAULT-WINDOW', 'DEFINED', 'DEFINE-USER-EVENT-MANAGER',
- 'DELETE', 'DEL', 'DELE', 'DELET', 'DELETE-CHARACTER', 'DELETE-CHAR',
- 'DELETE-CHARA', 'DELETE-CHARAC', 'DELETE-CHARACT', 'DELETE-CHARACTE',
- 'DELETE-CURRENT-ROW', 'DELETE-LINE', 'DELETE-RESULT-LIST-ENTRY',
- 'DELETE-SELECTED-ROW', 'DELETE-SELECTED-ROWS', 'DELIMITER', 'DESC',
- 'DESCENDING', 'DESC', 'DESCE', 'DESCEN', 'DESCEND', 'DESCENDI', 'DESCENDIN',
- 'DESELECT-FOCUSED-ROW', 'DESELECTION', 'DESELECT-ROWS',
- 'DESELECT-SELECTED-ROW', 'DESTRUCTOR', 'DIALOG-BOX', 'DICTIONARY', 'DICT',
- 'DICTI', 'DICTIO', 'DICTION', 'DICTIONA', 'DICTIONAR', 'DIR', 'DISABLE',
- 'DISABLE-AUTO-ZAP', 'DISABLED', 'DISABLE-DUMP-TRIGGERS',
- 'DISABLE-LOAD-TRIGGERS', 'DISCONNECT', 'DISCON', 'DISCONN', 'DISCONNE',
- 'DISCONNEC', 'DISP', 'DISPLAY', 'DISP', 'DISPL', 'DISPLA',
- 'DISPLAY-MESSAGE', 'DISPLAY-TYPE', 'DISPLAY-T', 'DISPLAY-TY', 'DISPLAY-TYP',
- 'DISTINCT', 'DO', 'DOMAIN-DESCRIPTION', 'DOMAIN-NAME', 'DOMAIN-TYPE', 'DOS',
- 'DOUBLE', 'DOWN', 'DRAG-ENABLED', 'DROP', 'DROP-DOWN', 'DROP-DOWN-LIST',
- 'DROP-FILE-NOTIFY', 'DROP-TARGET', 'DUMP', 'DYNAMIC', 'DYNAMIC-FUNCTION',
- 'EACH', 'ECHO', 'EDGE-CHARS', 'EDGE', 'EDGE-', 'EDGE-C', 'EDGE-CH',
- 'EDGE-CHA', 'EDGE-CHAR', 'EDGE-PIXELS', 'EDGE-P', 'EDGE-PI', 'EDGE-PIX',
- 'EDGE-PIXE', 'EDGE-PIXEL', 'EDIT-CAN-PASTE', 'EDIT-CAN-UNDO', 'EDIT-CLEAR',
- 'EDIT-COPY', 'EDIT-CUT', 'EDITING', 'EDITOR', 'EDIT-PASTE', 'EDIT-UNDO',
- 'ELSE', 'EMPTY', 'EMPTY-TEMP-TABLE', 'ENABLE', 'ENABLED-FIELDS', 'ENCODE',
- 'ENCRYPT', 'ENCRYPT-AUDIT-MAC-KEY', 'ENCRYPTION-SALT', 'END',
- 'END-DOCUMENT', 'END-ELEMENT', 'END-EVENT-GROUP', 'END-FILE-DROP', 'ENDKEY',
- 'END-KEY', 'END-MOVE', 'END-RESIZE', 'END-ROW-RESIZE', 'END-USER-PROMPT',
- 'ENTERED', 'ENTRY', 'EQ', 'ERROR', 'ERROR-COLUMN', 'ERROR-COL',
- 'ERROR-COLU', 'ERROR-COLUM', 'ERROR-ROW', 'ERROR-STACK-TRACE',
- 'ERROR-STATUS', 'ERROR-STAT', 'ERROR-STATU', 'ESCAPE', 'ETIME',
- 'EVENT-GROUP-ID', 'EVENT-PROCEDURE', 'EVENT-PROCEDURE-CONTEXT', 'EVENTS',
- 'EVENT', 'EVENT-TYPE', 'EVENT-T', 'EVENT-TY', 'EVENT-TYP', 'EXCEPT',
- 'EXCLUSIVE-ID', 'EXCLUSIVE-LOCK', 'EXCLUSIVE', 'EXCLUSIVE-', 'EXCLUSIVE-L',
- 'EXCLUSIVE-LO', 'EXCLUSIVE-LOC', 'EXCLUSIVE-WEB-USER', 'EXECUTE', 'EXISTS',
- 'EXP', 'EXPAND', 'EXPANDABLE', 'EXPLICIT', 'EXPORT', 'EXPORT-PRINCIPAL',
- 'EXTENDED', 'EXTENT', 'EXTERNAL', 'FALSE', 'FETCH', 'FETCH-SELECTED-ROW',
- 'FGCOLOR', 'FGC', 'FGCO', 'FGCOL', 'FGCOLO', 'FIELD', 'FIELDS', 'FIELD',
- 'FILE', 'FILE-CREATE-DATE', 'FILE-CREATE-TIME', 'FILE-INFORMATION',
- 'FILE-INFO', 'FILE-INFOR', 'FILE-INFORM', 'FILE-INFORMA', 'FILE-INFORMAT',
- 'FILE-INFORMATI', 'FILE-INFORMATIO', 'FILE-MOD-DATE', 'FILE-MOD-TIME',
- 'FILENAME', 'FILE-NAME', 'FILE-OFFSET', 'FILE-OFF', 'FILE-OFFS',
- 'FILE-OFFSE', 'FILE-SIZE', 'FILE-TYPE', 'FILL', 'FILLED', 'FILL-IN',
- 'FILTERS', 'FINAL', 'FINALLY', 'FIND', 'FIND-BY-ROWID',
- 'FIND-CASE-SENSITIVE', 'FIND-CURRENT', 'FINDER', 'FIND-FIRST',
- 'FIND-GLOBAL', 'FIND-LAST', 'FIND-NEXT-OCCURRENCE', 'FIND-PREV-OCCURRENCE',
- 'FIND-SELECT', 'FIND-UNIQUE', 'FIND-WRAP-AROUND', 'FIRST',
- 'FIRST-ASYNCH-REQUEST', 'FIRST-CHILD', 'FIRST-COLUMN', 'FIRST-FORM',
- 'FIRST-OBJECT', 'FIRST-OF', 'FIRST-PROCEDURE', 'FIRST-PROC', 'FIRST-PROCE',
- 'FIRST-PROCED', 'FIRST-PROCEDU', 'FIRST-PROCEDUR', 'FIRST-SERVER',
- 'FIRST-TAB-ITEM', 'FIRST-TAB-I', 'FIRST-TAB-IT', 'FIRST-TAB-ITE',
- 'FIT-LAST-COLUMN', 'FIXED-ONLY', 'FLAT-BUTTON', 'FLOAT', 'FOCUS',
- 'FOCUSED-ROW', 'FOCUSED-ROW-SELECTED', 'FONT', 'FONT-TABLE', 'FOR',
- 'FORCE-FILE', 'FOREGROUND', 'FORE', 'FOREG', 'FOREGR', 'FOREGRO',
- 'FOREGROU', 'FOREGROUN', 'FORM', 'FORMAT', 'FORM', 'FORMA', 'FORMATTED',
- 'FORMATTE', 'FORM-LONG-INPUT', 'FORWARD', 'FORWARDS', 'FORWARD', 'FRAGMENT',
- 'FRAGMEN', 'FRAME', 'FRAM', 'FRAME-COL', 'FRAME-DB', 'FRAME-DOWN',
- 'FRAME-FIELD', 'FRAME-FILE', 'FRAME-INDEX', 'FRAME-INDE', 'FRAME-LINE',
- 'FRAME-NAME', 'FRAME-ROW', 'FRAME-SPACING', 'FRAME-SPA', 'FRAME-SPAC',
- 'FRAME-SPACI', 'FRAME-SPACIN', 'FRAME-VALUE', 'FRAME-VAL', 'FRAME-VALU',
- 'FRAME-X', 'FRAME-Y', 'FREQUENCY', 'FROM', 'FROM-CHARS', 'FROM-C',
- 'FROM-CH', 'FROM-CHA', 'FROM-CHAR', 'FROM-CURRENT', 'FROM-CUR', 'FROM-CURR',
- 'FROM-CURRE', 'FROM-CURREN', 'FROM-PIXELS', 'FROM-P', 'FROM-PI', 'FROM-PIX',
- 'FROM-PIXE', 'FROM-PIXEL', 'FULL-HEIGHT-CHARS', 'FULL-HEIGHT',
- 'FULL-HEIGHT-', 'FULL-HEIGHT-C', 'FULL-HEIGHT-CH', 'FULL-HEIGHT-CHA',
- 'FULL-HEIGHT-CHAR', 'FULL-HEIGHT-PIXELS', 'FULL-HEIGHT-P', 'FULL-HEIGHT-PI',
- 'FULL-HEIGHT-PIX', 'FULL-HEIGHT-PIXE', 'FULL-HEIGHT-PIXEL', 'FULL-PATHNAME',
- 'FULL-PATHN', 'FULL-PATHNA', 'FULL-PATHNAM', 'FULL-WIDTH-CHARS',
- 'FULL-WIDTH', 'FULL-WIDTH-', 'FULL-WIDTH-C', 'FULL-WIDTH-CH',
- 'FULL-WIDTH-CHA', 'FULL-WIDTH-CHAR', 'FULL-WIDTH-PIXELS', 'FULL-WIDTH-P',
- 'FULL-WIDTH-PI', 'FULL-WIDTH-PIX', 'FULL-WIDTH-PIXE', 'FULL-WIDTH-PIXEL',
- 'FUNCTION', 'FUNCTION-CALL-TYPE', 'GATEWAYS', 'GATEWAY', 'GE',
- 'GENERATE-MD5', 'GENERATE-PBE-KEY', 'GENERATE-PBE-SALT',
- 'GENERATE-RANDOM-KEY', 'GENERATE-UUID', 'GET', 'GET-ATTR-CALL-TYPE',
- 'GET-ATTRIBUTE-NODE', 'GET-BINARY-DATA', 'GET-BLUE-VALUE', 'GET-BLUE',
- 'GET-BLUE-', 'GET-BLUE-V', 'GET-BLUE-VA', 'GET-BLUE-VAL', 'GET-BLUE-VALU',
- 'GET-BROWSE-COLUMN', 'GET-BUFFER-HANDLEGETBYTE', 'GET-BYTE',
- 'GET-CALLBACK-PROC-CONTEXT', 'GET-CALLBACK-PROC-NAME', 'GET-CGI-LIST',
- 'GET-CGI-LONG-VALUE', 'GET-CGI-VALUE', 'GET-CODEPAGES', 'GET-COLLATIONS',
- 'GET-CONFIG-VALUE', 'GET-CURRENT', 'GET-DOUBLE', 'GET-DROPPED-FILE',
- 'GET-DYNAMIC', 'GET-ERROR-COLUMN', 'GET-ERROR-ROW', 'GET-FILE',
- 'GET-FILE-NAME', 'GET-FILE-OFFSET', 'GET-FILE-OFFSE', 'GET-FIRST',
- 'GET-FLOAT', 'GET-GREEN-VALUE', 'GET-GREEN', 'GET-GREEN-', 'GET-GREEN-V',
- 'GET-GREEN-VA', 'GET-GREEN-VAL', 'GET-GREEN-VALU',
- 'GET-INDEX-BY-NAMESPACE-NAME', 'GET-INDEX-BY-QNAME', 'GET-INT64',
- 'GET-ITERATION', 'GET-KEY-VALUE', 'GET-KEY-VAL', 'GET-KEY-VALU', 'GET-LAST',
- 'GET-LOCALNAME-BY-INDEX', 'GET-LONG', 'GET-MESSAGE', 'GET-NEXT',
- 'GET-NUMBER', 'GET-POINTER-VALUE', 'GET-PREV', 'GET-PRINTERS',
- 'GET-PROPERTY', 'GET-QNAME-BY-INDEX', 'GET-RED-VALUE', 'GET-RED',
- 'GET-RED-', 'GET-RED-V', 'GET-RED-VA', 'GET-RED-VAL', 'GET-RED-VALU',
- 'GET-REPOSITIONED-ROW', 'GET-RGB-VALUE', 'GET-SELECTED-WIDGET',
- 'GET-SELECTED', 'GET-SELECTED-', 'GET-SELECTED-W', 'GET-SELECTED-WI',
- 'GET-SELECTED-WID', 'GET-SELECTED-WIDG', 'GET-SELECTED-WIDGE', 'GET-SHORT',
- 'GET-SIGNATURE', 'GET-SIZE', 'GET-STRING', 'GET-TAB-ITEM',
- 'GET-TEXT-HEIGHT-CHARS', 'GET-TEXT-HEIGHT', 'GET-TEXT-HEIGHT-',
- 'GET-TEXT-HEIGHT-C', 'GET-TEXT-HEIGHT-CH', 'GET-TEXT-HEIGHT-CHA',
- 'GET-TEXT-HEIGHT-CHAR', 'GET-TEXT-HEIGHT-PIXELS', 'GET-TEXT-HEIGHT-P',
- 'GET-TEXT-HEIGHT-PI', 'GET-TEXT-HEIGHT-PIX', 'GET-TEXT-HEIGHT-PIXE',
- 'GET-TEXT-HEIGHT-PIXEL', 'GET-TEXT-WIDTH-CHARS', 'GET-TEXT-WIDTH',
- 'GET-TEXT-WIDTH-', 'GET-TEXT-WIDTH-C', 'GET-TEXT-WIDTH-CH',
- 'GET-TEXT-WIDTH-CHA', 'GET-TEXT-WIDTH-CHAR', 'GET-TEXT-WIDTH-PIXELS',
- 'GET-TEXT-WIDTH-P', 'GET-TEXT-WIDTH-PI', 'GET-TEXT-WIDTH-PIX',
- 'GET-TEXT-WIDTH-PIXE', 'GET-TEXT-WIDTH-PIXEL', 'GET-TYPE-BY-INDEX',
- 'GET-TYPE-BY-NAMESPACE-NAME', 'GET-TYPE-BY-QNAME', 'GET-UNSIGNED-LONG',
- 'GET-UNSIGNED-SHORT', 'GET-URI-BY-INDEX', 'GET-VALUE-BY-INDEX',
- 'GET-VALUE-BY-NAMESPACE-NAME', 'GET-VALUE-BY-QNAME', 'GET-WAIT-STATE',
- 'GLOBAL', 'GO-ON', 'GO-PENDING', 'GO-PEND', 'GO-PENDI', 'GO-PENDIN',
- 'GRANT', 'GRAPHIC-EDGE', 'GRAPHIC-E', 'GRAPHIC-ED', 'GRAPHIC-EDG',
- 'GRID-FACTOR-HORIZONTAL', 'GRID-FACTOR-H', 'GRID-FACTOR-HO',
- 'GRID-FACTOR-HOR', 'GRID-FACTOR-HORI', 'GRID-FACTOR-HORIZ',
- 'GRID-FACTOR-HORIZO', 'GRID-FACTOR-HORIZON', 'GRID-FACTOR-HORIZONT',
- 'GRID-FACTOR-HORIZONTA', 'GRID-FACTOR-VERTICAL', 'GRID-FACTOR-V',
- 'GRID-FACTOR-VE', 'GRID-FACTOR-VER', 'GRID-FACTOR-VERT', 'GRID-FACTOR-VERT',
- 'GRID-FACTOR-VERTI', 'GRID-FACTOR-VERTIC', 'GRID-FACTOR-VERTICA',
- 'GRID-SNAP', 'GRID-UNIT-HEIGHT-CHARS', 'GRID-UNIT-HEIGHT',
- 'GRID-UNIT-HEIGHT-', 'GRID-UNIT-HEIGHT-C', 'GRID-UNIT-HEIGHT-CH',
- 'GRID-UNIT-HEIGHT-CHA', 'GRID-UNIT-HEIGHT-PIXELS', 'GRID-UNIT-HEIGHT-P',
- 'GRID-UNIT-HEIGHT-PI', 'GRID-UNIT-HEIGHT-PIX', 'GRID-UNIT-HEIGHT-PIXE',
- 'GRID-UNIT-HEIGHT-PIXEL', 'GRID-UNIT-WIDTH-CHARS', 'GRID-UNIT-WIDTH',
- 'GRID-UNIT-WIDTH-', 'GRID-UNIT-WIDTH-C', 'GRID-UNIT-WIDTH-CH',
- 'GRID-UNIT-WIDTH-CHA', 'GRID-UNIT-WIDTH-CHAR', 'GRID-UNIT-WIDTH-PIXELS',
- 'GRID-UNIT-WIDTH-P', 'GRID-UNIT-WIDTH-PI', 'GRID-UNIT-WIDTH-PIX',
- 'GRID-UNIT-WIDTH-PIXE', 'GRID-UNIT-WIDTH-PIXEL', 'GRID-VISIBLE', 'GROUP',
- 'GT', 'GUID', 'HANDLER', 'HAS-RECORDS', 'HAVING', 'HEADER', 'HEIGHT-CHARS',
- 'HEIGHT', 'HEIGHT-', 'HEIGHT-C', 'HEIGHT-CH', 'HEIGHT-CHA', 'HEIGHT-CHAR',
- 'HEIGHT-PIXELS', 'HEIGHT-P', 'HEIGHT-PI', 'HEIGHT-PIX', 'HEIGHT-PIXE',
- 'HEIGHT-PIXEL', 'HELP', 'HEX-DECODE', 'HEX-ENCODE', 'HIDDEN', 'HIDE',
- 'HORIZONTAL', 'HORI', 'HORIZ', 'HORIZO', 'HORIZON', 'HORIZONT', 'HORIZONTA',
- 'HOST-BYTE-ORDER', 'HTML-CHARSET', 'HTML-END-OF-LINE', 'HTML-END-OF-PAGE',
- 'HTML-FRAME-BEGIN', 'HTML-FRAME-END', 'HTML-HEADER-BEGIN',
- 'HTML-HEADER-END', 'HTML-TITLE-BEGIN', 'HTML-TITLE-END', 'HWND', 'ICON',
- 'IF', 'IMAGE', 'IMAGE-DOWN', 'IMAGE-INSENSITIVE', 'IMAGE-SIZE',
- 'IMAGE-SIZE-CHARS', 'IMAGE-SIZE-C', 'IMAGE-SIZE-CH', 'IMAGE-SIZE-CHA',
- 'IMAGE-SIZE-CHAR', 'IMAGE-SIZE-PIXELS', 'IMAGE-SIZE-P', 'IMAGE-SIZE-PI',
- 'IMAGE-SIZE-PIX', 'IMAGE-SIZE-PIXE', 'IMAGE-SIZE-PIXEL', 'IMAGE-UP',
- 'IMMEDIATE-DISPLAY', 'IMPLEMENTS', 'IMPORT', 'IMPORT-PRINCIPAL', 'IN',
- 'INCREMENT-EXCLUSIVE-ID', 'INDEX', 'INDEXED-REPOSITION', 'INDEX-HINT',
- 'INDEX-INFORMATION', 'INDICATOR', 'INFORMATION', 'INFO', 'INFOR', 'INFORM',
- 'INFORMA', 'INFORMAT', 'INFORMATI', 'INFORMATIO', 'IN-HANDLE',
- 'INHERIT-BGCOLOR', 'INHERIT-BGC', 'INHERIT-BGCO', 'INHERIT-BGCOL',
- 'INHERIT-BGCOLO', 'INHERIT-FGCOLOR', 'INHERIT-FGC', 'INHERIT-FGCO',
- 'INHERIT-FGCOL', 'INHERIT-FGCOLO', 'INHERITS', 'INITIAL', 'INIT', 'INITI',
- 'INITIA', 'INITIAL-DIR', 'INITIAL-FILTER', 'INITIALIZE-DOCUMENT-TYPE',
- 'INITIATE', 'INNER-CHARS', 'INNER-LINES', 'INPUT', 'INPUT-OUTPUT',
- 'INPUT-O', 'INPUT-OU', 'INPUT-OUT', 'INPUT-OUTP', 'INPUT-OUTPU',
- 'INPUT-VALUE', 'INSERT', 'INSERT-ATTRIBUTE', 'INSERT-BACKTAB', 'INSERT-B',
- 'INSERT-BA', 'INSERT-BAC', 'INSERT-BACK', 'INSERT-BACKT', 'INSERT-BACKTA',
- 'INSERT-FILE', 'INSERT-ROW', 'INSERT-STRING', 'INSERT-TAB', 'INSERT-T',
- 'INSERT-TA', 'INTERFACE', 'INTERNAL-ENTRIES', 'INTO', 'INVOKE', 'IS',
- 'IS-ATTR-SPACE', 'IS-ATTR', 'IS-ATTR-', 'IS-ATTR-S', 'IS-ATTR-SP',
- 'IS-ATTR-SPA', 'IS-ATTR-SPAC', 'IS-CLASS', 'IS-CLAS', 'IS-LEAD-BYTE',
- 'IS-ATTR', 'IS-OPEN', 'IS-PARAMETER-SET', 'IS-ROW-SELECTED', 'IS-SELECTED',
- 'ITEM', 'ITEMS-PER-ROW', 'JOIN', 'JOIN-BY-SQLDB', 'KBLABEL',
- 'KEEP-CONNECTION-OPEN', 'KEEP-FRAME-Z-ORDER', 'KEEP-FRAME-Z',
- 'KEEP-FRAME-Z-', 'KEEP-FRAME-Z-O', 'KEEP-FRAME-Z-OR', 'KEEP-FRAME-Z-ORD',
- 'KEEP-FRAME-Z-ORDE', 'KEEP-MESSAGES', 'KEEP-SECURITY-CACHE',
- 'KEEP-TAB-ORDER', 'KEY', 'KEYCODE', 'KEY-CODE', 'KEYFUNCTION', 'KEYFUNC',
- 'KEYFUNCT', 'KEYFUNCTI', 'KEYFUNCTIO', 'KEY-FUNCTION', 'KEY-FUNC',
- 'KEY-FUNCT', 'KEY-FUNCTI', 'KEY-FUNCTIO', 'KEYLABEL', 'KEY-LABEL', 'KEYS',
- 'KEYWORD', 'KEYWORD-ALL', 'LABEL', 'LABEL-BGCOLOR', 'LABEL-BGC',
- 'LABEL-BGCO', 'LABEL-BGCOL', 'LABEL-BGCOLO', 'LABEL-DCOLOR', 'LABEL-DC',
- 'LABEL-DCO', 'LABEL-DCOL', 'LABEL-DCOLO', 'LABEL-FGCOLOR', 'LABEL-FGC',
- 'LABEL-FGCO', 'LABEL-FGCOL', 'LABEL-FGCOLO', 'LABEL-FONT', 'LABEL-PFCOLOR',
- 'LABEL-PFC', 'LABEL-PFCO', 'LABEL-PFCOL', 'LABEL-PFCOLO', 'LABELS',
- 'LANDSCAPE', 'LANGUAGES', 'LANGUAGE', 'LARGE', 'LARGE-TO-SMALL', 'LAST',
- 'LAST-ASYNCH-REQUEST', 'LAST-BATCH', 'LAST-CHILD', 'LAST-EVENT',
- 'LAST-EVEN', 'LAST-FORM', 'LASTKEY', 'LAST-KEY', 'LAST-OBJECT', 'LAST-OF',
- 'LAST-PROCEDURE', 'LAST-PROCE', 'LAST-PROCED', 'LAST-PROCEDU',
- 'LAST-PROCEDUR', 'LAST-SERVER', 'LAST-TAB-ITEM', 'LAST-TAB-I',
- 'LAST-TAB-IT', 'LAST-TAB-ITE', 'LC', 'LDBNAME', 'LE', 'LEAVE',
- 'LEFT-ALIGNED', 'LEFT-ALIGN', 'LEFT-ALIGNE', 'LEFT-TRIM', 'LENGTH',
- 'LIBRARY', 'LIKE', 'LIKE-SEQUENTIAL', 'LINE', 'LINE-COUNTER', 'LINE-COUNT',
- 'LINE-COUNTE', 'LIST-EVENTS', 'LISTING', 'LISTI', 'LISTIN',
- 'LIST-ITEM-PAIRS', 'LIST-ITEMS', 'LIST-PROPERTY-NAMES', 'LIST-QUERY-ATTRS',
- 'LIST-SET-ATTRS', 'LIST-WIDGETS', 'LITERAL-QUESTION', 'LITTLE-ENDIAN',
- 'LOAD', 'LOAD-DOMAINS', 'LOAD-ICON', 'LOAD-IMAGE', 'LOAD-IMAGE-DOWN',
- 'LOAD-IMAGE-INSENSITIVE', 'LOAD-IMAGE-UP', 'LOAD-MOUSE-POINTER',
- 'LOAD-MOUSE-P', 'LOAD-MOUSE-PO', 'LOAD-MOUSE-POI', 'LOAD-MOUSE-POIN',
- 'LOAD-MOUSE-POINT', 'LOAD-MOUSE-POINTE', 'LOAD-PICTURE', 'LOAD-SMALL-ICON',
- 'LOCAL-NAME', 'LOCATOR-COLUMN-NUMBER', 'LOCATOR-LINE-NUMBER',
- 'LOCATOR-PUBLIC-ID', 'LOCATOR-SYSTEM-ID', 'LOCATOR-TYPE', 'LOCKED',
- 'LOCK-REGISTRATION', 'LOG', 'LOG-AUDIT-EVENT', 'LOGIN-EXPIRATION-TIMESTAMP',
- 'LOGIN-HOST', 'LOGIN-STATE', 'LOG-MANAGER', 'LOGOUT', 'LOOKAHEAD', 'LOOKUP',
- 'LT', 'MACHINE-CLASS', 'MANDATORY', 'MANUAL-HIGHLIGHT', 'MAP',
- 'MARGIN-EXTRA', 'MARGIN-HEIGHT-CHARS', 'MARGIN-HEIGHT', 'MARGIN-HEIGHT-',
- 'MARGIN-HEIGHT-C', 'MARGIN-HEIGHT-CH', 'MARGIN-HEIGHT-CHA',
- 'MARGIN-HEIGHT-CHAR', 'MARGIN-HEIGHT-PIXELS', 'MARGIN-HEIGHT-P',
- 'MARGIN-HEIGHT-PI', 'MARGIN-HEIGHT-PIX', 'MARGIN-HEIGHT-PIXE',
- 'MARGIN-HEIGHT-PIXEL', 'MARGIN-WIDTH-CHARS', 'MARGIN-WIDTH',
- 'MARGIN-WIDTH-', 'MARGIN-WIDTH-C', 'MARGIN-WIDTH-CH', 'MARGIN-WIDTH-CHA',
- 'MARGIN-WIDTH-CHAR', 'MARGIN-WIDTH-PIXELS', 'MARGIN-WIDTH-P',
- 'MARGIN-WIDTH-PI', 'MARGIN-WIDTH-PIX', 'MARGIN-WIDTH-PIXE',
- 'MARGIN-WIDTH-PIXEL', 'MARK-NEW', 'MARK-ROW-STATE', 'MATCHES', 'MAX',
- 'MAX-BUTTON', 'MAX-CHARS', 'MAX-DATA-GUESS', 'MAX-HEIGHT',
- 'MAX-HEIGHT-CHARS', 'MAX-HEIGHT-C', 'MAX-HEIGHT-CH', 'MAX-HEIGHT-CHA',
- 'MAX-HEIGHT-CHAR', 'MAX-HEIGHT-PIXELS', 'MAX-HEIGHT-P', 'MAX-HEIGHT-PI',
- 'MAX-HEIGHT-PIX', 'MAX-HEIGHT-PIXE', 'MAX-HEIGHT-PIXEL', 'MAXIMIZE',
- 'MAXIMUM', 'MAX', 'MAXI', 'MAXIM', 'MAXIMU', 'MAXIMUM-LEVEL', 'MAX-ROWS',
- 'MAX-SIZE', 'MAX-VALUE', 'MAX-VAL', 'MAX-VALU', 'MAX-WIDTH',
- 'MAX-WIDTH-CHARS', 'MAX-WIDTH', 'MAX-WIDTH-', 'MAX-WIDTH-C', 'MAX-WIDTH-CH',
- 'MAX-WIDTH-CHA', 'MAX-WIDTH-CHAR', 'MAX-WIDTH-PIXELS', 'MAX-WIDTH-P',
- 'MAX-WIDTH-PI', 'MAX-WIDTH-PIX', 'MAX-WIDTH-PIXE', 'MAX-WIDTH-PIXEL',
- 'MD5-DIGEST', 'MEMBER', 'MEMPTR-TO-NODE-VALUE', 'MENU', 'MENUBAR',
- 'MENU-BAR', 'MENU-ITEM', 'MENU-KEY', 'MENU-K', 'MENU-KE', 'MENU-MOUSE',
- 'MENU-M', 'MENU-MO', 'MENU-MOU', 'MENU-MOUS', 'MERGE-BY-FIELD', 'MESSAGE',
- 'MESSAGE-AREA', 'MESSAGE-AREA-FONT', 'MESSAGE-LINES', 'METHOD', 'MIN',
- 'MIN-BUTTON', 'MIN-COLUMN-WIDTH-CHARS', 'MIN-COLUMN-WIDTH-C',
- 'MIN-COLUMN-WIDTH-CH', 'MIN-COLUMN-WIDTH-CHA', 'MIN-COLUMN-WIDTH-CHAR',
- 'MIN-COLUMN-WIDTH-PIXELS', 'MIN-COLUMN-WIDTH-P', 'MIN-COLUMN-WIDTH-PI',
- 'MIN-COLUMN-WIDTH-PIX', 'MIN-COLUMN-WIDTH-PIXE', 'MIN-COLUMN-WIDTH-PIXEL',
- 'MIN-HEIGHT-CHARS', 'MIN-HEIGHT', 'MIN-HEIGHT-', 'MIN-HEIGHT-C',
- 'MIN-HEIGHT-CH', 'MIN-HEIGHT-CHA', 'MIN-HEIGHT-CHAR', 'MIN-HEIGHT-PIXELS',
- 'MIN-HEIGHT-P', 'MIN-HEIGHT-PI', 'MIN-HEIGHT-PIX', 'MIN-HEIGHT-PIXE',
- 'MIN-HEIGHT-PIXEL', 'MINIMUM', 'MIN', 'MINI', 'MINIM', 'MINIMU', 'MIN-SIZE',
- 'MIN-VALUE', 'MIN-VAL', 'MIN-VALU', 'MIN-WIDTH-CHARS', 'MIN-WIDTH',
- 'MIN-WIDTH-', 'MIN-WIDTH-C', 'MIN-WIDTH-CH', 'MIN-WIDTH-CHA',
- 'MIN-WIDTH-CHAR', 'MIN-WIDTH-PIXELS', 'MIN-WIDTH-P', 'MIN-WIDTH-PI',
- 'MIN-WIDTH-PIX', 'MIN-WIDTH-PIXE', 'MIN-WIDTH-PIXEL', 'MODIFIED', 'MODULO',
- 'MOD', 'MODU', 'MODUL', 'MONTH', 'MOUSE', 'MOUSE-POINTER', 'MOUSE-P',
- 'MOUSE-PO', 'MOUSE-POI', 'MOUSE-POIN', 'MOUSE-POINT', 'MOUSE-POINTE',
- 'MOVABLE', 'MOVE-AFTER-TAB-ITEM', 'MOVE-AFTER', 'MOVE-AFTER-',
- 'MOVE-AFTER-T', 'MOVE-AFTER-TA', 'MOVE-AFTER-TAB', 'MOVE-AFTER-TAB-',
- 'MOVE-AFTER-TAB-I', 'MOVE-AFTER-TAB-IT', 'MOVE-AFTER-TAB-ITE',
- 'MOVE-BEFORE-TAB-ITEM', 'MOVE-BEFOR', 'MOVE-BEFORE', 'MOVE-BEFORE-',
- 'MOVE-BEFORE-T', 'MOVE-BEFORE-TA', 'MOVE-BEFORE-TAB', 'MOVE-BEFORE-TAB-',
- 'MOVE-BEFORE-TAB-I', 'MOVE-BEFORE-TAB-IT', 'MOVE-BEFORE-TAB-ITE',
- 'MOVE-COLUMN', 'MOVE-COL', 'MOVE-COLU', 'MOVE-COLUM', 'MOVE-TO-BOTTOM',
- 'MOVE-TO-B', 'MOVE-TO-BO', 'MOVE-TO-BOT', 'MOVE-TO-BOTT', 'MOVE-TO-BOTTO',
- 'MOVE-TO-EOF', 'MOVE-TO-TOP', 'MOVE-TO-T', 'MOVE-TO-TO', 'MPE',
- 'MULTI-COMPILE', 'MULTIPLE', 'MULTIPLE-KEY', 'MULTITASKING-INTERVAL',
- 'MUST-EXIST', 'NAME', 'NAMESPACE-PREFIX', 'NAMESPACE-URI', 'NATIVE', 'NE',
- 'NEEDS-APPSERVER-PROMPT', 'NEEDS-PROMPT', 'NEW', 'NEW-INSTANCE', 'NEW-ROW',
- 'NEXT', 'NEXT-COLUMN', 'NEXT-PROMPT', 'NEXT-ROWID', 'NEXT-SIBLING',
- 'NEXT-TAB-ITEM', 'NEXT-TAB-I', 'NEXT-TAB-IT', 'NEXT-TAB-ITE', 'NEXT-VALUE',
- 'NO', 'NO-APPLY', 'NO-ARRAY-MESSAGE', 'NO-ASSIGN', 'NO-ATTR-LIST',
- 'NO-ATTR', 'NO-ATTR-', 'NO-ATTR-L', 'NO-ATTR-LI', 'NO-ATTR-LIS',
- 'NO-ATTR-SPACE', 'NO-ATTR', 'NO-ATTR-', 'NO-ATTR-S', 'NO-ATTR-SP',
- 'NO-ATTR-SPA', 'NO-ATTR-SPAC', 'NO-AUTO-VALIDATE', 'NO-BIND-WHERE',
- 'NO-BOX', 'NO-CONSOLE', 'NO-CONVERT', 'NO-CONVERT-3D-COLORS',
- 'NO-CURRENT-VALUE', 'NO-DEBUG', 'NODE-VALUE-TO-MEMPTR', 'NO-DRAG',
- 'NO-ECHO', 'NO-EMPTY-SPACE', 'NO-ERROR', 'NO-FILL', 'NO-F', 'NO-FI',
- 'NO-FIL', 'NO-FOCUS', 'NO-HELP', 'NO-HIDE', 'NO-INDEX-HINT',
- 'NO-INHERIT-BGCOLOR', 'NO-INHERIT-BGC', 'NO-INHERIT-BGCO', 'LABEL-BGCOL',
- 'LABEL-BGCOLO', 'NO-INHERIT-FGCOLOR', 'NO-INHERIT-FGC', 'NO-INHERIT-FGCO',
- 'NO-INHERIT-FGCOL', 'NO-INHERIT-FGCOLO', 'NO-JOIN-BY-SQLDB', 'NO-LABELS',
- 'NO-LABE', 'NO-LOBS', 'NO-LOCK', 'NO-LOOKAHEAD', 'NO-MAP', 'NO-MESSAGE',
- 'NO-MES', 'NO-MESS', 'NO-MESSA', 'NO-MESSAG', 'NONAMESPACE-SCHEMA-LOCATION',
- 'NONE', 'NO-PAUSE', 'NO-PREFETCH', 'NO-PREFE', 'NO-PREFET', 'NO-PREFETC',
- 'NORMALIZE', 'NO-ROW-MARKERS', 'NO-SCROLLBAR-VERTICAL',
- 'NO-SEPARATE-CONNECTION', 'NO-SEPARATORS', 'NOT', 'NO-TAB-STOP',
- 'NOT-ACTIVE', 'NO-UNDERLINE', 'NO-UND', 'NO-UNDE', 'NO-UNDER', 'NO-UNDERL',
- 'NO-UNDERLI', 'NO-UNDERLIN', 'NO-UNDO', 'NO-VALIDATE', 'NO-VAL', 'NO-VALI',
- 'NO-VALID', 'NO-VALIDA', 'NO-VALIDAT', 'NOW', 'NO-WAIT', 'NO-WORD-WRAP',
- 'NULL', 'NUM-ALIASES', 'NUM-ALI', 'NUM-ALIA', 'NUM-ALIAS', 'NUM-ALIASE',
- 'NUM-BUFFERS', 'NUM-BUTTONS', 'NUM-BUT', 'NUM-BUTT', 'NUM-BUTTO',
- 'NUM-BUTTON', 'NUM-COLUMNS', 'NUM-COL', 'NUM-COLU', 'NUM-COLUM',
- 'NUM-COLUMN', 'NUM-COPIES', 'NUM-DBS', 'NUM-DROPPED-FILES', 'NUM-ENTRIES',
- 'NUMERIC', 'NUMERIC-FORMAT', 'NUMERIC-F', 'NUMERIC-FO', 'NUMERIC-FOR',
- 'NUMERIC-FORM', 'NUMERIC-FORMA', 'NUM-FIELDS', 'NUM-FORMATS', 'NUM-ITEMS',
- 'NUM-ITERATIONS', 'NUM-LINES', 'NUM-LOCKED-COLUMNS', 'NUM-LOCKED-COL',
- 'NUM-LOCKED-COLU', 'NUM-LOCKED-COLUM', 'NUM-LOCKED-COLUMN', 'NUM-MESSAGES',
- 'NUM-PARAMETERS', 'NUM-REFERENCES', 'NUM-REPLACED', 'NUM-RESULTS',
- 'NUM-SELECTED-ROWS', 'NUM-SELECTED-WIDGETS', 'NUM-SELECTED',
- 'NUM-SELECTED-', 'NUM-SELECTED-W', 'NUM-SELECTED-WI', 'NUM-SELECTED-WID',
- 'NUM-SELECTED-WIDG', 'NUM-SELECTED-WIDGE', 'NUM-SELECTED-WIDGET',
- 'NUM-TABS', 'NUM-TO-RETAIN', 'NUM-VISIBLE-COLUMNS', 'OCTET-LENGTH', 'OF',
- 'OFF', 'OK', 'OK-CANCEL', 'OLD', 'ON', 'ON-FRAME-BORDER', 'ON-FRAME',
- 'ON-FRAME-', 'ON-FRAME-B', 'ON-FRAME-BO', 'ON-FRAME-BOR', 'ON-FRAME-BORD',
- 'ON-FRAME-BORDE', 'OPEN', 'OPSYS', 'OPTION', 'OR', 'ORDERED-JOIN',
- 'ORDINAL', 'OS-APPEND', 'OS-COMMAND', 'OS-COPY', 'OS-CREATE-DIR',
- 'OS-DELETE', 'OS-DIR', 'OS-DRIVES', 'OS-DRIVE', 'OS-ERROR', 'OS-GETENV',
- 'OS-RENAME', 'OTHERWISE', 'OUTPUT', 'OVERLAY', 'OVERRIDE', 'OWNER', 'PAGE',
- 'PAGE-BOTTOM', 'PAGE-BOT', 'PAGE-BOTT', 'PAGE-BOTTO', 'PAGED',
- 'PAGE-NUMBER', 'PAGE-NUM', 'PAGE-NUMB', 'PAGE-NUMBE', 'PAGE-SIZE',
- 'PAGE-TOP', 'PAGE-WIDTH', 'PAGE-WID', 'PAGE-WIDT', 'PARAMETER', 'PARAM',
- 'PARAME', 'PARAMET', 'PARAMETE', 'PARENT', 'PARSE-STATUS', 'PARTIAL-KEY',
- 'PASCAL', 'PASSWORD-FIELD', 'PATHNAME', 'PAUSE', 'PBE-HASH-ALGORITHM',
- 'PBE-HASH-ALG', 'PBE-HASH-ALGO', 'PBE-HASH-ALGOR', 'PBE-HASH-ALGORI',
- 'PBE-HASH-ALGORIT', 'PBE-HASH-ALGORITH', 'PBE-KEY-ROUNDS', 'PDBNAME',
- 'PERSISTENT', 'PERSIST', 'PERSISTE', 'PERSISTEN',
- 'PERSISTENT-CACHE-DISABLED', 'PFCOLOR', 'PFC', 'PFCO', 'PFCOL', 'PFCOLO',
- 'PIXELS', 'PIXELS-PER-COLUMN', 'PIXELS-PER-COL', 'PIXELS-PER-COLU',
- 'PIXELS-PER-COLUM', 'PIXELS-PER-ROW', 'POPUP-MENU', 'POPUP-M', 'POPUP-ME',
- 'POPUP-MEN', 'POPUP-ONLY', 'POPUP-O', 'POPUP-ON', 'POPUP-ONL', 'PORTRAIT',
- 'POSITION', 'PRECISION', 'PREFER-DATASET', 'PREPARED', 'PREPARE-STRING',
- 'PREPROCESS', 'PREPROC', 'PREPROCE', 'PREPROCES', 'PRESELECT', 'PRESEL',
- 'PRESELE', 'PRESELEC', 'PREV', 'PREV-COLUMN', 'PREV-SIBLING',
- 'PREV-TAB-ITEM', 'PREV-TAB-I', 'PREV-TAB-IT', 'PREV-TAB-ITE', 'PRIMARY',
- 'PRINTER', 'PRINTER-CONTROL-HANDLE', 'PRINTER-HDC', 'PRINTER-NAME',
- 'PRINTER-PORT', 'PRINTER-SETUP', 'PRIVATE', 'PRIVATE-DATA', 'PRIVATE-D',
- 'PRIVATE-DA', 'PRIVATE-DAT', 'PRIVILEGES', 'PROCEDURE', 'PROCE', 'PROCED',
- 'PROCEDU', 'PROCEDUR', 'PROCEDURE-CALL-TYPE', 'PROCESS', 'PROC-HANDLE',
- 'PROC-HA', 'PROC-HAN', 'PROC-HAND', 'PROC-HANDL', 'PROC-STATUS', 'PROC-ST',
- 'PROC-STA', 'PROC-STAT', 'PROC-STATU', 'proc-text', 'proc-text-buffe',
- 'PROFILER', 'PROGRAM-NAME', 'PROGRESS', 'PROGRESS-SOURCE', 'PROGRESS-S',
- 'PROGRESS-SO', 'PROGRESS-SOU', 'PROGRESS-SOUR', 'PROGRESS-SOURC', 'PROMPT',
- 'PROMPT-FOR', 'PROMPT-F', 'PROMPT-FO', 'PROMSGS', 'PROPATH', 'PROPERTY',
- 'PROTECTED', 'PROVERSION', 'PROVERS', 'PROVERSI', 'PROVERSIO', 'PROXY',
- 'PROXY-PASSWORD', 'PROXY-USERID', 'PUBLIC', 'PUBLIC-ID', 'PUBLISH',
- 'PUBLISHED-EVENTS', 'PUT', 'PUTBYTE', 'PUT-BYTE', 'PUT-DOUBLE', 'PUT-FLOAT',
- 'PUT-INT64', 'PUT-KEY-VALUE', 'PUT-KEY-VAL', 'PUT-KEY-VALU', 'PUT-LONG',
- 'PUT-SHORT', 'PUT-STRING', 'PUT-UNSIGNED-LONG', 'QUERY', 'QUERY-CLOSE',
- 'QUERY-OFF-END', 'QUERY-OPEN', 'QUERY-PREPARE', 'QUERY-TUNING', 'QUESTION',
- 'QUIT', 'QUOTER', 'RADIO-BUTTONS', 'RADIO-SET', 'RANDOM', 'RAW-TRANSFER',
- 'RCODE-INFORMATION', 'RCODE-INFO', 'RCODE-INFOR', 'RCODE-INFORM',
- 'RCODE-INFORMA', 'RCODE-INFORMAT', 'RCODE-INFORMATI', 'RCODE-INFORMATIO',
- 'READ-AVAILABLE', 'READ-EXACT-NUM', 'READ-FILE', 'READKEY', 'READ-ONLY',
- 'READ-XML', 'READ-XMLSCHEMA', 'REAL', 'RECORD-LENGTH', 'RECTANGLE', 'RECT',
- 'RECTA', 'RECTAN', 'RECTANG', 'RECTANGL', 'RECURSIVE', 'REFERENCE-ONLY',
- 'REFRESH', 'REFRESHABLE', 'REFRESH-AUDIT-POLICY', 'REGISTER-DOMAIN',
- 'RELEASE', 'REMOTE', 'REMOVE-EVENTS-PROCEDURE', 'REMOVE-SUPER-PROCEDURE',
- 'REPEAT', 'REPLACE', 'REPLACE-SELECTION-TEXT', 'REPOSITION',
- 'REPOSITION-BACKWARD', 'REPOSITION-FORWARD', 'REPOSITION-MODE',
- 'REPOSITION-TO-ROW', 'REPOSITION-TO-ROWID', 'REQUEST', 'RESET', 'RESIZABLE',
- 'RESIZA', 'RESIZAB', 'RESIZABL', 'RESIZE', 'RESTART-ROW', 'RESTART-ROWID',
- 'RETAIN', 'RETAIN-SHAPE', 'RETRY', 'RETRY-CANCEL', 'RETURN',
- 'RETURN-INSERTED', 'RETURN-INS', 'RETURN-INSE', 'RETURN-INSER',
- 'RETURN-INSERT', 'RETURN-INSERTE', 'RETURNS', 'RETURN-TO-START-DIR',
- 'RETURN-TO-START-DI', 'RETURN-VALUE', 'RETURN-VAL', 'RETURN-VALU',
- 'RETURN-VALUE-DATA-TYPE', 'REVERSE-FROM', 'REVERT', 'REVOKE', 'RGB-VALUE',
- 'RIGHT-ALIGNED', 'RETURN-ALIGN', 'RETURN-ALIGNE', 'RIGHT-TRIM', 'R-INDEX',
- 'ROLES', 'ROUND', 'ROUTINE-LEVEL', 'ROW', 'ROW-HEIGHT-CHARS', 'HEIGHT',
- 'ROW-HEIGHT-PIXELS', 'HEIGHT-P', 'ROW-MARKERS', 'ROW-OF', 'ROW-RESIZABLE',
- 'RULE', 'RUN', 'RUN-PROCEDURE', 'SAVE', 'SAVE-AS', 'SAVE-FILE',
- 'SAX-COMPLETE', 'SAX-COMPLE', 'SAX-COMPLET', 'SAX-PARSE', 'SAX-PARSE-FIRST',
- 'SAX-PARSE-NEXT', 'SAX-PARSER-ERROR', 'SAX-RUNNING', 'SAX-UNINITIALIZED',
- 'SAX-WRITE-BEGIN', 'SAX-WRITE-COMPLETE', 'SAX-WRITE-CONTENT',
- 'SAX-WRITE-ELEMENT', 'SAX-WRITE-ERROR', 'SAX-WRITE-IDLE', 'SAX-WRITER',
- 'SAX-WRITE-TAG', 'SCHEMA', 'SCHEMA-LOCATION', 'SCHEMA-MARSHAL',
- 'SCHEMA-PATH', 'SCREEN', 'SCREEN-IO', 'SCREEN-LINES', 'SCREEN-VALUE',
- 'SCREEN-VAL', 'SCREEN-VALU', 'SCROLL', 'SCROLLABLE', 'SCROLLBAR-HORIZONTAL',
- 'SCROLLBAR-H', 'SCROLLBAR-HO', 'SCROLLBAR-HOR', 'SCROLLBAR-HORI',
- 'SCROLLBAR-HORIZ', 'SCROLLBAR-HORIZO', 'SCROLLBAR-HORIZON',
- 'SCROLLBAR-HORIZONT', 'SCROLLBAR-HORIZONTA', 'SCROLL-BARS',
- 'SCROLLBAR-VERTICAL', 'SCROLLBAR-V', 'SCROLLBAR-VE', 'SCROLLBAR-VER',
- 'SCROLLBAR-VERT', 'SCROLLBAR-VERTI', 'SCROLLBAR-VERTIC',
- 'SCROLLBAR-VERTICA', 'SCROLL-DELTA', 'SCROLLED-ROW-POSITION',
- 'SCROLLED-ROW-POS', 'SCROLLED-ROW-POSI', 'SCROLLED-ROW-POSIT',
- 'SCROLLED-ROW-POSITI', 'SCROLLED-ROW-POSITIO', 'SCROLLING', 'SCROLL-OFFSET',
- 'SCROLL-TO-CURRENT-ROW', 'SCROLL-TO-ITEM', 'SCROLL-TO-I', 'SCROLL-TO-IT',
- 'SCROLL-TO-ITE', 'SCROLL-TO-SELECTED-ROW', 'SDBNAME', 'SEAL',
- 'SEAL-TIMESTAMP', 'SEARCH', 'SEARCH-SELF', 'SEARCH-TARGET', 'SECTION',
- 'SECURITY-POLICY', 'SEEK', 'SELECT', 'SELECTABLE', 'SELECT-ALL', 'SELECTED',
- 'SELECT-FOCUSED-ROW', 'SELECTION', 'SELECTION-END', 'SELECTION-LIST',
- 'SELECTION-START', 'SELECTION-TEXT', 'SELECT-NEXT-ROW', 'SELECT-PREV-ROW',
- 'SELECT-ROW', 'SELF', 'SEND', 'send-sql-statement', 'send-sql', 'SENSITIVE',
- 'SEPARATE-CONNECTION', 'SEPARATOR-FGCOLOR', 'SEPARATORS', 'SERVER',
- 'SERVER-CONNECTION-BOUND', 'SERVER-CONNECTION-BOUND-REQUEST',
- 'SERVER-CONNECTION-CONTEXT', 'SERVER-CONNECTION-ID',
- 'SERVER-OPERATING-MODE', 'SESSION', 'SESSION-ID', 'SET', 'SET-APPL-CONTEXT',
- 'SET-ATTR-CALL-TYPE', 'SET-ATTRIBUTE-NODE', 'SET-BLUE-VALUE', 'SET-BLUE',
- 'SET-BLUE-', 'SET-BLUE-V', 'SET-BLUE-VA', 'SET-BLUE-VAL', 'SET-BLUE-VALU',
- 'SET-BREAK', 'SET-BUFFERS', 'SET-CALLBACK', 'SET-CLIENT', 'SET-COMMIT',
- 'SET-CONTENTS', 'SET-CURRENT-VALUE', 'SET-DB-CLIENT', 'SET-DYNAMIC',
- 'SET-EVENT-MANAGER-OPTION', 'SET-GREEN-VALUE', 'SET-GREEN', 'SET-GREEN-',
- 'SET-GREEN-V', 'SET-GREEN-VA', 'SET-GREEN-VAL', 'SET-GREEN-VALU',
- 'SET-INPUT-SOURCE', 'SET-OPTION', 'SET-OUTPUT-DESTINATION', 'SET-PARAMETER',
- 'SET-POINTER-VALUE', 'SET-PROPERTY', 'SET-RED-VALUE', 'SET-RED', 'SET-RED-',
- 'SET-RED-V', 'SET-RED-VA', 'SET-RED-VAL', 'SET-RED-VALU',
- 'SET-REPOSITIONED-ROW', 'SET-RGB-VALUE', 'SET-ROLLBACK', 'SET-SELECTION',
- 'SET-SIZE', 'SET-SORT-ARROW', 'SETUSERID', 'SETUSER', 'SETUSERI',
- 'SET-WAIT-STATE', 'SHA1-DIGEST', 'SHARED', 'SHARE-LOCK', 'SHARE', 'SHARE-',
- 'SHARE-L', 'SHARE-LO', 'SHARE-LOC', 'SHOW-IN-TASKBAR', 'SHOW-STATS',
- 'SHOW-STAT', 'SIDE-LABEL-HANDLE', 'SIDE-LABEL-H', 'SIDE-LABEL-HA',
- 'SIDE-LABEL-HAN', 'SIDE-LABEL-HAND', 'SIDE-LABEL-HANDL', 'SIDE-LABELS',
- 'SIDE-LAB', 'SIDE-LABE', 'SIDE-LABEL', 'SILENT', 'SIMPLE', 'SINGLE', 'SIZE',
- 'SIZE-CHARS', 'SIZE-C', 'SIZE-CH', 'SIZE-CHA', 'SIZE-CHAR', 'SIZE-PIXELS',
- 'SIZE-P', 'SIZE-PI', 'SIZE-PIX', 'SIZE-PIXE', 'SIZE-PIXEL', 'SKIP',
- 'SKIP-DELETED-RECORD', 'SLIDER', 'SMALL-ICON', 'SMALLINT', 'SMALL-TITLE',
- 'SOME', 'SORT', 'SORT-ASCENDING', 'SORT-NUMBER', 'SOURCE',
- 'SOURCE-PROCEDURE', 'SPACE', 'SQL', 'SQRT', 'SSL-SERVER-NAME', 'STANDALONE',
- 'START', 'START-DOCUMENT', 'START-ELEMENT', 'START-MOVE', 'START-RESIZE',
- 'START-ROW-RESIZE', 'STATE-DETAIL', 'STATIC', 'STATUS', 'STATUS-AREA',
- 'STATUS-AREA-FONT', 'STDCALL', 'STOP', 'STOP-PARSING', 'STOPPED', 'STOPPE',
- 'STORED-PROCEDURE', 'STORED-PROC', 'STORED-PROCE', 'STORED-PROCED',
- 'STORED-PROCEDU', 'STORED-PROCEDUR', 'STREAM', 'STREAM-HANDLE', 'STREAM-IO',
- 'STRETCH-TO-FIT', 'STRICT', 'STRING', 'STRING-VALUE', 'STRING-XREF',
- 'SUB-AVERAGE', 'SUB-AVE', 'SUB-AVER', 'SUB-AVERA', 'SUB-AVERAG',
- 'SUB-COUNT', 'SUB-MAXIMUM', 'SUM-MAX', 'SUM-MAXI', 'SUM-MAXIM',
- 'SUM-MAXIMU', 'SUB-MENU', 'SUBSUB-', 'MINIMUM', 'SUB-MIN', 'SUBSCRIBE',
- 'SUBSTITUTE', 'SUBST', 'SUBSTI', 'SUBSTIT', 'SUBSTITU', 'SUBSTITUT',
- 'SUBSTRING', 'SUBSTR', 'SUBSTRI', 'SUBSTRIN', 'SUB-TOTAL', 'SUBTYPE', 'SUM',
- 'SUPER', 'SUPER-PROCEDURES', 'SUPPRESS-NAMESPACE-PROCESSING',
- 'SUPPRESS-WARNINGS', 'SUPPRESS-W', 'SUPPRESS-WA', 'SUPPRESS-WAR',
- 'SUPPRESS-WARN', 'SUPPRESS-WARNI', 'SUPPRESS-WARNIN', 'SUPPRESS-WARNING',
- 'SYMMETRIC-ENCRYPTION-ALGORITHM', 'SYMMETRIC-ENCRYPTION-IV',
- 'SYMMETRIC-ENCRYPTION-KEY', 'SYMMETRIC-SUPPORT', 'SYSTEM-ALERT-BOXES',
- 'SYSTEM-ALERT', 'SYSTEM-ALERT-', 'SYSTEM-ALERT-B', 'SYSTEM-ALERT-BO',
- 'SYSTEM-ALERT-BOX', 'SYSTEM-ALERT-BOXE', 'SYSTEM-DIALOG', 'SYSTEM-HELP',
- 'SYSTEM-ID', 'TABLE', 'TABLE-HANDLE', 'TABLE-NUMBER', 'TAB-POSITION',
- 'TAB-STOP', 'TARGET', 'TARGET-PROCEDURE', 'TEMP-DIRECTORY', 'TEMP-DIR',
- 'TEMP-DIRE', 'TEMP-DIREC', 'TEMP-DIRECT', 'TEMP-DIRECTO', 'TEMP-DIRECTOR',
- 'TEMP-TABLE', 'TEMP-TABLE-PREPARE', 'TERM', 'TERMINAL', 'TERM', 'TERMI',
- 'TERMIN', 'TERMINA', 'TERMINATE', 'TEXT', 'TEXT-CURSOR', 'TEXT-SEG-GROW',
- 'TEXT-SELECTED', 'THEN', 'THIS-OBJECT', 'THIS-PROCEDURE', 'THREE-D',
- 'THROW', 'THROUGH', 'THRU', 'TIC-MARKS', 'TIME', 'TIME-SOURCE', 'TITLE',
- 'TITLE-BGCOLOR', 'TITLE-BGC', 'TITLE-BGCO', 'TITLE-BGCOL', 'TITLE-BGCOLO',
- 'TITLE-DCOLOR', 'TITLE-DC', 'TITLE-DCO', 'TITLE-DCOL', 'TITLE-DCOLO',
- 'TITLE-FGCOLOR', 'TITLE-FGC', 'TITLE-FGCO', 'TITLE-FGCOL', 'TITLE-FGCOLO',
- 'TITLE-FONT', 'TITLE-FO', 'TITLE-FON', 'TO', 'TODAY', 'TOGGLE-BOX',
- 'TOOLTIP', 'TOOLTIPS', 'TOPIC', 'TOP-NAV-QUERY', 'TOP-ONLY', 'TO-ROWID',
- 'TOTAL', 'TRAILING', 'TRANS', 'TRANSACTION', 'TRANSACTION-MODE',
- 'TRANS-INIT-PROCEDURE', 'TRANSPARENT', 'TRIGGER', 'TRIGGERS', 'TRIM',
- 'TRUE', 'TRUNCATE', 'TRUNC', 'TRUNCA', 'TRUNCAT', 'TYPE', 'TYPE-OF',
- 'UNBOX', 'UNBUFFERED', 'UNBUFF', 'UNBUFFE', 'UNBUFFER', 'UNBUFFERE',
- 'UNDERLINE', 'UNDERL', 'UNDERLI', 'UNDERLIN', 'UNDO', 'UNFORMATTED',
- 'UNFORM', 'UNFORMA', 'UNFORMAT', 'UNFORMATT', 'UNFORMATTE', 'UNION',
- 'UNIQUE', 'UNIQUE-ID', 'UNIQUE-MATCH', 'UNIX', 'UNLESS-HIDDEN', 'UNLOAD',
- 'UNSIGNED-LONG', 'UNSUBSCRIBE', 'UP', 'UPDATE', 'UPDATE-ATTRIBUTE', 'URL',
- 'URL-DECODE', 'URL-ENCODE', 'URL-PASSWORD', 'URL-USERID', 'USE',
- 'USE-DICT-EXPS', 'USE-FILENAME', 'USE-INDEX', 'USER', 'USE-REVVIDEO',
- 'USERID', 'USER-ID', 'USE-TEXT', 'USE-UNDERLINE', 'USE-WIDGET-POOL',
- 'USING', 'V6DISPLAY', 'V6FRAME', 'VALIDATE', 'VALIDATE-EXPRESSION',
- 'VALIDATE-MESSAGE', 'VALIDATE-SEAL', 'VALIDATION-ENABLED', 'VALID-EVENT',
- 'VALID-HANDLE', 'VALID-OBJECT', 'VALUE', 'VALUE-CHANGED', 'VALUES',
- 'VARIABLE', 'VAR', 'VARI', 'VARIA', 'VARIAB', 'VARIABL', 'VERBOSE',
- 'VERSION', 'VERTICAL', 'VERT', 'VERTI', 'VERTIC', 'VERTICA', 'VIEW',
- 'VIEW-AS', 'VIEW-FIRST-COLUMN-ON-REOPEN', 'VIRTUAL-HEIGHT-CHARS',
- 'VIRTUAL-HEIGHT', 'VIRTUAL-HEIGHT-', 'VIRTUAL-HEIGHT-C',
- 'VIRTUAL-HEIGHT-CH', 'VIRTUAL-HEIGHT-CHA', 'VIRTUAL-HEIGHT-CHAR',
- 'VIRTUAL-HEIGHT-PIXELS', 'VIRTUAL-HEIGHT-P', 'VIRTUAL-HEIGHT-PI',
- 'VIRTUAL-HEIGHT-PIX', 'VIRTUAL-HEIGHT-PIXE', 'VIRTUAL-HEIGHT-PIXEL',
- 'VIRTUAL-WIDTH-CHARS', 'VIRTUAL-WIDTH', 'VIRTUAL-WIDTH-', 'VIRTUAL-WIDTH-C',
- 'VIRTUAL-WIDTH-CH', 'VIRTUAL-WIDTH-CHA', 'VIRTUAL-WIDTH-CHAR',
- 'VIRTUAL-WIDTH-PIXELS', 'VIRTUAL-WIDTH-P', 'VIRTUAL-WIDTH-PI',
- 'VIRTUAL-WIDTH-PIX', 'VIRTUAL-WIDTH-PIXE', 'VIRTUAL-WIDTH-PIXEL', 'VISIBLE',
- 'VOID', 'WAIT', 'WAIT-FOR', 'WARNING', 'WEB-CONTEXT', 'WEEKDAY', 'WHEN',
- 'WHERE', 'WHILE', 'WIDGET', 'WIDGET-ENTER', 'WIDGET-E', 'WIDGET-EN',
- 'WIDGET-ENT', 'WIDGET-ENTE', 'WIDGET-ID', 'WIDGET-LEAVE', 'WIDGET-L',
- 'WIDGET-LE', 'WIDGET-LEA', 'WIDGET-LEAV', 'WIDGET-POOL', 'WIDTH',
- 'WIDTH-CHARS', 'WIDTH', 'WIDTH-', 'WIDTH-C', 'WIDTH-CH', 'WIDTH-CHA',
- 'WIDTH-CHAR', 'WIDTH-PIXELS', 'WIDTH-P', 'WIDTH-PI', 'WIDTH-PIX',
- 'WIDTH-PIXE', 'WIDTH-PIXEL', 'WINDOW', 'WINDOW-MAXIMIZED', 'WINDOW-MAXIM',
- 'WINDOW-MAXIMI', 'WINDOW-MAXIMIZ', 'WINDOW-MAXIMIZE', 'WINDOW-MINIMIZED',
- 'WINDOW-MINIM', 'WINDOW-MINIMI', 'WINDOW-MINIMIZ', 'WINDOW-MINIMIZE',
- 'WINDOW-NAME', 'WINDOW-NORMAL', 'WINDOW-STATE', 'WINDOW-STA', 'WINDOW-STAT',
- 'WINDOW-SYSTEM', 'WITH', 'WORD-INDEX', 'WORD-WRAP',
- 'WORK-AREA-HEIGHT-PIXELS', 'WORK-AREA-WIDTH-PIXELS', 'WORK-AREA-X',
- 'WORK-AREA-Y', 'WORKFILE', 'WORK-TABLE', 'WORK-TAB', 'WORK-TABL', 'WRITE',
- 'WRITE-CDATA', 'WRITE-CHARACTERS', 'WRITE-COMMENT', 'WRITE-DATA-ELEMENT',
- 'WRITE-EMPTY-ELEMENT', 'WRITE-ENTITY-REF', 'WRITE-EXTERNAL-DTD',
- 'WRITE-FRAGMENT', 'WRITE-MESSAGE', 'WRITE-PROCESSING-INSTRUCTION',
- 'WRITE-STATUS', 'WRITE-XML', 'WRITE-XMLSCHEMA', 'X', 'XCODE',
- 'XML-DATA-TYPE', 'XML-NODE-TYPE', 'XML-SCHEMA-PATH',
- 'XML-SUPPRESS-NAMESPACE-PROCESSING', 'X-OF', 'XREF', 'XREF-XML', 'Y',
- 'YEAR', 'YEAR-OFFSET', 'YES', 'YES-NO', 'YES-NO-CANCEL', 'Y-OF'
-]
diff --git a/pygments/lexers/_phpbuiltins.py b/pygments/lexers/_php_builtins.py
index 08eaaf2e..f1b64ced 100644
--- a/pygments/lexers/_phpbuiltins.py
+++ b/pygments/lexers/_php_builtins.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""
- pygments.lexers._phpbuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ pygments.lexers._php_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file loads the function names and their modules from the
php webpage and generates itself.
@@ -12,13 +12,14 @@
internet connection. don't run that at home, use
a server ;-)
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from __future__ import print_function
-MODULES = {'.NET': ['dotnet_load'],
- 'APC': ['apc_add',
+MODULES = {'.NET': ('dotnet_load',),
+ 'APC': ('apc_add',
'apc_bin_dump',
'apc_bin_dumpfile',
'apc_bin_load',
@@ -36,8 +37,8 @@ MODULES = {'.NET': ['dotnet_load'],
'apc_inc',
'apc_load_constants',
'apc_sma_info',
- 'apc_store'],
- 'APD': ['apd_breakpoint',
+ 'apc_store'),
+ 'APD': ('apd_breakpoint',
'apd_callstack',
'apd_clunk',
'apd_continue',
@@ -52,30 +53,27 @@ MODULES = {'.NET': ['dotnet_load'],
'apd_set_session_trace',
'apd_set_session',
'override_function',
- 'rename_function'],
- 'Aliases and deprecated Mysqli': ['mysqli_bind_param',
+ 'rename_function'),
+ 'Aliases and deprecated Mysqli': ('mysqli_bind_param',
'mysqli_bind_result',
'mysqli_client_encoding',
'mysqli_connect',
- 'mysqli_disable_reads_from_master',
'mysqli_disable_rpl_parse',
'mysqli_enable_reads_from_master',
'mysqli_enable_rpl_parse',
'mysqli_escape_string',
'mysqli_execute',
'mysqli_fetch',
+ 'mysqli_get_cache_stats',
'mysqli_get_metadata',
'mysqli_master_query',
'mysqli_param_count',
'mysqli_report',
'mysqli_rpl_parse_enabled',
'mysqli_rpl_probe',
- 'mysqli_rpl_query_type',
'mysqli_send_long_data',
- 'mysqli_send_query',
- 'mysqli_set_opt',
- 'mysqli_slave_query'],
- 'Apache': ['apache_child_terminate',
+ 'mysqli_slave_query'),
+ 'Apache': ('apache_child_terminate',
'apache_get_modules',
'apache_get_version',
'apache_getenv',
@@ -86,9 +84,10 @@ MODULES = {'.NET': ['dotnet_load'],
'apache_response_headers',
'apache_setenv',
'getallheaders',
- 'virtual'],
- 'Array': ['array_change_key_case',
+ 'virtual'),
+ 'Array': ('array_change_key_case',
'array_chunk',
+ 'array_column',
'array_combine',
'array_count_values',
'array_diff_assoc',
@@ -146,6 +145,7 @@ MODULES = {'.NET': ['dotnet_load'],
'end',
'extract',
'in_array',
+ 'key_exists',
'key',
'krsort',
'ksort',
@@ -163,15 +163,15 @@ MODULES = {'.NET': ['dotnet_load'],
'sort',
'uasort',
'uksort',
- 'usort'],
- 'BBCode': ['bbcode_add_element',
+ 'usort'),
+ 'BBCode': ('bbcode_add_element',
'bbcode_add_smiley',
'bbcode_create',
'bbcode_destroy',
'bbcode_parse',
'bbcode_set_arg_parser',
- 'bbcode_set_flags'],
- 'BC Math': ['bcadd',
+ 'bbcode_set_flags'),
+ 'BC Math': ('bcadd',
'bccomp',
'bcdiv',
'bcmod',
@@ -180,8 +180,9 @@ MODULES = {'.NET': ['dotnet_load'],
'bcpowmod',
'bcscale',
'bcsqrt',
- 'bcsub'],
- 'Bzip2': ['bzclose',
+ 'bcsub'),
+ 'Blenc': ('blenc_encrypt',),
+ 'Bzip2': ('bzclose',
'bzcompress',
'bzdecompress',
'bzerrno',
@@ -190,8 +191,8 @@ MODULES = {'.NET': ['dotnet_load'],
'bzflush',
'bzopen',
'bzread',
- 'bzwrite'],
- 'COM': ['com_addref',
+ 'bzwrite'),
+ 'COM': ('com_addref',
'com_create_guid',
'com_event_sink',
'com_get_active_object',
@@ -232,9 +233,8 @@ MODULES = {'.NET': ['dotnet_load'],
'variant_set_type',
'variant_set',
'variant_sub',
- 'variant_xor'],
- 'CUBRID': ['cubrid_affected_rows',
- 'cubrid_bind',
+ 'variant_xor'),
+ 'CUBRID': ('cubrid_bind',
'cubrid_close_prepare',
'cubrid_close_request',
'cubrid_col_get',
@@ -253,10 +253,12 @@ MODULES = {'.NET': ['dotnet_load'],
'cubrid_execute',
'cubrid_fetch',
'cubrid_free_result',
+ 'cubrid_get_autocommit',
'cubrid_get_charset',
'cubrid_get_class_name',
'cubrid_get_client_info',
'cubrid_get_db_parameter',
+ 'cubrid_get_query_timeout',
'cubrid_get_server_info',
'cubrid_get',
'cubrid_insert_id',
@@ -266,11 +268,27 @@ MODULES = {'.NET': ['dotnet_load'],
'cubrid_lob_get',
'cubrid_lob_send',
'cubrid_lob_size',
+ 'cubrid_lob2_bind',
+ 'cubrid_lob2_close',
+ 'cubrid_lob2_export',
+ 'cubrid_lob2_import',
+ 'cubrid_lob2_new',
+ 'cubrid_lob2_read',
+ 'cubrid_lob2_seek64',
+ 'cubrid_lob2_seek',
+ 'cubrid_lob2_size64',
+ 'cubrid_lob2_size',
+ 'cubrid_lob2_tell64',
+ 'cubrid_lob2_tell',
+ 'cubrid_lob2_write',
'cubrid_lock_read',
'cubrid_lock_write',
'cubrid_move_cursor',
+ 'cubrid_next_result',
'cubrid_num_cols',
'cubrid_num_rows',
+ 'cubrid_pconnect_with_url',
+ 'cubrid_pconnect',
'cubrid_prepare',
'cubrid_put',
'cubrid_rollback',
@@ -279,9 +297,12 @@ MODULES = {'.NET': ['dotnet_load'],
'cubrid_seq_insert',
'cubrid_seq_put',
'cubrid_set_add',
+ 'cubrid_set_autocommit',
+ 'cubrid_set_db_parameter',
'cubrid_set_drop',
- 'cubrid_version'],
- 'Cairo': ['cairo_create',
+ 'cubrid_set_query_timeout',
+ 'cubrid_version'),
+ 'Cairo': ('cairo_create',
'cairo_font_face_get_type',
'cairo_font_face_status',
'cairo_font_options_create',
@@ -375,8 +396,8 @@ MODULES = {'.NET': ['dotnet_load'],
'cairo_surface_write_to_png',
'cairo_svg_surface_create',
'cairo_svg_surface_restrict_to_version',
- 'cairo_svg_version_to_string'],
- 'Calendar': ['cal_days_in_month',
+ 'cairo_svg_version_to_string'),
+ 'Calendar': ('cal_days_in_month',
'cal_from_jd',
'cal_info',
'cal_to_jd',
@@ -393,8 +414,9 @@ MODULES = {'.NET': ['dotnet_load'],
'jdtounix',
'JewishToJD',
'JulianToJD',
- 'unixtojd'],
- 'Classes/Object': ['call_user_method_array',
+ 'unixtojd'),
+ 'Classes/Object': ('__autoload',
+ 'call_user_method_array',
'call_user_method',
'class_alias',
'class_exists',
@@ -404,38 +426,43 @@ MODULES = {'.NET': ['dotnet_load'],
'get_class',
'get_declared_classes',
'get_declared_interfaces',
+ 'get_declared_traits',
'get_object_vars',
'get_parent_class',
'interface_exists',
'is_a',
'is_subclass_of',
'method_exists',
- 'property_exists'],
- 'Classkit': ['classkit_import',
+ 'property_exists',
+ 'trait_exists'),
+ 'Classkit': ('classkit_import',
'classkit_method_add',
'classkit_method_copy',
'classkit_method_redefine',
'classkit_method_remove',
- 'classkit_method_rename'],
- 'Crack': ['crack_check',
+ 'classkit_method_rename'),
+ 'Crack': ('crack_check',
'crack_closedict',
'crack_getlastmessage',
- 'crack_opendict'],
- 'Ctype': ['ctype_alnum',
+ 'crack_opendict'),
+ 'Ctype': ('ctype_alnum',
'ctype_alpha',
'ctype_cntrl',
'ctype_digit',
'ctype_graph',
'ctype_lower',
'ctype_print',
- 'ctype_punct'],
- 'Cyrus': ['cyrus_authenticate',
+ 'ctype_punct',
+ 'ctype_space',
+ 'ctype_upper',
+ 'ctype_xdigit'),
+ 'Cyrus': ('cyrus_authenticate',
'cyrus_bind',
'cyrus_close',
'cyrus_connect',
'cyrus_query',
- 'cyrus_unbind'],
- 'DB++': ['dbplus_add',
+ 'cyrus_unbind'),
+ 'DB++': ('dbplus_add',
'dbplus_aql',
'dbplus_chdir',
'dbplus_close',
@@ -481,8 +508,8 @@ MODULES = {'.NET': ['dotnet_load'],
'dbplus_unselect',
'dbplus_update',
'dbplus_xlockrel',
- 'dbplus_xunlockrel'],
- 'DBA': ['dba_close',
+ 'dbplus_xunlockrel'),
+ 'DBA': ('dba_close',
'dba_delete',
'dba_exists',
'dba_fetch',
@@ -496,27 +523,13 @@ MODULES = {'.NET': ['dotnet_load'],
'dba_optimize',
'dba_popen',
'dba_replace',
- 'dba_sync'],
- 'DOM': ['dom_import_simplexml'],
- 'DOM XML (PHP 4)': ['domxml_new_doc',
- 'domxml_open_file',
- 'domxml_open_mem',
- 'domxml_version',
- 'domxml_xmltree',
- 'domxml_xslt_stylesheet_doc',
- 'domxml_xslt_stylesheet_file',
- 'domxml_xslt_stylesheet',
- 'domxml_xslt_version',
- 'xpath_eval_expression',
- 'xpath_eval',
- 'xpath_new_context',
- 'xpath_register_ns_auto',
- 'xpath_register_ns',
- 'xptr_eval',
- 'xptr_new_context'],
- 'Date/Time': ['checkdate',
+ 'dba_sync'),
+ 'DOM': ('dom_import_simplexml',),
+ 'Date/Time': ('checkdate',
'date_add',
'date_create_from_format',
+ 'date_create_immutable_from_format',
+ 'date_create_immutable',
'date_create',
'date_date_set',
'date_default_timezone_get',
@@ -562,17 +575,85 @@ MODULES = {'.NET': ['dotnet_load'],
'timezone_offset_get',
'timezone_open',
'timezone_transitions_get',
- 'timezone_version_get'],
- 'Direct IO': ['dio_close', 'dio_fcntl', 'dio_open'],
- 'Directory': ['chdir',
+ 'timezone_version_get'),
+ 'Direct IO': ('dio_close',
+ 'dio_fcntl',
+ 'dio_open',
+ 'dio_read',
+ 'dio_seek',
+ 'dio_stat',
+ 'dio_tcsetattr',
+ 'dio_truncate',
+ 'dio_write'),
+ 'Directory': ('chdir',
'chroot',
'closedir',
+ 'dir',
'getcwd',
'opendir',
'readdir',
'rewinddir',
- 'scandir'],
- 'Enchant': ['enchant_broker_describe',
+ 'scandir'),
+ 'Eio': ('eio_busy',
+ 'eio_cancel',
+ 'eio_chmod',
+ 'eio_chown',
+ 'eio_close',
+ 'eio_custom',
+ 'eio_dup2',
+ 'eio_event_loop',
+ 'eio_fallocate',
+ 'eio_fchmod',
+ 'eio_fchown',
+ 'eio_fdatasync',
+ 'eio_fstat',
+ 'eio_fstatvfs',
+ 'eio_fsync',
+ 'eio_ftruncate',
+ 'eio_futime',
+ 'eio_get_event_stream',
+ 'eio_get_last_error',
+ 'eio_grp_add',
+ 'eio_grp_cancel',
+ 'eio_grp_limit',
+ 'eio_grp',
+ 'eio_init',
+ 'eio_link',
+ 'eio_lstat',
+ 'eio_mkdir',
+ 'eio_mknod',
+ 'eio_nop',
+ 'eio_npending',
+ 'eio_nready',
+ 'eio_nreqs',
+ 'eio_nthreads',
+ 'eio_open',
+ 'eio_poll',
+ 'eio_read',
+ 'eio_readahead',
+ 'eio_readdir',
+ 'eio_readlink',
+ 'eio_realpath',
+ 'eio_rename',
+ 'eio_rmdir',
+ 'eio_seek',
+ 'eio_sendfile',
+ 'eio_set_max_idle',
+ 'eio_set_max_parallel',
+ 'eio_set_max_poll_reqs',
+ 'eio_set_max_poll_time',
+ 'eio_set_min_parallel',
+ 'eio_stat',
+ 'eio_statvfs',
+ 'eio_symlink',
+ 'eio_sync_file_range',
+ 'eio_sync',
+ 'eio_syncfs',
+ 'eio_truncate',
+ 'eio_unlink',
+ 'eio_utime',
+ 'eio_write'),
+ 'Enchant': ('enchant_broker_describe',
'enchant_broker_dict_exists',
'enchant_broker_free_dict',
'enchant_broker_free',
@@ -590,8 +671,8 @@ MODULES = {'.NET': ['dotnet_load'],
'enchant_dict_is_in_session',
'enchant_dict_quick_check',
'enchant_dict_store_replacement',
- 'enchant_dict_suggest'],
- 'Error Handling': ['debug_backtrace',
+ 'enchant_dict_suggest'),
+ 'Error Handling': ('debug_backtrace',
'debug_print_backtrace',
'error_get_last',
'error_log',
@@ -601,14 +682,14 @@ MODULES = {'.NET': ['dotnet_load'],
'set_error_handler',
'set_exception_handler',
'trigger_error',
- 'user_error'],
- 'Exif': ['exif_imagetype',
+ 'user_error'),
+ 'Exif': ('exif_imagetype',
'exif_read_data',
'exif_tagname',
'exif_thumbnail',
- 'read_exif_data'],
- 'Expect': ['expect_expectl'],
- 'FAM': ['fam_cancel_monitor',
+ 'read_exif_data'),
+ 'Expect': ('expect_expectl', 'expect_popen'),
+ 'FAM': ('fam_cancel_monitor',
'fam_close',
'fam_monitor_collection',
'fam_monitor_directory',
@@ -617,8 +698,8 @@ MODULES = {'.NET': ['dotnet_load'],
'fam_open',
'fam_pending',
'fam_resume_monitor',
- 'fam_suspend_monitor'],
- 'FDF': ['fdf_add_doc_javascript',
+ 'fam_suspend_monitor'),
+ 'FDF': ('fdf_add_doc_javascript',
'fdf_add_template',
'fdf_close',
'fdf_create',
@@ -652,8 +733,9 @@ MODULES = {'.NET': ['dotnet_load'],
'fdf_set_submit_form_action',
'fdf_set_target_frame',
'fdf_set_value',
- 'fdf_set_version'],
- 'FTP': ['ftp_alloc',
+ 'fdf_set_version'),
+ 'FPM': ('fastcgi_finish_request',),
+ 'FTP': ('ftp_alloc',
'ftp_cdup',
'ftp_chdir',
'ftp_chmod',
@@ -686,14 +768,155 @@ MODULES = {'.NET': ['dotnet_load'],
'ftp_site',
'ftp_size',
'ftp_ssl_connect',
- 'ftp_systype'],
- 'Fileinfo': ['finfo_buffer',
+ 'ftp_systype'),
+ 'Fann': ('fann_cascadetrain_on_data',
+ 'fann_cascadetrain_on_file',
+ 'fann_clear_scaling_params',
+ 'fann_copy',
+ 'fann_create_from_file',
+ 'fann_create_shortcut_array',
+ 'fann_create_shortcut',
+ 'fann_create_sparse_array',
+ 'fann_create_sparse',
+ 'fann_create_standard_array',
+ 'fann_create_standard',
+ 'fann_create_train_from_callback',
+ 'fann_create_train',
+ 'fann_descale_input',
+ 'fann_descale_output',
+ 'fann_descale_train',
+ 'fann_destroy_train',
+ 'fann_destroy',
+ 'fann_duplicate_train_data',
+ 'fann_get_activation_function',
+ 'fann_get_activation_steepness',
+ 'fann_get_bias_array',
+ 'fann_get_bit_fail_limit',
+ 'fann_get_bit_fail',
+ 'fann_get_cascade_activation_functions_count',
+ 'fann_get_cascade_activation_functions',
+ 'fann_get_cascade_activation_steepnesses_count',
+ 'fann_get_cascade_activation_steepnesses',
+ 'fann_get_cascade_candidate_change_fraction',
+ 'fann_get_cascade_candidate_limit',
+ 'fann_get_cascade_candidate_stagnation_epochs',
+ 'fann_get_cascade_max_cand_epochs',
+ 'fann_get_cascade_max_out_epochs',
+ 'fann_get_cascade_min_cand_epochs',
+ 'fann_get_cascade_min_out_epochs',
+ 'fann_get_cascade_num_candidate_groups',
+ 'fann_get_cascade_num_candidates',
+ 'fann_get_cascade_output_change_fraction',
+ 'fann_get_cascade_output_stagnation_epochs',
+ 'fann_get_cascade_weight_multiplier',
+ 'fann_get_connection_array',
+ 'fann_get_connection_rate',
+ 'fann_get_errno',
+ 'fann_get_errstr',
+ 'fann_get_layer_array',
+ 'fann_get_learning_momentum',
+ 'fann_get_learning_rate',
+ 'fann_get_MSE',
+ 'fann_get_network_type',
+ 'fann_get_num_input',
+ 'fann_get_num_layers',
+ 'fann_get_num_output',
+ 'fann_get_quickprop_decay',
+ 'fann_get_quickprop_mu',
+ 'fann_get_rprop_decrease_factor',
+ 'fann_get_rprop_delta_max',
+ 'fann_get_rprop_delta_min',
+ 'fann_get_rprop_delta_zero',
+ 'fann_get_rprop_increase_factor',
+ 'fann_get_sarprop_step_error_shift',
+ 'fann_get_sarprop_step_error_threshold_factor',
+ 'fann_get_sarprop_temperature',
+ 'fann_get_sarprop_weight_decay_shift',
+ 'fann_get_total_connections',
+ 'fann_get_total_neurons',
+ 'fann_get_train_error_function',
+ 'fann_get_train_stop_function',
+ 'fann_get_training_algorithm',
+ 'fann_init_weights',
+ 'fann_length_train_data',
+ 'fann_merge_train_data',
+ 'fann_num_input_train_data',
+ 'fann_num_output_train_data',
+ 'fann_print_error',
+ 'fann_randomize_weights',
+ 'fann_read_train_from_file',
+ 'fann_reset_errno',
+ 'fann_reset_errstr',
+ 'fann_reset_MSE',
+ 'fann_run',
+ 'fann_save_train',
+ 'fann_save',
+ 'fann_scale_input_train_data',
+ 'fann_scale_input',
+ 'fann_scale_output_train_data',
+ 'fann_scale_output',
+ 'fann_scale_train_data',
+ 'fann_scale_train',
+ 'fann_set_activation_function_hidden',
+ 'fann_set_activation_function_layer',
+ 'fann_set_activation_function_output',
+ 'fann_set_activation_function',
+ 'fann_set_activation_steepness_hidden',
+ 'fann_set_activation_steepness_layer',
+ 'fann_set_activation_steepness_output',
+ 'fann_set_activation_steepness',
+ 'fann_set_bit_fail_limit',
+ 'fann_set_callback',
+ 'fann_set_cascade_activation_functions',
+ 'fann_set_cascade_activation_steepnesses',
+ 'fann_set_cascade_candidate_change_fraction',
+ 'fann_set_cascade_candidate_limit',
+ 'fann_set_cascade_candidate_stagnation_epochs',
+ 'fann_set_cascade_max_cand_epochs',
+ 'fann_set_cascade_max_out_epochs',
+ 'fann_set_cascade_min_cand_epochs',
+ 'fann_set_cascade_min_out_epochs',
+ 'fann_set_cascade_num_candidate_groups',
+ 'fann_set_cascade_output_change_fraction',
+ 'fann_set_cascade_output_stagnation_epochs',
+ 'fann_set_cascade_weight_multiplier',
+ 'fann_set_error_log',
+ 'fann_set_input_scaling_params',
+ 'fann_set_learning_momentum',
+ 'fann_set_learning_rate',
+ 'fann_set_output_scaling_params',
+ 'fann_set_quickprop_decay',
+ 'fann_set_quickprop_mu',
+ 'fann_set_rprop_decrease_factor',
+ 'fann_set_rprop_delta_max',
+ 'fann_set_rprop_delta_min',
+ 'fann_set_rprop_delta_zero',
+ 'fann_set_rprop_increase_factor',
+ 'fann_set_sarprop_step_error_shift',
+ 'fann_set_sarprop_step_error_threshold_factor',
+ 'fann_set_sarprop_temperature',
+ 'fann_set_sarprop_weight_decay_shift',
+ 'fann_set_scaling_params',
+ 'fann_set_train_error_function',
+ 'fann_set_train_stop_function',
+ 'fann_set_training_algorithm',
+ 'fann_set_weight_array',
+ 'fann_set_weight',
+ 'fann_shuffle_train_data',
+ 'fann_subset_train_data',
+ 'fann_test_data',
+ 'fann_test',
+ 'fann_train_epoch',
+ 'fann_train_on_data',
+ 'fann_train_on_file',
+ 'fann_train'),
+ 'Fileinfo': ('finfo_buffer',
'finfo_close',
'finfo_file',
'finfo_open',
'finfo_set_flags',
- 'mime_content_type'],
- 'Filesystem': ['basename',
+ 'mime_content_type'),
+ 'Filesystem': ('basename',
'chgrp',
'chmod',
'chown',
@@ -772,15 +995,15 @@ MODULES = {'.NET': ['dotnet_load'],
'tmpfile',
'touch',
'umask',
- 'unlink'],
- 'Filter': ['filter_has_var',
+ 'unlink'),
+ 'Filter': ('filter_has_var',
'filter_id',
'filter_input_array',
'filter_input',
'filter_list',
'filter_var_array',
- 'filter_var'],
- 'Firebird/InterBase': ['ibase_add_user',
+ 'filter_var'),
+ 'Firebird/InterBase': ('ibase_add_user',
'ibase_affected_rows',
'ibase_backup',
'ibase_blob_add',
@@ -826,11 +1049,10 @@ MODULES = {'.NET': ['dotnet_load'],
'ibase_service_attach',
'ibase_service_detach',
'ibase_set_event_handler',
- 'ibase_timefmt',
'ibase_trans',
- 'ibase_wait_event'],
- 'FriBiDi': ['fribidi_log2vis'],
- 'FrontBase': ['fbsql_affected_rows',
+ 'ibase_wait_event'),
+ 'FriBiDi': ('fribidi_log2vis',),
+ 'FrontBase': ('fbsql_affected_rows',
'fbsql_autocommit',
'fbsql_blob_size',
'fbsql_change_user',
@@ -889,8 +1111,8 @@ MODULES = {'.NET': ['dotnet_load'],
'fbsql_table_name',
'fbsql_tablename',
'fbsql_username',
- 'fbsql_warnings'],
- 'Function handling': ['call_user_func_array',
+ 'fbsql_warnings'),
+ 'Function handling': ('call_user_func_array',
'call_user_func',
'create_function',
'forward_static_call_array',
@@ -902,12 +1124,122 @@ MODULES = {'.NET': ['dotnet_load'],
'get_defined_functions',
'register_shutdown_function',
'register_tick_function',
- 'unregister_tick_function'],
- 'GD and Image': ['gd_info',
+ 'unregister_tick_function'),
+ 'GD and Image': ('gd_info',
'getimagesize',
+ 'getimagesizefromstring',
'image_type_to_extension',
- 'image_type_to_mime_type'],
- 'GMP': ['gmp_abs',
+ 'image_type_to_mime_type',
+ 'image2wbmp',
+ 'imageaffine',
+ 'imageaffinematrixconcat',
+ 'imageaffinematrixget',
+ 'imagealphablending',
+ 'imageantialias',
+ 'imagearc',
+ 'imagechar',
+ 'imagecharup',
+ 'imagecolorallocate',
+ 'imagecolorallocatealpha',
+ 'imagecolorat',
+ 'imagecolorclosest',
+ 'imagecolorclosestalpha',
+ 'imagecolorclosesthwb',
+ 'imagecolordeallocate',
+ 'imagecolorexact',
+ 'imagecolorexactalpha',
+ 'imagecolormatch',
+ 'imagecolorresolve',
+ 'imagecolorresolvealpha',
+ 'imagecolorset',
+ 'imagecolorsforindex',
+ 'imagecolorstotal',
+ 'imagecolortransparent',
+ 'imageconvolution',
+ 'imagecopy',
+ 'imagecopymerge',
+ 'imagecopymergegray',
+ 'imagecopyresampled',
+ 'imagecopyresized',
+ 'imagecreate',
+ 'imagecreatefromgd2',
+ 'imagecreatefromgd2part',
+ 'imagecreatefromgd',
+ 'imagecreatefromgif',
+ 'imagecreatefromjpeg',
+ 'imagecreatefrompng',
+ 'imagecreatefromstring',
+ 'imagecreatefromwbmp',
+ 'imagecreatefromwebp',
+ 'imagecreatefromxbm',
+ 'imagecreatefromxpm',
+ 'imagecreatetruecolor',
+ 'imagecrop',
+ 'imagecropauto',
+ 'imagedashedline',
+ 'imagedestroy',
+ 'imageellipse',
+ 'imagefill',
+ 'imagefilledarc',
+ 'imagefilledellipse',
+ 'imagefilledpolygon',
+ 'imagefilledrectangle',
+ 'imagefilltoborder',
+ 'imagefilter',
+ 'imageflip',
+ 'imagefontheight',
+ 'imagefontwidth',
+ 'imageftbbox',
+ 'imagefttext',
+ 'imagegammacorrect',
+ 'imagegd2',
+ 'imagegd',
+ 'imagegif',
+ 'imagegrabscreen',
+ 'imagegrabwindow',
+ 'imageinterlace',
+ 'imageistruecolor',
+ 'imagejpeg',
+ 'imagelayereffect',
+ 'imageline',
+ 'imageloadfont',
+ 'imagepalettecopy',
+ 'imagepalettetotruecolor',
+ 'imagepng',
+ 'imagepolygon',
+ 'imagepsbbox',
+ 'imagepsencodefont',
+ 'imagepsextendfont',
+ 'imagepsfreefont',
+ 'imagepsloadfont',
+ 'imagepsslantfont',
+ 'imagepstext',
+ 'imagerectangle',
+ 'imagerotate',
+ 'imagesavealpha',
+ 'imagescale',
+ 'imagesetbrush',
+ 'imagesetinterpolation',
+ 'imagesetpixel',
+ 'imagesetstyle',
+ 'imagesetthickness',
+ 'imagesettile',
+ 'imagestring',
+ 'imagestringup',
+ 'imagesx',
+ 'imagesy',
+ 'imagetruecolortopalette',
+ 'imagettfbbox',
+ 'imagettftext',
+ 'imagetypes',
+ 'imagewbmp',
+ 'imagewebp',
+ 'imagexbm',
+ 'iptcembed',
+ 'iptcparse',
+ 'jpeg2wbmp',
+ 'png2wbmp'),
+ 'GMP': ('gmp_abs',
'gmp_add',
'gmp_and',
'gmp_clrbit',
@@ -947,8 +1279,9 @@ MODULES = {'.NET': ['dotnet_load'],
'gmp_strval',
'gmp_sub',
'gmp_testbit',
- 'gmp_xor'],
- 'GeoIP': ['geoip_continent_code_by_name',
+ 'gmp_xor'),
+ 'GeoIP': ('geoip_asnum_by_name',
+ 'geoip_continent_code_by_name',
'geoip_country_code_by_name',
'geoip_country_code3_by_name',
'geoip_country_name_by_name',
@@ -956,14 +1289,17 @@ MODULES = {'.NET': ['dotnet_load'],
'geoip_db_avail',
'geoip_db_filename',
'geoip_db_get_all_info',
+ 'geoip_domain_by_name',
'geoip_id_by_name',
'geoip_isp_by_name',
+ 'geoip_netspeedcell_by_name',
'geoip_org_by_name',
'geoip_record_by_name',
'geoip_region_by_name',
'geoip_region_name_by_code',
- 'geoip_time_zone_by_country_and_region'],
- 'Gettext': ['bind_textdomain_codeset',
+ 'geoip_setup_custom_directory',
+ 'geoip_time_zone_by_country_and_region'),
+ 'Gettext': ('bind_textdomain_codeset',
'bindtextdomain',
'dcgettext',
'dcngettext',
@@ -971,8 +1307,8 @@ MODULES = {'.NET': ['dotnet_load'],
'dngettext',
'gettext',
'ngettext',
- 'textdomain'],
- 'GnuPG': ['gnupg_adddecryptkey',
+ 'textdomain'),
+ 'GnuPG': ('gnupg_adddecryptkey',
'gnupg_addencryptkey',
'gnupg_addsignkey',
'gnupg_cleardecryptkeys',
@@ -992,9 +1328,9 @@ MODULES = {'.NET': ['dotnet_load'],
'gnupg_seterrormode',
'gnupg_setsignmode',
'gnupg_sign',
- 'gnupg_verify'],
- 'Gopher': ['gopher_parsedir'],
- 'Grapheme': ['grapheme_extract',
+ 'gnupg_verify'),
+ 'Gopher': ('gopher_parsedir',),
+ 'Grapheme': ('grapheme_extract',
'grapheme_stripos',
'grapheme_stristr',
'grapheme_strlen',
@@ -1002,8 +1338,8 @@ MODULES = {'.NET': ['dotnet_load'],
'grapheme_strripos',
'grapheme_strrpos',
'grapheme_strstr',
- 'grapheme_substr'],
- 'Gupnp': ['gupnp_context_get_host_ip',
+ 'grapheme_substr'),
+ 'Gupnp': ('gupnp_context_get_host_ip',
'gupnp_context_get_port',
'gupnp_context_get_subscription_timeout',
'gupnp_context_host_path',
@@ -1040,8 +1376,8 @@ MODULES = {'.NET': ['dotnet_load'],
'gupnp_service_proxy_get_subscribed',
'gupnp_service_proxy_remove_notify',
'gupnp_service_proxy_set_subscribed',
- 'gupnp_service_thaw_notify'],
- 'HTTP': ['http_cache_etag',
+ 'gupnp_service_thaw_notify'),
+ 'HTTP': ('http_cache_etag',
'http_cache_last_modified',
'http_chunked_decode',
'http_deflate',
@@ -1091,19 +1427,20 @@ MODULES = {'.NET': ['dotnet_load'],
'http_send_stream',
'http_throttle',
'http_build_str',
- 'http_build_url'],
- 'Hash': ['hash_algos',
+ 'http_build_url'),
+ 'Hash': ('hash_algos',
'hash_copy',
'hash_file',
'hash_final',
'hash_hmac_file',
'hash_hmac',
'hash_init',
+ 'hash_pbkdf2',
'hash_update_file',
'hash_update_stream',
'hash_update',
- 'hash'],
- 'Hyperwave': ['hw_Array2Objrec',
+ 'hash'),
+ 'Hyperwave': ('hw_Array2Objrec',
'hw_changeobject',
'hw_Children',
'hw_ChildrenObj',
@@ -1164,12 +1501,12 @@ MODULES = {'.NET': ['dotnet_load'],
'hw_setlinkroot',
'hw_stat',
'hw_Unlock',
- 'hw_Who'],
- 'Hyperwave API': ['hw_api_attribute',
+ 'hw_Who'),
+ 'Hyperwave API': ('hwapi_attribute_new',
+ 'hwapi_content_new',
'hwapi_hgcsp',
- 'hw_api_content',
- 'hw_api_object'],
- 'IBM DB2': ['db2_autocommit',
+ 'hwapi_object_new'),
+ 'IBM DB2': ('db2_autocommit',
'db2_bind_param',
'db2_client_info',
'db2_close',
@@ -1199,8 +1536,28 @@ MODULES = {'.NET': ['dotnet_load'],
'db2_free_result',
'db2_free_stmt',
'db2_get_option',
- 'db2_last_insert_id'],
- 'ID3': ['id3_get_frame_long_name',
+ 'db2_last_insert_id',
+ 'db2_lob_read',
+ 'db2_next_result',
+ 'db2_num_fields',
+ 'db2_num_rows',
+ 'db2_pclose',
+ 'db2_pconnect',
+ 'db2_prepare',
+ 'db2_primary_keys',
+ 'db2_procedure_columns',
+ 'db2_procedures',
+ 'db2_result',
+ 'db2_rollback',
+ 'db2_server_info',
+ 'db2_set_option',
+ 'db2_special_columns',
+ 'db2_statistics',
+ 'db2_stmt_error',
+ 'db2_stmt_errormsg',
+ 'db2_table_privileges',
+ 'db2_tables'),
+ 'ID3': ('id3_get_frame_long_name',
'id3_get_frame_short_name',
'id3_get_genre_id',
'id3_get_genre_list',
@@ -1208,9 +1565,9 @@ MODULES = {'.NET': ['dotnet_load'],
'id3_get_tag',
'id3_get_version',
'id3_remove_tag',
- 'id3_set_tag'],
- 'IDN': ['idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'],
- 'IIS': ['iis_add_server',
+ 'id3_set_tag'),
+ 'IDN': ('grapheme_substr', 'idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'),
+ 'IIS': ('iis_add_server',
'iis_get_dir_security',
'iis_get_script_map',
'iis_get_server_by_comment',
@@ -1225,8 +1582,8 @@ MODULES = {'.NET': ['dotnet_load'],
'iis_start_server',
'iis_start_service',
'iis_stop_server',
- 'iis_stop_service'],
- 'IMAP': ['imap_8bit',
+ 'iis_stop_service'),
+ 'IMAP': ('imap_8bit',
'imap_alerts',
'imap_append',
'imap_base64',
@@ -1236,6 +1593,7 @@ MODULES = {'.NET': ['dotnet_load'],
'imap_check',
'imap_clearflag_full',
'imap_close',
+ 'imap_create',
'imap_createmailbox',
'imap_delete',
'imap_deletemailbox',
@@ -1246,6 +1604,7 @@ MODULES = {'.NET': ['dotnet_load'],
'imap_fetchheader',
'imap_fetchmime',
'imap_fetchstructure',
+ 'imap_fetchtext',
'imap_gc',
'imap_get_quota',
'imap_get_quotaroot',
@@ -1273,12 +1632,14 @@ MODULES = {'.NET': ['dotnet_load'],
'imap_open',
'imap_ping',
'imap_qprint',
+ 'imap_rename',
'imap_renamemailbox',
'imap_reopen',
'imap_rfc822_parse_adrlist',
'imap_rfc822_parse_headers',
'imap_rfc822_write_address',
'imap_savebody',
+ 'imap_scan',
'imap_scanmailbox',
'imap_search',
'imap_set_quota',
@@ -1294,8 +1655,8 @@ MODULES = {'.NET': ['dotnet_load'],
'imap_unsubscribe',
'imap_utf7_decode',
'imap_utf7_encode',
- 'imap_utf8'],
- 'Informix': ['ifx_affected_rows',
+ 'imap_utf8'),
+ 'Informix': ('ifx_affected_rows',
'ifx_blobinfile_mode',
'ifx_byteasvarchar',
'ifx_close',
@@ -1332,8 +1693,8 @@ MODULES = {'.NET': ['dotnet_load'],
'ifxus_read_slob',
'ifxus_seek_slob',
'ifxus_tell_slob',
- 'ifxus_write_slob'],
- 'Ingres': ['ingres_autocommit_state',
+ 'ifxus_write_slob'),
+ 'Ingres': ('ingres_autocommit_state',
'ingres_autocommit',
'ingres_charset',
'ingres_close',
@@ -1366,16 +1727,19 @@ MODULES = {'.NET': ['dotnet_load'],
'ingres_result_seek',
'ingres_rollback',
'ingres_set_environment',
- 'ingres_unbuffered_query'],
- 'Inotify': ['inotify_add_watch',
+ 'ingres_unbuffered_query'),
+ 'Inotify': ('inotify_add_watch',
'inotify_init',
'inotify_queue_len',
'inotify_read',
- 'inotify_rm_watch'],
- 'JSON': ['json_decode', 'json_encode', 'json_last_error'],
- 'Java': ['java_last_exception_clear', 'java_last_exception_get'],
- 'Judy': ['judy_type', 'judy_version'],
- 'KADM5': ['kadm5_chpass_principal',
+ 'inotify_rm_watch'),
+ 'JSON': ('json_decode',
+ 'json_encode',
+ 'json_last_error_msg',
+ 'json_last_error'),
+ 'Java': ('java_last_exception_clear', 'java_last_exception_get'),
+ 'Judy': ('judy_type', 'judy_version'),
+ 'KADM5': ('kadm5_chpass_principal',
'kadm5_create_principal',
'kadm5_delete_principal',
'kadm5_destroy',
@@ -1384,13 +1748,15 @@ MODULES = {'.NET': ['dotnet_load'],
'kadm5_get_principal',
'kadm5_get_principals',
'kadm5_init_with_password',
- 'kadm5_modify_principal'],
- 'LDAP': ['ldap_8859_to_t61',
+ 'kadm5_modify_principal'),
+ 'LDAP': ('ldap_8859_to_t61',
'ldap_add',
'ldap_bind',
'ldap_close',
'ldap_compare',
'ldap_connect',
+ 'ldap_control_paged_result_response',
+ 'ldap_control_paged_result',
'ldap_count_entries',
'ldap_delete',
'ldap_dn2ufn',
@@ -1427,9 +1793,9 @@ MODULES = {'.NET': ['dotnet_load'],
'ldap_sort',
'ldap_start_tls',
'ldap_t61_to_8859',
- 'ldap_unbind'],
- 'LZF': ['lzf_compress', 'lzf_decompress', 'lzf_optimized_for'],
- 'Libevent': ['event_add',
+ 'ldap_unbind'),
+ 'LZF': ('lzf_compress', 'lzf_decompress', 'lzf_optimized_for'),
+ 'Libevent': ('event_add',
'event_base_free',
'event_base_loop',
'event_base_loopbreak',
@@ -1452,8 +1818,8 @@ MODULES = {'.NET': ['dotnet_load'],
'event_del',
'event_free',
'event_new',
- 'event_set'],
- 'Lotus Notes': ['notes_body',
+ 'event_set'),
+ 'Lotus Notes': ('notes_body',
'notes_copy_db',
'notes_create_db',
'notes_create_note',
@@ -1466,8 +1832,8 @@ MODULES = {'.NET': ['dotnet_load'],
'notes_nav_create',
'notes_search',
'notes_unread',
- 'notes_version'],
- 'MCVE': ['m_checkstatus',
+ 'notes_version'),
+ 'MCVE': ('m_checkstatus',
'm_completeauthorizations',
'm_connect',
'm_connectionerror',
@@ -1486,9 +1852,28 @@ MODULES = {'.NET': ['dotnet_load'],
'm_numcolumns',
'm_numrows',
'm_parsecommadelimited',
- 'm_responsekeys'],
- 'Mail': ['ezmlm_hash', 'mail'],
- 'Mailparse': ['mailparse_determine_best_xfer_encoding',
+ 'm_responsekeys',
+ 'm_responseparam',
+ 'm_returnstatus',
+ 'm_setblocking',
+ 'm_setdropfile',
+ 'm_setip',
+ 'm_setssl_cafile',
+ 'm_setssl_files',
+ 'm_setssl',
+ 'm_settimeout',
+ 'm_sslcert_gen_hash',
+ 'm_transactionssent',
+ 'm_transinqueue',
+ 'm_transkeyval',
+ 'm_transnew',
+ 'm_transsend',
+ 'm_uwait',
+ 'm_validateidentifier',
+ 'm_verifyconnection',
+ 'm_verifysslcert'),
+ 'Mail': ('ezmlm_hash', 'mail'),
+ 'Mailparse': ('mailparse_determine_best_xfer_encoding',
'mailparse_msg_create',
'mailparse_msg_extract_part_file',
'mailparse_msg_extract_part',
@@ -1501,8 +1886,8 @@ MODULES = {'.NET': ['dotnet_load'],
'mailparse_msg_parse',
'mailparse_rfc822_parse_addresses',
'mailparse_stream_encode',
- 'mailparse_uudecode_all'],
- 'Math': ['abs',
+ 'mailparse_uudecode_all'),
+ 'Math': ('abs',
'acos',
'acosh',
'asin',
@@ -1520,8 +1905,37 @@ MODULES = {'.NET': ['dotnet_load'],
'decoct',
'deg2rad',
'exp',
- 'expm1'],
- 'MaxDB': ['maxdb_affected_rows',
+ 'expm1',
+ 'floor',
+ 'fmod',
+ 'getrandmax',
+ 'hexdec',
+ 'hypot',
+ 'is_finite',
+ 'is_infinite',
+ 'is_nan',
+ 'lcg_value',
+ 'log10',
+ 'log1p',
+ 'log',
+ 'max',
+ 'min',
+ 'mt_getrandmax',
+ 'mt_rand',
+ 'mt_srand',
+ 'octdec',
+ 'pi',
+ 'pow',
+ 'rad2deg',
+ 'rand',
+ 'round',
+ 'sin',
+ 'sinh',
+ 'sqrt',
+ 'srand',
+ 'tan',
+ 'tanh'),
+ 'MaxDB': ('maxdb_affected_rows',
'maxdb_autocommit',
'maxdb_bind_param',
'maxdb_bind_result',
@@ -1598,8 +2012,32 @@ MODULES = {'.NET': ['dotnet_load'],
'maxdb_sqlstate',
'maxdb_ssl_set',
'maxdb_stat',
- 'maxdb_stmt_affected_rows'],
- 'Mcrypt': ['mcrypt_cbc',
+ 'maxdb_stmt_affected_rows',
+ 'maxdb_stmt_bind_param',
+ 'maxdb_stmt_bind_result',
+ 'maxdb_stmt_close_long_data',
+ 'maxdb_stmt_close',
+ 'maxdb_stmt_data_seek',
+ 'maxdb_stmt_errno',
+ 'maxdb_stmt_error',
+ 'maxdb_stmt_execute',
+ 'maxdb_stmt_fetch',
+ 'maxdb_stmt_free_result',
+ 'maxdb_stmt_init',
+ 'maxdb_stmt_num_rows',
+ 'maxdb_stmt_param_count',
+ 'maxdb_stmt_prepare',
+ 'maxdb_stmt_reset',
+ 'maxdb_stmt_result_metadata',
+ 'maxdb_stmt_send_long_data',
+ 'maxdb_stmt_sqlstate',
+ 'maxdb_stmt_store_result',
+ 'maxdb_store_result',
+ 'maxdb_thread_id',
+ 'maxdb_thread_safe',
+ 'maxdb_use_result',
+ 'maxdb_warning_count'),
+ 'Mcrypt': ('mcrypt_cbc',
'mcrypt_cfb',
'mcrypt_create_iv',
'mcrypt_decrypt',
@@ -1635,20 +2073,20 @@ MODULES = {'.NET': ['dotnet_load'],
'mcrypt_module_open',
'mcrypt_module_self_test',
'mcrypt_ofb',
- 'mdecrypt_generic'],
- 'Memcache': ['memcache_debug'],
- 'Mhash': ['mhash_count',
+ 'mdecrypt_generic'),
+ 'Memcache': ('memcache_debug',),
+ 'Mhash': ('mhash_count',
'mhash_get_block_size',
'mhash_get_hash_name',
'mhash_keygen_s2k',
- 'mhash'],
- 'Ming': ['ming_keypress',
+ 'mhash'),
+ 'Ming': ('ming_keypress',
'ming_setcubicthreshold',
'ming_setscale',
'ming_setswfcompression',
'ming_useconstants',
- 'ming_useswfversion'],
- 'Misc.': ['connection_aborted',
+ 'ming_useswfversion'),
+ 'Misc.': ('connection_aborted',
'connection_status',
'connection_timeout',
'constant',
@@ -1672,9 +2110,9 @@ MODULES = {'.NET': ['dotnet_load'],
'time_sleep_until',
'uniqid',
'unpack',
- 'usleep'],
- 'Mongo': ['bson_decode', 'bson_encode'],
- 'Msession': ['msession_connect',
+ 'usleep'),
+ 'Mongo': ('bson_decode', 'bson_encode'),
+ 'Msession': ('msession_connect',
'msession_count',
'msession_create',
'msession_destroy',
@@ -1694,8 +2132,8 @@ MODULES = {'.NET': ['dotnet_load'],
'msession_set',
'msession_timeout',
'msession_uniq',
- 'msession_unlock'],
- 'Mssql': ['mssql_bind',
+ 'msession_unlock'),
+ 'Mssql': ('mssql_bind',
'mssql_close',
'mssql_connect',
'mssql_data_seek',
@@ -1724,8 +2162,8 @@ MODULES = {'.NET': ['dotnet_load'],
'mssql_query',
'mssql_result',
'mssql_rows_affected',
- 'mssql_select_db'],
- 'Multibyte String': ['mb_check_encoding',
+ 'mssql_select_db'),
+ 'Multibyte String': ('mb_check_encoding',
'mb_convert_case',
'mb_convert_encoding',
'mb_convert_kana',
@@ -1738,6 +2176,7 @@ MODULES = {'.NET': ['dotnet_load'],
'mb_encode_numericentity',
'mb_encoding_aliases',
'mb_ereg_match',
+ 'mb_ereg_replace_callback',
'mb_ereg_replace',
'mb_ereg_search_getpos',
'mb_ereg_search_getregs',
@@ -1778,8 +2217,8 @@ MODULES = {'.NET': ['dotnet_load'],
'mb_strwidth',
'mb_substitute_character',
'mb_substr_count',
- 'mb_substr'],
- 'MySQL': ['mysql_affected_rows',
+ 'mb_substr'),
+ 'MySQL': ('mysql_affected_rows',
'mysql_client_encoding',
'mysql_close',
'mysql_connect',
@@ -1826,9 +2265,23 @@ MODULES = {'.NET': ['dotnet_load'],
'mysql_stat',
'mysql_tablename',
'mysql_thread_id',
- 'mysql_unbuffered_query'],
- 'NSAPI': ['nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'],
- 'Ncurses': ['ncurses_addch',
+ 'mysql_unbuffered_query'),
+ 'Mysqlnd_memcache': ('mysqlnd_memcache_get_config', 'mysqlnd_memcache_set'),
+ 'Mysqlnd_ms': ('mysqlnd_ms_dump_servers',
+ 'mysqlnd_ms_fabric_select_global',
+ 'mysqlnd_ms_fabric_select_shard',
+ 'mysqlnd_ms_get_last_gtid',
+ 'mysqlnd_ms_get_last_used_connection',
+ 'mysqlnd_ms_get_stats',
+ 'mysqlnd_ms_match_wild',
+ 'mysqlnd_ms_query_is_select',
+ 'mysqlnd_ms_set_qos',
+ 'mysqlnd_ms_set_user_pick_server'),
+ 'Mysqlnd_uh': ('mysqlnd_uh_convert_to_mysqlnd',
+ 'mysqlnd_uh_set_connection_proxy',
+ 'mysqlnd_uh_set_statement_proxy'),
+ 'NSAPI': ('nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'),
+ 'Ncurses': ('ncurses_addch',
'ncurses_addchnstr',
'ncurses_addchstr',
'ncurses_addnstr',
@@ -1987,8 +2440,8 @@ MODULES = {'.NET': ['dotnet_load'],
'ncurses_wrefresh',
'ncurses_wstandend',
'ncurses_wstandout',
- 'ncurses_wvline'],
- 'Network': ['checkdnsrr',
+ 'ncurses_wvline'),
+ 'Network': ('checkdnsrr',
'closelog',
'define_syslog_variables',
'dns_check_record',
@@ -1997,8 +2450,32 @@ MODULES = {'.NET': ['dotnet_load'],
'fsockopen',
'gethostbyaddr',
'gethostbyname',
- 'gethostbynamel'],
- 'Newt': ['newt_bell',
+ 'gethostbynamel',
+ 'gethostname',
+ 'getmxrr',
+ 'getprotobyname',
+ 'getprotobynumber',
+ 'getservbyname',
+ 'getservbyport',
+ 'header_register_callback',
+ 'header_remove',
+ 'header',
+ 'headers_list',
+ 'headers_sent',
+ 'http_response_code',
+ 'inet_ntop',
+ 'inet_pton',
+ 'ip2long',
+ 'long2ip',
+ 'openlog',
+ 'pfsockopen',
+ 'setcookie',
+ 'setrawcookie',
+ 'socket_get_status',
+ 'socket_set_blocking',
+ 'socket_set_timeout',
+ 'syslog'),
+ 'Newt': ('newt_bell',
'newt_button_bar',
'newt_button',
'newt_centered_window',
@@ -2018,11 +2495,107 @@ MODULES = {'.NET': ['dotnet_load'],
'newt_checkbox_tree_set_width',
'newt_checkbox_tree',
'newt_checkbox',
- 'newt_clear_key_buffer'],
- 'OAuth': ['oauth_get_sbs', 'oauth_urlencode'],
- 'OCI8': ['oci_bind_array_by_name',
+ 'newt_clear_key_buffer',
+ 'newt_cls',
+ 'newt_compact_button',
+ 'newt_component_add_callback',
+ 'newt_component_takes_focus',
+ 'newt_create_grid',
+ 'newt_cursor_off',
+ 'newt_cursor_on',
+ 'newt_delay',
+ 'newt_draw_form',
+ 'newt_draw_root_text',
+ 'newt_entry_get_value',
+ 'newt_entry_set_filter',
+ 'newt_entry_set_flags',
+ 'newt_entry_set',
+ 'newt_entry',
+ 'newt_finished',
+ 'newt_form_add_component',
+ 'newt_form_add_components',
+ 'newt_form_add_hot_key',
+ 'newt_form_destroy',
+ 'newt_form_get_current',
+ 'newt_form_run',
+ 'newt_form_set_background',
+ 'newt_form_set_height',
+ 'newt_form_set_size',
+ 'newt_form_set_timer',
+ 'newt_form_set_width',
+ 'newt_form_watch_fd',
+ 'newt_form',
+ 'newt_get_screen_size',
+ 'newt_grid_add_components_to_form',
+ 'newt_grid_basic_window',
+ 'newt_grid_free',
+ 'newt_grid_get_size',
+ 'newt_grid_h_close_stacked',
+ 'newt_grid_h_stacked',
+ 'newt_grid_place',
+ 'newt_grid_set_field',
+ 'newt_grid_simple_window',
+ 'newt_grid_v_close_stacked',
+ 'newt_grid_v_stacked',
+ 'newt_grid_wrapped_window_at',
+ 'newt_grid_wrapped_window',
+ 'newt_init',
+ 'newt_label_set_text',
+ 'newt_label',
+ 'newt_listbox_append_entry',
+ 'newt_listbox_clear_selection',
+ 'newt_listbox_clear',
+ 'newt_listbox_delete_entry',
+ 'newt_listbox_get_current',
+ 'newt_listbox_get_selection',
+ 'newt_listbox_insert_entry',
+ 'newt_listbox_item_count',
+ 'newt_listbox_select_item',
+ 'newt_listbox_set_current_by_key',
+ 'newt_listbox_set_current',
+ 'newt_listbox_set_data',
+ 'newt_listbox_set_entry',
+ 'newt_listbox_set_width',
+ 'newt_listbox',
+ 'newt_listitem_get_data',
+ 'newt_listitem_set',
+ 'newt_listitem',
+ 'newt_open_window',
+ 'newt_pop_help_line',
+ 'newt_pop_window',
+ 'newt_push_help_line',
+ 'newt_radio_get_current',
+ 'newt_radiobutton',
+ 'newt_redraw_help_line',
+ 'newt_reflow_text',
+ 'newt_refresh',
+ 'newt_resize_screen',
+ 'newt_resume',
+ 'newt_run_form',
+ 'newt_scale_set',
+ 'newt_scale',
+ 'newt_scrollbar_set',
+ 'newt_set_help_callback',
+ 'newt_set_suspend_callback',
+ 'newt_suspend',
+ 'newt_textbox_get_num_lines',
+ 'newt_textbox_reflowed',
+ 'newt_textbox_set_height',
+ 'newt_textbox_set_text',
+ 'newt_textbox',
+ 'newt_vertical_scrollbar',
+ 'newt_wait_for_key',
+ 'newt_win_choice',
+ 'newt_win_entries',
+ 'newt_win_menu',
+ 'newt_win_message',
+ 'newt_win_messagev',
+ 'newt_win_ternary'),
+ 'OAuth': ('oauth_get_sbs', 'oauth_urlencode'),
+ 'OCI8': ('oci_bind_array_by_name',
'oci_bind_by_name',
'oci_cancel',
+ 'oci_client_version',
'oci_close',
'oci_commit',
'oci_connect',
@@ -2042,7 +2615,9 @@ MODULES = {'.NET': ['dotnet_load'],
'oci_field_size',
'oci_field_type_raw',
'oci_field_type',
+ 'oci_free_descriptor',
'oci_free_statement',
+ 'oci_get_implicit_resultset',
'oci_internal_debug',
'oci_lob_copy',
'oci_lob_is_equal',
@@ -2064,8 +2639,8 @@ MODULES = {'.NET': ['dotnet_load'],
'oci_set_edition',
'oci_set_module_name',
'oci_set_prefetch',
- 'oci_statement_type'],
- 'ODBC': ['odbc_autocommit',
+ 'oci_statement_type'),
+ 'ODBC': ('odbc_autocommit',
'odbc_binmode',
'odbc_close_all',
'odbc_close',
@@ -2109,12 +2684,23 @@ MODULES = {'.NET': ['dotnet_load'],
'odbc_specialcolumns',
'odbc_statistics',
'odbc_tableprivileges',
- 'odbc_tables'],
- 'Object Aggregation': ['aggregate_info',
+ 'odbc_tables'),
+ 'OPcache': ('opcache_compile_file',
+ 'opcache_get_configuration',
+ 'opcache_get_status',
+ 'opcache_invalidate',
+ 'opcache_reset'),
+ 'Object Aggregation': ('aggregate_info',
'aggregate_methods_by_list',
- 'aggregate_methods_by_regexp'],
- 'Object overloading': ['overload'],
- 'OpenAL': ['openal_buffer_create',
+ 'aggregate_methods_by_regexp',
+ 'aggregate_methods',
+ 'aggregate_properties_by_list',
+ 'aggregate_properties_by_regexp',
+ 'aggregate_properties',
+ 'aggregate',
+ 'aggregation_info',
+ 'deaggregate'),
+ 'OpenAL': ('openal_buffer_create',
'openal_buffer_data',
'openal_buffer_destroy',
'openal_buffer_get',
@@ -2136,8 +2722,9 @@ MODULES = {'.NET': ['dotnet_load'],
'openal_source_rewind',
'openal_source_set',
'openal_source_stop',
- 'openal_stream'],
- 'OpenSSL': ['openssl_csr_export_to_file',
+ 'openal_stream'),
+ 'OpenSSL': ('openssl_cipher_iv_length',
+ 'openssl_csr_export_to_file',
'openssl_csr_export',
'openssl_csr_get_public_key',
'openssl_csr_get_subject',
@@ -2154,6 +2741,7 @@ MODULES = {'.NET': ['dotnet_load'],
'openssl_get_privatekey',
'openssl_get_publickey',
'openssl_open',
+ 'openssl_pbkdf2',
'openssl_pkcs12_export_to_file',
'openssl_pkcs12_export',
'openssl_pkcs12_read',
@@ -2175,6 +2763,10 @@ MODULES = {'.NET': ['dotnet_load'],
'openssl_random_pseudo_bytes',
'openssl_seal',
'openssl_sign',
+ 'openssl_spki_export_challenge',
+ 'openssl_spki_export',
+ 'openssl_spki_new',
+ 'openssl_spki_verify',
'openssl_verify',
'openssl_x509_check_private_key',
'openssl_x509_checkpurpose',
@@ -2182,8 +2774,8 @@ MODULES = {'.NET': ['dotnet_load'],
'openssl_x509_export',
'openssl_x509_free',
'openssl_x509_parse',
- 'openssl_x509_read'],
- 'Output Control': ['flush',
+ 'openssl_x509_read'),
+ 'Output Control': ('flush',
'ob_clean',
'ob_end_clean',
'ob_end_flush',
@@ -2199,8 +2791,8 @@ MODULES = {'.NET': ['dotnet_load'],
'ob_list_handlers',
'ob_start',
'output_add_rewrite_var',
- 'output_reset_rewrite_vars'],
- 'Ovrimos SQL': ['ovrimos_close',
+ 'output_reset_rewrite_vars'),
+ 'Ovrimos SQL': ('ovrimos_close',
'ovrimos_commit',
'ovrimos_connect',
'ovrimos_cursor',
@@ -2219,10 +2811,12 @@ MODULES = {'.NET': ['dotnet_load'],
'ovrimos_prepare',
'ovrimos_result_all',
'ovrimos_result',
- 'ovrimos_rollback'],
- 'PCNTL': ['pcntl_alarm',
+ 'ovrimos_rollback'),
+ 'PCNTL': ('pcntl_alarm',
+ 'pcntl_errno',
'pcntl_exec',
'pcntl_fork',
+ 'pcntl_get_last_error',
'pcntl_getpriority',
'pcntl_setpriority',
'pcntl_signal_dispatch',
@@ -2230,6 +2824,7 @@ MODULES = {'.NET': ['dotnet_load'],
'pcntl_sigprocmask',
'pcntl_sigtimedwait',
'pcntl_sigwaitinfo',
+ 'pcntl_strerror',
'pcntl_wait',
'pcntl_waitpid',
'pcntl_wexitstatus',
@@ -2237,8 +2832,8 @@ MODULES = {'.NET': ['dotnet_load'],
'pcntl_wifsignaled',
'pcntl_wifstopped',
'pcntl_wstopsig',
- 'pcntl_wtermsig'],
- 'PCRE': ['preg_filter',
+ 'pcntl_wtermsig'),
+ 'PCRE': ('preg_filter',
'preg_grep',
'preg_last_error',
'preg_match_all',
@@ -2246,8 +2841,8 @@ MODULES = {'.NET': ['dotnet_load'],
'preg_quote',
'preg_replace_callback',
'preg_replace',
- 'preg_split'],
- 'PDF': ['PDF_activate_item',
+ 'preg_split'),
+ 'PDF': ('PDF_activate_item',
'PDF_add_annotation',
'PDF_add_bookmark',
'PDF_add_launchlink',
@@ -2425,9 +3020,11 @@ MODULES = {'.NET': ['dotnet_load'],
'PDF_translate',
'PDF_utf16_to_utf8',
'PDF_utf32_to_utf16',
- 'PDF_utf8_to_utf16'],
- 'PHP Options/Info': ['assert_options',
+ 'PDF_utf8_to_utf16'),
+ 'PHP Options/Info': ('assert_options',
'assert',
+ 'cli_get_process_title',
+ 'cli_set_process_title',
'dl',
'extension_loaded',
'gc_collect_cycles',
@@ -2477,8 +3074,8 @@ MODULES = {'.NET': ['dotnet_load'],
'version_compare',
'zend_logo_guid',
'zend_thread_id',
- 'zend_version'],
- 'POSIX': ['posix_access',
+ 'zend_version'),
+ 'POSIX': ('posix_access',
'posix_ctermid',
'posix_errno',
'posix_get_last_error',
@@ -2513,15 +3110,15 @@ MODULES = {'.NET': ['dotnet_load'],
'posix_strerror',
'posix_times',
'posix_ttyname',
- 'posix_uname'],
- 'POSIX Regex': ['ereg_replace',
+ 'posix_uname'),
+ 'POSIX Regex': ('ereg_replace',
'ereg',
'eregi_replace',
'eregi',
'split',
'spliti',
- 'sql_regcase'],
- 'PS': ['ps_add_bookmark',
+ 'sql_regcase'),
+ 'PS': ('ps_add_bookmark',
'ps_add_launchlink',
'ps_add_locallink',
'ps_add_note',
@@ -2598,8 +3195,8 @@ MODULES = {'.NET': ['dotnet_load'],
'ps_symbol_name',
'ps_symbol_width',
'ps_symbol',
- 'ps_translate'],
- 'Paradox': ['px_close',
+ 'ps_translate'),
+ 'Paradox': ('px_close',
'px_create_fp',
'px_date2string',
'px_delete_record',
@@ -2623,11 +3220,15 @@ MODULES = {'.NET': ['dotnet_load'],
'px_set_targetencoding',
'px_set_value',
'px_timestamp2string',
- 'px_update_record'],
- 'Parsekit': ['parsekit_compile_file',
+ 'px_update_record'),
+ 'Parsekit': ('parsekit_compile_file',
'parsekit_compile_string',
- 'parsekit_func_arginfo'],
- 'PostgreSQL': ['pg_affected_rows',
+ 'parsekit_func_arginfo'),
+ 'Password Hashing': ('password_get_info',
+ 'password_hash',
+ 'password_needs_rehash',
+ 'password_verify'),
+ 'PostgreSQL': ('pg_affected_rows',
'pg_cancel_query',
'pg_client_encoding',
'pg_close',
@@ -2642,6 +3243,8 @@ MODULES = {'.NET': ['dotnet_load'],
'pg_delete',
'pg_end_copy',
'pg_escape_bytea',
+ 'pg_escape_identifier',
+ 'pg_escape_literal',
'pg_escape_string',
'pg_execute',
'pg_fetch_all_columns',
@@ -2677,6 +3280,7 @@ MODULES = {'.NET': ['dotnet_load'],
'pg_lo_read',
'pg_lo_seek',
'pg_lo_tell',
+ 'pg_lo_truncate',
'pg_lo_unlink',
'pg_lo_write',
'pg_meta_data',
@@ -2687,8 +3291,29 @@ MODULES = {'.NET': ['dotnet_load'],
'pg_pconnect',
'pg_ping',
'pg_port',
- 'pg_prepare'],
- 'Printer': ['printer_abort',
+ 'pg_prepare',
+ 'pg_put_line',
+ 'pg_query_params',
+ 'pg_query',
+ 'pg_result_error_field',
+ 'pg_result_error',
+ 'pg_result_seek',
+ 'pg_result_status',
+ 'pg_select',
+ 'pg_send_execute',
+ 'pg_send_prepare',
+ 'pg_send_query_params',
+ 'pg_send_query',
+ 'pg_set_client_encoding',
+ 'pg_set_error_verbosity',
+ 'pg_trace',
+ 'pg_transaction_status',
+ 'pg_tty',
+ 'pg_unescape_bytea',
+ 'pg_untrace',
+ 'pg_update',
+ 'pg_version'),
+ 'Printer': ('printer_abort',
'printer_close',
'printer_create_brush',
'printer_create_dc',
@@ -2718,8 +3343,9 @@ MODULES = {'.NET': ['dotnet_load'],
'printer_set_option',
'printer_start_doc',
'printer_start_page',
- 'printer_write'],
- 'Program execution': ['escapeshellarg',
+ 'printer_write'),
+ 'Proctitle': ('setproctitle', 'setthreadtitle'),
+ 'Program execution': ('escapeshellarg',
'escapeshellcmd',
'exec',
'passthru',
@@ -2729,8 +3355,8 @@ MODULES = {'.NET': ['dotnet_load'],
'proc_open',
'proc_terminate',
'shell_exec',
- 'system'],
- 'Pspell': ['pspell_add_to_personal',
+ 'system'),
+ 'Pspell': ('pspell_add_to_personal',
'pspell_add_to_session',
'pspell_check',
'pspell_clear_session',
@@ -2742,13 +3368,19 @@ MODULES = {'.NET': ['dotnet_load'],
'pspell_config_personal',
'pspell_config_repl',
'pspell_config_runtogether',
- 'pspell_config_save_repl'],
- 'RPM Reader': ['rpm_close',
+ 'pspell_config_save_repl',
+ 'pspell_new_config',
+ 'pspell_new_personal',
+ 'pspell_new',
+ 'pspell_save_wordlist',
+ 'pspell_store_replacement',
+ 'pspell_suggest'),
+ 'RPM Reader': ('rpm_close',
'rpm_get_tag',
'rpm_is_valid',
'rpm_open',
- 'rpm_version'],
- 'RRD': ['rrd_create',
+ 'rpm_version'),
+ 'RRD': ('rrd_create',
'rrd_error',
'rrd_fetch',
'rrd_first',
@@ -2759,8 +3391,10 @@ MODULES = {'.NET': ['dotnet_load'],
'rrd_restore',
'rrd_tune',
'rrd_update',
- 'rrd_xport'],
- 'Radius': ['radius_acct_open',
+ 'rrd_version',
+ 'rrd_xport',
+ 'rrdc_disconnect'),
+ 'Radius': ('radius_acct_open',
'radius_add_server',
'radius_auth_open',
'radius_close',
@@ -2772,6 +3406,8 @@ MODULES = {'.NET': ['dotnet_load'],
'radius_demangle_mppe_key',
'radius_demangle',
'radius_get_attr',
+ 'radius_get_tagged_attr_data',
+ 'radius_get_tagged_attr_tag',
'radius_get_vendor_attr',
'radius_put_addr',
'radius_put_attr',
@@ -2782,11 +3418,12 @@ MODULES = {'.NET': ['dotnet_load'],
'radius_put_vendor_int',
'radius_put_vendor_string',
'radius_request_authenticator',
+ 'radius_salt_encrypt_attr',
'radius_send_request',
'radius_server_secret',
- 'radius_strerror'],
- 'Rar': ['rar_wrapper_cache_stats'],
- 'Readline': ['readline_add_history',
+ 'radius_strerror'),
+ 'Rar': ('rar_wrapper_cache_stats',),
+ 'Readline': ('readline_add_history',
'readline_callback_handler_install',
'readline_callback_handler_remove',
'readline_callback_read_char',
@@ -2798,9 +3435,9 @@ MODULES = {'.NET': ['dotnet_load'],
'readline_read_history',
'readline_redisplay',
'readline_write_history',
- 'readline'],
- 'Recode': ['recode_file', 'recode_string', 'recode'],
- 'SNMP': ['snmp_get_quick_print',
+ 'readline'),
+ 'Recode': ('recode_file', 'recode_string', 'recode'),
+ 'SNMP': ('snmp_get_quick_print',
'snmp_get_valueretrieval',
'snmp_read_mib',
'snmp_set_enum_print',
@@ -2823,10 +3460,11 @@ MODULES = {'.NET': ['dotnet_load'],
'snmprealwalk',
'snmpset',
'snmpwalk',
- 'snmpwalkoid'],
- 'SOAP': ['is_soap_fault', 'use_soap_error_handler'],
- 'SPL': ['class_implements',
+ 'snmpwalkoid'),
+ 'SOAP': ('is_soap_fault', 'use_soap_error_handler'),
+ 'SPL': ('class_implements',
'class_parents',
+ 'class_uses',
'iterator_apply',
'iterator_count',
'iterator_to_array',
@@ -2837,10 +3475,76 @@ MODULES = {'.NET': ['dotnet_load'],
'spl_autoload_unregister',
'spl_autoload',
'spl_classes',
- 'spl_object_hash'],
- 'SPPLUS': ['calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'],
- 'SQLite': ['sqlite_array_query', 'sqlite_busy_timeout', 'sqlite_changes'],
- 'SSH2': ['ssh2_auth_hostbased_file',
+ 'spl_object_hash'),
+ 'SPPLUS': ('calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'),
+ 'SQLSRV': ('sqlsrv_begin_transaction',
+ 'sqlsrv_cancel',
+ 'sqlsrv_client_info',
+ 'sqlsrv_close',
+ 'sqlsrv_commit',
+ 'sqlsrv_configure',
+ 'sqlsrv_connect',
+ 'sqlsrv_errors',
+ 'sqlsrv_execute',
+ 'sqlsrv_fetch_array',
+ 'sqlsrv_fetch_object',
+ 'sqlsrv_fetch',
+ 'sqlsrv_field_metadata',
+ 'sqlsrv_free_stmt',
+ 'sqlsrv_get_config',
+ 'sqlsrv_get_field',
+ 'sqlsrv_has_rows',
+ 'sqlsrv_next_result',
+ 'sqlsrv_num_fields',
+ 'sqlsrv_num_rows',
+ 'sqlsrv_prepare',
+ 'sqlsrv_query',
+ 'sqlsrv_rollback',
+ 'sqlsrv_rows_affected',
+ 'sqlsrv_send_stream_data',
+ 'sqlsrv_server_info'),
+ 'SQLite': ('sqlite_array_query',
+ 'sqlite_busy_timeout',
+ 'sqlite_changes',
+ 'sqlite_close',
+ 'sqlite_column',
+ 'sqlite_create_aggregate',
+ 'sqlite_create_function',
+ 'sqlite_current',
+ 'sqlite_error_string',
+ 'sqlite_escape_string',
+ 'sqlite_exec',
+ 'sqlite_factory',
+ 'sqlite_fetch_all',
+ 'sqlite_fetch_array',
+ 'sqlite_fetch_column_types',
+ 'sqlite_fetch_object',
+ 'sqlite_fetch_single',
+ 'sqlite_fetch_string',
+ 'sqlite_field_name',
+ 'sqlite_has_more',
+ 'sqlite_has_prev',
+ 'sqlite_key',
+ 'sqlite_last_error',
+ 'sqlite_last_insert_rowid',
+ 'sqlite_libencoding',
+ 'sqlite_libversion',
+ 'sqlite_next',
+ 'sqlite_num_fields',
+ 'sqlite_num_rows',
+ 'sqlite_open',
+ 'sqlite_popen',
+ 'sqlite_prev',
+ 'sqlite_query',
+ 'sqlite_rewind',
+ 'sqlite_seek',
+ 'sqlite_single_query',
+ 'sqlite_udf_decode_binary',
+ 'sqlite_udf_encode_binary',
+ 'sqlite_unbuffered_query',
+ 'sqlite_valid'),
+ 'SSH2': ('ssh2_auth_agent',
+ 'ssh2_auth_hostbased_file',
'ssh2_auth_none',
'ssh2_auth_password',
'ssh2_auth_pubkey_file',
@@ -2855,6 +3559,7 @@ MODULES = {'.NET': ['dotnet_load'],
'ssh2_publickey_remove',
'ssh2_scp_recv',
'ssh2_scp_send',
+ 'ssh2_sftp_chmod',
'ssh2_sftp_lstat',
'ssh2_sftp_mkdir',
'ssh2_sftp_readlink',
@@ -2866,8 +3571,8 @@ MODULES = {'.NET': ['dotnet_load'],
'ssh2_sftp_unlink',
'ssh2_sftp',
'ssh2_shell',
- 'ssh2_tunnel'],
- 'SVN': ['svn_add',
+ 'ssh2_tunnel'),
+ 'SVN': ('svn_add',
'svn_auth_get_parameter',
'svn_auth_set_parameter',
'svn_blame',
@@ -2914,8 +3619,8 @@ MODULES = {'.NET': ['dotnet_load'],
'svn_repos_recover',
'svn_revert',
'svn_status',
- 'svn_update'],
- 'SWF': ['swf_actiongeturl',
+ 'svn_update'),
+ 'SWF': ('swf_actiongeturl',
'swf_actiongotoframe',
'swf_actiongotolabel',
'swf_actionnextframe',
@@ -2981,8 +3686,8 @@ MODULES = {'.NET': ['dotnet_load'],
'swf_startsymbol',
'swf_textwidth',
'swf_translate',
- 'swf_viewport'],
- 'Semaphore': ['ftok',
+ 'swf_viewport'),
+ 'Semaphore': ('ftok',
'msg_get_queue',
'msg_queue_exists',
'msg_receive',
@@ -3000,8 +3705,8 @@ MODULES = {'.NET': ['dotnet_load'],
'shm_has_var',
'shm_put_var',
'shm_remove_var',
- 'shm_remove'],
- 'Session': ['session_cache_expire',
+ 'shm_remove'),
+ 'Session': ('session_cache_expire',
'session_cache_limiter',
'session_commit',
'session_decode',
@@ -3013,33 +3718,36 @@ MODULES = {'.NET': ['dotnet_load'],
'session_module_name',
'session_name',
'session_regenerate_id',
+ 'session_register_shutdown',
'session_register',
'session_save_path',
'session_set_cookie_params',
'session_set_save_handler',
'session_start',
+ 'session_status',
'session_unregister',
'session_unset',
- 'session_write_close'],
- 'Session PgSQL': ['session_pgsql_add_error',
+ 'session_write_close'),
+ 'Session PgSQL': ('session_pgsql_add_error',
'session_pgsql_get_error',
'session_pgsql_get_field',
'session_pgsql_reset',
'session_pgsql_set_field',
- 'session_pgsql_status'],
- 'Shared Memory': ['shmop_close',
+ 'session_pgsql_status'),
+ 'Shared Memory': ('shmop_close',
'shmop_delete',
'shmop_open',
'shmop_read',
'shmop_size',
- 'shmop_write'],
- 'SimpleXML': ['simplexml_import_dom',
+ 'shmop_write'),
+ 'SimpleXML': ('simplexml_import_dom',
'simplexml_load_file',
- 'simplexml_load_string'],
- 'Socket': ['socket_accept',
+ 'simplexml_load_string'),
+ 'Socket': ('socket_accept',
'socket_bind',
'socket_clear_error',
'socket_close',
+ 'socket_cmsg_space',
'socket_connect',
'socket_create_listen',
'socket_create_pair',
@@ -3047,22 +3755,25 @@ MODULES = {'.NET': ['dotnet_load'],
'socket_get_option',
'socket_getpeername',
'socket_getsockname',
+ 'socket_import_stream',
'socket_last_error',
'socket_listen',
'socket_read',
'socket_recv',
'socket_recvfrom',
+ 'socket_recvmsg',
'socket_select',
'socket_send',
+ 'socket_sendmsg',
'socket_sendto',
'socket_set_block',
'socket_set_nonblock',
'socket_set_option',
'socket_shutdown',
'socket_strerror',
- 'socket_write'],
- 'Solr': ['solr_get_version'],
- 'Statistic': ['stats_absolute_deviation',
+ 'socket_write'),
+ 'Solr': ('solr_get_version',),
+ 'Statistic': ('stats_absolute_deviation',
'stats_cdf_beta',
'stats_cdf_binomial',
'stats_cdf_cauchy',
@@ -3129,9 +3840,9 @@ MODULES = {'.NET': ['dotnet_load'],
'stats_stat_paired_t',
'stats_stat_percentile',
'stats_stat_powersum',
- 'stats_variance'],
- 'Stomp': ['stomp_connect_error', 'stomp_version'],
- 'Stream': ['set_socket_blocking',
+ 'stats_variance'),
+ 'Stomp': ('stomp_connect_error', 'stomp_version'),
+ 'Stream': ('set_socket_blocking',
'stream_bucket_append',
'stream_bucket_make_writeable',
'stream_bucket_new',
@@ -3159,8 +3870,26 @@ MODULES = {'.NET': ['dotnet_load'],
'stream_notification_callback',
'stream_register_wrapper',
'stream_resolve_include_path',
- 'stream_select'],
- 'String': ['addcslashes',
+ 'stream_select',
+ 'stream_set_blocking',
+ 'stream_set_chunk_size',
+ 'stream_set_read_buffer',
+ 'stream_set_timeout',
+ 'stream_set_write_buffer',
+ 'stream_socket_accept',
+ 'stream_socket_client',
+ 'stream_socket_enable_crypto',
+ 'stream_socket_get_name',
+ 'stream_socket_pair',
+ 'stream_socket_recvfrom',
+ 'stream_socket_sendto',
+ 'stream_socket_server',
+ 'stream_socket_shutdown',
+ 'stream_supports_lock',
+ 'stream_wrapper_register',
+ 'stream_wrapper_restore',
+ 'stream_wrapper_unregister'),
+ 'String': ('addcslashes',
'addslashes',
'bin2hex',
'chop',
@@ -3178,6 +3907,7 @@ MODULES = {'.NET': ['dotnet_load'],
'get_html_translation_table',
'hebrev',
'hebrevc',
+ 'hex2bin',
'html_entity_decode',
'htmlentities',
'htmlspecialchars_decode',
@@ -3240,8 +3970,24 @@ MODULES = {'.NET': ['dotnet_load'],
'strrev',
'strripos',
'strrpos',
- 'strspn'],
- 'Sybase': ['sybase_affected_rows',
+ 'strspn',
+ 'strstr',
+ 'strtok',
+ 'strtolower',
+ 'strtoupper',
+ 'strtr',
+ 'substr_compare',
+ 'substr_count',
+ 'substr_replace',
+ 'substr',
+ 'trim',
+ 'ucfirst',
+ 'ucwords',
+ 'vfprintf',
+ 'vprintf',
+ 'vsprintf',
+ 'wordwrap'),
+ 'Sybase': ('sybase_affected_rows',
'sybase_close',
'sybase_connect',
'sybase_data_seek',
@@ -3265,22 +4011,185 @@ MODULES = {'.NET': ['dotnet_load'],
'sybase_result',
'sybase_select_db',
'sybase_set_message_handler',
- 'sybase_unbuffered_query'],
- 'TCP': ['tcpwrap_check'],
- 'Tidy': ['ob_tidyhandler',
+ 'sybase_unbuffered_query'),
+ 'TCP': ('tcpwrap_check',),
+ 'Taint': ('is_tainted', 'taint', 'untaint'),
+ 'Tidy': ('ob_tidyhandler',
'tidy_access_count',
'tidy_config_count',
'tidy_error_count',
- 'tidy_get_error_buffer',
'tidy_get_output',
'tidy_load_config',
'tidy_reset_config',
'tidy_save_config',
'tidy_set_encoding',
'tidy_setopt',
- 'tidy_warning_count'],
- 'Tokenizer': ['token_get_all', 'token_name'],
- 'URL': ['base64_decode',
+ 'tidy_warning_count'),
+ 'Tokenizer': ('token_get_all', 'token_name'),
+ 'Trader': ('trader_acos',
+ 'trader_ad',
+ 'trader_add',
+ 'trader_adosc',
+ 'trader_adx',
+ 'trader_adxr',
+ 'trader_apo',
+ 'trader_aroon',
+ 'trader_aroonosc',
+ 'trader_asin',
+ 'trader_atan',
+ 'trader_atr',
+ 'trader_avgprice',
+ 'trader_bbands',
+ 'trader_beta',
+ 'trader_bop',
+ 'trader_cci',
+ 'trader_cdl2crows',
+ 'trader_cdl3blackcrows',
+ 'trader_cdl3inside',
+ 'trader_cdl3linestrike',
+ 'trader_cdl3outside',
+ 'trader_cdl3starsinsouth',
+ 'trader_cdl3whitesoldiers',
+ 'trader_cdlabandonedbaby',
+ 'trader_cdladvanceblock',
+ 'trader_cdlbelthold',
+ 'trader_cdlbreakaway',
+ 'trader_cdlclosingmarubozu',
+ 'trader_cdlconcealbabyswall',
+ 'trader_cdlcounterattack',
+ 'trader_cdldarkcloudcover',
+ 'trader_cdldoji',
+ 'trader_cdldojistar',
+ 'trader_cdldragonflydoji',
+ 'trader_cdlengulfing',
+ 'trader_cdleveningdojistar',
+ 'trader_cdleveningstar',
+ 'trader_cdlgapsidesidewhite',
+ 'trader_cdlgravestonedoji',
+ 'trader_cdlhammer',
+ 'trader_cdlhangingman',
+ 'trader_cdlharami',
+ 'trader_cdlharamicross',
+ 'trader_cdlhighwave',
+ 'trader_cdlhikkake',
+ 'trader_cdlhikkakemod',
+ 'trader_cdlhomingpigeon',
+ 'trader_cdlidentical3crows',
+ 'trader_cdlinneck',
+ 'trader_cdlinvertedhammer',
+ 'trader_cdlkicking',
+ 'trader_cdlkickingbylength',
+ 'trader_cdlladderbottom',
+ 'trader_cdllongleggeddoji',
+ 'trader_cdllongline',
+ 'trader_cdlmarubozu',
+ 'trader_cdlmatchinglow',
+ 'trader_cdlmathold',
+ 'trader_cdlmorningdojistar',
+ 'trader_cdlmorningstar',
+ 'trader_cdlonneck',
+ 'trader_cdlpiercing',
+ 'trader_cdlrickshawman',
+ 'trader_cdlrisefall3methods',
+ 'trader_cdlseparatinglines',
+ 'trader_cdlshootingstar',
+ 'trader_cdlshortline',
+ 'trader_cdlspinningtop',
+ 'trader_cdlstalledpattern',
+ 'trader_cdlsticksandwich',
+ 'trader_cdltakuri',
+ 'trader_cdltasukigap',
+ 'trader_cdlthrusting',
+ 'trader_cdltristar',
+ 'trader_cdlunique3river',
+ 'trader_cdlupsidegap2crows',
+ 'trader_cdlxsidegap3methods',
+ 'trader_ceil',
+ 'trader_cmo',
+ 'trader_correl',
+ 'trader_cos',
+ 'trader_cosh',
+ 'trader_dema',
+ 'trader_div',
+ 'trader_dx',
+ 'trader_ema',
+ 'trader_errno',
+ 'trader_exp',
+ 'trader_floor',
+ 'trader_get_compat',
+ 'trader_get_unstable_period',
+ 'trader_ht_dcperiod',
+ 'trader_ht_dcphase',
+ 'trader_ht_phasor',
+ 'trader_ht_sine',
+ 'trader_ht_trendline',
+ 'trader_ht_trendmode',
+ 'trader_kama',
+ 'trader_linearreg_angle',
+ 'trader_linearreg_intercept',
+ 'trader_linearreg_slope',
+ 'trader_linearreg',
+ 'trader_ln',
+ 'trader_log10',
+ 'trader_ma',
+ 'trader_macd',
+ 'trader_macdext',
+ 'trader_macdfix',
+ 'trader_mama',
+ 'trader_mavp',
+ 'trader_max',
+ 'trader_maxindex',
+ 'trader_medprice',
+ 'trader_mfi',
+ 'trader_midpoint',
+ 'trader_midprice',
+ 'trader_min',
+ 'trader_minindex',
+ 'trader_minmax',
+ 'trader_minmaxindex',
+ 'trader_minus_di',
+ 'trader_minus_dm',
+ 'trader_mom',
+ 'trader_mult',
+ 'trader_natr',
+ 'trader_obv',
+ 'trader_plus_di',
+ 'trader_plus_dm',
+ 'trader_ppo',
+ 'trader_roc',
+ 'trader_rocp',
+ 'trader_rocr100',
+ 'trader_rocr',
+ 'trader_rsi',
+ 'trader_sar',
+ 'trader_sarext',
+ 'trader_set_compat',
+ 'trader_set_unstable_period',
+ 'trader_sin',
+ 'trader_sinh',
+ 'trader_sma',
+ 'trader_sqrt',
+ 'trader_stddev',
+ 'trader_stoch',
+ 'trader_stochf',
+ 'trader_stochrsi',
+ 'trader_sub',
+ 'trader_sum',
+ 'trader_t3',
+ 'trader_tan',
+ 'trader_tanh',
+ 'trader_tema',
+ 'trader_trange',
+ 'trader_trima',
+ 'trader_trix',
+ 'trader_tsf',
+ 'trader_typprice',
+ 'trader_ultosc',
+ 'trader_var',
+ 'trader_wclprice',
+ 'trader_willr',
+ 'trader_wma'),
+ 'URL': ('base64_decode',
'base64_encode',
'get_headers',
'get_meta_tags',
@@ -3289,8 +4198,22 @@ MODULES = {'.NET': ['dotnet_load'],
'rawurldecode',
'rawurlencode',
'urldecode',
- 'urlencode'],
- 'Variable handling': ['debug_zval_dump',
+ 'urlencode'),
+ 'Uopz': ('uopz_backup',
+ 'uopz_compose',
+ 'uopz_copy',
+ 'uopz_delete',
+ 'uopz_extend',
+ 'uopz_flags',
+ 'uopz_function',
+ 'uopz_implement',
+ 'uopz_overload',
+ 'uopz_redefine',
+ 'uopz_rename',
+ 'uopz_restore',
+ 'uopz_undefine'),
+ 'Variable handling': ('boolval',
+ 'debug_zval_dump',
'doubleval',
'empty',
'floatval',
@@ -3322,20 +4245,19 @@ MODULES = {'.NET': ['dotnet_load'],
'unserialize',
'unset',
'var_dump',
- 'var_export'],
- 'W32api': ['w32api_deftype',
+ 'var_export'),
+ 'W32api': ('w32api_deftype',
'w32api_init_dtype',
'w32api_invoke_function',
'w32api_register_function',
- 'w32api_set_call_method'],
- 'WDDX': ['wddx_add_vars',
+ 'w32api_set_call_method'),
+ 'WDDX': ('wddx_add_vars',
'wddx_deserialize',
'wddx_packet_end',
'wddx_packet_start',
'wddx_serialize_value',
- 'wddx_serialize_vars',
- 'wddx_unserialize'],
- 'WinCache': ['wincache_fcache_fileinfo',
+ 'wddx_serialize_vars'),
+ 'WinCache': ('wincache_fcache_fileinfo',
'wincache_fcache_meminfo',
'wincache_lock',
'wincache_ocache_fileinfo',
@@ -3356,9 +4278,32 @@ MODULES = {'.NET': ['dotnet_load'],
'wincache_ucache_info',
'wincache_ucache_meminfo',
'wincache_ucache_set',
- 'wincache_unlock'],
- 'XML Parser': ['utf8_decode'],
- 'XML-RPC': ['xmlrpc_decode_request',
+ 'wincache_unlock'),
+ 'XML Parser': ('utf8_decode',
+ 'utf8_encode',
+ 'xml_error_string',
+ 'xml_get_current_byte_index',
+ 'xml_get_current_column_number',
+ 'xml_get_current_line_number',
+ 'xml_get_error_code',
+ 'xml_parse_into_struct',
+ 'xml_parse',
+ 'xml_parser_create_ns',
+ 'xml_parser_create',
+ 'xml_parser_free',
+ 'xml_parser_get_option',
+ 'xml_parser_set_option',
+ 'xml_set_character_data_handler',
+ 'xml_set_default_handler',
+ 'xml_set_element_handler',
+ 'xml_set_end_namespace_decl_handler',
+ 'xml_set_external_entity_ref_handler',
+ 'xml_set_notation_decl_handler',
+ 'xml_set_object',
+ 'xml_set_processing_instruction_handler',
+ 'xml_set_start_namespace_decl_handler',
+ 'xml_set_unparsed_entity_decl_handler'),
+ 'XML-RPC': ('xmlrpc_decode_request',
'xmlrpc_decode',
'xmlrpc_encode_request',
'xmlrpc_encode',
@@ -3371,27 +4316,31 @@ MODULES = {'.NET': ['dotnet_load'],
'xmlrpc_server_destroy',
'xmlrpc_server_register_introspection_callback',
'xmlrpc_server_register_method',
- 'xmlrpc_set_type'],
- 'XSLT (PHP4)': ['xslt_backend_info',
- 'xslt_backend_name',
- 'xslt_backend_version',
- 'xslt_create',
- 'xslt_errno',
- 'xslt_error',
- 'xslt_free',
- 'xslt_getopt',
- 'xslt_process',
- 'xslt_set_base',
- 'xslt_set_encoding',
- 'xslt_set_error_handler',
- 'xslt_set_log',
- 'xslt_set_object',
- 'xslt_set_sax_handler',
- 'xslt_set_sax_handlers',
- 'xslt_set_scheme_handler',
- 'xslt_set_scheme_handlers',
- 'xslt_setopt'],
- 'YAZ': ['yaz_addinfo',
+ 'xmlrpc_set_type'),
+ 'XSLT (PHP 4)': ('xslt_backend_info',
+ 'xslt_backend_name',
+ 'xslt_backend_version',
+ 'xslt_create',
+ 'xslt_errno',
+ 'xslt_error',
+ 'xslt_free',
+ 'xslt_getopt',
+ 'xslt_process',
+ 'xslt_set_base',
+ 'xslt_set_encoding',
+ 'xslt_set_error_handler',
+ 'xslt_set_log',
+ 'xslt_set_object',
+ 'xslt_set_sax_handler',
+ 'xslt_set_sax_handlers',
+ 'xslt_set_scheme_handler',
+ 'xslt_set_scheme_handlers',
+ 'xslt_setopt'),
+ 'Xhprof': ('xhprof_disable',
+ 'xhprof_enable',
+ 'xhprof_sample_disable',
+ 'xhprof_sample_enable'),
+ 'YAZ': ('yaz_addinfo',
'yaz_ccl_conf',
'yaz_ccl_parse',
'yaz_close',
@@ -3415,8 +4364,8 @@ MODULES = {'.NET': ['dotnet_load'],
'yaz_set_option',
'yaz_sort',
'yaz_syntax',
- 'yaz_wait'],
- 'YP/NIS': ['yp_all',
+ 'yaz_wait'),
+ 'YP/NIS': ('yp_all',
'yp_cat',
'yp_err_string',
'yp_errno',
@@ -3425,13 +4374,13 @@ MODULES = {'.NET': ['dotnet_load'],
'yp_master',
'yp_match',
'yp_next',
- 'yp_order'],
- 'Yaml': ['yaml_emit_file',
+ 'yp_order'),
+ 'Yaml': ('yaml_emit_file',
'yaml_emit',
'yaml_parse_file',
'yaml_parse_url',
- 'yaml_parse'],
- 'Zip': ['zip_close',
+ 'yaml_parse'),
+ 'Zip': ('zip_close',
'zip_entry_close',
'zip_entry_compressedsize',
'zip_entry_compressionmethod',
@@ -3440,8 +4389,8 @@ MODULES = {'.NET': ['dotnet_load'],
'zip_entry_open',
'zip_entry_read',
'zip_open',
- 'zip_read'],
- 'Zlib': ['gzclose',
+ 'zip_read'),
+ 'Zlib': ('gzclose',
'gzcompress',
'gzdecode',
'gzdeflate',
@@ -3462,8 +4411,10 @@ MODULES = {'.NET': ['dotnet_load'],
'gzuncompress',
'gzwrite',
'readgzfile',
- 'zlib_get_coding_type'],
- 'bcompiler': ['bcompiler_load_exe',
+ 'zlib_decode',
+ 'zlib_encode',
+ 'zlib_get_coding_type'),
+ 'bcompiler': ('bcompiler_load_exe',
'bcompiler_load',
'bcompiler_parse_class',
'bcompiler_read',
@@ -3475,12 +4426,14 @@ MODULES = {'.NET': ['dotnet_load'],
'bcompiler_write_function',
'bcompiler_write_functions_from_file',
'bcompiler_write_header',
- 'bcompiler_write_included_filename'],
- 'cURL': ['curl_close',
+ 'bcompiler_write_included_filename'),
+ 'cURL': ('curl_close',
'curl_copy_handle',
'curl_errno',
'curl_error',
+ 'curl_escape',
'curl_exec',
+ 'curl_file_create',
'curl_getinfo',
'curl_init',
'curl_multi_add_handle',
@@ -3491,11 +4444,20 @@ MODULES = {'.NET': ['dotnet_load'],
'curl_multi_init',
'curl_multi_remove_handle',
'curl_multi_select',
+ 'curl_multi_setopt',
+ 'curl_multi_strerror',
+ 'curl_pause',
+ 'curl_reset',
'curl_setopt_array',
'curl_setopt',
- 'curl_version'],
- 'chdb': ['chdb_create'],
- 'dBase': ['dbase_add_record',
+ 'curl_share_close',
+ 'curl_share_init',
+ 'curl_share_setopt',
+ 'curl_strerror',
+ 'curl_unescape',
+ 'curl_version'),
+ 'chdb': ('chdb_create',),
+ 'dBase': ('dbase_add_record',
'dbase_close',
'dbase_create',
'dbase_delete_record',
@@ -3506,21 +4468,23 @@ MODULES = {'.NET': ['dotnet_load'],
'dbase_numrecords',
'dbase_open',
'dbase_pack',
- 'dbase_replace_record'],
- 'dbx': ['dbx_close',
+ 'dbase_replace_record'),
+ 'dbx': ('dbx_close',
'dbx_compare',
'dbx_connect',
'dbx_error',
'dbx_escape_string',
- 'dbx_fetch_row'],
- 'filePro': ['filepro_fieldcount',
+ 'dbx_fetch_row',
+ 'dbx_query',
+ 'dbx_sort'),
+ 'filePro': ('filepro_fieldcount',
'filepro_fieldname',
'filepro_fieldtype',
'filepro_fieldwidth',
'filepro_retrieve',
'filepro_rowcount',
- 'filepro'],
- 'iconv': ['iconv_get_encoding',
+ 'filepro'),
+ 'iconv': ('iconv_get_encoding',
'iconv_mime_decode_headers',
'iconv_mime_decode',
'iconv_mime_encode',
@@ -3530,19 +4494,20 @@ MODULES = {'.NET': ['dotnet_load'],
'iconv_strrpos',
'iconv_substr',
'iconv',
- 'ob_iconv_handler'],
- 'inclued': ['inclued_get_data'],
- 'intl': ['intl_error_name',
+ 'ob_iconv_handler'),
+ 'inclued': ('inclued_get_data',),
+ 'intl': ('intl_error_name',
'intl_get_error_code',
'intl_get_error_message',
- 'intl_is_failure'],
- 'libxml': ['libxml_clear_errors',
+ 'intl_is_failure'),
+ 'libxml': ('libxml_clear_errors',
'libxml_disable_entity_loader',
'libxml_get_errors',
'libxml_get_last_error',
+ 'libxml_set_external_entity_loader',
'libxml_set_streams_context',
- 'libxml_use_internal_errors'],
- 'mSQL': ['msql_affected_rows',
+ 'libxml_use_internal_errors'),
+ 'mSQL': ('msql_affected_rows',
'msql_close',
'msql_connect',
'msql_create_db',
@@ -3581,8 +4546,8 @@ MODULES = {'.NET': ['dotnet_load'],
'msql_result',
'msql_select_db',
'msql_tablename',
- 'msql'],
- 'mnoGoSearch': ['udm_add_search_limit',
+ 'msql'),
+ 'mnoGoSearch': ('udm_add_search_limit',
'udm_alloc_agent_array',
'udm_alloc_agent',
'udm_api_version',
@@ -3605,8 +4570,8 @@ MODULES = {'.NET': ['dotnet_load'],
'udm_hash32',
'udm_load_ispell_data',
'udm_open_stored',
- 'udm_set_agent_param'],
- 'mqseries': ['mqseries_back',
+ 'udm_set_agent_param'),
+ 'mqseries': ('mqseries_back',
'mqseries_begin',
'mqseries_close',
'mqseries_cmit',
@@ -3619,16 +4584,19 @@ MODULES = {'.NET': ['dotnet_load'],
'mqseries_put1',
'mqseries_put',
'mqseries_set',
- 'mqseries_strerror'],
- 'mysqlnd_qc': ['mysqlnd_qc_change_handler',
- 'mysqlnd_qc_clear_cache',
+ 'mqseries_strerror'),
+ 'mysqlnd_qc': ('mysqlnd_qc_clear_cache',
+ 'mysqlnd_qc_get_available_handlers',
'mysqlnd_qc_get_cache_info',
'mysqlnd_qc_get_core_stats',
- 'mysqlnd_qc_get_handler',
+ 'mysqlnd_qc_get_normalized_query_trace_log',
'mysqlnd_qc_get_query_trace_log',
- 'mysqlnd_qc_set_user_handlers'],
- 'qtdom': ['qdom_error', 'qdom_tree'],
- 'runkit': ['runkit_class_adopt',
+ 'mysqlnd_qc_set_cache_condition',
+ 'mysqlnd_qc_set_is_select',
+ 'mysqlnd_qc_set_storage_handler',
+ 'mysqlnd_qc_set_user_handlers'),
+ 'qtdom': ('qdom_error', 'qdom_tree'),
+ 'runkit': ('runkit_class_adopt',
'runkit_class_emancipate',
'runkit_constant_add',
'runkit_constant_redefine',
@@ -3648,11 +4616,11 @@ MODULES = {'.NET': ['dotnet_load'],
'runkit_method_rename',
'runkit_return_value_used',
'runkit_sandbox_output_handler',
- 'runkit_superglobals'],
- 'ssdeep': ['ssdeep_fuzzy_compare',
+ 'runkit_superglobals'),
+ 'ssdeep': ('ssdeep_fuzzy_compare',
'ssdeep_fuzzy_hash_filename',
- 'ssdeep_fuzzy_hash'],
- 'vpopmail': ['vpopmail_add_alias_domain_ex',
+ 'ssdeep_fuzzy_hash'),
+ 'vpopmail': ('vpopmail_add_alias_domain_ex',
'vpopmail_add_alias_domain',
'vpopmail_add_domain_ex',
'vpopmail_add_domain',
@@ -3668,9 +4636,9 @@ MODULES = {'.NET': ['dotnet_load'],
'vpopmail_del_user',
'vpopmail_error',
'vpopmail_passwd',
- 'vpopmail_set_user_quota'],
- 'win32ps': ['win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'],
- 'win32service': ['win32_continue_service',
+ 'vpopmail_set_user_quota'),
+ 'win32ps': ('win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'),
+ 'win32service': ('win32_continue_service',
'win32_create_service',
'win32_delete_service',
'win32_get_last_control_message',
@@ -3679,13 +4647,13 @@ MODULES = {'.NET': ['dotnet_load'],
'win32_set_service_status',
'win32_start_service_ctrl_dispatcher',
'win32_start_service',
- 'win32_stop_service'],
- 'xattr': ['xattr_get',
+ 'win32_stop_service'),
+ 'xattr': ('xattr_get',
'xattr_list',
'xattr_remove',
'xattr_set',
- 'xattr_supported'],
- 'xdiff': ['xdiff_file_bdiff_size',
+ 'xattr_supported'),
+ 'xdiff': ('xdiff_file_bdiff_size',
'xdiff_file_bdiff',
'xdiff_file_bpatch',
'xdiff_file_diff_binary',
@@ -3702,16 +4670,20 @@ MODULES = {'.NET': ['dotnet_load'],
'xdiff_string_merge3',
'xdiff_string_patch_binary',
'xdiff_string_patch',
- 'xdiff_string_rabdiff']}
+ 'xdiff_string_rabdiff')}
-if __name__ == '__main__':
+
+if __name__ == '__main__': # pragma: no cover
import glob
import os
import pprint
import re
import shutil
import tarfile
- import urllib
+ try:
+ from urllib import urlretrieve
+ except ImportError:
+ from urllib.request import urlretrieve
PHP_MANUAL_URL = 'http://us3.php.net/distributions/manual/php_manual_en.tar.gz'
PHP_MANUAL_DIR = './php-chunked-xhtml/'
@@ -3733,26 +4705,27 @@ if __name__ == '__main__':
module = search.group(1)
modules[module] = []
- elif '<h2>Table of Contents</h2>' in line:
+ elif 'href="function.' in line:
for match in function_re.finditer(line):
fn = match.group(1)
- if '-&gt;' not in fn and '::' not in fn:
+ if '-&gt;' not in fn and '::' not in fn and fn not in modules[module]:
modules[module].append(fn)
- # These are dummy manual pages, not actual functions
- if module == 'PHP Options/Info':
- modules[module].remove('main')
- elif module == 'Filesystem':
- modules[module].remove('delete')
+ if module:
+ # These are dummy manual pages, not actual functions
+ if module == 'PHP Options/Info':
+ modules[module].remove('main')
+
+ if module == 'Filesystem':
+ modules[module].remove('delete')
- if not modules[module]:
- del modules[module]
+ if not modules[module]:
+ del modules[module]
- break
return modules
def get_php_references():
- download = urllib.urlretrieve(PHP_MANUAL_URL)
+ download = urlretrieve(PHP_MANUAL_URL)
tar = tarfile.open(download[0])
tar.extractall()
tar.close()
@@ -3761,26 +4734,22 @@ if __name__ == '__main__':
os.remove(download[0])
def regenerate(filename, modules):
- f = open(filename)
- try:
- content = f.read()
- finally:
- f.close()
+ with open(filename) as fp:
+ content = fp.read()
header = content[:content.find('MODULES = {')]
footer = content[content.find("if __name__ == '__main__':"):]
- f = open(filename, 'w')
- f.write(header)
- f.write('MODULES = %s\n\n' % pprint.pformat(modules))
- f.write(footer)
- f.close()
+ with open(filename, 'w') as fp:
+ fp.write(header)
+ fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
+ fp.write(footer)
def run():
- print '>> Downloading Function Index'
+ print('>> Downloading Function Index')
modules = get_php_functions()
- total = sum(len(v) for v in modules.itervalues())
- print '%d functions found' % total
+ total = sum(len(v) for v in modules.values())
+ print('%d functions found' % total)
regenerate(__file__, modules)
shutil.rmtree(PHP_MANUAL_DIR)
diff --git a/pygments/lexers/_postgres_builtins.py b/pygments/lexers/_postgres_builtins.py
index b2322137..671fa677 100644
--- a/pygments/lexers/_postgres_builtins.py
+++ b/pygments/lexers/_postgres_builtins.py
@@ -5,229 +5,617 @@
Self-updating data files for PostgreSQL lexer.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-import urllib
-
-# One man's constant is another man's variable.
-SOURCE_URL = 'https://github.com/postgres/postgres/raw/master'
-KEYWORDS_URL = SOURCE_URL + '/doc/src/sgml/keywords.sgml'
-DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml'
-
-def update_myself():
- data_file = list(fetch(DATATYPES_URL))
- datatypes = parse_datatypes(data_file)
- pseudos = parse_pseudos(data_file)
-
- keywords = parse_keywords(fetch(KEYWORDS_URL))
- update_consts(__file__, 'DATATYPES', datatypes)
- update_consts(__file__, 'PSEUDO_TYPES', pseudos)
- update_consts(__file__, 'KEYWORDS', keywords)
-
-def parse_keywords(f):
- kw = []
- for m in re.finditer(
- r'\s*<entry><token>([^<]+)</token></entry>\s*'
- r'<entry>([^<]+)</entry>', f.read()):
- kw.append(m.group(1))
-
- if not kw:
- raise ValueError('no keyword found')
-
- kw.sort()
- return kw
-
-def parse_datatypes(f):
- dt = set()
- for line in f:
- if '<sect1' in line:
- break
- if '<entry><type>' not in line:
- continue
-
- # Parse a string such as
- # time [ (<replaceable>p</replaceable>) ] [ without time zone ]
- # into types "time" and "without time zone"
-
- # remove all the tags
- line = re.sub("<replaceable>[^<]+</replaceable>", "", line)
- line = re.sub("<[^>]+>", "", line)
-
- # Drop the parts containing braces
- for tmp in [t for tmp in line.split('[')
- for t in tmp.split(']') if "(" not in t]:
- for t in tmp.split(','):
- t = t.strip()
- if not t: continue
- dt.add(" ".join(t.split()))
-
- dt = list(dt)
- dt.sort()
- return dt
-
-def parse_pseudos(f):
- dt = []
- re_start = re.compile(r'\s*<table id="datatype-pseudotypes-table">')
- re_entry = re.compile(r'\s*<entry><type>([^<]+)</></entry>')
- re_end = re.compile(r'\s*</table>')
-
- f = iter(f)
- for line in f:
- if re_start.match(line) is not None:
- break
- else:
- raise ValueError('pseudo datatypes table not found')
-
- for line in f:
- m = re_entry.match(line)
- if m is not None:
- dt.append(m.group(1))
-
- if re_end.match(line) is not None:
- break
- else:
- raise ValueError('end of pseudo datatypes table not found')
-
- if not dt:
- raise ValueError('pseudo datatypes not found')
-
- return dt
-
-def fetch(url):
- return urllib.urlopen(url)
-
-def update_consts(filename, constname, content):
- f = open(filename)
- lines = f.readlines()
- f.close()
-
- # Line to start/end inserting
- re_start = re.compile(r'^%s\s*=\s*\[\s*$' % constname)
- re_end = re.compile(r'^\s*\]\s*$')
- start = [ n for n, l in enumerate(lines) if re_start.match(l) ]
- if not start:
- raise ValueError("couldn't find line containing '%s = ['" % constname)
- if len(start) > 1:
- raise ValueError("too many lines containing '%s = ['" % constname)
- start = start[0] + 1
-
- end = [ n for n, l in enumerate(lines) if n >= start and re_end.match(l) ]
- if not end:
- raise ValueError("couldn't find line containing ']' after %s " % constname)
- end = end[0]
-
- # Pack the new content in lines not too long
- content = [repr(item) for item in content ]
- new_lines = [[]]
- for item in content:
- if sum(map(len, new_lines[-1])) + 2 * len(new_lines[-1]) + len(item) + 4 > 75:
- new_lines.append([])
- new_lines[-1].append(item)
-
- lines[start:end] = [ " %s,\n" % ", ".join(items) for items in new_lines ]
-
- f = open(filename, 'w')
- f.write(''.join(lines))
- f.close()
-
# Autogenerated: please edit them if you like wasting your time.
-KEYWORDS = [
- 'ABORT', 'ABSOLUTE', 'ACCESS', 'ACTION', 'ADD', 'ADMIN', 'AFTER',
- 'AGGREGATE', 'ALL', 'ALSO', 'ALTER', 'ALWAYS', 'ANALYSE', 'ANALYZE',
- 'AND', 'ANY', 'ARRAY', 'AS', 'ASC', 'ASSERTION', 'ASSIGNMENT',
- 'ASYMMETRIC', 'AT', 'ATTRIBUTE', 'AUTHORIZATION', 'BACKWARD', 'BEFORE',
- 'BEGIN', 'BETWEEN', 'BIGINT', 'BINARY', 'BIT', 'BOOLEAN', 'BOTH', 'BY',
- 'CACHE', 'CALLED', 'CASCADE', 'CASCADED', 'CASE', 'CAST', 'CATALOG',
- 'CHAIN', 'CHAR', 'CHARACTER', 'CHARACTERISTICS', 'CHECK', 'CHECKPOINT',
- 'CLASS', 'CLOSE', 'CLUSTER', 'COALESCE', 'COLLATE', 'COLLATION',
- 'COLUMN', 'COMMENT', 'COMMENTS', 'COMMIT', 'COMMITTED', 'CONCURRENTLY',
- 'CONFIGURATION', 'CONNECTION', 'CONSTRAINT', 'CONSTRAINTS', 'CONTENT',
- 'CONTINUE', 'CONVERSION', 'COPY', 'COST', 'CREATE', 'CROSS', 'CSV',
- 'CURRENT', 'CURRENT_CATALOG', 'CURRENT_DATE', 'CURRENT_ROLE',
- 'CURRENT_SCHEMA', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER',
- 'CURSOR', 'CYCLE', 'DATA', 'DATABASE', 'DAY', 'DEALLOCATE', 'DEC',
- 'DECIMAL', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE', 'DEFERRED',
- 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS', 'DESC', 'DICTIONARY',
- 'DISABLE', 'DISCARD', 'DISTINCT', 'DO', 'DOCUMENT', 'DOMAIN', 'DOUBLE',
- 'DROP', 'EACH', 'ELSE', 'ENABLE', 'ENCODING', 'ENCRYPTED', 'END',
- 'ENUM', 'ESCAPE', 'EXCEPT', 'EXCLUDE', 'EXCLUDING', 'EXCLUSIVE',
- 'EXECUTE', 'EXISTS', 'EXPLAIN', 'EXTENSION', 'EXTERNAL', 'EXTRACT',
- 'FALSE', 'FAMILY', 'FETCH', 'FIRST', 'FLOAT', 'FOLLOWING', 'FOR',
- 'FORCE', 'FOREIGN', 'FORWARD', 'FREEZE', 'FROM', 'FULL', 'FUNCTION',
- 'FUNCTIONS', 'GLOBAL', 'GRANT', 'GRANTED', 'GREATEST', 'GROUP',
- 'HANDLER', 'HAVING', 'HEADER', 'HOLD', 'HOUR', 'IDENTITY', 'IF',
- 'ILIKE', 'IMMEDIATE', 'IMMUTABLE', 'IMPLICIT', 'IN', 'INCLUDING',
- 'INCREMENT', 'INDEX', 'INDEXES', 'INHERIT', 'INHERITS', 'INITIALLY',
- 'INLINE', 'INNER', 'INOUT', 'INPUT', 'INSENSITIVE', 'INSERT', 'INSTEAD',
- 'INT', 'INTEGER', 'INTERSECT', 'INTERVAL', 'INTO', 'INVOKER', 'IS',
- 'ISNULL', 'ISOLATION', 'JOIN', 'KEY', 'LABEL', 'LANGUAGE', 'LARGE',
- 'LAST', 'LC_COLLATE', 'LC_CTYPE', 'LEADING', 'LEAST', 'LEFT', 'LEVEL',
- 'LIKE', 'LIMIT', 'LISTEN', 'LOAD', 'LOCAL', 'LOCALTIME',
- 'LOCALTIMESTAMP', 'LOCATION', 'LOCK', 'MAPPING', 'MATCH', 'MAXVALUE',
- 'MINUTE', 'MINVALUE', 'MODE', 'MONTH', 'MOVE', 'NAME', 'NAMES',
- 'NATIONAL', 'NATURAL', 'NCHAR', 'NEXT', 'NO', 'NONE', 'NOT', 'NOTHING',
- 'NOTIFY', 'NOTNULL', 'NOWAIT', 'NULL', 'NULLIF', 'NULLS', 'NUMERIC',
- 'OBJECT', 'OF', 'OFF', 'OFFSET', 'OIDS', 'ON', 'ONLY', 'OPERATOR',
- 'OPTION', 'OPTIONS', 'OR', 'ORDER', 'OUT', 'OUTER', 'OVER', 'OVERLAPS',
- 'OVERLAY', 'OWNED', 'OWNER', 'PARSER', 'PARTIAL', 'PARTITION',
- 'PASSING', 'PASSWORD', 'PLACING', 'PLANS', 'POSITION', 'PRECEDING',
- 'PRECISION', 'PREPARE', 'PREPARED', 'PRESERVE', 'PRIMARY', 'PRIOR',
- 'PRIVILEGES', 'PROCEDURAL', 'PROCEDURE', 'QUOTE', 'RANGE', 'READ',
- 'REAL', 'REASSIGN', 'RECHECK', 'RECURSIVE', 'REF', 'REFERENCES',
- 'REINDEX', 'RELATIVE', 'RELEASE', 'RENAME', 'REPEATABLE', 'REPLACE',
- 'REPLICA', 'RESET', 'RESTART', 'RESTRICT', 'RETURNING', 'RETURNS',
- 'REVOKE', 'RIGHT', 'ROLE', 'ROLLBACK', 'ROW', 'ROWS', 'RULE',
- 'SAVEPOINT', 'SCHEMA', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY',
- 'SELECT', 'SEQUENCE', 'SEQUENCES', 'SERIALIZABLE', 'SERVER', 'SESSION',
- 'SESSION_USER', 'SET', 'SETOF', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE',
- 'SMALLINT', 'SOME', 'STABLE', 'STANDALONE', 'START', 'STATEMENT',
- 'STATISTICS', 'STDIN', 'STDOUT', 'STORAGE', 'STRICT', 'STRIP',
- 'SUBSTRING', 'SYMMETRIC', 'SYSID', 'SYSTEM', 'TABLE', 'TABLES',
- 'TABLESPACE', 'TEMP', 'TEMPLATE', 'TEMPORARY', 'TEXT', 'THEN', 'TIME',
- 'TIMESTAMP', 'TO', 'TRAILING', 'TRANSACTION', 'TREAT', 'TRIGGER',
- 'TRIM', 'TRUE', 'TRUNCATE', 'TRUSTED', 'TYPE', 'UNBOUNDED',
- 'UNCOMMITTED', 'UNENCRYPTED', 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN',
- 'UNLOGGED', 'UNTIL', 'UPDATE', 'USER', 'USING', 'VACUUM', 'VALID',
- 'VALIDATE', 'VALIDATOR', 'VALUE', 'VALUES', 'VARCHAR', 'VARIADIC',
- 'VARYING', 'VERBOSE', 'VERSION', 'VIEW', 'VOLATILE', 'WHEN', 'WHERE',
- 'WHITESPACE', 'WINDOW', 'WITH', 'WITHOUT', 'WORK', 'WRAPPER', 'WRITE',
- 'XML', 'XMLATTRIBUTES', 'XMLCONCAT', 'XMLELEMENT', 'XMLEXISTS',
- 'XMLFOREST', 'XMLPARSE', 'XMLPI', 'XMLROOT', 'XMLSERIALIZE', 'YEAR',
- 'YES', 'ZONE',
- ]
-
-DATATYPES = [
- 'bigint', 'bigserial', 'bit', 'bit varying', 'bool', 'boolean', 'box',
- 'bytea', 'char', 'character', 'character varying', 'cidr', 'circle',
- 'date', 'decimal', 'double precision', 'float4', 'float8', 'inet',
- 'int', 'int2', 'int4', 'int8', 'integer', 'interval', 'json', 'line',
- 'lseg', 'macaddr', 'money', 'numeric', 'path', 'point', 'polygon',
- 'real', 'serial', 'serial2', 'serial4', 'serial8', 'smallint',
- 'smallserial', 'text', 'time', 'timestamp', 'timestamptz', 'timetz',
- 'tsquery', 'tsvector', 'txid_snapshot', 'uuid', 'varbit', 'varchar',
- 'with time zone', 'without time zone', 'xml',
- ]
-
-PSEUDO_TYPES = [
- 'any', 'anyelement', 'anyarray', 'anynonarray', 'anyenum', 'anyrange',
- 'cstring', 'internal', 'language_handler', 'fdw_handler', 'record',
- 'trigger', 'void', 'opaque',
- ]
+KEYWORDS = (
+ 'ABORT',
+ 'ABSOLUTE',
+ 'ACCESS',
+ 'ACTION',
+ 'ADD',
+ 'ADMIN',
+ 'AFTER',
+ 'AGGREGATE',
+ 'ALL',
+ 'ALSO',
+ 'ALTER',
+ 'ALWAYS',
+ 'ANALYSE',
+ 'ANALYZE',
+ 'AND',
+ 'ANY',
+ 'ARRAY',
+ 'AS',
+ 'ASC',
+ 'ASSERTION',
+ 'ASSIGNMENT',
+ 'ASYMMETRIC',
+ 'AT',
+ 'ATTRIBUTE',
+ 'AUTHORIZATION',
+ 'BACKWARD',
+ 'BEFORE',
+ 'BEGIN',
+ 'BETWEEN',
+ 'BIGINT',
+ 'BINARY',
+ 'BIT',
+ 'BOOLEAN',
+ 'BOTH',
+ 'BY',
+ 'CACHE',
+ 'CALLED',
+ 'CASCADE',
+ 'CASCADED',
+ 'CASE',
+ 'CAST',
+ 'CATALOG',
+ 'CHAIN',
+ 'CHAR',
+ 'CHARACTER',
+ 'CHARACTERISTICS',
+ 'CHECK',
+ 'CHECKPOINT',
+ 'CLASS',
+ 'CLOSE',
+ 'CLUSTER',
+ 'COALESCE',
+ 'COLLATE',
+ 'COLLATION',
+ 'COLUMN',
+ 'COMMENT',
+ 'COMMENTS',
+ 'COMMIT',
+ 'COMMITTED',
+ 'CONCURRENTLY',
+ 'CONFIGURATION',
+ 'CONNECTION',
+ 'CONSTRAINT',
+ 'CONSTRAINTS',
+ 'CONTENT',
+ 'CONTINUE',
+ 'CONVERSION',
+ 'COPY',
+ 'COST',
+ 'CREATE',
+ 'CROSS',
+ 'CSV',
+ 'CURRENT',
+ 'CURRENT_CATALOG',
+ 'CURRENT_DATE',
+ 'CURRENT_ROLE',
+ 'CURRENT_SCHEMA',
+ 'CURRENT_TIME',
+ 'CURRENT_TIMESTAMP',
+ 'CURRENT_USER',
+ 'CURSOR',
+ 'CYCLE',
+ 'DATA',
+ 'DATABASE',
+ 'DAY',
+ 'DEALLOCATE',
+ 'DEC',
+ 'DECIMAL',
+ 'DECLARE',
+ 'DEFAULT',
+ 'DEFAULTS',
+ 'DEFERRABLE',
+ 'DEFERRED',
+ 'DEFINER',
+ 'DELETE',
+ 'DELIMITER',
+ 'DELIMITERS',
+ 'DESC',
+ 'DICTIONARY',
+ 'DISABLE',
+ 'DISCARD',
+ 'DISTINCT',
+ 'DO',
+ 'DOCUMENT',
+ 'DOMAIN',
+ 'DOUBLE',
+ 'DROP',
+ 'EACH',
+ 'ELSE',
+ 'ENABLE',
+ 'ENCODING',
+ 'ENCRYPTED',
+ 'END',
+ 'ENUM',
+ 'ESCAPE',
+ 'EVENT',
+ 'EXCEPT',
+ 'EXCLUDE',
+ 'EXCLUDING',
+ 'EXCLUSIVE',
+ 'EXECUTE',
+ 'EXISTS',
+ 'EXPLAIN',
+ 'EXTENSION',
+ 'EXTERNAL',
+ 'EXTRACT',
+ 'FALSE',
+ 'FAMILY',
+ 'FETCH',
+ 'FILTER',
+ 'FIRST',
+ 'FLOAT',
+ 'FOLLOWING',
+ 'FOR',
+ 'FORCE',
+ 'FOREIGN',
+ 'FORWARD',
+ 'FREEZE',
+ 'FROM',
+ 'FULL',
+ 'FUNCTION',
+ 'FUNCTIONS',
+ 'GLOBAL',
+ 'GRANT',
+ 'GRANTED',
+ 'GREATEST',
+ 'GROUP',
+ 'HANDLER',
+ 'HAVING',
+ 'HEADER',
+ 'HOLD',
+ 'HOUR',
+ 'IDENTITY',
+ 'IF',
+ 'ILIKE',
+ 'IMMEDIATE',
+ 'IMMUTABLE',
+ 'IMPLICIT',
+ 'IN',
+ 'INCLUDING',
+ 'INCREMENT',
+ 'INDEX',
+ 'INDEXES',
+ 'INHERIT',
+ 'INHERITS',
+ 'INITIALLY',
+ 'INLINE',
+ 'INNER',
+ 'INOUT',
+ 'INPUT',
+ 'INSENSITIVE',
+ 'INSERT',
+ 'INSTEAD',
+ 'INT',
+ 'INTEGER',
+ 'INTERSECT',
+ 'INTERVAL',
+ 'INTO',
+ 'INVOKER',
+ 'IS',
+ 'ISNULL',
+ 'ISOLATION',
+ 'JOIN',
+ 'KEY',
+ 'LABEL',
+ 'LANGUAGE',
+ 'LARGE',
+ 'LAST',
+ 'LATERAL',
+ 'LC_COLLATE',
+ 'LC_CTYPE',
+ 'LEADING',
+ 'LEAKPROOF',
+ 'LEAST',
+ 'LEFT',
+ 'LEVEL',
+ 'LIKE',
+ 'LIMIT',
+ 'LISTEN',
+ 'LOAD',
+ 'LOCAL',
+ 'LOCALTIME',
+ 'LOCALTIMESTAMP',
+ 'LOCATION',
+ 'LOCK',
+ 'MAPPING',
+ 'MATCH',
+ 'MATERIALIZED',
+ 'MAXVALUE',
+ 'MINUTE',
+ 'MINVALUE',
+ 'MODE',
+ 'MONTH',
+ 'MOVE',
+ 'NAME',
+ 'NAMES',
+ 'NATIONAL',
+ 'NATURAL',
+ 'NCHAR',
+ 'NEXT',
+ 'NO',
+ 'NONE',
+ 'NOT',
+ 'NOTHING',
+ 'NOTIFY',
+ 'NOTNULL',
+ 'NOWAIT',
+ 'NULL',
+ 'NULLIF',
+ 'NULLS',
+ 'NUMERIC',
+ 'OBJECT',
+ 'OF',
+ 'OFF',
+ 'OFFSET',
+ 'OIDS',
+ 'ON',
+ 'ONLY',
+ 'OPERATOR',
+ 'OPTION',
+ 'OPTIONS',
+ 'OR',
+ 'ORDER',
+ 'ORDINALITY',
+ 'OUT',
+ 'OUTER',
+ 'OVER',
+ 'OVERLAPS',
+ 'OVERLAY',
+ 'OWNED',
+ 'OWNER',
+ 'PARSER',
+ 'PARTIAL',
+ 'PARTITION',
+ 'PASSING',
+ 'PASSWORD',
+ 'PLACING',
+ 'PLANS',
+ 'POLICY',
+ 'POSITION',
+ 'PRECEDING',
+ 'PRECISION',
+ 'PREPARE',
+ 'PREPARED',
+ 'PRESERVE',
+ 'PRIMARY',
+ 'PRIOR',
+ 'PRIVILEGES',
+ 'PROCEDURAL',
+ 'PROCEDURE',
+ 'PROGRAM',
+ 'QUOTE',
+ 'RANGE',
+ 'READ',
+ 'REAL',
+ 'REASSIGN',
+ 'RECHECK',
+ 'RECURSIVE',
+ 'REF',
+ 'REFERENCES',
+ 'REFRESH',
+ 'REINDEX',
+ 'RELATIVE',
+ 'RELEASE',
+ 'RENAME',
+ 'REPEATABLE',
+ 'REPLACE',
+ 'REPLICA',
+ 'RESET',
+ 'RESTART',
+ 'RESTRICT',
+ 'RETURNING',
+ 'RETURNS',
+ 'REVOKE',
+ 'RIGHT',
+ 'ROLE',
+ 'ROLLBACK',
+ 'ROW',
+ 'ROWS',
+ 'RULE',
+ 'SAVEPOINT',
+ 'SCHEMA',
+ 'SCROLL',
+ 'SEARCH',
+ 'SECOND',
+ 'SECURITY',
+ 'SELECT',
+ 'SEQUENCE',
+ 'SEQUENCES',
+ 'SERIALIZABLE',
+ 'SERVER',
+ 'SESSION',
+ 'SESSION_USER',
+ 'SET',
+ 'SETOF',
+ 'SHARE',
+ 'SHOW',
+ 'SIMILAR',
+ 'SIMPLE',
+ 'SMALLINT',
+ 'SNAPSHOT',
+ 'SOME',
+ 'STABLE',
+ 'STANDALONE',
+ 'START',
+ 'STATEMENT',
+ 'STATISTICS',
+ 'STDIN',
+ 'STDOUT',
+ 'STORAGE',
+ 'STRICT',
+ 'STRIP',
+ 'SUBSTRING',
+ 'SYMMETRIC',
+ 'SYSID',
+ 'SYSTEM',
+ 'TABLE',
+ 'TABLES',
+ 'TABLESPACE',
+ 'TEMP',
+ 'TEMPLATE',
+ 'TEMPORARY',
+ 'TEXT',
+ 'THEN',
+ 'TIME',
+ 'TIMESTAMP',
+ 'TO',
+ 'TRAILING',
+ 'TRANSACTION',
+ 'TREAT',
+ 'TRIGGER',
+ 'TRIM',
+ 'TRUE',
+ 'TRUNCATE',
+ 'TRUSTED',
+ 'TYPE',
+ 'TYPES',
+ 'UNBOUNDED',
+ 'UNCOMMITTED',
+ 'UNENCRYPTED',
+ 'UNION',
+ 'UNIQUE',
+ 'UNKNOWN',
+ 'UNLISTEN',
+ 'UNLOGGED',
+ 'UNTIL',
+ 'UPDATE',
+ 'USER',
+ 'USING',
+ 'VACUUM',
+ 'VALID',
+ 'VALIDATE',
+ 'VALIDATOR',
+ 'VALUE',
+ 'VALUES',
+ 'VARCHAR',
+ 'VARIADIC',
+ 'VARYING',
+ 'VERBOSE',
+ 'VERSION',
+ 'VIEW',
+ 'VIEWS',
+ 'VOLATILE',
+ 'WHEN',
+ 'WHERE',
+ 'WHITESPACE',
+ 'WINDOW',
+ 'WITH',
+ 'WITHIN',
+ 'WITHOUT',
+ 'WORK',
+ 'WRAPPER',
+ 'WRITE',
+ 'XML',
+ 'XMLATTRIBUTES',
+ 'XMLCONCAT',
+ 'XMLELEMENT',
+ 'XMLEXISTS',
+ 'XMLFOREST',
+ 'XMLPARSE',
+ 'XMLPI',
+ 'XMLROOT',
+ 'XMLSERIALIZE',
+ 'YEAR',
+ 'YES',
+ 'ZONE',
+)
+
+DATATYPES = (
+ 'bigint',
+ 'bigserial',
+ 'bit',
+ 'bit varying',
+ 'bool',
+ 'boolean',
+ 'box',
+ 'bytea',
+ 'char',
+ 'character',
+ 'character varying',
+ 'cidr',
+ 'circle',
+ 'date',
+ 'decimal',
+ 'double precision',
+ 'float4',
+ 'float8',
+ 'inet',
+ 'int',
+ 'int2',
+ 'int4',
+ 'int8',
+ 'integer',
+ 'interval',
+ 'json',
+ 'jsonb',
+ 'line',
+ 'lseg',
+ 'macaddr',
+ 'money',
+ 'numeric',
+ 'path',
+ 'pg_lsn',
+ 'point',
+ 'polygon',
+ 'real',
+ 'serial',
+ 'serial2',
+ 'serial4',
+ 'serial8',
+ 'smallint',
+ 'smallserial',
+ 'text',
+ 'time',
+ 'timestamp',
+ 'timestamptz',
+ 'timetz',
+ 'tsquery',
+ 'tsvector',
+ 'txid_snapshot',
+ 'uuid',
+ 'varbit',
+ 'varchar',
+ 'with time zone',
+ 'without time zone',
+ 'xml',
+)
+
+PSEUDO_TYPES = (
+ 'any',
+ 'anyelement',
+ 'anyarray',
+ 'anynonarray',
+ 'anyenum',
+ 'anyrange',
+ 'cstring',
+ 'internal',
+ 'language_handler',
+ 'fdw_handler',
+ 'record',
+ 'trigger',
+ 'void',
+ 'opaque',
+)
# Remove 'trigger' from types
-PSEUDO_TYPES = sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS)))
+PSEUDO_TYPES = tuple(sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS))))
-PLPGSQL_KEYWORDS = [
+PLPGSQL_KEYWORDS = (
'ALIAS', 'CONSTANT', 'DIAGNOSTICS', 'ELSIF', 'EXCEPTION', 'EXIT',
'FOREACH', 'GET', 'LOOP', 'NOTICE', 'OPEN', 'PERFORM', 'QUERY', 'RAISE',
'RETURN', 'REVERSE', 'SQLSTATE', 'WHILE',
- ]
+)
+
+
+if __name__ == '__main__': # pragma: no cover
+ import re
+ try:
+ from urllib import urlopen
+ except ImportError:
+ from urllib.request import urlopen
+
+ from pygments.util import format_lines
+
+ # One man's constant is another man's variable.
+ SOURCE_URL = 'https://github.com/postgres/postgres/raw/master'
+ KEYWORDS_URL = SOURCE_URL + '/doc/src/sgml/keywords.sgml'
+ DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml'
+
+ def update_myself():
+ data_file = list(urlopen(DATATYPES_URL))
+ datatypes = parse_datatypes(data_file)
+ pseudos = parse_pseudos(data_file)
+
+ keywords = parse_keywords(urlopen(KEYWORDS_URL))
+ update_consts(__file__, 'DATATYPES', datatypes)
+ update_consts(__file__, 'PSEUDO_TYPES', pseudos)
+ update_consts(__file__, 'KEYWORDS', keywords)
+
+ def parse_keywords(f):
+ kw = []
+ for m in re.finditer(
+ r'\s*<entry><token>([^<]+)</token></entry>\s*'
+ r'<entry>([^<]+)</entry>', f.read()):
+ kw.append(m.group(1))
+
+ if not kw:
+ raise ValueError('no keyword found')
+
+ kw.sort()
+ return kw
+
+ def parse_datatypes(f):
+ dt = set()
+ for line in f:
+ if '<sect1' in line:
+ break
+ if '<entry><type>' not in line:
+ continue
+
+ # Parse a string such as
+ # time [ (<replaceable>p</replaceable>) ] [ without time zone ]
+ # into types "time" and "without time zone"
+
+ # remove all the tags
+ line = re.sub("<replaceable>[^<]+</replaceable>", "", line)
+ line = re.sub("<[^>]+>", "", line)
+
+ # Drop the parts containing braces
+ for tmp in [t for tmp in line.split('[')
+ for t in tmp.split(']') if "(" not in t]:
+ for t in tmp.split(','):
+ t = t.strip()
+ if not t: continue
+ dt.add(" ".join(t.split()))
+
+ dt = list(dt)
+ dt.sort()
+ return dt
+
+ def parse_pseudos(f):
+ dt = []
+ re_start = re.compile(r'\s*<table id="datatype-pseudotypes-table">')
+ re_entry = re.compile(r'\s*<entry><type>([^<]+)</></entry>')
+ re_end = re.compile(r'\s*</table>')
+
+ f = iter(f)
+ for line in f:
+ if re_start.match(line) is not None:
+ break
+ else:
+ raise ValueError('pseudo datatypes table not found')
+
+ for line in f:
+ m = re_entry.match(line)
+ if m is not None:
+ dt.append(m.group(1))
+
+ if re_end.match(line) is not None:
+ break
+ else:
+ raise ValueError('end of pseudo datatypes table not found')
+
+ if not dt:
+ raise ValueError('pseudo datatypes not found')
+
+ return dt
+
+ def update_consts(filename, constname, content):
+ with open(filename) as f:
+ data = f.read()
+
+ # Line to start/end inserting
+ re_match = re.compile(r'^%s\s*=\s*\($.*?^\s*\)$' % constname, re.M | re.S)
+ m = re_match.search(data)
+ if not m:
+ raise ValueError('Could not find existing definition for %s' %
+ (constname,))
+
+ new_block = format_lines(constname, content)
+ data = data[:m.start()] + new_block + data[m.end():]
+
+ with open(filename, 'w') as f:
+ f.write(data)
-if __name__ == '__main__':
update_myself()
-
diff --git a/pygments/lexers/_scilab_builtins.py b/pygments/lexers/_scilab_builtins.py
index ed0dc819..85c99966 100644
--- a/pygments/lexers/_scilab_builtins.py
+++ b/pygments/lexers/_scilab_builtins.py
@@ -5,36 +5,3090 @@
Builtin list for the ScilabLexer.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-# These lists are generated automatically.
-# Run the following in a Scilab script:
-#
-# varType=["functions", "commands", "macros", "variables" ];
-# fd = mopen('list.txt','wt');
-#
-# for j=1:size(varType,"*")
-# myStr="";
-# a=completion("",varType(j));
-# myStr=varType(j)+"_kw = [";
-# for i=1:size(a,"*")
-# myStr = myStr + """" + a(i) + """";
-# if size(a,"*") <> i then
-# myStr = myStr + ","; end
-# end
-# myStr = myStr + "]";
-# mputl(myStr,fd);
-# end
-# mclose(fd);
-#
-# Then replace "$" by "\\$" manually.
-
-functions_kw = ["%XMLAttr_6","%XMLAttr_e","%XMLAttr_i_XMLElem","%XMLAttr_length","%XMLAttr_p","%XMLAttr_size","%XMLDoc_6","%XMLDoc_e","%XMLDoc_i_XMLList","%XMLDoc_p","%XMLElem_6","%XMLElem_e","%XMLElem_i_XMLDoc","%XMLElem_i_XMLElem","%XMLElem_i_XMLList","%XMLElem_p","%XMLList_6","%XMLList_e","%XMLList_i_XMLElem","%XMLList_i_XMLList","%XMLList_length","%XMLList_p","%XMLList_size","%XMLNs_6","%XMLNs_e","%XMLNs_i_XMLElem","%XMLNs_p","%XMLSet_6","%XMLSet_e","%XMLSet_length","%XMLSet_p","%XMLSet_size","%XMLValid_p","%b_i_XMLList","%c_i_XMLAttr","%c_i_XMLDoc","%c_i_XMLElem","%c_i_XMLList","%ce_i_XMLList","%fptr_i_XMLList","%h_i_XMLList","%hm_i_XMLList","%i_abs","%i_cumprod","%i_cumsum","%i_diag","%i_i_XMLList","%i_matrix","%i_max","%i_maxi","%i_min","%i_mini","%i_mput","%i_p","%i_prod","%i_sum","%i_tril","%i_triu","%ip_i_XMLList","%l_i_XMLList","%lss_i_XMLList","%mc_i_XMLList","%msp_full","%msp_i_XMLList","%msp_spget","%p_i_XMLList","%ptr_i_XMLList","%r_i_XMLList","%s_i_XMLList","%sp_i_XMLList","%spb_i_XMLList","%st_i_XMLList","Calendar","ClipBoard","Matplot","Matplot1","PlaySound","TCL_DeleteInterp","TCL_DoOneEvent","TCL_EvalFile","TCL_EvalStr","TCL_ExistArray","TCL_ExistInterp","TCL_ExistVar","TCL_GetVar","TCL_GetVersion","TCL_SetVar","TCL_UnsetVar","TCL_UpVar","_","_code2str","_str2code","about","abs","acos","addcb","addf","addhistory","addinter","amell","and","argn","arl2_ius","ascii","asin","atan","backslash","balanc","banner","base2dec","basename","bdiag","beep","besselh","besseli","besselj","besselk","bessely","beta","bezout","bfinit","blkfc1i","blkslvi","bool2s","browsehistory","browsevar","bsplin3val","buildDocv2","buildouttb","bvode","c_link","calerf","call","callblk","captions","cd","cdfbet","cdfbin","cdfchi","cdfchn","cdff","cdffnc","cdfgam","cdfnbn","cdfnor","cdfpoi","cdft","ceil","champ","champ1","chdir","chol","clc","clean","clear","clear_pixmap","clearfun","clearglobal","closeEditor","closeXcos","code2str","coeff","comp","completion","conj","contour2di","contr","conv2","convstr","copy","copyfile","corr","cos","coserror","createdir","cshep2d","ctree2","ctree3","ctree4","cumprod","cumsum","curblock","curblockc","dasrt","dassl","data2sig","debug","dec2base","deff","definedfields","degree","delbpt","delete","deletefile","delip","delmenu","det","dgettext","dhinf","diag","diary","diffobjs","disp","dispbpt","displayhistory","disposefftwlibrary","dlgamma","dnaupd","dneupd","double","draw","drawaxis","drawlater","drawnow","dsaupd","dsearch","dseupd","duplicate","editor","editvar","emptystr","end_scicosim","ereduc","errcatch","errclear","error","eval_cshep2d","exec","execstr","exists","exit","exp","expm","exportUI","export_to_hdf5","eye","fadj2sp","fec","feval","fft","fftw","fftw_flags","fftw_forget_wisdom","fftwlibraryisloaded","file","filebrowser","fileext","fileinfo","fileparts","filesep","find","findBD","findfiles","floor","format","fort","fprintfMat","freq","frexp","fromc","fromjava","fscanfMat","fsolve","fstair","full","fullpath","funcprot","funptr","gamma","gammaln","geom3d","get","get_absolute_file_path","get_fftw_wisdom","getblocklabel","getcallbackobject","getdate","getdebuginfo","getdefaultlanguage","getdrives","getdynlibext","getenv","getfield","gethistory","gethistoryfile","getinstalledlookandfeels","getio","getlanguage","getlongpathname","getlookandfeel","getmd5","getmemory","getmodules","getos","getpid","getrelativefilename","getscicosvars","getscilabmode","getshortpathname","gettext","getvariablesonstack","getversion","glist","global","glue","grand","grayplot","grep","gsort","gstacksize","havewindow","helpbrowser","hess","hinf","historymanager","historysize","host","iconvert","iconvert","ieee","ilib_verbose","imag","impl","import_from_hdf5","imult","inpnvi","int","int16","int2d","int32","int3d","int8","interp","interp2d","interp3d","intg","intppty","inttype","inv","is_handle_valid","isalphanum","isascii","isdef","isdigit","isdir","isequal","isequalbitwise","iserror","isfile","isglobal","isletter","isreal","iswaitingforinput","javaclasspath","javalibrarypath","kron","lasterror","ldiv","ldivf","legendre","length","lib","librarieslist","libraryinfo","linear_interpn","lines","link","linmeq","list","load","loadScicos","loadfftwlibrary","loadhistory","log","log1p","lsq","lsq_splin","lsqrsolve","lsslist","lstcat","lstsize","ltitr","lu","ludel","lufact","luget","lusolve","macr2lst","macr2tree","matfile_close","matfile_listvar","matfile_open","matfile_varreadnext","matfile_varwrite","matrix","max","maxfiles","mclearerr","mclose","meof","merror","messagebox","mfprintf","mfscanf","mget","mgeti","mgetl","mgetstr","min","mlist","mode","model2blk","mopen","move","movefile","mprintf","mput","mputl","mputstr","mscanf","mseek","msprintf","msscanf","mtell","mtlb_mode","mtlb_sparse","mucomp","mulf","nearfloat","newaxes","newest","newfun","nnz","notify","number_properties","ode","odedc","ones","opentk","optim","or","ordmmd","parallel_concurrency","parallel_run","param3d","param3d1","part","pathconvert","pathsep","phase_simulation","plot2d","plot2d1","plot2d2","plot2d3","plot2d4","plot3d","plot3d1","pointer_xproperty","poly","ppol","pppdiv","predef","print","printf","printfigure","printsetupbox","prod","progressionbar","prompt","pwd","qld","qp_solve","qr","raise_window","rand","rankqr","rat","rcond","rdivf","read","read4b","readb","readgateway","readmps","real","realtime","realtimeinit","regexp","relocate_handle","remez","removedir","removelinehistory","res_with_prec","resethistory","residu","resume","return","ricc","ricc_old","rlist","roots","rotate_axes","round","rpem","rtitr","rubberbox","save","saveafterncommands","saveconsecutivecommands","savehistory","schur","sci_haltscicos","sci_tree2","sci_tree3","sci_tree4","sciargs","scicos_debug","scicos_debug_count","scicos_time","scicosim","scinotes","sctree","semidef","set","set_blockerror","set_fftw_wisdom","set_xproperty","setbpt","setdefaultlanguage","setenv","setfield","sethistoryfile","setlanguage","setlookandfeel","setmenu","sfact","sfinit","show_pixmap","show_window","showalluimenushandles","sident","sig2data","sign","simp","simp_mode","sin","size","slash","sleep","sorder","sparse","spchol","spcompack","spec","spget","splin","splin2d","splin3d","spones","sprintf","sqrt","stacksize","str2code","strcat","strchr","strcmp","strcspn","strindex","string","stringbox","stripblanks","strncpy","strrchr","strrev","strsplit","strspn","strstr","strsubst","strtod","strtok","subf","sum","svd","swap_handles","symfcti","syredi","system_getproperty","system_setproperty","ta2lpd","tan","taucs_chdel","taucs_chfact","taucs_chget","taucs_chinfo","taucs_chsolve","tempname","testmatrix","timer","tlist","tohome","tokens","toolbar","toprint","tr_zer","tril","triu","type","typename","uiDisplayTree","uicontextmenu","uicontrol","uigetcolor","uigetdir","uigetfile","uigetfont","uimenu","uint16","uint32","uint8","uipopup","uiputfile","uiwait","ulink","umf_ludel","umf_lufact","umf_luget","umf_luinfo","umf_lusolve","umfpack","unglue","unix","unsetmenu","unzoom","updatebrowsevar","usecanvas","user","var2vec","varn","vec2var","waitbar","warnBlockByUID","warning","what","where","whereis","who","winsid","with_embedded_jre","with_module","writb","write","write4b","x_choose","x_choose_modeless","x_dialog","x_mdialog","xarc","xarcs","xarrows","xchange","xchoicesi","xclick","xcos","xcosAddToolsMenu","xcosConfigureXmlFile","xcosDiagramToScilab","xcosPalCategoryAdd","xcosPalDelete","xcosPalDisable","xcosPalEnable","xcosPalGenerateIcon","xcosPalLoad","xcosPalMove","xcosUpdateBlock","xdel","xfarc","xfarcs","xfpoly","xfpolys","xfrect","xget","xgetech","xgetmouse","xgraduate","xgrid","xlfont","xls_open","xls_read","xmlAddNs","xmlAsNumber","xmlAsText","xmlDTD","xmlDelete","xmlDocument","xmlDump","xmlElement","xmlFormat","xmlGetNsByHref","xmlGetNsByPrefix","xmlGetOpenDocs","xmlIsValidObject","xmlNs","xmlRead","xmlReadStr","xmlRelaxNG","xmlRemove","xmlSchema","xmlSetAttributes","xmlValidate","xmlWrite","xmlXPath","xname","xpause","xpoly","xpolys","xrect","xrects","xs2bmp","xs2eps","xs2gif","xs2jpg","xs2pdf","xs2png","xs2ppm","xs2ps","xs2svg","xsegs","xset","xsetech","xstring","xstringb","xtitle","zeros","znaupd","zneupd","zoom_rect"]
-
-commands_kw = ["abort","apropos","break","case","catch","clc","clear","continue","do","else","elseif","end","endfunction","exit","for","function","help","if","pause","pwd","quit","resume","return","select","then","try","what","while","who"]
-
-macros_kw = ["%0_i_st","%3d_i_h","%Block_xcosUpdateBlock","%TNELDER_p","%TNELDER_string","%TNMPLOT_p","%TNMPLOT_string","%TOPTIM_p","%TOPTIM_string","%TSIMPLEX_p","%TSIMPLEX_string","%_gsort","%_strsplit","%ar_p","%asn","%b_a_b","%b_a_s","%b_c_s","%b_c_spb","%b_cumprod","%b_cumsum","%b_d_s","%b_diag","%b_e","%b_f_s","%b_f_spb","%b_g_s","%b_g_spb","%b_h_s","%b_h_spb","%b_i_b","%b_i_ce","%b_i_h","%b_i_hm","%b_i_s","%b_i_sp","%b_i_spb","%b_i_st","%b_iconvert","%b_l_b","%b_l_s","%b_m_b","%b_m_s","%b_matrix","%b_n_hm","%b_o_hm","%b_p_s","%b_prod","%b_r_b","%b_r_s","%b_s_b","%b_s_s","%b_string","%b_sum","%b_tril","%b_triu","%b_x_b","%b_x_s","%c_a_c","%c_b_c","%c_b_s","%c_diag","%c_e","%c_eye","%c_f_s","%c_i_c","%c_i_ce","%c_i_h","%c_i_hm","%c_i_lss","%c_i_r","%c_i_s","%c_i_st","%c_matrix","%c_n_l","%c_n_st","%c_o_l","%c_o_st","%c_ones","%c_rand","%c_tril","%c_triu","%cblock_c_cblock","%cblock_c_s","%cblock_e","%cblock_f_cblock","%cblock_p","%cblock_size","%ce_6","%ce_c_ce","%ce_e","%ce_f_ce","%ce_i_ce","%ce_i_s","%ce_i_st","%ce_matrix","%ce_p","%ce_size","%ce_string","%ce_t","%champdat_i_h","%choose","%diagram_xcos","%dir_p","%fptr_i_st","%grayplot_i_h","%h_i_st","%hm_1_hm","%hm_1_s","%hm_2_hm","%hm_2_s","%hm_3_hm","%hm_3_s","%hm_4_hm","%hm_4_s","%hm_5","%hm_a_hm","%hm_a_r","%hm_a_s","%hm_abs","%hm_and","%hm_bool2s","%hm_c_hm","%hm_ceil","%hm_conj","%hm_cos","%hm_cumprod","%hm_cumsum","%hm_d_hm","%hm_d_s","%hm_degree","%hm_e","%hm_exp","%hm_f_hm","%hm_fft","%hm_find","%hm_floor","%hm_g_hm","%hm_h_hm","%hm_i_b","%hm_i_ce","%hm_i_hm","%hm_i_i","%hm_i_p","%hm_i_r","%hm_i_s","%hm_i_st","%hm_iconvert","%hm_imag","%hm_int","%hm_isnan","%hm_isreal","%hm_j_hm","%hm_j_s","%hm_k_hm","%hm_k_s","%hm_log","%hm_m_p","%hm_m_r","%hm_m_s","%hm_matrix","%hm_maxi","%hm_mean","%hm_median","%hm_mini","%hm_n_b","%hm_n_c","%hm_n_hm","%hm_n_i","%hm_n_p","%hm_n_s","%hm_o_b","%hm_o_c","%hm_o_hm","%hm_o_i","%hm_o_p","%hm_o_s","%hm_ones","%hm_or","%hm_p","%hm_prod","%hm_q_hm","%hm_r_s","%hm_rand","%hm_real","%hm_round","%hm_s","%hm_s_hm","%hm_s_r","%hm_s_s","%hm_sign","%hm_sin","%hm_size","%hm_sqrt","%hm_st_deviation","%hm_string","%hm_sum","%hm_x_hm","%hm_x_p","%hm_x_s","%hm_zeros","%i_1_s","%i_2_s","%i_3_s","%i_4_s","%i_Matplot","%i_a_i","%i_a_s","%i_and","%i_ascii","%i_b_s","%i_bezout","%i_champ","%i_champ1","%i_contour","%i_contour2d","%i_d_i","%i_d_s","%i_e","%i_fft","%i_g_i","%i_gcd","%i_h_i","%i_i_ce","%i_i_h","%i_i_hm","%i_i_i","%i_i_s","%i_i_st","%i_j_i","%i_j_s","%i_l_s","%i_lcm","%i_length","%i_m_i","%i_m_s","%i_mfprintf","%i_mprintf","%i_msprintf","%i_n_s","%i_o_s","%i_or","%i_p_i","%i_p_s","%i_plot2d","%i_plot2d1","%i_plot2d2","%i_q_s","%i_r_i","%i_r_s","%i_round","%i_s_i","%i_s_s","%i_sign","%i_string","%i_x_i","%i_x_s","%ip_a_s","%ip_i_st","%ip_m_s","%ip_n_ip","%ip_o_ip","%ip_p","%ip_s_s","%ip_string","%k","%l_i_h","%l_i_s","%l_i_st","%l_isequal","%l_n_c","%l_n_l","%l_n_m","%l_n_p","%l_n_s","%l_n_st","%l_o_c","%l_o_l","%l_o_m","%l_o_p","%l_o_s","%l_o_st","%lss_a_lss","%lss_a_p","%lss_a_r","%lss_a_s","%lss_c_lss","%lss_c_p","%lss_c_r","%lss_c_s","%lss_e","%lss_eye","%lss_f_lss","%lss_f_p","%lss_f_r","%lss_f_s","%lss_i_ce","%lss_i_lss","%lss_i_p","%lss_i_r","%lss_i_s","%lss_i_st","%lss_inv","%lss_l_lss","%lss_l_p","%lss_l_r","%lss_l_s","%lss_m_lss","%lss_m_p","%lss_m_r","%lss_m_s","%lss_n_lss","%lss_n_p","%lss_n_r","%lss_n_s","%lss_norm","%lss_o_lss","%lss_o_p","%lss_o_r","%lss_o_s","%lss_ones","%lss_r_lss","%lss_r_p","%lss_r_r","%lss_r_s","%lss_rand","%lss_s","%lss_s_lss","%lss_s_p","%lss_s_r","%lss_s_s","%lss_size","%lss_t","%lss_v_lss","%lss_v_p","%lss_v_r","%lss_v_s","%lt_i_s","%m_n_l","%m_o_l","%mc_i_h","%mc_i_s","%mc_i_st","%mc_n_st","%mc_o_st","%mc_string","%mps_p","%mps_string","%msp_a_s","%msp_abs","%msp_e","%msp_find","%msp_i_s","%msp_i_st","%msp_length","%msp_m_s","%msp_maxi","%msp_n_msp","%msp_nnz","%msp_o_msp","%msp_p","%msp_sparse","%msp_spones","%msp_t","%p_a_lss","%p_a_r","%p_c_lss","%p_c_r","%p_cumprod","%p_cumsum","%p_d_p","%p_d_r","%p_d_s","%p_det","%p_e","%p_f_lss","%p_f_r","%p_i_ce","%p_i_h","%p_i_hm","%p_i_lss","%p_i_p","%p_i_r","%p_i_s","%p_i_st","%p_inv","%p_j_s","%p_k_p","%p_k_r","%p_k_s","%p_l_lss","%p_l_p","%p_l_r","%p_l_s","%p_m_hm","%p_m_lss","%p_m_r","%p_matrix","%p_n_l","%p_n_lss","%p_n_r","%p_o_l","%p_o_lss","%p_o_r","%p_o_sp","%p_p_s","%p_prod","%p_q_p","%p_q_r","%p_q_s","%p_r_lss","%p_r_p","%p_r_r","%p_r_s","%p_s_lss","%p_s_r","%p_simp","%p_string","%p_sum","%p_v_lss","%p_v_p","%p_v_r","%p_v_s","%p_x_hm","%p_x_r","%p_y_p","%p_y_r","%p_y_s","%p_z_p","%p_z_r","%p_z_s","%r_a_hm","%r_a_lss","%r_a_p","%r_a_r","%r_a_s","%r_c_lss","%r_c_p","%r_c_r","%r_c_s","%r_clean","%r_cumprod","%r_d_p","%r_d_r","%r_d_s","%r_det","%r_diag","%r_e","%r_eye","%r_f_lss","%r_f_p","%r_f_r","%r_f_s","%r_i_ce","%r_i_hm","%r_i_lss","%r_i_p","%r_i_r","%r_i_s","%r_i_st","%r_inv","%r_j_s","%r_k_p","%r_k_r","%r_k_s","%r_l_lss","%r_l_p","%r_l_r","%r_l_s","%r_m_hm","%r_m_lss","%r_m_p","%r_m_r","%r_m_s","%r_matrix","%r_n_lss","%r_n_p","%r_n_r","%r_n_s","%r_norm","%r_o_lss","%r_o_p","%r_o_r","%r_o_s","%r_ones","%r_p","%r_p_s","%r_prod","%r_q_p","%r_q_r","%r_q_s","%r_r_lss","%r_r_p","%r_r_r","%r_r_s","%r_rand","%r_s","%r_s_hm","%r_s_lss","%r_s_p","%r_s_r","%r_s_s","%r_simp","%r_size","%r_string","%r_sum","%r_t","%r_tril","%r_triu","%r_v_lss","%r_v_p","%r_v_r","%r_v_s","%r_x_p","%r_x_r","%r_x_s","%r_y_p","%r_y_r","%r_y_s","%r_z_p","%r_z_r","%r_z_s","%s_1_hm","%s_1_i","%s_2_hm","%s_2_i","%s_3_hm","%s_3_i","%s_4_hm","%s_4_i","%s_5","%s_a_b","%s_a_hm","%s_a_i","%s_a_ip","%s_a_lss","%s_a_msp","%s_a_r","%s_a_sp","%s_and","%s_b_i","%s_b_s","%s_c_b","%s_c_cblock","%s_c_lss","%s_c_r","%s_c_sp","%s_d_b","%s_d_i","%s_d_p","%s_d_r","%s_d_sp","%s_e","%s_f_b","%s_f_cblock","%s_f_lss","%s_f_r","%s_f_sp","%s_g_b","%s_g_s","%s_h_b","%s_h_s","%s_i_b","%s_i_c","%s_i_ce","%s_i_h","%s_i_hm","%s_i_i","%s_i_lss","%s_i_p","%s_i_r","%s_i_s","%s_i_sp","%s_i_spb","%s_i_st","%s_j_i","%s_k_hm","%s_k_p","%s_k_r","%s_k_sp","%s_l_b","%s_l_hm","%s_l_i","%s_l_lss","%s_l_p","%s_l_r","%s_l_s","%s_l_sp","%s_m_b","%s_m_hm","%s_m_i","%s_m_ip","%s_m_lss","%s_m_msp","%s_m_r","%s_matrix","%s_n_hm","%s_n_i","%s_n_l","%s_n_lss","%s_n_r","%s_n_st","%s_o_hm","%s_o_i","%s_o_l","%s_o_lss","%s_o_r","%s_o_st","%s_or","%s_p_b","%s_p_i","%s_pow","%s_q_hm","%s_q_i","%s_q_p","%s_q_r","%s_q_sp","%s_r_b","%s_r_i","%s_r_lss","%s_r_p","%s_r_r","%s_r_s","%s_r_sp","%s_s_b","%s_s_hm","%s_s_i","%s_s_ip","%s_s_lss","%s_s_r","%s_s_sp","%s_simp","%s_v_lss","%s_v_p","%s_v_r","%s_v_s","%s_x_b","%s_x_hm","%s_x_i","%s_x_r","%s_y_p","%s_y_r","%s_y_sp","%s_z_p","%s_z_r","%s_z_sp","%sn","%sp_a_s","%sp_a_sp","%sp_and","%sp_c_s","%sp_ceil","%sp_cos","%sp_cumprod","%sp_cumsum","%sp_d_s","%sp_d_sp","%sp_diag","%sp_e","%sp_exp","%sp_f_s","%sp_floor","%sp_gsort","%sp_i_ce","%sp_i_h","%sp_i_s","%sp_i_sp","%sp_i_st","%sp_int","%sp_inv","%sp_k_s","%sp_k_sp","%sp_l_s","%sp_l_sp","%sp_length","%sp_norm","%sp_or","%sp_p_s","%sp_prod","%sp_q_s","%sp_q_sp","%sp_r_s","%sp_r_sp","%sp_round","%sp_s_s","%sp_s_sp","%sp_sin","%sp_sqrt","%sp_string","%sp_sum","%sp_tril","%sp_triu","%sp_y_s","%sp_y_sp","%sp_z_s","%sp_z_sp","%spb_and","%spb_c_b","%spb_cumprod","%spb_cumsum","%spb_diag","%spb_e","%spb_f_b","%spb_g_b","%spb_g_spb","%spb_h_b","%spb_h_spb","%spb_i_b","%spb_i_ce","%spb_i_h","%spb_i_st","%spb_or","%spb_prod","%spb_sum","%spb_tril","%spb_triu","%st_6","%st_c_st","%st_e","%st_f_st","%st_i_b","%st_i_c","%st_i_fptr","%st_i_h","%st_i_i","%st_i_ip","%st_i_lss","%st_i_msp","%st_i_p","%st_i_r","%st_i_s","%st_i_sp","%st_i_spb","%st_i_st","%st_matrix","%st_n_c","%st_n_l","%st_n_mc","%st_n_p","%st_n_s","%st_o_c","%st_o_l","%st_o_mc","%st_o_p","%st_o_s","%st_o_tl","%st_p","%st_size","%st_string","%st_t","%ticks_i_h","%xls_e","%xls_p","%xlssheet_e","%xlssheet_p","%xlssheet_size","%xlssheet_string","DominationRank","G_make","IsAScalar","NDcost","OS_Version","PlotSparse","ReadHBSparse","ReadmiMatrix","TCL_CreateSlave","WritemiMatrix","abcd","abinv","accept_func_default","accept_func_vfsa","acf","acosd","acosh","acoshm","acosm","acot","acotd","acoth","acsc","acscd","acsch","add_demo","add_help_chapter","add_module_help_chapter","add_param","add_profiling","adj2sp","aff2ab","ana_style","analpf","analyze","aplat","apropos","arhnk","arl2","arma2p","armac","armax","armax1","arobasestring2strings","arsimul","ascii2string","asciimat","asec","asecd","asech","asind","asinh","asinhm","asinm","assert_checkalmostequal","assert_checkequal","assert_checkerror","assert_checkfalse","assert_checkfilesequal","assert_checktrue","assert_comparecomplex","assert_computedigits","assert_cond2reltol","assert_cond2reqdigits","assert_generror","atand","atanh","atanhm","atanm","atomsAutoload","atomsAutoloadAdd","atomsAutoloadDel","atomsAutoloadList","atomsCategoryList","atomsCheckModule","atomsDepTreeShow","atomsGetConfig","atomsGetInstalled","atomsGetLoaded","atomsGetLoadedPath","atomsInstall","atomsIsInstalled","atomsIsLoaded","atomsList","atomsLoad","atomsRemove","atomsRepositoryAdd","atomsRepositoryDel","atomsRepositoryList","atomsRestoreConfig","atomsSaveConfig","atomsSearch","atomsSetConfig","atomsShow","atomsSystemInit","atomsSystemUpdate","atomsTest","atomsUpdate","atomsVersion","augment","auread","auwrite","balreal","bench_run","bilin","bilt","bin2dec","binomial","bitand","bitcmp","bitget","bitor","bitset","bitxor","black","blanks","bloc2exp","bloc2ss","block_parameter_error","bode","bstap","buttmag","bvodeS","bytecode","bytecodewalk","cainv","calendar","calfrq","canon","casc","cat","cat_code","cb_m2sci_gui","ccontrg","cell","cell2mat","cellstr","center","cepstrum","cfspec","char","chart","cheb1mag","cheb2mag","check_gateways","check_help","check_modules_xml","check_versions","chepol","chfact","chsolve","classmarkov","clean_help","clock","cls2dls","cmb_lin","cmndred","cmoment","coding_ga_binary","coding_ga_identity","coff","coffg","colcomp","colcompr","colinout","colregul","companion","complex","compute_initial_temp","cond","cond2sp","condestsp","config","configure_msifort","configure_msvc","cont_frm","cont_mat","contrss","conv","convert_to_float","convertindex","convol","convol2d","copfac","correl","cosd","cosh","coshm","cosm","cotd","cotg","coth","cothm","covar","createfun","createstruct","crossover_ga_binary","crossover_ga_default","csc","cscd","csch","csgn","csim","cspect","ctr_gram","czt","dae","daeoptions","damp","datafit","date","datenum","datevec","dbphi","dcf","ddp","dec2bin","dec2hex","dec2oct","del_help_chapter","del_module_help_chapter","demo_begin","demo_choose","demo_compiler","demo_end","demo_file_choice","demo_folder_choice","demo_function_choice","demo_gui","demo_mdialog","demo_message","demo_run","demo_viewCode","denom","derivat","derivative","des2ss","des2tf","detectmsifort64tools","detectmsvc64tools","determ","detr","detrend","devtools_run_builder","dft","dhnorm","diff","diophant","dir","dirname","dispfiles","dllinfo","dscr","dsimul","dt_ility","dtsi","edit","edit_error","eigenmarkov","ell1mag","enlarge_shape","entropy","eomday","epred","eqfir","eqiir","equil","equil1","erf","erfc","erfcx","erfinv","etime","eval","evans","evstr","expression2code","extract_help_examples","factor","factorial","factors","faurre","ffilt","fft2","fftshift","fieldnames","filt_sinc","filter","findABCD","findAC","findBDK","findR","find_freq","find_links","find_scicos_version","findm","findmsifortcompiler","findmsvccompiler","findx0BD","firstnonsingleton","fit_dat","fix","fixedpointgcd","flipdim","flts","fminsearch","format_txt","fourplan","fprintf","frep2tf","freson","frfit","frmag","fscanf","fseek_origin","fsfirlin","fspec","fspecg","fstabst","ftest","ftuneq","fullfile","fullrf","fullrfk","fun2string","g_margin","gainplot","gamitg","gcare","gcd","gencompilationflags_unix","generateBlockImage","generateBlockImages","generic_i_ce","generic_i_h","generic_i_hm","generic_i_s","generic_i_st","genlib","genlib_old","genmarkov","geomean","getDiagramVersion","getModelicaPath","get_file_path","get_function_path","get_param","get_profile","get_scicos_version","getd","getscilabkeywords","getshell","gettklib","gfare","gfrancis","givens","glever","gmres","group","gschur","gspec","gtild","h2norm","h_cl","h_inf","h_inf_st","h_norm","hallchart","halt","hank","hankelsv","harmean","haveacompiler","head_comments","help","help_from_sci","help_skeleton","hermit","hex2dec","hilb","hilbert","horner","householder","hrmt","htrianr","hypermat","ifft","iir","iirgroup","iirlp","iirmod","ilib_build","ilib_compile","ilib_for_link","ilib_gen_Make","ilib_gen_Make_unix","ilib_gen_cleaner","ilib_gen_gateway","ilib_gen_loader","ilib_include_flag","ilib_mex_build","im_inv","importScicosDiagram","importScicosPal","importXcosDiagram","imrep2ss","ind2sub","inistate","init_ga_default","init_param","initial_scicos_tables","input","instruction2code","intc","intdec","integrate","interp1","interpln","intersect","intl","intsplin","inttrap","inv_coeff","invr","invrs","invsyslin","iqr","isLeapYear","is_absolute_path","is_param","iscell","iscellstr","isempty","isfield","isinf","isnan","isnum","issparse","isstruct","isvector","jmat","justify","kalm","karmarkar","kernel","kpure","krac2","kroneck","lattn","launchtest","lcf","lcm","lcmdiag","leastsq","leqe","leqr","lev","levin","lex_sort","lft","lin","lin2mu","lincos","lindquist","linf","linfn","linsolve","linspace","list2vec","list_param","listfiles","listfunctions","listvarinfile","lmisolver","lmitool","loadXcosLibs","loadmatfile","loadwave","log10","log2","logm","logspace","lqe","lqg","lqg2stan","lqg_ltr","lqr","ls","lyap","m2sci_gui","m_circle","macglov","macrovar","mad","makecell","manedit","mapsound","markp2ss","matfile2sci","mdelete","mean","meanf","median","mese","meshgrid","mfft","mfile2sci","minreal","minss","mkdir","modulo","moment","mrfit","msd","mstr2sci","mtlb","mtlb_0","mtlb_a","mtlb_all","mtlb_any","mtlb_axes","mtlb_axis","mtlb_beta","mtlb_box","mtlb_choices","mtlb_close","mtlb_colordef","mtlb_cond","mtlb_conv","mtlb_cov","mtlb_cumprod","mtlb_cumsum","mtlb_dec2hex","mtlb_delete","mtlb_diag","mtlb_diff","mtlb_dir","mtlb_double","mtlb_e","mtlb_echo","mtlb_error","mtlb_eval","mtlb_exist","mtlb_eye","mtlb_false","mtlb_fft","mtlb_fftshift","mtlb_filter","mtlb_find","mtlb_findstr","mtlb_fliplr","mtlb_fopen","mtlb_format","mtlb_fprintf","mtlb_fread","mtlb_fscanf","mtlb_full","mtlb_fwrite","mtlb_get","mtlb_grid","mtlb_hold","mtlb_i","mtlb_ifft","mtlb_image","mtlb_imp","mtlb_int16","mtlb_int32","mtlb_int8","mtlb_is","mtlb_isa","mtlb_isfield","mtlb_isletter","mtlb_isspace","mtlb_l","mtlb_legendre","mtlb_linspace","mtlb_logic","mtlb_logical","mtlb_loglog","mtlb_lower","mtlb_max","mtlb_mean","mtlb_median","mtlb_mesh","mtlb_meshdom","mtlb_min","mtlb_more","mtlb_num2str","mtlb_ones","mtlb_pcolor","mtlb_plot","mtlb_prod","mtlb_qr","mtlb_qz","mtlb_rand","mtlb_randn","mtlb_rcond","mtlb_realmax","mtlb_realmin","mtlb_repmat","mtlb_s","mtlb_semilogx","mtlb_semilogy","mtlb_setstr","mtlb_size","mtlb_sort","mtlb_sortrows","mtlb_sprintf","mtlb_sscanf","mtlb_std","mtlb_strcmp","mtlb_strcmpi","mtlb_strfind","mtlb_strrep","mtlb_subplot","mtlb_sum","mtlb_t","mtlb_toeplitz","mtlb_tril","mtlb_triu","mtlb_true","mtlb_type","mtlb_uint16","mtlb_uint32","mtlb_uint8","mtlb_upper","mtlb_var","mtlb_zeros","mu2lin","mutation_ga_binary","mutation_ga_default","mvcorrel","mvvacov","nancumsum","nand2mean","nanmax","nanmean","nanmeanf","nanmedian","nanmin","nanstdev","nansum","narsimul","ndgrid","ndims","nehari","neigh_func_csa","neigh_func_default","neigh_func_fsa","neigh_func_vfsa","neldermead_cget","neldermead_configure","neldermead_costf","neldermead_defaultoutput","neldermead_destroy","neldermead_display","neldermead_function","neldermead_get","neldermead_log","neldermead_new","neldermead_restart","neldermead_search","neldermead_updatesimp","nextpow2","nfreq","nicholschart","nlev","nmplot_cget","nmplot_configure","nmplot_contour","nmplot_destroy","nmplot_display","nmplot_function","nmplot_get","nmplot_historyplot","nmplot_log","nmplot_new","nmplot_outputcmd","nmplot_restart","nmplot_search","nmplot_simplexhistory","noisegen","nonreg_test_run","norm","now","null","num2cell","numdiff","numer","nyquist","nyquistfrequencybounds","obs_gram","obscont","observer","obsv_mat","obsvss","oct2dec","odeoptions","optim_ga","optim_moga","optim_nsga","optim_nsga2","optim_sa","optimbase_cget","optimbase_checkbounds","optimbase_checkcostfun","optimbase_checkx0","optimbase_configure","optimbase_destroy","optimbase_display","optimbase_function","optimbase_get","optimbase_hasbounds","optimbase_hasconstraints","optimbase_hasnlcons","optimbase_histget","optimbase_histset","optimbase_incriter","optimbase_isfeasible","optimbase_isinbounds","optimbase_isinnonlincons","optimbase_log","optimbase_logshutdown","optimbase_logstartup","optimbase_new","optimbase_outputcmd","optimbase_outstruct","optimbase_proj2bnds","optimbase_set","optimbase_stoplog","optimbase_terminate","optimget","optimplotfunccount","optimplotfval","optimplotx","optimset","optimsimplex_center","optimsimplex_check","optimsimplex_compsomefv","optimsimplex_computefv","optimsimplex_deltafv","optimsimplex_deltafvmax","optimsimplex_destroy","optimsimplex_dirmat","optimsimplex_fvmean","optimsimplex_fvstdev","optimsimplex_fvvariance","optimsimplex_getall","optimsimplex_getallfv","optimsimplex_getallx","optimsimplex_getfv","optimsimplex_getn","optimsimplex_getnbve","optimsimplex_getve","optimsimplex_getx","optimsimplex_gradientfv","optimsimplex_log","optimsimplex_new","optimsimplex_print","optimsimplex_reflect","optimsimplex_setall","optimsimplex_setallfv","optimsimplex_setallx","optimsimplex_setfv","optimsimplex_setn","optimsimplex_setnbve","optimsimplex_setve","optimsimplex_setx","optimsimplex_shrink","optimsimplex_size","optimsimplex_sort","optimsimplex_tostring","optimsimplex_xbar","orth","p_margin","pack","pareto_filter","parrot","pbig","pca","pcg","pdiv","pen2ea","pencan","pencost","penlaur","perctl","perl","perms","permute","pertrans","pfactors","pfss","phasemag","phaseplot","phc","pinv","playsnd","plotprofile","plzr","pmodulo","pol2des","pol2str","polar","polfact","prbs_a","prettyprint","primes","princomp","profile","proj","projsl","projspec","psmall","pspect","qmr","qpsolve","quart","quaskro","rafiter","randpencil","range","rank","read_csv","readxls","recompilefunction","recons","reglin","regress","remezb","remove_param","remove_profiling","repfreq","replace_Ix_by_Fx","repmat","reset_profiling","resize_matrix","returntoscilab","rhs2code","ric_desc","riccati","rmdir","routh_t","rowcomp","rowcompr","rowinout","rowregul","rowshuff","rref","sample","samplef","samwr","savematfile","savewave","scanf","sci2exp","sciGUI_init","sci_sparse","scicos_getvalue","scicos_simulate","scicos_workspace_init","scisptdemo","scitest","sdiff","sec","secd","sech","selection_ga_elitist","selection_ga_random","sensi","set_param","setdiff","sgrid","show_margins","show_pca","showprofile","signm","sinc","sincd","sind","sinh","sinhm","sinm","sm2des","sm2ss","smga","smooth","solve","sound","soundsec","sp2adj","spaninter","spanplus","spantwo","specfact","speye","sprand","spzeros","sqroot","sqrtm","squarewave","squeeze","srfaur","srkf","ss2des","ss2ss","ss2tf","sscanf","sskf","ssprint","ssrand","st_deviation","st_i_generic","st_ility","stabil","statgain","stdev","stdevf","steadycos","strange","strcmpi","struct","sub2ind","sva","svplot","sylm","sylv","sysconv","sysdiag","sysfact","syslin","syssize","system","systmat","tabul","tand","tanh","tanhm","tanm","tbx_build_blocks","tbx_build_cleaner","tbx_build_gateway","tbx_build_gateway_clean","tbx_build_gateway_loader","tbx_build_help","tbx_build_help_loader","tbx_build_loader","tbx_build_macros","tbx_build_src","tbx_builder","tbx_builder_gateway","tbx_builder_gateway_lang","tbx_builder_help","tbx_builder_help_lang","tbx_builder_macros","tbx_builder_src","tbx_builder_src_lang","temp_law_csa","temp_law_default","temp_law_fsa","temp_law_huang","temp_law_vfsa","test_clean","test_on_columns","test_run","test_run_level","testexamples","tf2des","tf2ss","thrownan","tic","time_id","toc","toeplitz","tokenpos","toolboxes","trace","trans","translatepaths","tree2code","trfmod","trianfml","trimmean","trisolve","trzeros","typeof","ui_observer","union","unique","unit_test_run","unix_g","unix_s","unix_w","unix_x","unobs","unpack","variance","variancef","vec2list","vectorfind","ver","warnobsolete","wavread","wavwrite","wcenter","weekday","wfir","wfir_gui","whereami","who_user","whos","wiener","wigner","winclose","window","winlist","with_javasci","with_macros_source","with_modelica_compiler","with_pvm","with_texmacs","with_tk","write_csv","xcosBlockEval","xcosBlockInterface","xcosCodeGeneration","xcosConfigureModelica","xcosPal","xcosPalAdd","xcosPalAddBlock","xcosPalExport","xcosShowBlockWarning","xcosValidateBlockSet","xcosValidateCompareBlock","xcos_compile","xcos_run","xcos_simulate","xcos_workspace_init","xmltochm","xmltoformat","xmltohtml","xmltojar","xmltopdf","xmltops","xmltoweb","yulewalk","zeropen","zgrid","zpbutt","zpch1","zpch2","zpell"]
-
-builtin_consts = ["\\$","%F","%T","%e","%eps","%f","%fftw","%gui","%i","%inf","%io","%modalWarning","%nan","%pi","%s","%t","%tk","%toolboxes","%toolboxes_dir","%z","PWD","SCI","SCIHOME","TMPDIR","a","ans","assertlib","atomslib","cacsdlib","compatibility_functilib","corelib","data_structureslib","demo_toolslib","development_toolslib","differential_equationlib","dynamic_linklib","elementary_functionslib","fd","fileiolib","functionslib","genetic_algorithmslib","helptoolslib","home","i","integerlib","interpolationlib","iolib","j","linear_algebralib","m2scilib","matiolib","modules_managerlib","myStr","neldermeadlib","optimbaselib","optimizationlib","optimsimplexlib","output_streamlib","overloadinglib","parameterslib","polynomialslib","scicos_autolib","scicos_utilslib","scinoteslib","signal_processinglib","simulated_annealinglib","soundlib","sparselib","special_functionslib","spreadsheetlib","statisticslib","stringlib","tclscilib","timelib","umfpacklib","varType","xcoslib"]
+# Autogenerated
+
+commands_kw = (
+ 'abort',
+ 'apropos',
+ 'break',
+ 'case',
+ 'catch',
+ 'continue',
+ 'do',
+ 'else',
+ 'elseif',
+ 'end',
+ 'endfunction',
+ 'for',
+ 'function',
+ 'help',
+ 'if',
+ 'pause',
+ 'quit',
+ 'select',
+ 'then',
+ 'try',
+ 'while',
+)
+
+functions_kw = (
+ '!!_invoke_',
+ '%H5Object_e',
+ '%H5Object_fieldnames',
+ '%H5Object_p',
+ '%XMLAttr_6',
+ '%XMLAttr_e',
+ '%XMLAttr_i_XMLElem',
+ '%XMLAttr_length',
+ '%XMLAttr_p',
+ '%XMLAttr_size',
+ '%XMLDoc_6',
+ '%XMLDoc_e',
+ '%XMLDoc_i_XMLList',
+ '%XMLDoc_p',
+ '%XMLElem_6',
+ '%XMLElem_e',
+ '%XMLElem_i_XMLDoc',
+ '%XMLElem_i_XMLElem',
+ '%XMLElem_i_XMLList',
+ '%XMLElem_p',
+ '%XMLList_6',
+ '%XMLList_e',
+ '%XMLList_i_XMLElem',
+ '%XMLList_i_XMLList',
+ '%XMLList_length',
+ '%XMLList_p',
+ '%XMLList_size',
+ '%XMLNs_6',
+ '%XMLNs_e',
+ '%XMLNs_i_XMLElem',
+ '%XMLNs_p',
+ '%XMLSet_6',
+ '%XMLSet_e',
+ '%XMLSet_length',
+ '%XMLSet_p',
+ '%XMLSet_size',
+ '%XMLValid_p',
+ '%_EClass_6',
+ '%_EClass_e',
+ '%_EClass_p',
+ '%_EObj_0',
+ '%_EObj_1__EObj',
+ '%_EObj_1_b',
+ '%_EObj_1_c',
+ '%_EObj_1_i',
+ '%_EObj_1_s',
+ '%_EObj_2__EObj',
+ '%_EObj_2_b',
+ '%_EObj_2_c',
+ '%_EObj_2_i',
+ '%_EObj_2_s',
+ '%_EObj_3__EObj',
+ '%_EObj_3_b',
+ '%_EObj_3_c',
+ '%_EObj_3_i',
+ '%_EObj_3_s',
+ '%_EObj_4__EObj',
+ '%_EObj_4_b',
+ '%_EObj_4_c',
+ '%_EObj_4_i',
+ '%_EObj_4_s',
+ '%_EObj_5',
+ '%_EObj_6',
+ '%_EObj_a__EObj',
+ '%_EObj_a_b',
+ '%_EObj_a_c',
+ '%_EObj_a_i',
+ '%_EObj_a_s',
+ '%_EObj_d__EObj',
+ '%_EObj_d_b',
+ '%_EObj_d_c',
+ '%_EObj_d_i',
+ '%_EObj_d_s',
+ '%_EObj_disp',
+ '%_EObj_e',
+ '%_EObj_g__EObj',
+ '%_EObj_g_b',
+ '%_EObj_g_c',
+ '%_EObj_g_i',
+ '%_EObj_g_s',
+ '%_EObj_h__EObj',
+ '%_EObj_h_b',
+ '%_EObj_h_c',
+ '%_EObj_h_i',
+ '%_EObj_h_s',
+ '%_EObj_i__EObj',
+ '%_EObj_j__EObj',
+ '%_EObj_j_b',
+ '%_EObj_j_c',
+ '%_EObj_j_i',
+ '%_EObj_j_s',
+ '%_EObj_k__EObj',
+ '%_EObj_k_b',
+ '%_EObj_k_c',
+ '%_EObj_k_i',
+ '%_EObj_k_s',
+ '%_EObj_l__EObj',
+ '%_EObj_l_b',
+ '%_EObj_l_c',
+ '%_EObj_l_i',
+ '%_EObj_l_s',
+ '%_EObj_m__EObj',
+ '%_EObj_m_b',
+ '%_EObj_m_c',
+ '%_EObj_m_i',
+ '%_EObj_m_s',
+ '%_EObj_n__EObj',
+ '%_EObj_n_b',
+ '%_EObj_n_c',
+ '%_EObj_n_i',
+ '%_EObj_n_s',
+ '%_EObj_o__EObj',
+ '%_EObj_o_b',
+ '%_EObj_o_c',
+ '%_EObj_o_i',
+ '%_EObj_o_s',
+ '%_EObj_p',
+ '%_EObj_p__EObj',
+ '%_EObj_p_b',
+ '%_EObj_p_c',
+ '%_EObj_p_i',
+ '%_EObj_p_s',
+ '%_EObj_q__EObj',
+ '%_EObj_q_b',
+ '%_EObj_q_c',
+ '%_EObj_q_i',
+ '%_EObj_q_s',
+ '%_EObj_r__EObj',
+ '%_EObj_r_b',
+ '%_EObj_r_c',
+ '%_EObj_r_i',
+ '%_EObj_r_s',
+ '%_EObj_s__EObj',
+ '%_EObj_s_b',
+ '%_EObj_s_c',
+ '%_EObj_s_i',
+ '%_EObj_s_s',
+ '%_EObj_t',
+ '%_EObj_x__EObj',
+ '%_EObj_x_b',
+ '%_EObj_x_c',
+ '%_EObj_x_i',
+ '%_EObj_x_s',
+ '%_EObj_y__EObj',
+ '%_EObj_y_b',
+ '%_EObj_y_c',
+ '%_EObj_y_i',
+ '%_EObj_y_s',
+ '%_EObj_z__EObj',
+ '%_EObj_z_b',
+ '%_EObj_z_c',
+ '%_EObj_z_i',
+ '%_EObj_z_s',
+ '%_eigs',
+ '%_load',
+ '%b_1__EObj',
+ '%b_2__EObj',
+ '%b_3__EObj',
+ '%b_4__EObj',
+ '%b_a__EObj',
+ '%b_d__EObj',
+ '%b_g__EObj',
+ '%b_h__EObj',
+ '%b_i_XMLList',
+ '%b_i__EObj',
+ '%b_j__EObj',
+ '%b_k__EObj',
+ '%b_l__EObj',
+ '%b_m__EObj',
+ '%b_n__EObj',
+ '%b_o__EObj',
+ '%b_p__EObj',
+ '%b_q__EObj',
+ '%b_r__EObj',
+ '%b_s__EObj',
+ '%b_x__EObj',
+ '%b_y__EObj',
+ '%b_z__EObj',
+ '%c_1__EObj',
+ '%c_2__EObj',
+ '%c_3__EObj',
+ '%c_4__EObj',
+ '%c_a__EObj',
+ '%c_d__EObj',
+ '%c_g__EObj',
+ '%c_h__EObj',
+ '%c_i_XMLAttr',
+ '%c_i_XMLDoc',
+ '%c_i_XMLElem',
+ '%c_i_XMLList',
+ '%c_i__EObj',
+ '%c_j__EObj',
+ '%c_k__EObj',
+ '%c_l__EObj',
+ '%c_m__EObj',
+ '%c_n__EObj',
+ '%c_o__EObj',
+ '%c_p__EObj',
+ '%c_q__EObj',
+ '%c_r__EObj',
+ '%c_s__EObj',
+ '%c_x__EObj',
+ '%c_y__EObj',
+ '%c_z__EObj',
+ '%ce_i_XMLList',
+ '%fptr_i_XMLList',
+ '%h_i_XMLList',
+ '%hm_i_XMLList',
+ '%i_1__EObj',
+ '%i_2__EObj',
+ '%i_3__EObj',
+ '%i_4__EObj',
+ '%i_a__EObj',
+ '%i_abs',
+ '%i_cumprod',
+ '%i_cumsum',
+ '%i_d__EObj',
+ '%i_diag',
+ '%i_g__EObj',
+ '%i_h__EObj',
+ '%i_i_XMLList',
+ '%i_i__EObj',
+ '%i_j__EObj',
+ '%i_k__EObj',
+ '%i_l__EObj',
+ '%i_m__EObj',
+ '%i_matrix',
+ '%i_max',
+ '%i_maxi',
+ '%i_min',
+ '%i_mini',
+ '%i_mput',
+ '%i_n__EObj',
+ '%i_o__EObj',
+ '%i_p',
+ '%i_p__EObj',
+ '%i_prod',
+ '%i_q__EObj',
+ '%i_r__EObj',
+ '%i_s__EObj',
+ '%i_sum',
+ '%i_tril',
+ '%i_triu',
+ '%i_x__EObj',
+ '%i_y__EObj',
+ '%i_z__EObj',
+ '%ip_i_XMLList',
+ '%l_i_XMLList',
+ '%l_i__EObj',
+ '%lss_i_XMLList',
+ '%mc_i_XMLList',
+ '%msp_full',
+ '%msp_i_XMLList',
+ '%msp_spget',
+ '%p_i_XMLList',
+ '%ptr_i_XMLList',
+ '%r_i_XMLList',
+ '%s_1__EObj',
+ '%s_2__EObj',
+ '%s_3__EObj',
+ '%s_4__EObj',
+ '%s_a__EObj',
+ '%s_d__EObj',
+ '%s_g__EObj',
+ '%s_h__EObj',
+ '%s_i_XMLList',
+ '%s_i__EObj',
+ '%s_j__EObj',
+ '%s_k__EObj',
+ '%s_l__EObj',
+ '%s_m__EObj',
+ '%s_n__EObj',
+ '%s_o__EObj',
+ '%s_p__EObj',
+ '%s_q__EObj',
+ '%s_r__EObj',
+ '%s_s__EObj',
+ '%s_x__EObj',
+ '%s_y__EObj',
+ '%s_z__EObj',
+ '%sp_i_XMLList',
+ '%spb_i_XMLList',
+ '%st_i_XMLList',
+ 'Calendar',
+ 'ClipBoard',
+ 'Matplot',
+ 'Matplot1',
+ 'PlaySound',
+ 'TCL_DeleteInterp',
+ 'TCL_DoOneEvent',
+ 'TCL_EvalFile',
+ 'TCL_EvalStr',
+ 'TCL_ExistArray',
+ 'TCL_ExistInterp',
+ 'TCL_ExistVar',
+ 'TCL_GetVar',
+ 'TCL_GetVersion',
+ 'TCL_SetVar',
+ 'TCL_UnsetVar',
+ 'TCL_UpVar',
+ '_',
+ '_code2str',
+ '_d',
+ '_str2code',
+ 'about',
+ 'abs',
+ 'acos',
+ 'addModulePreferences',
+ 'addcolor',
+ 'addf',
+ 'addhistory',
+ 'addinter',
+ 'addlocalizationdomain',
+ 'amell',
+ 'and',
+ 'argn',
+ 'arl2_ius',
+ 'ascii',
+ 'asin',
+ 'atan',
+ 'backslash',
+ 'balanc',
+ 'banner',
+ 'base2dec',
+ 'basename',
+ 'bdiag',
+ 'beep',
+ 'besselh',
+ 'besseli',
+ 'besselj',
+ 'besselk',
+ 'bessely',
+ 'beta',
+ 'bezout',
+ 'bfinit',
+ 'blkfc1i',
+ 'blkslvi',
+ 'bool2s',
+ 'browsehistory',
+ 'browsevar',
+ 'bsplin3val',
+ 'buildDoc',
+ 'buildouttb',
+ 'bvode',
+ 'c_link',
+ 'call',
+ 'callblk',
+ 'captions',
+ 'cd',
+ 'cdfbet',
+ 'cdfbin',
+ 'cdfchi',
+ 'cdfchn',
+ 'cdff',
+ 'cdffnc',
+ 'cdfgam',
+ 'cdfnbn',
+ 'cdfnor',
+ 'cdfpoi',
+ 'cdft',
+ 'ceil',
+ 'champ',
+ 'champ1',
+ 'chdir',
+ 'chol',
+ 'clc',
+ 'clean',
+ 'clear',
+ 'clearfun',
+ 'clearglobal',
+ 'closeEditor',
+ 'closeEditvar',
+ 'closeXcos',
+ 'code2str',
+ 'coeff',
+ 'color',
+ 'comp',
+ 'completion',
+ 'conj',
+ 'contour2di',
+ 'contr',
+ 'conv2',
+ 'convstr',
+ 'copy',
+ 'copyfile',
+ 'corr',
+ 'cos',
+ 'coserror',
+ 'createdir',
+ 'cshep2d',
+ 'csvDefault',
+ 'csvIsnum',
+ 'csvRead',
+ 'csvStringToDouble',
+ 'csvTextScan',
+ 'csvWrite',
+ 'ctree2',
+ 'ctree3',
+ 'ctree4',
+ 'cumprod',
+ 'cumsum',
+ 'curblock',
+ 'curblockc',
+ 'daskr',
+ 'dasrt',
+ 'dassl',
+ 'data2sig',
+ 'datatipCreate',
+ 'datatipManagerMode',
+ 'datatipMove',
+ 'datatipRemove',
+ 'datatipSetDisplay',
+ 'datatipSetInterp',
+ 'datatipSetOrientation',
+ 'datatipSetStyle',
+ 'datatipToggle',
+ 'dawson',
+ 'dct',
+ 'debug',
+ 'dec2base',
+ 'deff',
+ 'definedfields',
+ 'degree',
+ 'delbpt',
+ 'delete',
+ 'deletefile',
+ 'delip',
+ 'delmenu',
+ 'det',
+ 'dgettext',
+ 'dhinf',
+ 'diag',
+ 'diary',
+ 'diffobjs',
+ 'disp',
+ 'dispbpt',
+ 'displayhistory',
+ 'disposefftwlibrary',
+ 'dlgamma',
+ 'dnaupd',
+ 'dneupd',
+ 'double',
+ 'drawaxis',
+ 'drawlater',
+ 'drawnow',
+ 'driver',
+ 'dsaupd',
+ 'dsearch',
+ 'dseupd',
+ 'dst',
+ 'duplicate',
+ 'editvar',
+ 'emptystr',
+ 'end_scicosim',
+ 'ereduc',
+ 'erf',
+ 'erfc',
+ 'erfcx',
+ 'erfi',
+ 'errcatch',
+ 'errclear',
+ 'error',
+ 'eval_cshep2d',
+ 'exec',
+ 'execstr',
+ 'exists',
+ 'exit',
+ 'exp',
+ 'expm',
+ 'exportUI',
+ 'export_to_hdf5',
+ 'eye',
+ 'fadj2sp',
+ 'fec',
+ 'feval',
+ 'fft',
+ 'fftw',
+ 'fftw_flags',
+ 'fftw_forget_wisdom',
+ 'fftwlibraryisloaded',
+ 'figure',
+ 'file',
+ 'filebrowser',
+ 'fileext',
+ 'fileinfo',
+ 'fileparts',
+ 'filesep',
+ 'find',
+ 'findBD',
+ 'findfiles',
+ 'fire_closing_finished',
+ 'floor',
+ 'format',
+ 'fort',
+ 'fprintfMat',
+ 'freq',
+ 'frexp',
+ 'fromc',
+ 'fromjava',
+ 'fscanfMat',
+ 'fsolve',
+ 'fstair',
+ 'full',
+ 'fullpath',
+ 'funcprot',
+ 'funptr',
+ 'gamma',
+ 'gammaln',
+ 'geom3d',
+ 'get',
+ 'getURL',
+ 'get_absolute_file_path',
+ 'get_fftw_wisdom',
+ 'getblocklabel',
+ 'getcallbackobject',
+ 'getdate',
+ 'getdebuginfo',
+ 'getdefaultlanguage',
+ 'getdrives',
+ 'getdynlibext',
+ 'getenv',
+ 'getfield',
+ 'gethistory',
+ 'gethistoryfile',
+ 'getinstalledlookandfeels',
+ 'getio',
+ 'getlanguage',
+ 'getlongpathname',
+ 'getlookandfeel',
+ 'getmd5',
+ 'getmemory',
+ 'getmodules',
+ 'getos',
+ 'getpid',
+ 'getrelativefilename',
+ 'getscicosvars',
+ 'getscilabmode',
+ 'getshortpathname',
+ 'gettext',
+ 'getvariablesonstack',
+ 'getversion',
+ 'glist',
+ 'global',
+ 'glue',
+ 'grand',
+ 'graphicfunction',
+ 'grayplot',
+ 'grep',
+ 'gsort',
+ 'gstacksize',
+ 'h5attr',
+ 'h5close',
+ 'h5cp',
+ 'h5dataset',
+ 'h5dump',
+ 'h5exists',
+ 'h5flush',
+ 'h5get',
+ 'h5group',
+ 'h5isArray',
+ 'h5isAttr',
+ 'h5isCompound',
+ 'h5isFile',
+ 'h5isGroup',
+ 'h5isList',
+ 'h5isRef',
+ 'h5isSet',
+ 'h5isSpace',
+ 'h5isType',
+ 'h5isVlen',
+ 'h5label',
+ 'h5ln',
+ 'h5ls',
+ 'h5mount',
+ 'h5mv',
+ 'h5open',
+ 'h5read',
+ 'h5readattr',
+ 'h5rm',
+ 'h5umount',
+ 'h5write',
+ 'h5writeattr',
+ 'havewindow',
+ 'helpbrowser',
+ 'hess',
+ 'hinf',
+ 'historymanager',
+ 'historysize',
+ 'host',
+ 'htmlDump',
+ 'htmlRead',
+ 'htmlReadStr',
+ 'htmlWrite',
+ 'iconvert',
+ 'ieee',
+ 'ilib_verbose',
+ 'imag',
+ 'impl',
+ 'import_from_hdf5',
+ 'imult',
+ 'inpnvi',
+ 'int',
+ 'int16',
+ 'int2d',
+ 'int32',
+ 'int3d',
+ 'int8',
+ 'interp',
+ 'interp2d',
+ 'interp3d',
+ 'intg',
+ 'intppty',
+ 'inttype',
+ 'inv',
+ 'invoke_lu',
+ 'is_handle_valid',
+ 'is_hdf5_file',
+ 'isalphanum',
+ 'isascii',
+ 'isdef',
+ 'isdigit',
+ 'isdir',
+ 'isequal',
+ 'isequalbitwise',
+ 'iserror',
+ 'isfile',
+ 'isglobal',
+ 'isletter',
+ 'isnum',
+ 'isreal',
+ 'iswaitingforinput',
+ 'jallowClassReloading',
+ 'jarray',
+ 'jautoTranspose',
+ 'jautoUnwrap',
+ 'javaclasspath',
+ 'javalibrarypath',
+ 'jcast',
+ 'jcompile',
+ 'jconvMatrixMethod',
+ 'jcreatejar',
+ 'jdeff',
+ 'jdisableTrace',
+ 'jenableTrace',
+ 'jexists',
+ 'jgetclassname',
+ 'jgetfield',
+ 'jgetfields',
+ 'jgetinfo',
+ 'jgetmethods',
+ 'jimport',
+ 'jinvoke',
+ 'jinvoke_db',
+ 'jnewInstance',
+ 'jremove',
+ 'jsetfield',
+ 'junwrap',
+ 'junwraprem',
+ 'jwrap',
+ 'jwrapinfloat',
+ 'kron',
+ 'lasterror',
+ 'ldiv',
+ 'ldivf',
+ 'legendre',
+ 'length',
+ 'lib',
+ 'librarieslist',
+ 'libraryinfo',
+ 'light',
+ 'linear_interpn',
+ 'lines',
+ 'link',
+ 'linmeq',
+ 'list',
+ 'listvar_in_hdf5',
+ 'load',
+ 'loadGui',
+ 'loadScicos',
+ 'loadXcos',
+ 'loadfftwlibrary',
+ 'loadhistory',
+ 'log',
+ 'log1p',
+ 'lsq',
+ 'lsq_splin',
+ 'lsqrsolve',
+ 'lsslist',
+ 'lstcat',
+ 'lstsize',
+ 'ltitr',
+ 'lu',
+ 'ludel',
+ 'lufact',
+ 'luget',
+ 'lusolve',
+ 'macr2lst',
+ 'macr2tree',
+ 'matfile_close',
+ 'matfile_listvar',
+ 'matfile_open',
+ 'matfile_varreadnext',
+ 'matfile_varwrite',
+ 'matrix',
+ 'max',
+ 'maxfiles',
+ 'mclearerr',
+ 'mclose',
+ 'meof',
+ 'merror',
+ 'messagebox',
+ 'mfprintf',
+ 'mfscanf',
+ 'mget',
+ 'mgeti',
+ 'mgetl',
+ 'mgetstr',
+ 'min',
+ 'mlist',
+ 'mode',
+ 'model2blk',
+ 'mopen',
+ 'move',
+ 'movefile',
+ 'mprintf',
+ 'mput',
+ 'mputl',
+ 'mputstr',
+ 'mscanf',
+ 'mseek',
+ 'msprintf',
+ 'msscanf',
+ 'mtell',
+ 'mtlb_mode',
+ 'mtlb_sparse',
+ 'mucomp',
+ 'mulf',
+ 'name2rgb',
+ 'nearfloat',
+ 'newaxes',
+ 'newest',
+ 'newfun',
+ 'nnz',
+ 'norm',
+ 'notify',
+ 'number_properties',
+ 'ode',
+ 'odedc',
+ 'ones',
+ 'openged',
+ 'opentk',
+ 'optim',
+ 'or',
+ 'ordmmd',
+ 'parallel_concurrency',
+ 'parallel_run',
+ 'param3d',
+ 'param3d1',
+ 'part',
+ 'pathconvert',
+ 'pathsep',
+ 'phase_simulation',
+ 'plot2d',
+ 'plot2d1',
+ 'plot2d2',
+ 'plot2d3',
+ 'plot2d4',
+ 'plot3d',
+ 'plot3d1',
+ 'plotbrowser',
+ 'pointer_xproperty',
+ 'poly',
+ 'ppol',
+ 'pppdiv',
+ 'predef',
+ 'preferences',
+ 'print',
+ 'printf',
+ 'printfigure',
+ 'printsetupbox',
+ 'prod',
+ 'progressionbar',
+ 'prompt',
+ 'pwd',
+ 'qld',
+ 'qp_solve',
+ 'qr',
+ 'raise_window',
+ 'rand',
+ 'rankqr',
+ 'rat',
+ 'rcond',
+ 'rdivf',
+ 'read',
+ 'read4b',
+ 'read_csv',
+ 'readb',
+ 'readgateway',
+ 'readmps',
+ 'real',
+ 'realtime',
+ 'realtimeinit',
+ 'regexp',
+ 'relocate_handle',
+ 'remez',
+ 'removeModulePreferences',
+ 'removedir',
+ 'removelinehistory',
+ 'res_with_prec',
+ 'resethistory',
+ 'residu',
+ 'resume',
+ 'return',
+ 'ricc',
+ 'rlist',
+ 'roots',
+ 'rotate_axes',
+ 'round',
+ 'rpem',
+ 'rtitr',
+ 'rubberbox',
+ 'save',
+ 'saveGui',
+ 'saveafterncommands',
+ 'saveconsecutivecommands',
+ 'savehistory',
+ 'schur',
+ 'sci_haltscicos',
+ 'sci_tree2',
+ 'sci_tree3',
+ 'sci_tree4',
+ 'sciargs',
+ 'scicos_debug',
+ 'scicos_debug_count',
+ 'scicos_time',
+ 'scicosim',
+ 'scinotes',
+ 'sctree',
+ 'semidef',
+ 'set',
+ 'set_blockerror',
+ 'set_fftw_wisdom',
+ 'set_xproperty',
+ 'setbpt',
+ 'setdefaultlanguage',
+ 'setenv',
+ 'setfield',
+ 'sethistoryfile',
+ 'setlanguage',
+ 'setlookandfeel',
+ 'setmenu',
+ 'sfact',
+ 'sfinit',
+ 'show_window',
+ 'sident',
+ 'sig2data',
+ 'sign',
+ 'simp',
+ 'simp_mode',
+ 'sin',
+ 'size',
+ 'slash',
+ 'sleep',
+ 'sorder',
+ 'sparse',
+ 'spchol',
+ 'spcompack',
+ 'spec',
+ 'spget',
+ 'splin',
+ 'splin2d',
+ 'splin3d',
+ 'splitURL',
+ 'spones',
+ 'sprintf',
+ 'sqrt',
+ 'stacksize',
+ 'str2code',
+ 'strcat',
+ 'strchr',
+ 'strcmp',
+ 'strcspn',
+ 'strindex',
+ 'string',
+ 'stringbox',
+ 'stripblanks',
+ 'strncpy',
+ 'strrchr',
+ 'strrev',
+ 'strsplit',
+ 'strspn',
+ 'strstr',
+ 'strsubst',
+ 'strtod',
+ 'strtok',
+ 'subf',
+ 'sum',
+ 'svd',
+ 'swap_handles',
+ 'symfcti',
+ 'syredi',
+ 'system_getproperty',
+ 'system_setproperty',
+ 'ta2lpd',
+ 'tan',
+ 'taucs_chdel',
+ 'taucs_chfact',
+ 'taucs_chget',
+ 'taucs_chinfo',
+ 'taucs_chsolve',
+ 'tempname',
+ 'testmatrix',
+ 'timer',
+ 'tlist',
+ 'tohome',
+ 'tokens',
+ 'toolbar',
+ 'toprint',
+ 'tr_zer',
+ 'tril',
+ 'triu',
+ 'type',
+ 'typename',
+ 'uiDisplayTree',
+ 'uicontextmenu',
+ 'uicontrol',
+ 'uigetcolor',
+ 'uigetdir',
+ 'uigetfile',
+ 'uigetfont',
+ 'uimenu',
+ 'uint16',
+ 'uint32',
+ 'uint8',
+ 'uipopup',
+ 'uiputfile',
+ 'uiwait',
+ 'ulink',
+ 'umf_ludel',
+ 'umf_lufact',
+ 'umf_luget',
+ 'umf_luinfo',
+ 'umf_lusolve',
+ 'umfpack',
+ 'unglue',
+ 'unix',
+ 'unsetmenu',
+ 'unzoom',
+ 'updatebrowsevar',
+ 'usecanvas',
+ 'useeditor',
+ 'user',
+ 'var2vec',
+ 'varn',
+ 'vec2var',
+ 'waitbar',
+ 'warnBlockByUID',
+ 'warning',
+ 'what',
+ 'where',
+ 'whereis',
+ 'who',
+ 'winsid',
+ 'with_module',
+ 'writb',
+ 'write',
+ 'write4b',
+ 'write_csv',
+ 'x_choose',
+ 'x_choose_modeless',
+ 'x_dialog',
+ 'x_mdialog',
+ 'xarc',
+ 'xarcs',
+ 'xarrows',
+ 'xchange',
+ 'xchoicesi',
+ 'xclick',
+ 'xcos',
+ 'xcosAddToolsMenu',
+ 'xcosConfigureXmlFile',
+ 'xcosDiagramToScilab',
+ 'xcosPalCategoryAdd',
+ 'xcosPalDelete',
+ 'xcosPalDisable',
+ 'xcosPalEnable',
+ 'xcosPalGenerateIcon',
+ 'xcosPalGet',
+ 'xcosPalLoad',
+ 'xcosPalMove',
+ 'xcosSimulationStarted',
+ 'xcosUpdateBlock',
+ 'xdel',
+ 'xend',
+ 'xfarc',
+ 'xfarcs',
+ 'xfpoly',
+ 'xfpolys',
+ 'xfrect',
+ 'xget',
+ 'xgetmouse',
+ 'xgraduate',
+ 'xgrid',
+ 'xinit',
+ 'xlfont',
+ 'xls_open',
+ 'xls_read',
+ 'xmlAddNs',
+ 'xmlAppend',
+ 'xmlAsNumber',
+ 'xmlAsText',
+ 'xmlDTD',
+ 'xmlDelete',
+ 'xmlDocument',
+ 'xmlDump',
+ 'xmlElement',
+ 'xmlFormat',
+ 'xmlGetNsByHref',
+ 'xmlGetNsByPrefix',
+ 'xmlGetOpenDocs',
+ 'xmlIsValidObject',
+ 'xmlName',
+ 'xmlNs',
+ 'xmlRead',
+ 'xmlReadStr',
+ 'xmlRelaxNG',
+ 'xmlRemove',
+ 'xmlSchema',
+ 'xmlSetAttributes',
+ 'xmlValidate',
+ 'xmlWrite',
+ 'xmlXPath',
+ 'xname',
+ 'xpause',
+ 'xpoly',
+ 'xpolys',
+ 'xrect',
+ 'xrects',
+ 'xs2bmp',
+ 'xs2emf',
+ 'xs2eps',
+ 'xs2gif',
+ 'xs2jpg',
+ 'xs2pdf',
+ 'xs2png',
+ 'xs2ppm',
+ 'xs2ps',
+ 'xs2svg',
+ 'xsegs',
+ 'xset',
+ 'xstring',
+ 'xstringb',
+ 'xtitle',
+ 'zeros',
+ 'znaupd',
+ 'zneupd',
+ 'zoom_rect',
+)
+
+macros_kw = (
+ '!_deff_wrapper',
+ '%0_i_st',
+ '%3d_i_h',
+ '%Block_xcosUpdateBlock',
+ '%TNELDER_p',
+ '%TNELDER_string',
+ '%TNMPLOT_p',
+ '%TNMPLOT_string',
+ '%TOPTIM_p',
+ '%TOPTIM_string',
+ '%TSIMPLEX_p',
+ '%TSIMPLEX_string',
+ '%_EVoid_p',
+ '%_gsort',
+ '%_listvarinfile',
+ '%_rlist',
+ '%_save',
+ '%_sodload',
+ '%_strsplit',
+ '%_unwrap',
+ '%ar_p',
+ '%asn',
+ '%b_a_b',
+ '%b_a_s',
+ '%b_c_s',
+ '%b_c_spb',
+ '%b_cumprod',
+ '%b_cumsum',
+ '%b_d_s',
+ '%b_diag',
+ '%b_e',
+ '%b_f_s',
+ '%b_f_spb',
+ '%b_g_s',
+ '%b_g_spb',
+ '%b_grand',
+ '%b_h_s',
+ '%b_h_spb',
+ '%b_i_b',
+ '%b_i_ce',
+ '%b_i_h',
+ '%b_i_hm',
+ '%b_i_s',
+ '%b_i_sp',
+ '%b_i_spb',
+ '%b_i_st',
+ '%b_iconvert',
+ '%b_l_b',
+ '%b_l_s',
+ '%b_m_b',
+ '%b_m_s',
+ '%b_matrix',
+ '%b_n_hm',
+ '%b_o_hm',
+ '%b_p_s',
+ '%b_prod',
+ '%b_r_b',
+ '%b_r_s',
+ '%b_s_b',
+ '%b_s_s',
+ '%b_string',
+ '%b_sum',
+ '%b_tril',
+ '%b_triu',
+ '%b_x_b',
+ '%b_x_s',
+ '%bicg',
+ '%bicgstab',
+ '%c_a_c',
+ '%c_b_c',
+ '%c_b_s',
+ '%c_diag',
+ '%c_dsearch',
+ '%c_e',
+ '%c_eye',
+ '%c_f_s',
+ '%c_grand',
+ '%c_i_c',
+ '%c_i_ce',
+ '%c_i_h',
+ '%c_i_hm',
+ '%c_i_lss',
+ '%c_i_r',
+ '%c_i_s',
+ '%c_i_st',
+ '%c_matrix',
+ '%c_n_l',
+ '%c_n_st',
+ '%c_o_l',
+ '%c_o_st',
+ '%c_ones',
+ '%c_rand',
+ '%c_tril',
+ '%c_triu',
+ '%cblock_c_cblock',
+ '%cblock_c_s',
+ '%cblock_e',
+ '%cblock_f_cblock',
+ '%cblock_p',
+ '%cblock_size',
+ '%ce_6',
+ '%ce_c_ce',
+ '%ce_e',
+ '%ce_f_ce',
+ '%ce_i_ce',
+ '%ce_i_s',
+ '%ce_i_st',
+ '%ce_matrix',
+ '%ce_p',
+ '%ce_size',
+ '%ce_string',
+ '%ce_t',
+ '%cgs',
+ '%champdat_i_h',
+ '%choose',
+ '%diagram_xcos',
+ '%dir_p',
+ '%fptr_i_st',
+ '%grand_perm',
+ '%grayplot_i_h',
+ '%h_i_st',
+ '%hmS_k_hmS_generic',
+ '%hm_1_hm',
+ '%hm_1_s',
+ '%hm_2_hm',
+ '%hm_2_s',
+ '%hm_3_hm',
+ '%hm_3_s',
+ '%hm_4_hm',
+ '%hm_4_s',
+ '%hm_5',
+ '%hm_a_hm',
+ '%hm_a_r',
+ '%hm_a_s',
+ '%hm_abs',
+ '%hm_and',
+ '%hm_bool2s',
+ '%hm_c_hm',
+ '%hm_ceil',
+ '%hm_conj',
+ '%hm_cos',
+ '%hm_cumprod',
+ '%hm_cumsum',
+ '%hm_d_hm',
+ '%hm_d_s',
+ '%hm_degree',
+ '%hm_dsearch',
+ '%hm_e',
+ '%hm_exp',
+ '%hm_eye',
+ '%hm_f_hm',
+ '%hm_find',
+ '%hm_floor',
+ '%hm_g_hm',
+ '%hm_grand',
+ '%hm_gsort',
+ '%hm_h_hm',
+ '%hm_i_b',
+ '%hm_i_ce',
+ '%hm_i_h',
+ '%hm_i_hm',
+ '%hm_i_i',
+ '%hm_i_p',
+ '%hm_i_r',
+ '%hm_i_s',
+ '%hm_i_st',
+ '%hm_iconvert',
+ '%hm_imag',
+ '%hm_int',
+ '%hm_isnan',
+ '%hm_isreal',
+ '%hm_j_hm',
+ '%hm_j_s',
+ '%hm_k_hm',
+ '%hm_k_s',
+ '%hm_log',
+ '%hm_m_p',
+ '%hm_m_r',
+ '%hm_m_s',
+ '%hm_matrix',
+ '%hm_max',
+ '%hm_mean',
+ '%hm_median',
+ '%hm_min',
+ '%hm_n_b',
+ '%hm_n_c',
+ '%hm_n_hm',
+ '%hm_n_i',
+ '%hm_n_p',
+ '%hm_n_s',
+ '%hm_o_b',
+ '%hm_o_c',
+ '%hm_o_hm',
+ '%hm_o_i',
+ '%hm_o_p',
+ '%hm_o_s',
+ '%hm_ones',
+ '%hm_or',
+ '%hm_p',
+ '%hm_prod',
+ '%hm_q_hm',
+ '%hm_r_s',
+ '%hm_rand',
+ '%hm_real',
+ '%hm_round',
+ '%hm_s',
+ '%hm_s_hm',
+ '%hm_s_r',
+ '%hm_s_s',
+ '%hm_sign',
+ '%hm_sin',
+ '%hm_size',
+ '%hm_sqrt',
+ '%hm_stdev',
+ '%hm_string',
+ '%hm_sum',
+ '%hm_x_hm',
+ '%hm_x_p',
+ '%hm_x_s',
+ '%hm_zeros',
+ '%i_1_s',
+ '%i_2_s',
+ '%i_3_s',
+ '%i_4_s',
+ '%i_Matplot',
+ '%i_a_i',
+ '%i_a_s',
+ '%i_and',
+ '%i_ascii',
+ '%i_b_s',
+ '%i_bezout',
+ '%i_champ',
+ '%i_champ1',
+ '%i_contour',
+ '%i_contour2d',
+ '%i_d_i',
+ '%i_d_s',
+ '%i_dsearch',
+ '%i_e',
+ '%i_fft',
+ '%i_g_i',
+ '%i_gcd',
+ '%i_grand',
+ '%i_h_i',
+ '%i_i_ce',
+ '%i_i_h',
+ '%i_i_hm',
+ '%i_i_i',
+ '%i_i_s',
+ '%i_i_st',
+ '%i_j_i',
+ '%i_j_s',
+ '%i_l_s',
+ '%i_lcm',
+ '%i_length',
+ '%i_m_i',
+ '%i_m_s',
+ '%i_mfprintf',
+ '%i_mprintf',
+ '%i_msprintf',
+ '%i_n_s',
+ '%i_o_s',
+ '%i_or',
+ '%i_p_i',
+ '%i_p_s',
+ '%i_plot2d',
+ '%i_plot2d1',
+ '%i_plot2d2',
+ '%i_q_s',
+ '%i_r_i',
+ '%i_r_s',
+ '%i_round',
+ '%i_s_i',
+ '%i_s_s',
+ '%i_sign',
+ '%i_string',
+ '%i_x_i',
+ '%i_x_s',
+ '%ip_a_s',
+ '%ip_i_st',
+ '%ip_m_s',
+ '%ip_n_ip',
+ '%ip_o_ip',
+ '%ip_p',
+ '%ip_part',
+ '%ip_s_s',
+ '%ip_string',
+ '%k',
+ '%l_i_h',
+ '%l_i_s',
+ '%l_i_st',
+ '%l_isequal',
+ '%l_n_c',
+ '%l_n_l',
+ '%l_n_m',
+ '%l_n_p',
+ '%l_n_s',
+ '%l_n_st',
+ '%l_o_c',
+ '%l_o_l',
+ '%l_o_m',
+ '%l_o_p',
+ '%l_o_s',
+ '%l_o_st',
+ '%lss_a_lss',
+ '%lss_a_p',
+ '%lss_a_r',
+ '%lss_a_s',
+ '%lss_c_lss',
+ '%lss_c_p',
+ '%lss_c_r',
+ '%lss_c_s',
+ '%lss_e',
+ '%lss_eye',
+ '%lss_f_lss',
+ '%lss_f_p',
+ '%lss_f_r',
+ '%lss_f_s',
+ '%lss_i_ce',
+ '%lss_i_lss',
+ '%lss_i_p',
+ '%lss_i_r',
+ '%lss_i_s',
+ '%lss_i_st',
+ '%lss_inv',
+ '%lss_l_lss',
+ '%lss_l_p',
+ '%lss_l_r',
+ '%lss_l_s',
+ '%lss_m_lss',
+ '%lss_m_p',
+ '%lss_m_r',
+ '%lss_m_s',
+ '%lss_n_lss',
+ '%lss_n_p',
+ '%lss_n_r',
+ '%lss_n_s',
+ '%lss_norm',
+ '%lss_o_lss',
+ '%lss_o_p',
+ '%lss_o_r',
+ '%lss_o_s',
+ '%lss_ones',
+ '%lss_r_lss',
+ '%lss_r_p',
+ '%lss_r_r',
+ '%lss_r_s',
+ '%lss_rand',
+ '%lss_s',
+ '%lss_s_lss',
+ '%lss_s_p',
+ '%lss_s_r',
+ '%lss_s_s',
+ '%lss_size',
+ '%lss_t',
+ '%lss_v_lss',
+ '%lss_v_p',
+ '%lss_v_r',
+ '%lss_v_s',
+ '%lt_i_s',
+ '%m_n_l',
+ '%m_o_l',
+ '%mc_i_h',
+ '%mc_i_s',
+ '%mc_i_st',
+ '%mc_n_st',
+ '%mc_o_st',
+ '%mc_string',
+ '%mps_p',
+ '%mps_string',
+ '%msp_a_s',
+ '%msp_abs',
+ '%msp_e',
+ '%msp_find',
+ '%msp_i_s',
+ '%msp_i_st',
+ '%msp_length',
+ '%msp_m_s',
+ '%msp_maxi',
+ '%msp_n_msp',
+ '%msp_nnz',
+ '%msp_o_msp',
+ '%msp_p',
+ '%msp_sparse',
+ '%msp_spones',
+ '%msp_t',
+ '%p_a_lss',
+ '%p_a_r',
+ '%p_c_lss',
+ '%p_c_r',
+ '%p_cumprod',
+ '%p_cumsum',
+ '%p_d_p',
+ '%p_d_r',
+ '%p_d_s',
+ '%p_det',
+ '%p_e',
+ '%p_f_lss',
+ '%p_f_r',
+ '%p_grand',
+ '%p_i_ce',
+ '%p_i_h',
+ '%p_i_hm',
+ '%p_i_lss',
+ '%p_i_p',
+ '%p_i_r',
+ '%p_i_s',
+ '%p_i_st',
+ '%p_inv',
+ '%p_j_s',
+ '%p_k_p',
+ '%p_k_r',
+ '%p_k_s',
+ '%p_l_lss',
+ '%p_l_p',
+ '%p_l_r',
+ '%p_l_s',
+ '%p_m_hm',
+ '%p_m_lss',
+ '%p_m_r',
+ '%p_matrix',
+ '%p_n_l',
+ '%p_n_lss',
+ '%p_n_r',
+ '%p_o_l',
+ '%p_o_lss',
+ '%p_o_r',
+ '%p_o_sp',
+ '%p_p_s',
+ '%p_part',
+ '%p_prod',
+ '%p_q_p',
+ '%p_q_r',
+ '%p_q_s',
+ '%p_r_lss',
+ '%p_r_p',
+ '%p_r_r',
+ '%p_r_s',
+ '%p_s_lss',
+ '%p_s_r',
+ '%p_simp',
+ '%p_string',
+ '%p_sum',
+ '%p_v_lss',
+ '%p_v_p',
+ '%p_v_r',
+ '%p_v_s',
+ '%p_x_hm',
+ '%p_x_r',
+ '%p_y_p',
+ '%p_y_r',
+ '%p_y_s',
+ '%p_z_p',
+ '%p_z_r',
+ '%p_z_s',
+ '%pcg',
+ '%plist_p',
+ '%plist_string',
+ '%r_0',
+ '%r_a_hm',
+ '%r_a_lss',
+ '%r_a_p',
+ '%r_a_r',
+ '%r_a_s',
+ '%r_c_lss',
+ '%r_c_p',
+ '%r_c_r',
+ '%r_c_s',
+ '%r_clean',
+ '%r_cumprod',
+ '%r_cumsum',
+ '%r_d_p',
+ '%r_d_r',
+ '%r_d_s',
+ '%r_det',
+ '%r_diag',
+ '%r_e',
+ '%r_eye',
+ '%r_f_lss',
+ '%r_f_p',
+ '%r_f_r',
+ '%r_f_s',
+ '%r_i_ce',
+ '%r_i_hm',
+ '%r_i_lss',
+ '%r_i_p',
+ '%r_i_r',
+ '%r_i_s',
+ '%r_i_st',
+ '%r_inv',
+ '%r_j_s',
+ '%r_k_p',
+ '%r_k_r',
+ '%r_k_s',
+ '%r_l_lss',
+ '%r_l_p',
+ '%r_l_r',
+ '%r_l_s',
+ '%r_m_hm',
+ '%r_m_lss',
+ '%r_m_p',
+ '%r_m_r',
+ '%r_m_s',
+ '%r_matrix',
+ '%r_n_lss',
+ '%r_n_p',
+ '%r_n_r',
+ '%r_n_s',
+ '%r_norm',
+ '%r_o_lss',
+ '%r_o_p',
+ '%r_o_r',
+ '%r_o_s',
+ '%r_ones',
+ '%r_p',
+ '%r_p_s',
+ '%r_prod',
+ '%r_q_p',
+ '%r_q_r',
+ '%r_q_s',
+ '%r_r_lss',
+ '%r_r_p',
+ '%r_r_r',
+ '%r_r_s',
+ '%r_rand',
+ '%r_s',
+ '%r_s_hm',
+ '%r_s_lss',
+ '%r_s_p',
+ '%r_s_r',
+ '%r_s_s',
+ '%r_simp',
+ '%r_size',
+ '%r_string',
+ '%r_sum',
+ '%r_t',
+ '%r_tril',
+ '%r_triu',
+ '%r_v_lss',
+ '%r_v_p',
+ '%r_v_r',
+ '%r_v_s',
+ '%r_varn',
+ '%r_x_p',
+ '%r_x_r',
+ '%r_x_s',
+ '%r_y_p',
+ '%r_y_r',
+ '%r_y_s',
+ '%r_z_p',
+ '%r_z_r',
+ '%r_z_s',
+ '%s_1_hm',
+ '%s_1_i',
+ '%s_2_hm',
+ '%s_2_i',
+ '%s_3_hm',
+ '%s_3_i',
+ '%s_4_hm',
+ '%s_4_i',
+ '%s_5',
+ '%s_a_b',
+ '%s_a_hm',
+ '%s_a_i',
+ '%s_a_ip',
+ '%s_a_lss',
+ '%s_a_msp',
+ '%s_a_r',
+ '%s_a_sp',
+ '%s_and',
+ '%s_b_i',
+ '%s_b_s',
+ '%s_bezout',
+ '%s_c_b',
+ '%s_c_cblock',
+ '%s_c_lss',
+ '%s_c_r',
+ '%s_c_sp',
+ '%s_d_b',
+ '%s_d_i',
+ '%s_d_p',
+ '%s_d_r',
+ '%s_d_sp',
+ '%s_e',
+ '%s_f_b',
+ '%s_f_cblock',
+ '%s_f_lss',
+ '%s_f_r',
+ '%s_f_sp',
+ '%s_g_b',
+ '%s_g_s',
+ '%s_gcd',
+ '%s_grand',
+ '%s_h_b',
+ '%s_h_s',
+ '%s_i_b',
+ '%s_i_c',
+ '%s_i_ce',
+ '%s_i_h',
+ '%s_i_hm',
+ '%s_i_i',
+ '%s_i_lss',
+ '%s_i_p',
+ '%s_i_r',
+ '%s_i_s',
+ '%s_i_sp',
+ '%s_i_spb',
+ '%s_i_st',
+ '%s_j_i',
+ '%s_k_hm',
+ '%s_k_p',
+ '%s_k_r',
+ '%s_k_sp',
+ '%s_l_b',
+ '%s_l_hm',
+ '%s_l_i',
+ '%s_l_lss',
+ '%s_l_p',
+ '%s_l_r',
+ '%s_l_s',
+ '%s_l_sp',
+ '%s_lcm',
+ '%s_m_b',
+ '%s_m_hm',
+ '%s_m_i',
+ '%s_m_ip',
+ '%s_m_lss',
+ '%s_m_msp',
+ '%s_m_r',
+ '%s_matrix',
+ '%s_n_hm',
+ '%s_n_i',
+ '%s_n_l',
+ '%s_n_lss',
+ '%s_n_r',
+ '%s_n_st',
+ '%s_o_hm',
+ '%s_o_i',
+ '%s_o_l',
+ '%s_o_lss',
+ '%s_o_r',
+ '%s_o_st',
+ '%s_or',
+ '%s_p_b',
+ '%s_p_i',
+ '%s_pow',
+ '%s_q_hm',
+ '%s_q_i',
+ '%s_q_p',
+ '%s_q_r',
+ '%s_q_sp',
+ '%s_r_b',
+ '%s_r_i',
+ '%s_r_lss',
+ '%s_r_p',
+ '%s_r_r',
+ '%s_r_s',
+ '%s_r_sp',
+ '%s_s_b',
+ '%s_s_hm',
+ '%s_s_i',
+ '%s_s_ip',
+ '%s_s_lss',
+ '%s_s_r',
+ '%s_s_sp',
+ '%s_simp',
+ '%s_v_lss',
+ '%s_v_p',
+ '%s_v_r',
+ '%s_v_s',
+ '%s_x_b',
+ '%s_x_hm',
+ '%s_x_i',
+ '%s_x_r',
+ '%s_y_p',
+ '%s_y_r',
+ '%s_y_sp',
+ '%s_z_p',
+ '%s_z_r',
+ '%s_z_sp',
+ '%sn',
+ '%sp_a_s',
+ '%sp_a_sp',
+ '%sp_and',
+ '%sp_c_s',
+ '%sp_ceil',
+ '%sp_conj',
+ '%sp_cos',
+ '%sp_cumprod',
+ '%sp_cumsum',
+ '%sp_d_s',
+ '%sp_d_sp',
+ '%sp_det',
+ '%sp_diag',
+ '%sp_e',
+ '%sp_exp',
+ '%sp_f_s',
+ '%sp_floor',
+ '%sp_grand',
+ '%sp_gsort',
+ '%sp_i_ce',
+ '%sp_i_h',
+ '%sp_i_s',
+ '%sp_i_sp',
+ '%sp_i_st',
+ '%sp_int',
+ '%sp_inv',
+ '%sp_k_s',
+ '%sp_k_sp',
+ '%sp_l_s',
+ '%sp_l_sp',
+ '%sp_length',
+ '%sp_max',
+ '%sp_min',
+ '%sp_norm',
+ '%sp_or',
+ '%sp_p_s',
+ '%sp_prod',
+ '%sp_q_s',
+ '%sp_q_sp',
+ '%sp_r_s',
+ '%sp_r_sp',
+ '%sp_round',
+ '%sp_s_s',
+ '%sp_s_sp',
+ '%sp_sin',
+ '%sp_sqrt',
+ '%sp_string',
+ '%sp_sum',
+ '%sp_tril',
+ '%sp_triu',
+ '%sp_y_s',
+ '%sp_y_sp',
+ '%sp_z_s',
+ '%sp_z_sp',
+ '%spb_and',
+ '%spb_c_b',
+ '%spb_cumprod',
+ '%spb_cumsum',
+ '%spb_diag',
+ '%spb_e',
+ '%spb_f_b',
+ '%spb_g_b',
+ '%spb_g_spb',
+ '%spb_h_b',
+ '%spb_h_spb',
+ '%spb_i_b',
+ '%spb_i_ce',
+ '%spb_i_h',
+ '%spb_i_st',
+ '%spb_or',
+ '%spb_prod',
+ '%spb_sum',
+ '%spb_tril',
+ '%spb_triu',
+ '%st_6',
+ '%st_c_st',
+ '%st_e',
+ '%st_f_st',
+ '%st_i_b',
+ '%st_i_c',
+ '%st_i_fptr',
+ '%st_i_h',
+ '%st_i_i',
+ '%st_i_ip',
+ '%st_i_lss',
+ '%st_i_msp',
+ '%st_i_p',
+ '%st_i_r',
+ '%st_i_s',
+ '%st_i_sp',
+ '%st_i_spb',
+ '%st_i_st',
+ '%st_matrix',
+ '%st_n_c',
+ '%st_n_l',
+ '%st_n_mc',
+ '%st_n_p',
+ '%st_n_s',
+ '%st_o_c',
+ '%st_o_l',
+ '%st_o_mc',
+ '%st_o_p',
+ '%st_o_s',
+ '%st_o_tl',
+ '%st_p',
+ '%st_size',
+ '%st_string',
+ '%st_t',
+ '%ticks_i_h',
+ '%xls_e',
+ '%xls_p',
+ '%xlssheet_e',
+ '%xlssheet_p',
+ '%xlssheet_size',
+ '%xlssheet_string',
+ 'DominationRank',
+ 'G_make',
+ 'IsAScalar',
+ 'NDcost',
+ 'OS_Version',
+ 'PlotSparse',
+ 'ReadHBSparse',
+ 'TCL_CreateSlave',
+ 'abcd',
+ 'abinv',
+ 'accept_func_default',
+ 'accept_func_vfsa',
+ 'acf',
+ 'acosd',
+ 'acosh',
+ 'acoshm',
+ 'acosm',
+ 'acot',
+ 'acotd',
+ 'acoth',
+ 'acsc',
+ 'acscd',
+ 'acsch',
+ 'add_demo',
+ 'add_help_chapter',
+ 'add_module_help_chapter',
+ 'add_param',
+ 'add_profiling',
+ 'adj2sp',
+ 'aff2ab',
+ 'ana_style',
+ 'analpf',
+ 'analyze',
+ 'aplat',
+ 'arhnk',
+ 'arl2',
+ 'arma2p',
+ 'arma2ss',
+ 'armac',
+ 'armax',
+ 'armax1',
+ 'arobasestring2strings',
+ 'arsimul',
+ 'ascii2string',
+ 'asciimat',
+ 'asec',
+ 'asecd',
+ 'asech',
+ 'asind',
+ 'asinh',
+ 'asinhm',
+ 'asinm',
+ 'assert_checkalmostequal',
+ 'assert_checkequal',
+ 'assert_checkerror',
+ 'assert_checkfalse',
+ 'assert_checkfilesequal',
+ 'assert_checktrue',
+ 'assert_comparecomplex',
+ 'assert_computedigits',
+ 'assert_cond2reltol',
+ 'assert_cond2reqdigits',
+ 'assert_generror',
+ 'atand',
+ 'atanh',
+ 'atanhm',
+ 'atanm',
+ 'atomsAutoload',
+ 'atomsAutoloadAdd',
+ 'atomsAutoloadDel',
+ 'atomsAutoloadList',
+ 'atomsCategoryList',
+ 'atomsCheckModule',
+ 'atomsDepTreeShow',
+ 'atomsGetConfig',
+ 'atomsGetInstalled',
+ 'atomsGetInstalledPath',
+ 'atomsGetLoaded',
+ 'atomsGetLoadedPath',
+ 'atomsInstall',
+ 'atomsIsInstalled',
+ 'atomsIsLoaded',
+ 'atomsList',
+ 'atomsLoad',
+ 'atomsQuit',
+ 'atomsRemove',
+ 'atomsRepositoryAdd',
+ 'atomsRepositoryDel',
+ 'atomsRepositoryList',
+ 'atomsRestoreConfig',
+ 'atomsSaveConfig',
+ 'atomsSearch',
+ 'atomsSetConfig',
+ 'atomsShow',
+ 'atomsSystemInit',
+ 'atomsSystemUpdate',
+ 'atomsTest',
+ 'atomsUpdate',
+ 'atomsVersion',
+ 'augment',
+ 'auread',
+ 'auwrite',
+ 'balreal',
+ 'bench_run',
+ 'bilin',
+ 'bilt',
+ 'bin2dec',
+ 'binomial',
+ 'bitand',
+ 'bitcmp',
+ 'bitget',
+ 'bitor',
+ 'bitset',
+ 'bitxor',
+ 'black',
+ 'blanks',
+ 'bloc2exp',
+ 'bloc2ss',
+ 'block_parameter_error',
+ 'bode',
+ 'bode_asymp',
+ 'bstap',
+ 'buttmag',
+ 'bvodeS',
+ 'bytecode',
+ 'bytecodewalk',
+ 'cainv',
+ 'calendar',
+ 'calerf',
+ 'calfrq',
+ 'canon',
+ 'casc',
+ 'cat',
+ 'cat_code',
+ 'cb_m2sci_gui',
+ 'ccontrg',
+ 'cell',
+ 'cell2mat',
+ 'cellstr',
+ 'center',
+ 'cepstrum',
+ 'cfspec',
+ 'char',
+ 'chart',
+ 'cheb1mag',
+ 'cheb2mag',
+ 'check_gateways',
+ 'check_modules_xml',
+ 'check_versions',
+ 'chepol',
+ 'chfact',
+ 'chsolve',
+ 'classmarkov',
+ 'clean_help',
+ 'clock',
+ 'cls2dls',
+ 'cmb_lin',
+ 'cmndred',
+ 'cmoment',
+ 'coding_ga_binary',
+ 'coding_ga_identity',
+ 'coff',
+ 'coffg',
+ 'colcomp',
+ 'colcompr',
+ 'colinout',
+ 'colregul',
+ 'companion',
+ 'complex',
+ 'compute_initial_temp',
+ 'cond',
+ 'cond2sp',
+ 'condestsp',
+ 'configure_msifort',
+ 'configure_msvc',
+ 'conjgrad',
+ 'cont_frm',
+ 'cont_mat',
+ 'contrss',
+ 'conv',
+ 'convert_to_float',
+ 'convertindex',
+ 'convol',
+ 'convol2d',
+ 'copfac',
+ 'correl',
+ 'cosd',
+ 'cosh',
+ 'coshm',
+ 'cosm',
+ 'cotd',
+ 'cotg',
+ 'coth',
+ 'cothm',
+ 'cov',
+ 'covar',
+ 'createXConfiguration',
+ 'createfun',
+ 'createstruct',
+ 'cross',
+ 'crossover_ga_binary',
+ 'crossover_ga_default',
+ 'csc',
+ 'cscd',
+ 'csch',
+ 'csgn',
+ 'csim',
+ 'cspect',
+ 'ctr_gram',
+ 'czt',
+ 'dae',
+ 'daeoptions',
+ 'damp',
+ 'datafit',
+ 'date',
+ 'datenum',
+ 'datevec',
+ 'dbphi',
+ 'dcf',
+ 'ddp',
+ 'dec2bin',
+ 'dec2hex',
+ 'dec2oct',
+ 'del_help_chapter',
+ 'del_module_help_chapter',
+ 'demo_begin',
+ 'demo_choose',
+ 'demo_compiler',
+ 'demo_end',
+ 'demo_file_choice',
+ 'demo_folder_choice',
+ 'demo_function_choice',
+ 'demo_gui',
+ 'demo_run',
+ 'demo_viewCode',
+ 'denom',
+ 'derivat',
+ 'derivative',
+ 'des2ss',
+ 'des2tf',
+ 'detectmsifort64tools',
+ 'detectmsvc64tools',
+ 'determ',
+ 'detr',
+ 'detrend',
+ 'devtools_run_builder',
+ 'dhnorm',
+ 'diff',
+ 'diophant',
+ 'dir',
+ 'dirname',
+ 'dispfiles',
+ 'dllinfo',
+ 'dscr',
+ 'dsimul',
+ 'dt_ility',
+ 'dtsi',
+ 'edit',
+ 'edit_error',
+ 'editor',
+ 'eigenmarkov',
+ 'eigs',
+ 'ell1mag',
+ 'enlarge_shape',
+ 'entropy',
+ 'eomday',
+ 'epred',
+ 'eqfir',
+ 'eqiir',
+ 'equil',
+ 'equil1',
+ 'erfinv',
+ 'etime',
+ 'eval',
+ 'evans',
+ 'evstr',
+ 'example_run',
+ 'expression2code',
+ 'extract_help_examples',
+ 'factor',
+ 'factorial',
+ 'factors',
+ 'faurre',
+ 'ffilt',
+ 'fft2',
+ 'fftshift',
+ 'fieldnames',
+ 'filt_sinc',
+ 'filter',
+ 'findABCD',
+ 'findAC',
+ 'findBDK',
+ 'findR',
+ 'find_freq',
+ 'find_links',
+ 'find_scicos_version',
+ 'findm',
+ 'findmsifortcompiler',
+ 'findmsvccompiler',
+ 'findx0BD',
+ 'firstnonsingleton',
+ 'fix',
+ 'fixedpointgcd',
+ 'flipdim',
+ 'flts',
+ 'fminsearch',
+ 'formatBlackTip',
+ 'formatBodeMagTip',
+ 'formatBodePhaseTip',
+ 'formatGainplotTip',
+ 'formatHallModuleTip',
+ 'formatHallPhaseTip',
+ 'formatNicholsGainTip',
+ 'formatNicholsPhaseTip',
+ 'formatNyquistTip',
+ 'formatPhaseplotTip',
+ 'formatSgridDampingTip',
+ 'formatSgridFreqTip',
+ 'formatZgridDampingTip',
+ 'formatZgridFreqTip',
+ 'format_txt',
+ 'fourplan',
+ 'frep2tf',
+ 'freson',
+ 'frfit',
+ 'frmag',
+ 'fseek_origin',
+ 'fsfirlin',
+ 'fspec',
+ 'fspecg',
+ 'fstabst',
+ 'ftest',
+ 'ftuneq',
+ 'fullfile',
+ 'fullrf',
+ 'fullrfk',
+ 'fun2string',
+ 'g_margin',
+ 'gainplot',
+ 'gamitg',
+ 'gcare',
+ 'gcd',
+ 'gencompilationflags_unix',
+ 'generateBlockImage',
+ 'generateBlockImages',
+ 'generic_i_ce',
+ 'generic_i_h',
+ 'generic_i_hm',
+ 'generic_i_s',
+ 'generic_i_st',
+ 'genlib',
+ 'genmarkov',
+ 'geomean',
+ 'getDiagramVersion',
+ 'getModelicaPath',
+ 'getPreferencesValue',
+ 'get_file_path',
+ 'get_function_path',
+ 'get_param',
+ 'get_profile',
+ 'get_scicos_version',
+ 'getd',
+ 'getscilabkeywords',
+ 'getshell',
+ 'gettklib',
+ 'gfare',
+ 'gfrancis',
+ 'givens',
+ 'glever',
+ 'gmres',
+ 'group',
+ 'gschur',
+ 'gspec',
+ 'gtild',
+ 'h2norm',
+ 'h_cl',
+ 'h_inf',
+ 'h_inf_st',
+ 'h_norm',
+ 'hallchart',
+ 'halt',
+ 'hank',
+ 'hankelsv',
+ 'harmean',
+ 'haveacompiler',
+ 'head_comments',
+ 'help_from_sci',
+ 'help_skeleton',
+ 'hermit',
+ 'hex2dec',
+ 'hilb',
+ 'hilbert',
+ 'histc',
+ 'horner',
+ 'householder',
+ 'hrmt',
+ 'htrianr',
+ 'hypermat',
+ 'idct',
+ 'idst',
+ 'ifft',
+ 'ifftshift',
+ 'iir',
+ 'iirgroup',
+ 'iirlp',
+ 'iirmod',
+ 'ilib_build',
+ 'ilib_build_jar',
+ 'ilib_compile',
+ 'ilib_for_link',
+ 'ilib_gen_Make',
+ 'ilib_gen_Make_unix',
+ 'ilib_gen_cleaner',
+ 'ilib_gen_gateway',
+ 'ilib_gen_loader',
+ 'ilib_include_flag',
+ 'ilib_mex_build',
+ 'im_inv',
+ 'importScicosDiagram',
+ 'importScicosPal',
+ 'importXcosDiagram',
+ 'imrep2ss',
+ 'ind2sub',
+ 'inistate',
+ 'init_ga_default',
+ 'init_param',
+ 'initial_scicos_tables',
+ 'input',
+ 'instruction2code',
+ 'intc',
+ 'intdec',
+ 'integrate',
+ 'interp1',
+ 'interpln',
+ 'intersect',
+ 'intl',
+ 'intsplin',
+ 'inttrap',
+ 'inv_coeff',
+ 'invr',
+ 'invrs',
+ 'invsyslin',
+ 'iqr',
+ 'isLeapYear',
+ 'is_absolute_path',
+ 'is_param',
+ 'iscell',
+ 'iscellstr',
+ 'iscolumn',
+ 'isempty',
+ 'isfield',
+ 'isinf',
+ 'ismatrix',
+ 'isnan',
+ 'isrow',
+ 'isscalar',
+ 'issparse',
+ 'issquare',
+ 'isstruct',
+ 'isvector',
+ 'jmat',
+ 'justify',
+ 'kalm',
+ 'karmarkar',
+ 'kernel',
+ 'kpure',
+ 'krac2',
+ 'kroneck',
+ 'lattn',
+ 'lattp',
+ 'launchtest',
+ 'lcf',
+ 'lcm',
+ 'lcmdiag',
+ 'leastsq',
+ 'leqe',
+ 'leqr',
+ 'lev',
+ 'levin',
+ 'lex_sort',
+ 'lft',
+ 'lin',
+ 'lin2mu',
+ 'lincos',
+ 'lindquist',
+ 'linf',
+ 'linfn',
+ 'linsolve',
+ 'linspace',
+ 'list2vec',
+ 'list_param',
+ 'listfiles',
+ 'listfunctions',
+ 'listvarinfile',
+ 'lmisolver',
+ 'lmitool',
+ 'loadXcosLibs',
+ 'loadmatfile',
+ 'loadwave',
+ 'log10',
+ 'log2',
+ 'logm',
+ 'logspace',
+ 'lqe',
+ 'lqg',
+ 'lqg2stan',
+ 'lqg_ltr',
+ 'lqr',
+ 'ls',
+ 'lyap',
+ 'm2sci_gui',
+ 'm_circle',
+ 'macglov',
+ 'macrovar',
+ 'mad',
+ 'makecell',
+ 'manedit',
+ 'mapsound',
+ 'markp2ss',
+ 'matfile2sci',
+ 'mdelete',
+ 'mean',
+ 'meanf',
+ 'median',
+ 'members',
+ 'mese',
+ 'meshgrid',
+ 'mfft',
+ 'mfile2sci',
+ 'minreal',
+ 'minss',
+ 'mkdir',
+ 'modulo',
+ 'moment',
+ 'mrfit',
+ 'msd',
+ 'mstr2sci',
+ 'mtlb',
+ 'mtlb_0',
+ 'mtlb_a',
+ 'mtlb_all',
+ 'mtlb_any',
+ 'mtlb_axes',
+ 'mtlb_axis',
+ 'mtlb_beta',
+ 'mtlb_box',
+ 'mtlb_choices',
+ 'mtlb_close',
+ 'mtlb_colordef',
+ 'mtlb_cond',
+ 'mtlb_cov',
+ 'mtlb_cumprod',
+ 'mtlb_cumsum',
+ 'mtlb_dec2hex',
+ 'mtlb_delete',
+ 'mtlb_diag',
+ 'mtlb_diff',
+ 'mtlb_dir',
+ 'mtlb_double',
+ 'mtlb_e',
+ 'mtlb_echo',
+ 'mtlb_error',
+ 'mtlb_eval',
+ 'mtlb_exist',
+ 'mtlb_eye',
+ 'mtlb_false',
+ 'mtlb_fft',
+ 'mtlb_fftshift',
+ 'mtlb_filter',
+ 'mtlb_find',
+ 'mtlb_findstr',
+ 'mtlb_fliplr',
+ 'mtlb_fopen',
+ 'mtlb_format',
+ 'mtlb_fprintf',
+ 'mtlb_fread',
+ 'mtlb_fscanf',
+ 'mtlb_full',
+ 'mtlb_fwrite',
+ 'mtlb_get',
+ 'mtlb_grid',
+ 'mtlb_hold',
+ 'mtlb_i',
+ 'mtlb_ifft',
+ 'mtlb_image',
+ 'mtlb_imp',
+ 'mtlb_int16',
+ 'mtlb_int32',
+ 'mtlb_int8',
+ 'mtlb_is',
+ 'mtlb_isa',
+ 'mtlb_isfield',
+ 'mtlb_isletter',
+ 'mtlb_isspace',
+ 'mtlb_l',
+ 'mtlb_legendre',
+ 'mtlb_linspace',
+ 'mtlb_logic',
+ 'mtlb_logical',
+ 'mtlb_loglog',
+ 'mtlb_lower',
+ 'mtlb_max',
+ 'mtlb_mean',
+ 'mtlb_median',
+ 'mtlb_mesh',
+ 'mtlb_meshdom',
+ 'mtlb_min',
+ 'mtlb_more',
+ 'mtlb_num2str',
+ 'mtlb_ones',
+ 'mtlb_pcolor',
+ 'mtlb_plot',
+ 'mtlb_prod',
+ 'mtlb_qr',
+ 'mtlb_qz',
+ 'mtlb_rand',
+ 'mtlb_randn',
+ 'mtlb_rcond',
+ 'mtlb_realmax',
+ 'mtlb_realmin',
+ 'mtlb_s',
+ 'mtlb_semilogx',
+ 'mtlb_semilogy',
+ 'mtlb_setstr',
+ 'mtlb_size',
+ 'mtlb_sort',
+ 'mtlb_sortrows',
+ 'mtlb_sprintf',
+ 'mtlb_sscanf',
+ 'mtlb_std',
+ 'mtlb_strcmp',
+ 'mtlb_strcmpi',
+ 'mtlb_strfind',
+ 'mtlb_strrep',
+ 'mtlb_subplot',
+ 'mtlb_sum',
+ 'mtlb_t',
+ 'mtlb_toeplitz',
+ 'mtlb_tril',
+ 'mtlb_triu',
+ 'mtlb_true',
+ 'mtlb_type',
+ 'mtlb_uint16',
+ 'mtlb_uint32',
+ 'mtlb_uint8',
+ 'mtlb_upper',
+ 'mtlb_var',
+ 'mtlb_zeros',
+ 'mu2lin',
+ 'mutation_ga_binary',
+ 'mutation_ga_default',
+ 'mvcorrel',
+ 'mvvacov',
+ 'nancumsum',
+ 'nand2mean',
+ 'nanmax',
+ 'nanmean',
+ 'nanmeanf',
+ 'nanmedian',
+ 'nanmin',
+ 'nanreglin',
+ 'nanstdev',
+ 'nansum',
+ 'narsimul',
+ 'ndgrid',
+ 'ndims',
+ 'nehari',
+ 'neigh_func_csa',
+ 'neigh_func_default',
+ 'neigh_func_fsa',
+ 'neigh_func_vfsa',
+ 'neldermead_cget',
+ 'neldermead_configure',
+ 'neldermead_costf',
+ 'neldermead_defaultoutput',
+ 'neldermead_destroy',
+ 'neldermead_function',
+ 'neldermead_get',
+ 'neldermead_log',
+ 'neldermead_new',
+ 'neldermead_restart',
+ 'neldermead_search',
+ 'neldermead_updatesimp',
+ 'nextpow2',
+ 'nfreq',
+ 'nicholschart',
+ 'nlev',
+ 'nmplot_cget',
+ 'nmplot_configure',
+ 'nmplot_contour',
+ 'nmplot_destroy',
+ 'nmplot_function',
+ 'nmplot_get',
+ 'nmplot_historyplot',
+ 'nmplot_log',
+ 'nmplot_new',
+ 'nmplot_outputcmd',
+ 'nmplot_restart',
+ 'nmplot_search',
+ 'nmplot_simplexhistory',
+ 'noisegen',
+ 'nonreg_test_run',
+ 'now',
+ 'nthroot',
+ 'null',
+ 'num2cell',
+ 'numderivative',
+ 'numdiff',
+ 'numer',
+ 'nyquist',
+ 'nyquistfrequencybounds',
+ 'obs_gram',
+ 'obscont',
+ 'observer',
+ 'obsv_mat',
+ 'obsvss',
+ 'oct2dec',
+ 'odeoptions',
+ 'optim_ga',
+ 'optim_moga',
+ 'optim_nsga',
+ 'optim_nsga2',
+ 'optim_sa',
+ 'optimbase_cget',
+ 'optimbase_checkbounds',
+ 'optimbase_checkcostfun',
+ 'optimbase_checkx0',
+ 'optimbase_configure',
+ 'optimbase_destroy',
+ 'optimbase_function',
+ 'optimbase_get',
+ 'optimbase_hasbounds',
+ 'optimbase_hasconstraints',
+ 'optimbase_hasnlcons',
+ 'optimbase_histget',
+ 'optimbase_histset',
+ 'optimbase_incriter',
+ 'optimbase_isfeasible',
+ 'optimbase_isinbounds',
+ 'optimbase_isinnonlincons',
+ 'optimbase_log',
+ 'optimbase_logshutdown',
+ 'optimbase_logstartup',
+ 'optimbase_new',
+ 'optimbase_outputcmd',
+ 'optimbase_outstruct',
+ 'optimbase_proj2bnds',
+ 'optimbase_set',
+ 'optimbase_stoplog',
+ 'optimbase_terminate',
+ 'optimget',
+ 'optimplotfunccount',
+ 'optimplotfval',
+ 'optimplotx',
+ 'optimset',
+ 'optimsimplex_center',
+ 'optimsimplex_check',
+ 'optimsimplex_compsomefv',
+ 'optimsimplex_computefv',
+ 'optimsimplex_deltafv',
+ 'optimsimplex_deltafvmax',
+ 'optimsimplex_destroy',
+ 'optimsimplex_dirmat',
+ 'optimsimplex_fvmean',
+ 'optimsimplex_fvstdev',
+ 'optimsimplex_fvvariance',
+ 'optimsimplex_getall',
+ 'optimsimplex_getallfv',
+ 'optimsimplex_getallx',
+ 'optimsimplex_getfv',
+ 'optimsimplex_getn',
+ 'optimsimplex_getnbve',
+ 'optimsimplex_getve',
+ 'optimsimplex_getx',
+ 'optimsimplex_gradientfv',
+ 'optimsimplex_log',
+ 'optimsimplex_new',
+ 'optimsimplex_reflect',
+ 'optimsimplex_setall',
+ 'optimsimplex_setallfv',
+ 'optimsimplex_setallx',
+ 'optimsimplex_setfv',
+ 'optimsimplex_setn',
+ 'optimsimplex_setnbve',
+ 'optimsimplex_setve',
+ 'optimsimplex_setx',
+ 'optimsimplex_shrink',
+ 'optimsimplex_size',
+ 'optimsimplex_sort',
+ 'optimsimplex_xbar',
+ 'orth',
+ 'output_ga_default',
+ 'output_moga_default',
+ 'output_nsga2_default',
+ 'output_nsga_default',
+ 'p_margin',
+ 'pack',
+ 'pareto_filter',
+ 'parrot',
+ 'pbig',
+ 'pca',
+ 'pcg',
+ 'pdiv',
+ 'pen2ea',
+ 'pencan',
+ 'pencost',
+ 'penlaur',
+ 'perctl',
+ 'perl',
+ 'perms',
+ 'permute',
+ 'pertrans',
+ 'pfactors',
+ 'pfss',
+ 'phasemag',
+ 'phaseplot',
+ 'phc',
+ 'pinv',
+ 'playsnd',
+ 'plotprofile',
+ 'plzr',
+ 'pmodulo',
+ 'pol2des',
+ 'pol2str',
+ 'polar',
+ 'polfact',
+ 'prbs_a',
+ 'prettyprint',
+ 'primes',
+ 'princomp',
+ 'profile',
+ 'proj',
+ 'projsl',
+ 'projspec',
+ 'psmall',
+ 'pspect',
+ 'qmr',
+ 'qpsolve',
+ 'quart',
+ 'quaskro',
+ 'rafiter',
+ 'randpencil',
+ 'range',
+ 'rank',
+ 'readxls',
+ 'recompilefunction',
+ 'recons',
+ 'reglin',
+ 'regress',
+ 'remezb',
+ 'remove_param',
+ 'remove_profiling',
+ 'repfreq',
+ 'replace_Ix_by_Fx',
+ 'repmat',
+ 'reset_profiling',
+ 'resize_matrix',
+ 'returntoscilab',
+ 'rhs2code',
+ 'ric_desc',
+ 'riccati',
+ 'rmdir',
+ 'routh_t',
+ 'rowcomp',
+ 'rowcompr',
+ 'rowinout',
+ 'rowregul',
+ 'rowshuff',
+ 'rref',
+ 'sample',
+ 'samplef',
+ 'samwr',
+ 'savematfile',
+ 'savewave',
+ 'scanf',
+ 'sci2exp',
+ 'sciGUI_init',
+ 'sci_sparse',
+ 'scicos_getvalue',
+ 'scicos_simulate',
+ 'scicos_workspace_init',
+ 'scisptdemo',
+ 'scitest',
+ 'sdiff',
+ 'sec',
+ 'secd',
+ 'sech',
+ 'selection_ga_elitist',
+ 'selection_ga_random',
+ 'sensi',
+ 'setPreferencesValue',
+ 'set_param',
+ 'setdiff',
+ 'sgrid',
+ 'show_margins',
+ 'show_pca',
+ 'showprofile',
+ 'signm',
+ 'sinc',
+ 'sincd',
+ 'sind',
+ 'sinh',
+ 'sinhm',
+ 'sinm',
+ 'sm2des',
+ 'sm2ss',
+ 'smga',
+ 'smooth',
+ 'solve',
+ 'sound',
+ 'soundsec',
+ 'sp2adj',
+ 'spaninter',
+ 'spanplus',
+ 'spantwo',
+ 'specfact',
+ 'speye',
+ 'sprand',
+ 'spzeros',
+ 'sqroot',
+ 'sqrtm',
+ 'squarewave',
+ 'squeeze',
+ 'srfaur',
+ 'srkf',
+ 'ss2des',
+ 'ss2ss',
+ 'ss2tf',
+ 'sskf',
+ 'ssprint',
+ 'ssrand',
+ 'st_deviation',
+ 'st_i_generic',
+ 'st_ility',
+ 'stabil',
+ 'statgain',
+ 'stdev',
+ 'stdevf',
+ 'steadycos',
+ 'strange',
+ 'strcmpi',
+ 'struct',
+ 'sub2ind',
+ 'sva',
+ 'svplot',
+ 'sylm',
+ 'sylv',
+ 'sysconv',
+ 'sysdiag',
+ 'sysfact',
+ 'syslin',
+ 'syssize',
+ 'system',
+ 'systmat',
+ 'tabul',
+ 'tand',
+ 'tanh',
+ 'tanhm',
+ 'tanm',
+ 'tbx_build_blocks',
+ 'tbx_build_cleaner',
+ 'tbx_build_gateway',
+ 'tbx_build_gateway_clean',
+ 'tbx_build_gateway_loader',
+ 'tbx_build_help',
+ 'tbx_build_help_loader',
+ 'tbx_build_loader',
+ 'tbx_build_localization',
+ 'tbx_build_macros',
+ 'tbx_build_pal_loader',
+ 'tbx_build_src',
+ 'tbx_builder',
+ 'tbx_builder_gateway',
+ 'tbx_builder_gateway_lang',
+ 'tbx_builder_help',
+ 'tbx_builder_help_lang',
+ 'tbx_builder_macros',
+ 'tbx_builder_src',
+ 'tbx_builder_src_lang',
+ 'tbx_generate_pofile',
+ 'temp_law_csa',
+ 'temp_law_default',
+ 'temp_law_fsa',
+ 'temp_law_huang',
+ 'temp_law_vfsa',
+ 'test_clean',
+ 'test_on_columns',
+ 'test_run',
+ 'test_run_level',
+ 'testexamples',
+ 'tf2des',
+ 'tf2ss',
+ 'thrownan',
+ 'tic',
+ 'time_id',
+ 'toc',
+ 'toeplitz',
+ 'tokenpos',
+ 'toolboxes',
+ 'trace',
+ 'trans',
+ 'translatepaths',
+ 'tree2code',
+ 'trfmod',
+ 'trianfml',
+ 'trimmean',
+ 'trisolve',
+ 'trzeros',
+ 'typeof',
+ 'ui_observer',
+ 'union',
+ 'unique',
+ 'unit_test_run',
+ 'unix_g',
+ 'unix_s',
+ 'unix_w',
+ 'unix_x',
+ 'unobs',
+ 'unpack',
+ 'unwrap',
+ 'variance',
+ 'variancef',
+ 'vec2list',
+ 'vectorfind',
+ 'ver',
+ 'warnobsolete',
+ 'wavread',
+ 'wavwrite',
+ 'wcenter',
+ 'weekday',
+ 'wfir',
+ 'wfir_gui',
+ 'whereami',
+ 'who_user',
+ 'whos',
+ 'wiener',
+ 'wigner',
+ 'window',
+ 'winlist',
+ 'with_javasci',
+ 'with_macros_source',
+ 'with_modelica_compiler',
+ 'with_tk',
+ 'xcorr',
+ 'xcosBlockEval',
+ 'xcosBlockInterface',
+ 'xcosCodeGeneration',
+ 'xcosConfigureModelica',
+ 'xcosPal',
+ 'xcosPalAdd',
+ 'xcosPalAddBlock',
+ 'xcosPalExport',
+ 'xcosPalGenerateAllIcons',
+ 'xcosShowBlockWarning',
+ 'xcosValidateBlockSet',
+ 'xcosValidateCompareBlock',
+ 'xcos_compile',
+ 'xcos_debug_gui',
+ 'xcos_run',
+ 'xcos_simulate',
+ 'xcov',
+ 'xmltochm',
+ 'xmltoformat',
+ 'xmltohtml',
+ 'xmltojar',
+ 'xmltopdf',
+ 'xmltops',
+ 'xmltoweb',
+ 'yulewalk',
+ 'zeropen',
+ 'zgrid',
+ 'zpbutt',
+ 'zpch1',
+ 'zpch2',
+ 'zpell',
+)
+
+variables_kw = (
+ '$',
+ '%F',
+ '%T',
+ '%e',
+ '%eps',
+ '%f',
+ '%fftw',
+ '%gui',
+ '%i',
+ '%inf',
+ '%io',
+ '%modalWarning',
+ '%nan',
+ '%pi',
+ '%s',
+ '%t',
+ '%tk',
+ '%toolboxes',
+ '%toolboxes_dir',
+ '%z',
+ 'PWD',
+ 'SCI',
+ 'SCIHOME',
+ 'TMPDIR',
+ 'arnoldilib',
+ 'assertlib',
+ 'atomslib',
+ 'cacsdlib',
+ 'compatibility_functilib',
+ 'corelib',
+ 'data_structureslib',
+ 'demo_toolslib',
+ 'development_toolslib',
+ 'differential_equationlib',
+ 'dynamic_linklib',
+ 'elementary_functionslib',
+ 'enull',
+ 'evoid',
+ 'external_objectslib',
+ 'fd',
+ 'fileiolib',
+ 'functionslib',
+ 'genetic_algorithmslib',
+ 'helptoolslib',
+ 'home',
+ 'integerlib',
+ 'interpolationlib',
+ 'iolib',
+ 'jnull',
+ 'jvoid',
+ 'linear_algebralib',
+ 'm2scilib',
+ 'matiolib',
+ 'modules_managerlib',
+ 'neldermeadlib',
+ 'optimbaselib',
+ 'optimizationlib',
+ 'optimsimplexlib',
+ 'output_streamlib',
+ 'overloadinglib',
+ 'parameterslib',
+ 'polynomialslib',
+ 'preferenceslib',
+ 'randliblib',
+ 'scicos_autolib',
+ 'scicos_utilslib',
+ 'scinoteslib',
+ 'signal_processinglib',
+ 'simulated_annealinglib',
+ 'soundlib',
+ 'sparselib',
+ 'special_functionslib',
+ 'spreadsheetlib',
+ 'statisticslib',
+ 'stringlib',
+ 'tclscilib',
+ 'timelib',
+ 'umfpacklib',
+ 'xcoslib',
+)
+
+
+if __name__ == '__main__': # pragma: no cover
+ import subprocess
+ from pygments.util import format_lines, duplicates_removed
+
+ mapping = {'variables': 'builtin'}
+
+ def extract_completion(var_type):
+ s = subprocess.Popen(['scilab', '-nwni'], stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ output = s.communicate('''\
+fd = mopen("/dev/stderr", "wt");
+mputl(strcat(completion("", "%s"), "||"), fd);
+mclose(fd)\n''' % var_type)
+ if '||' not in output[1]:
+ raise Exception(output[0])
+ # Invalid DISPLAY causes this to be output:
+ text = output[1].strip()
+ if text.startswith('Error: unable to open display \n'):
+ text = text[len('Error: unable to open display \n'):]
+ return text.split('||')
+
+ new_data = {}
+ seen = set() # only keep first type for a given word
+ for t in ('functions', 'commands', 'macros', 'variables'):
+ new_data[t] = duplicates_removed(extract_completion(t), seen)
+ seen.update(set(new_data[t]))
+
+
+ with open(__file__) as f:
+ content = f.read()
+
+ header = content[:content.find('# Autogenerated')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+ with open(__file__, 'w') as f:
+ f.write(header)
+ f.write('# Autogenerated\n\n')
+ for k, v in sorted(new_data.iteritems()):
+ f.write(format_lines(k + '_kw', v) + '\n\n')
+ f.write(footer)
diff --git a/pygments/lexers/_sourcemod_builtins.py b/pygments/lexers/_sourcemod_builtins.py
new file mode 100644
index 00000000..9ebb1595
--- /dev/null
+++ b/pygments/lexers/_sourcemod_builtins.py
@@ -0,0 +1,1163 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._sourcemod_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file contains the names of SourceMod functions.
+ It is able to re-generate itself.
+
+ Do not edit the FUNCTIONS list by hand.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+FUNCTIONS = (
+ 'OnEntityCreated',
+ 'OnEntityDestroyed',
+ 'OnGetGameDescription',
+ 'OnLevelInit',
+ 'SDKHook',
+ 'SDKHookEx',
+ 'SDKUnhook',
+ 'SDKHooks_TakeDamage',
+ 'SDKHooks_DropWeapon',
+ 'TopMenuHandler',
+ 'CreateTopMenu',
+ 'LoadTopMenuConfig',
+ 'AddToTopMenu',
+ 'GetTopMenuInfoString',
+ 'GetTopMenuObjName',
+ 'RemoveFromTopMenu',
+ 'DisplayTopMenu',
+ 'DisplayTopMenuCategory',
+ 'FindTopMenuCategory',
+ 'SetTopMenuTitleCaching',
+ 'OnAdminMenuCreated',
+ 'OnAdminMenuReady',
+ 'GetAdminTopMenu',
+ 'AddTargetsToMenu',
+ 'AddTargetsToMenu2',
+ 'RedisplayAdminMenu',
+ 'TEHook',
+ 'AddTempEntHook',
+ 'RemoveTempEntHook',
+ 'TE_Start',
+ 'TE_IsValidProp',
+ 'TE_WriteNum',
+ 'TE_ReadNum',
+ 'TE_WriteFloat',
+ 'TE_ReadFloat',
+ 'TE_WriteVector',
+ 'TE_ReadVector',
+ 'TE_WriteAngles',
+ 'TE_WriteFloatArray',
+ 'TE_Send',
+ 'TE_WriteEncodedEnt',
+ 'TE_SendToAll',
+ 'TE_SendToClient',
+ 'CreateKeyValues',
+ 'KvSetString',
+ 'KvSetNum',
+ 'KvSetUInt64',
+ 'KvSetFloat',
+ 'KvSetColor',
+ 'KvSetVector',
+ 'KvGetString',
+ 'KvGetNum',
+ 'KvGetFloat',
+ 'KvGetColor',
+ 'KvGetUInt64',
+ 'KvGetVector',
+ 'KvJumpToKey',
+ 'KvJumpToKeySymbol',
+ 'KvGotoFirstSubKey',
+ 'KvGotoNextKey',
+ 'KvSavePosition',
+ 'KvDeleteKey',
+ 'KvDeleteThis',
+ 'KvGoBack',
+ 'KvRewind',
+ 'KvGetSectionName',
+ 'KvSetSectionName',
+ 'KvGetDataType',
+ 'KeyValuesToFile',
+ 'FileToKeyValues',
+ 'StringToKeyValues',
+ 'KvSetEscapeSequences',
+ 'KvNodesInStack',
+ 'KvCopySubkeys',
+ 'KvFindKeyById',
+ 'KvGetNameSymbol',
+ 'KvGetSectionSymbol',
+ 'TE_SetupSparks',
+ 'TE_SetupSmoke',
+ 'TE_SetupDust',
+ 'TE_SetupMuzzleFlash',
+ 'TE_SetupMetalSparks',
+ 'TE_SetupEnergySplash',
+ 'TE_SetupArmorRicochet',
+ 'TE_SetupGlowSprite',
+ 'TE_SetupExplosion',
+ 'TE_SetupBloodSprite',
+ 'TE_SetupBeamRingPoint',
+ 'TE_SetupBeamPoints',
+ 'TE_SetupBeamLaser',
+ 'TE_SetupBeamRing',
+ 'TE_SetupBeamFollow',
+ 'HookEvent',
+ 'HookEventEx',
+ 'UnhookEvent',
+ 'CreateEvent',
+ 'FireEvent',
+ 'CancelCreatedEvent',
+ 'GetEventBool',
+ 'SetEventBool',
+ 'GetEventInt',
+ 'SetEventInt',
+ 'GetEventFloat',
+ 'SetEventFloat',
+ 'GetEventString',
+ 'SetEventString',
+ 'GetEventName',
+ 'SetEventBroadcast',
+ 'GetUserMessageType',
+ 'GetUserMessageId',
+ 'GetUserMessageName',
+ 'StartMessage',
+ 'StartMessageEx',
+ 'EndMessage',
+ 'MsgHook',
+ 'MsgPostHook',
+ 'HookUserMessage',
+ 'UnhookUserMessage',
+ 'StartMessageAll',
+ 'StartMessageOne',
+ 'InactivateClient',
+ 'ReconnectClient',
+ 'GetMaxEntities',
+ 'GetEntityCount',
+ 'IsValidEntity',
+ 'IsValidEdict',
+ 'IsEntNetworkable',
+ 'CreateEdict',
+ 'RemoveEdict',
+ 'GetEdictFlags',
+ 'SetEdictFlags',
+ 'GetEdictClassname',
+ 'GetEntityNetClass',
+ 'ChangeEdictState',
+ 'GetEntData',
+ 'SetEntData',
+ 'GetEntDataFloat',
+ 'SetEntDataFloat',
+ 'GetEntDataEnt2',
+ 'SetEntDataEnt2',
+ 'GetEntDataVector',
+ 'SetEntDataVector',
+ 'GetEntDataString',
+ 'SetEntDataString',
+ 'FindSendPropOffs',
+ 'FindSendPropInfo',
+ 'FindDataMapOffs',
+ 'FindDataMapInfo',
+ 'GetEntSendPropOffs',
+ 'GetEntProp',
+ 'SetEntProp',
+ 'GetEntPropFloat',
+ 'SetEntPropFloat',
+ 'GetEntPropEnt',
+ 'SetEntPropEnt',
+ 'GetEntPropVector',
+ 'SetEntPropVector',
+ 'GetEntPropString',
+ 'SetEntPropString',
+ 'GetEntPropArraySize',
+ 'GetEntDataArray',
+ 'SetEntDataArray',
+ 'GetEntityAddress',
+ 'GetEntityClassname',
+ 'float',
+ 'FloatMul',
+ 'FloatDiv',
+ 'FloatAdd',
+ 'FloatSub',
+ 'FloatFraction',
+ 'RoundToZero',
+ 'RoundToCeil',
+ 'RoundToFloor',
+ 'RoundToNearest',
+ 'FloatCompare',
+ 'SquareRoot',
+ 'Pow',
+ 'Exponential',
+ 'Logarithm',
+ 'Sine',
+ 'Cosine',
+ 'Tangent',
+ 'FloatAbs',
+ 'ArcTangent',
+ 'ArcCosine',
+ 'ArcSine',
+ 'ArcTangent2',
+ 'RoundFloat',
+ 'operator%',
+ 'DegToRad',
+ 'RadToDeg',
+ 'GetURandomInt',
+ 'GetURandomFloat',
+ 'SetURandomSeed',
+ 'SetURandomSeedSimple',
+ 'RemovePlayerItem',
+ 'GivePlayerItem',
+ 'GetPlayerWeaponSlot',
+ 'IgniteEntity',
+ 'ExtinguishEntity',
+ 'TeleportEntity',
+ 'ForcePlayerSuicide',
+ 'SlapPlayer',
+ 'FindEntityByClassname',
+ 'GetClientEyeAngles',
+ 'CreateEntityByName',
+ 'DispatchSpawn',
+ 'DispatchKeyValue',
+ 'DispatchKeyValueFloat',
+ 'DispatchKeyValueVector',
+ 'GetClientAimTarget',
+ 'GetTeamCount',
+ 'GetTeamName',
+ 'GetTeamScore',
+ 'SetTeamScore',
+ 'GetTeamClientCount',
+ 'SetEntityModel',
+ 'GetPlayerDecalFile',
+ 'GetPlayerJingleFile',
+ 'GetServerNetStats',
+ 'EquipPlayerWeapon',
+ 'ActivateEntity',
+ 'SetClientInfo',
+ 'GivePlayerAmmo',
+ 'SetClientListeningFlags',
+ 'GetClientListeningFlags',
+ 'SetListenOverride',
+ 'GetListenOverride',
+ 'IsClientMuted',
+ 'TR_GetPointContents',
+ 'TR_GetPointContentsEnt',
+ 'TR_TraceRay',
+ 'TR_TraceHull',
+ 'TR_TraceRayFilter',
+ 'TR_TraceHullFilter',
+ 'TR_TraceRayEx',
+ 'TR_TraceHullEx',
+ 'TR_TraceRayFilterEx',
+ 'TR_TraceHullFilterEx',
+ 'TR_GetFraction',
+ 'TR_GetEndPosition',
+ 'TR_GetEntityIndex',
+ 'TR_DidHit',
+ 'TR_GetHitGroup',
+ 'TR_GetPlaneNormal',
+ 'TR_PointOutsideWorld',
+ 'SortIntegers',
+ 'SortFloats',
+ 'SortStrings',
+ 'SortFunc1D',
+ 'SortCustom1D',
+ 'SortCustom2D',
+ 'SortADTArray',
+ 'SortFuncADTArray',
+ 'SortADTArrayCustom',
+ 'CompileRegex',
+ 'MatchRegex',
+ 'GetRegexSubString',
+ 'SimpleRegexMatch',
+ 'TF2_GetPlayerClass',
+ 'TF2_SetPlayerClass',
+ 'TF2_RemoveWeaponSlot',
+ 'TF2_RemoveAllWeapons',
+ 'TF2_IsPlayerInCondition',
+ 'TF2_GetObjectType',
+ 'TF2_GetObjectMode',
+ 'NominateMap',
+ 'RemoveNominationByMap',
+ 'RemoveNominationByOwner',
+ 'GetExcludeMapList',
+ 'GetNominatedMapList',
+ 'CanMapChooserStartVote',
+ 'InitiateMapChooserVote',
+ 'HasEndOfMapVoteFinished',
+ 'EndOfMapVoteEnabled',
+ 'OnNominationRemoved',
+ 'OnMapVoteStarted',
+ 'CreateTimer',
+ 'KillTimer',
+ 'TriggerTimer',
+ 'GetTickedTime',
+ 'GetMapTimeLeft',
+ 'GetMapTimeLimit',
+ 'ExtendMapTimeLimit',
+ 'GetTickInterval',
+ 'OnMapTimeLeftChanged',
+ 'IsServerProcessing',
+ 'CreateDataTimer',
+ 'ByteCountToCells',
+ 'CreateArray',
+ 'ClearArray',
+ 'CloneArray',
+ 'ResizeArray',
+ 'GetArraySize',
+ 'PushArrayCell',
+ 'PushArrayString',
+ 'PushArrayArray',
+ 'GetArrayCell',
+ 'GetArrayString',
+ 'GetArrayArray',
+ 'SetArrayCell',
+ 'SetArrayString',
+ 'SetArrayArray',
+ 'ShiftArrayUp',
+ 'RemoveFromArray',
+ 'SwapArrayItems',
+ 'FindStringInArray',
+ 'FindValueInArray',
+ 'ProcessTargetString',
+ 'ReplyToTargetError',
+ 'MultiTargetFilter',
+ 'AddMultiTargetFilter',
+ 'RemoveMultiTargetFilter',
+ 'OnBanClient',
+ 'OnBanIdentity',
+ 'OnRemoveBan',
+ 'BanClient',
+ 'BanIdentity',
+ 'RemoveBan',
+ 'CreateTrie',
+ 'SetTrieValue',
+ 'SetTrieArray',
+ 'SetTrieString',
+ 'GetTrieValue',
+ 'GetTrieArray',
+ 'GetTrieString',
+ 'RemoveFromTrie',
+ 'ClearTrie',
+ 'GetTrieSize',
+ 'GetFunctionByName',
+ 'CreateGlobalForward',
+ 'CreateForward',
+ 'GetForwardFunctionCount',
+ 'AddToForward',
+ 'RemoveFromForward',
+ 'RemoveAllFromForward',
+ 'Call_StartForward',
+ 'Call_StartFunction',
+ 'Call_PushCell',
+ 'Call_PushCellRef',
+ 'Call_PushFloat',
+ 'Call_PushFloatRef',
+ 'Call_PushArray',
+ 'Call_PushArrayEx',
+ 'Call_PushString',
+ 'Call_PushStringEx',
+ 'Call_Finish',
+ 'Call_Cancel',
+ 'NativeCall',
+ 'CreateNative',
+ 'ThrowNativeError',
+ 'GetNativeStringLength',
+ 'GetNativeString',
+ 'SetNativeString',
+ 'GetNativeCell',
+ 'GetNativeCellRef',
+ 'SetNativeCellRef',
+ 'GetNativeArray',
+ 'SetNativeArray',
+ 'FormatNativeString',
+ 'RequestFrameCallback',
+ 'RequestFrame',
+ 'OnRebuildAdminCache',
+ 'DumpAdminCache',
+ 'AddCommandOverride',
+ 'GetCommandOverride',
+ 'UnsetCommandOverride',
+ 'CreateAdmGroup',
+ 'FindAdmGroup',
+ 'SetAdmGroupAddFlag',
+ 'GetAdmGroupAddFlag',
+ 'GetAdmGroupAddFlags',
+ 'SetAdmGroupImmuneFrom',
+ 'GetAdmGroupImmuneCount',
+ 'GetAdmGroupImmuneFrom',
+ 'AddAdmGroupCmdOverride',
+ 'GetAdmGroupCmdOverride',
+ 'RegisterAuthIdentType',
+ 'CreateAdmin',
+ 'GetAdminUsername',
+ 'BindAdminIdentity',
+ 'SetAdminFlag',
+ 'GetAdminFlag',
+ 'GetAdminFlags',
+ 'AdminInheritGroup',
+ 'GetAdminGroupCount',
+ 'GetAdminGroup',
+ 'SetAdminPassword',
+ 'GetAdminPassword',
+ 'FindAdminByIdentity',
+ 'RemoveAdmin',
+ 'FlagBitsToBitArray',
+ 'FlagBitArrayToBits',
+ 'FlagArrayToBits',
+ 'FlagBitsToArray',
+ 'FindFlagByName',
+ 'FindFlagByChar',
+ 'FindFlagChar',
+ 'ReadFlagString',
+ 'CanAdminTarget',
+ 'CreateAuthMethod',
+ 'SetAdmGroupImmunityLevel',
+ 'GetAdmGroupImmunityLevel',
+ 'SetAdminImmunityLevel',
+ 'GetAdminImmunityLevel',
+ 'FlagToBit',
+ 'BitToFlag',
+ 'ServerCommand',
+ 'ServerCommandEx',
+ 'InsertServerCommand',
+ 'ServerExecute',
+ 'ClientCommand',
+ 'FakeClientCommand',
+ 'FakeClientCommandEx',
+ 'PrintToServer',
+ 'PrintToConsole',
+ 'ReplyToCommand',
+ 'GetCmdReplySource',
+ 'SetCmdReplySource',
+ 'IsChatTrigger',
+ 'ShowActivity2',
+ 'ShowActivity',
+ 'ShowActivityEx',
+ 'FormatActivitySource',
+ 'SrvCmd',
+ 'RegServerCmd',
+ 'ConCmd',
+ 'RegConsoleCmd',
+ 'RegAdminCmd',
+ 'GetCmdArgs',
+ 'GetCmdArg',
+ 'GetCmdArgString',
+ 'CreateConVar',
+ 'FindConVar',
+ 'ConVarChanged',
+ 'HookConVarChange',
+ 'UnhookConVarChange',
+ 'GetConVarBool',
+ 'SetConVarBool',
+ 'GetConVarInt',
+ 'SetConVarInt',
+ 'GetConVarFloat',
+ 'SetConVarFloat',
+ 'GetConVarString',
+ 'SetConVarString',
+ 'ResetConVar',
+ 'GetConVarDefault',
+ 'GetConVarFlags',
+ 'SetConVarFlags',
+ 'GetConVarBounds',
+ 'SetConVarBounds',
+ 'GetConVarName',
+ 'QueryClientConVar',
+ 'GetCommandIterator',
+ 'ReadCommandIterator',
+ 'CheckCommandAccess',
+ 'CheckAccess',
+ 'IsValidConVarChar',
+ 'GetCommandFlags',
+ 'SetCommandFlags',
+ 'FindFirstConCommand',
+ 'FindNextConCommand',
+ 'SendConVarValue',
+ 'AddServerTag',
+ 'RemoveServerTag',
+ 'CommandListener',
+ 'AddCommandListener',
+ 'RemoveCommandListener',
+ 'CommandExists',
+ 'OnClientSayCommand',
+ 'OnClientSayCommand_Post',
+ 'TF2_IgnitePlayer',
+ 'TF2_RespawnPlayer',
+ 'TF2_RegeneratePlayer',
+ 'TF2_AddCondition',
+ 'TF2_RemoveCondition',
+ 'TF2_SetPlayerPowerPlay',
+ 'TF2_DisguisePlayer',
+ 'TF2_RemovePlayerDisguise',
+ 'TF2_StunPlayer',
+ 'TF2_MakeBleed',
+ 'TF2_GetClass',
+ 'TF2_CalcIsAttackCritical',
+ 'TF2_OnIsHolidayActive',
+ 'TF2_IsHolidayActive',
+ 'TF2_IsPlayerInDuel',
+ 'TF2_RemoveWearable',
+ 'TF2_OnConditionAdded',
+ 'TF2_OnConditionRemoved',
+ 'TF2_OnWaitingForPlayersStart',
+ 'TF2_OnWaitingForPlayersEnd',
+ 'TF2_OnPlayerTeleport',
+ 'SQL_Connect',
+ 'SQL_DefConnect',
+ 'SQL_ConnectCustom',
+ 'SQLite_UseDatabase',
+ 'SQL_CheckConfig',
+ 'SQL_GetDriver',
+ 'SQL_ReadDriver',
+ 'SQL_GetDriverIdent',
+ 'SQL_GetDriverProduct',
+ 'SQL_SetCharset',
+ 'SQL_GetAffectedRows',
+ 'SQL_GetInsertId',
+ 'SQL_GetError',
+ 'SQL_EscapeString',
+ 'SQL_QuoteString',
+ 'SQL_FastQuery',
+ 'SQL_Query',
+ 'SQL_PrepareQuery',
+ 'SQL_FetchMoreResults',
+ 'SQL_HasResultSet',
+ 'SQL_GetRowCount',
+ 'SQL_GetFieldCount',
+ 'SQL_FieldNumToName',
+ 'SQL_FieldNameToNum',
+ 'SQL_FetchRow',
+ 'SQL_MoreRows',
+ 'SQL_Rewind',
+ 'SQL_FetchString',
+ 'SQL_FetchFloat',
+ 'SQL_FetchInt',
+ 'SQL_IsFieldNull',
+ 'SQL_FetchSize',
+ 'SQL_BindParamInt',
+ 'SQL_BindParamFloat',
+ 'SQL_BindParamString',
+ 'SQL_Execute',
+ 'SQL_LockDatabase',
+ 'SQL_UnlockDatabase',
+ 'SQLTCallback',
+ 'SQL_IsSameConnection',
+ 'SQL_TConnect',
+ 'SQL_TQuery',
+ 'SQL_CreateTransaction',
+ 'SQL_AddQuery',
+ 'SQLTxnSuccess',
+ 'SQLTxnFailure',
+ 'SQL_ExecuteTransaction',
+ 'CloseHandle',
+ 'CloneHandle',
+ 'MenuHandler',
+ 'CreateMenu',
+ 'DisplayMenu',
+ 'DisplayMenuAtItem',
+ 'AddMenuItem',
+ 'InsertMenuItem',
+ 'RemoveMenuItem',
+ 'RemoveAllMenuItems',
+ 'GetMenuItem',
+ 'GetMenuSelectionPosition',
+ 'GetMenuItemCount',
+ 'SetMenuPagination',
+ 'GetMenuPagination',
+ 'GetMenuStyle',
+ 'SetMenuTitle',
+ 'GetMenuTitle',
+ 'CreatePanelFromMenu',
+ 'GetMenuExitButton',
+ 'SetMenuExitButton',
+ 'GetMenuExitBackButton',
+ 'SetMenuExitBackButton',
+ 'SetMenuNoVoteButton',
+ 'CancelMenu',
+ 'GetMenuOptionFlags',
+ 'SetMenuOptionFlags',
+ 'IsVoteInProgress',
+ 'CancelVote',
+ 'VoteMenu',
+ 'VoteMenuToAll',
+ 'VoteHandler',
+ 'SetVoteResultCallback',
+ 'CheckVoteDelay',
+ 'IsClientInVotePool',
+ 'RedrawClientVoteMenu',
+ 'GetMenuStyleHandle',
+ 'CreatePanel',
+ 'CreateMenuEx',
+ 'GetClientMenu',
+ 'CancelClientMenu',
+ 'GetMaxPageItems',
+ 'GetPanelStyle',
+ 'SetPanelTitle',
+ 'DrawPanelItem',
+ 'DrawPanelText',
+ 'CanPanelDrawFlags',
+ 'SetPanelKeys',
+ 'SendPanelToClient',
+ 'GetPanelTextRemaining',
+ 'GetPanelCurrentKey',
+ 'SetPanelCurrentKey',
+ 'RedrawMenuItem',
+ 'InternalShowMenu',
+ 'GetMenuVoteInfo',
+ 'IsNewVoteAllowed',
+ 'PrefetchSound',
+ 'EmitAmbientSound',
+ 'FadeClientVolume',
+ 'StopSound',
+ 'EmitSound',
+ 'EmitSentence',
+ 'GetDistGainFromSoundLevel',
+ 'AmbientSHook',
+ 'NormalSHook',
+ 'AddAmbientSoundHook',
+ 'AddNormalSoundHook',
+ 'RemoveAmbientSoundHook',
+ 'RemoveNormalSoundHook',
+ 'EmitSoundToClient',
+ 'EmitSoundToAll',
+ 'ATTN_TO_SNDLEVEL',
+ 'GetGameSoundParams',
+ 'EmitGameSound',
+ 'EmitAmbientGameSound',
+ 'EmitGameSoundToClient',
+ 'EmitGameSoundToAll',
+ 'PrecacheScriptSound',
+ 'strlen',
+ 'StrContains',
+ 'strcmp',
+ 'strncmp',
+ 'StrEqual',
+ 'strcopy',
+ 'Format',
+ 'FormatEx',
+ 'VFormat',
+ 'StringToInt',
+ 'StringToIntEx',
+ 'IntToString',
+ 'StringToFloat',
+ 'StringToFloatEx',
+ 'FloatToString',
+ 'BreakString',
+ 'TrimString',
+ 'SplitString',
+ 'ReplaceString',
+ 'ReplaceStringEx',
+ 'GetCharBytes',
+ 'IsCharAlpha',
+ 'IsCharNumeric',
+ 'IsCharSpace',
+ 'IsCharMB',
+ 'IsCharUpper',
+ 'IsCharLower',
+ 'StripQuotes',
+ 'CharToUpper',
+ 'CharToLower',
+ 'FindCharInString',
+ 'StrCat',
+ 'ExplodeString',
+ 'ImplodeStrings',
+ 'GetVectorLength',
+ 'GetVectorDistance',
+ 'GetVectorDotProduct',
+ 'GetVectorCrossProduct',
+ 'NormalizeVector',
+ 'GetAngleVectors',
+ 'GetVectorAngles',
+ 'GetVectorVectors',
+ 'AddVectors',
+ 'SubtractVectors',
+ 'ScaleVector',
+ 'NegateVector',
+ 'MakeVectorFromPoints',
+ 'BaseComm_IsClientGagged',
+ 'BaseComm_IsClientMuted',
+ 'BaseComm_SetClientGag',
+ 'BaseComm_SetClientMute',
+ 'FormatUserLogText',
+ 'FindPluginByFile',
+ 'FindTarget',
+ 'AcceptEntityInput',
+ 'SetVariantBool',
+ 'SetVariantString',
+ 'SetVariantInt',
+ 'SetVariantFloat',
+ 'SetVariantVector3D',
+ 'SetVariantPosVector3D',
+ 'SetVariantColor',
+ 'SetVariantEntity',
+ 'GameRules_GetProp',
+ 'GameRules_SetProp',
+ 'GameRules_GetPropFloat',
+ 'GameRules_SetPropFloat',
+ 'GameRules_GetPropEnt',
+ 'GameRules_SetPropEnt',
+ 'GameRules_GetPropVector',
+ 'GameRules_SetPropVector',
+ 'GameRules_GetPropString',
+ 'GameRules_SetPropString',
+ 'GameRules_GetRoundState',
+ 'OnClientConnect',
+ 'OnClientConnected',
+ 'OnClientPutInServer',
+ 'OnClientDisconnect',
+ 'OnClientDisconnect_Post',
+ 'OnClientCommand',
+ 'OnClientSettingsChanged',
+ 'OnClientAuthorized',
+ 'OnClientPreAdminCheck',
+ 'OnClientPostAdminFilter',
+ 'OnClientPostAdminCheck',
+ 'GetMaxClients',
+ 'GetMaxHumanPlayers',
+ 'GetClientCount',
+ 'GetClientName',
+ 'GetClientIP',
+ 'GetClientAuthString',
+ 'GetClientAuthId',
+ 'GetSteamAccountID',
+ 'GetClientUserId',
+ 'IsClientConnected',
+ 'IsClientInGame',
+ 'IsClientInKickQueue',
+ 'IsClientAuthorized',
+ 'IsFakeClient',
+ 'IsClientSourceTV',
+ 'IsClientReplay',
+ 'IsClientObserver',
+ 'IsPlayerAlive',
+ 'GetClientInfo',
+ 'GetClientTeam',
+ 'SetUserAdmin',
+ 'GetUserAdmin',
+ 'AddUserFlags',
+ 'RemoveUserFlags',
+ 'SetUserFlagBits',
+ 'GetUserFlagBits',
+ 'CanUserTarget',
+ 'RunAdminCacheChecks',
+ 'NotifyPostAdminCheck',
+ 'CreateFakeClient',
+ 'SetFakeClientConVar',
+ 'GetClientHealth',
+ 'GetClientModel',
+ 'GetClientWeapon',
+ 'GetClientMaxs',
+ 'GetClientMins',
+ 'GetClientAbsAngles',
+ 'GetClientAbsOrigin',
+ 'GetClientArmor',
+ 'GetClientDeaths',
+ 'GetClientFrags',
+ 'GetClientDataRate',
+ 'IsClientTimingOut',
+ 'GetClientTime',
+ 'GetClientLatency',
+ 'GetClientAvgLatency',
+ 'GetClientAvgLoss',
+ 'GetClientAvgChoke',
+ 'GetClientAvgData',
+ 'GetClientAvgPackets',
+ 'GetClientOfUserId',
+ 'KickClient',
+ 'KickClientEx',
+ 'ChangeClientTeam',
+ 'GetClientSerial',
+ 'GetClientFromSerial',
+ 'FindStringTable',
+ 'GetNumStringTables',
+ 'GetStringTableNumStrings',
+ 'GetStringTableMaxStrings',
+ 'GetStringTableName',
+ 'FindStringIndex',
+ 'ReadStringTable',
+ 'GetStringTableDataLength',
+ 'GetStringTableData',
+ 'SetStringTableData',
+ 'AddToStringTable',
+ 'LockStringTables',
+ 'AddFileToDownloadsTable',
+ 'GetEntityFlags',
+ 'SetEntityFlags',
+ 'GetEntityMoveType',
+ 'SetEntityMoveType',
+ 'GetEntityRenderMode',
+ 'SetEntityRenderMode',
+ 'GetEntityRenderFx',
+ 'SetEntityRenderFx',
+ 'SetEntityRenderColor',
+ 'GetEntityGravity',
+ 'SetEntityGravity',
+ 'SetEntityHealth',
+ 'GetClientButtons',
+ 'EntityOutput',
+ 'HookEntityOutput',
+ 'UnhookEntityOutput',
+ 'HookSingleEntityOutput',
+ 'UnhookSingleEntityOutput',
+ 'SMC_CreateParser',
+ 'SMC_ParseFile',
+ 'SMC_GetErrorString',
+ 'SMC_ParseStart',
+ 'SMC_SetParseStart',
+ 'SMC_ParseEnd',
+ 'SMC_SetParseEnd',
+ 'SMC_NewSection',
+ 'SMC_KeyValue',
+ 'SMC_EndSection',
+ 'SMC_SetReaders',
+ 'SMC_RawLine',
+ 'SMC_SetRawLine',
+ 'BfWriteBool',
+ 'BfWriteByte',
+ 'BfWriteChar',
+ 'BfWriteShort',
+ 'BfWriteWord',
+ 'BfWriteNum',
+ 'BfWriteFloat',
+ 'BfWriteString',
+ 'BfWriteEntity',
+ 'BfWriteAngle',
+ 'BfWriteCoord',
+ 'BfWriteVecCoord',
+ 'BfWriteVecNormal',
+ 'BfWriteAngles',
+ 'BfReadBool',
+ 'BfReadByte',
+ 'BfReadChar',
+ 'BfReadShort',
+ 'BfReadWord',
+ 'BfReadNum',
+ 'BfReadFloat',
+ 'BfReadString',
+ 'BfReadEntity',
+ 'BfReadAngle',
+ 'BfReadCoord',
+ 'BfReadVecCoord',
+ 'BfReadVecNormal',
+ 'BfReadAngles',
+ 'BfGetNumBytesLeft',
+ 'CreateProfiler',
+ 'StartProfiling',
+ 'StopProfiling',
+ 'GetProfilerTime',
+ 'OnPluginStart',
+ 'AskPluginLoad2',
+ 'OnPluginEnd',
+ 'OnPluginPauseChange',
+ 'OnGameFrame',
+ 'OnMapStart',
+ 'OnMapEnd',
+ 'OnConfigsExecuted',
+ 'OnAutoConfigsBuffered',
+ 'OnAllPluginsLoaded',
+ 'GetMyHandle',
+ 'GetPluginIterator',
+ 'MorePlugins',
+ 'ReadPlugin',
+ 'GetPluginStatus',
+ 'GetPluginFilename',
+ 'IsPluginDebugging',
+ 'GetPluginInfo',
+ 'FindPluginByNumber',
+ 'SetFailState',
+ 'ThrowError',
+ 'GetTime',
+ 'FormatTime',
+ 'LoadGameConfigFile',
+ 'GameConfGetOffset',
+ 'GameConfGetKeyValue',
+ 'GameConfGetAddress',
+ 'GetSysTickCount',
+ 'AutoExecConfig',
+ 'RegPluginLibrary',
+ 'LibraryExists',
+ 'GetExtensionFileStatus',
+ 'OnLibraryAdded',
+ 'OnLibraryRemoved',
+ 'ReadMapList',
+ 'SetMapListCompatBind',
+ 'OnClientFloodCheck',
+ 'OnClientFloodResult',
+ 'CanTestFeatures',
+ 'GetFeatureStatus',
+ 'RequireFeature',
+ 'LoadFromAddress',
+ 'StoreToAddress',
+ 'CreateStack',
+ 'PushStackCell',
+ 'PushStackString',
+ 'PushStackArray',
+ 'PopStackCell',
+ 'PopStackString',
+ 'PopStackArray',
+ 'IsStackEmpty',
+ 'PopStack',
+ 'OnPlayerRunCmd',
+ 'BuildPath',
+ 'OpenDirectory',
+ 'ReadDirEntry',
+ 'OpenFile',
+ 'DeleteFile',
+ 'ReadFileLine',
+ 'ReadFile',
+ 'ReadFileString',
+ 'WriteFile',
+ 'WriteFileString',
+ 'WriteFileLine',
+ 'ReadFileCell',
+ 'WriteFileCell',
+ 'IsEndOfFile',
+ 'FileSeek',
+ 'FilePosition',
+ 'FileExists',
+ 'RenameFile',
+ 'DirExists',
+ 'FileSize',
+ 'FlushFile',
+ 'RemoveDir',
+ 'CreateDirectory',
+ 'GetFileTime',
+ 'LogToOpenFile',
+ 'LogToOpenFileEx',
+ 'PbReadInt',
+ 'PbReadFloat',
+ 'PbReadBool',
+ 'PbReadString',
+ 'PbReadColor',
+ 'PbReadAngle',
+ 'PbReadVector',
+ 'PbReadVector2D',
+ 'PbGetRepeatedFieldCount',
+ 'PbSetInt',
+ 'PbSetFloat',
+ 'PbSetBool',
+ 'PbSetString',
+ 'PbSetColor',
+ 'PbSetAngle',
+ 'PbSetVector',
+ 'PbSetVector2D',
+ 'PbAddInt',
+ 'PbAddFloat',
+ 'PbAddBool',
+ 'PbAddString',
+ 'PbAddColor',
+ 'PbAddAngle',
+ 'PbAddVector',
+ 'PbAddVector2D',
+ 'PbRemoveRepeatedFieldValue',
+ 'PbReadMessage',
+ 'PbReadRepeatedMessage',
+ 'PbAddMessage',
+ 'SetNextMap',
+ 'GetNextMap',
+ 'ForceChangeLevel',
+ 'GetMapHistorySize',
+ 'GetMapHistory',
+ 'GeoipCode2',
+ 'GeoipCode3',
+ 'GeoipCountry',
+ 'MarkNativeAsOptional',
+ 'RegClientCookie',
+ 'FindClientCookie',
+ 'SetClientCookie',
+ 'GetClientCookie',
+ 'SetAuthIdCookie',
+ 'AreClientCookiesCached',
+ 'OnClientCookiesCached',
+ 'CookieMenuHandler',
+ 'SetCookiePrefabMenu',
+ 'SetCookieMenuItem',
+ 'ShowCookieMenu',
+ 'GetCookieIterator',
+ 'ReadCookieIterator',
+ 'GetCookieAccess',
+ 'GetClientCookieTime',
+ 'LoadTranslations',
+ 'SetGlobalTransTarget',
+ 'GetClientLanguage',
+ 'GetServerLanguage',
+ 'GetLanguageCount',
+ 'GetLanguageInfo',
+ 'SetClientLanguage',
+ 'GetLanguageByCode',
+ 'GetLanguageByName',
+ 'CS_OnBuyCommand',
+ 'CS_OnCSWeaponDrop',
+ 'CS_OnGetWeaponPrice',
+ 'CS_OnTerminateRound',
+ 'CS_RespawnPlayer',
+ 'CS_SwitchTeam',
+ 'CS_DropWeapon',
+ 'CS_TerminateRound',
+ 'CS_GetTranslatedWeaponAlias',
+ 'CS_GetWeaponPrice',
+ 'CS_GetClientClanTag',
+ 'CS_SetClientClanTag',
+ 'CS_GetTeamScore',
+ 'CS_SetTeamScore',
+ 'CS_GetMVPCount',
+ 'CS_SetMVPCount',
+ 'CS_GetClientContributionScore',
+ 'CS_SetClientContributionScore',
+ 'CS_GetClientAssists',
+ 'CS_SetClientAssists',
+ 'CS_AliasToWeaponID',
+ 'CS_WeaponIDToAlias',
+ 'CS_IsValidWeaponID',
+ 'CS_UpdateClientModel',
+ 'LogToGame',
+ 'SetRandomSeed',
+ 'GetRandomFloat',
+ 'GetRandomInt',
+ 'IsMapValid',
+ 'IsDedicatedServer',
+ 'GetEngineTime',
+ 'GetGameTime',
+ 'GetGameTickCount',
+ 'GetGameDescription',
+ 'GetGameFolderName',
+ 'GetCurrentMap',
+ 'PrecacheModel',
+ 'PrecacheSentenceFile',
+ 'PrecacheDecal',
+ 'PrecacheGeneric',
+ 'IsModelPrecached',
+ 'IsDecalPrecached',
+ 'IsGenericPrecached',
+ 'PrecacheSound',
+ 'IsSoundPrecached',
+ 'CreateDialog',
+ 'GetEngineVersion',
+ 'PrintToChat',
+ 'PrintToChatAll',
+ 'PrintCenterText',
+ 'PrintCenterTextAll',
+ 'PrintHintText',
+ 'PrintHintTextToAll',
+ 'ShowVGUIPanel',
+ 'CreateHudSynchronizer',
+ 'SetHudTextParams',
+ 'SetHudTextParamsEx',
+ 'ShowSyncHudText',
+ 'ClearSyncHud',
+ 'ShowHudText',
+ 'ShowMOTDPanel',
+ 'DisplayAskConnectBox',
+ 'EntIndexToEntRef',
+ 'EntRefToEntIndex',
+ 'MakeCompatEntRef',
+ 'SetClientViewEntity',
+ 'SetLightStyle',
+ 'GetClientEyePosition',
+ 'CreateDataPack',
+ 'WritePackCell',
+ 'WritePackFloat',
+ 'WritePackString',
+ 'ReadPackCell',
+ 'ReadPackFloat',
+ 'ReadPackString',
+ 'ResetPack',
+ 'GetPackPosition',
+ 'SetPackPosition',
+ 'IsPackReadable',
+ 'LogMessage',
+ 'LogToFile',
+ 'LogToFileEx',
+ 'LogAction',
+ 'LogError',
+ 'OnLogAction',
+ 'GameLogHook',
+ 'AddGameLogHook',
+ 'RemoveGameLogHook',
+ 'FindTeamByName',
+ 'StartPrepSDKCall',
+ 'PrepSDKCall_SetVirtual',
+ 'PrepSDKCall_SetSignature',
+ 'PrepSDKCall_SetAddress',
+ 'PrepSDKCall_SetFromConf',
+ 'PrepSDKCall_SetReturnInfo',
+ 'PrepSDKCall_AddParameter',
+ 'EndPrepSDKCall',
+ 'SDKCall',
+ 'GetPlayerResourceEntity',
+)
+
+
+if __name__ == '__main__': # pragma: no cover
+ import re
+ import sys
+ try:
+ from urllib import FancyURLopener
+ except ImportError:
+ from urllib.request import FancyURLopener
+
+ from pygments.util import format_lines
+
+ # urllib ends up wanting to import a module called 'math' -- if
+ # pygments/lexers is in the path, this ends badly.
+ for i in range(len(sys.path)-1, -1, -1):
+ if sys.path[i].endswith('/lexers'):
+ del sys.path[i]
+
+ class Opener(FancyURLopener):
+ version = 'Mozilla/5.0 (Pygments Sourcemod Builtins Update)'
+
+ opener = Opener()
+
+ def get_version():
+ f = opener.open('http://docs.sourcemod.net/api/index.php')
+ r = re.compile(r'SourceMod v\.<b>([\d\.]+(?:-\w+)?)</td>')
+ for line in f:
+ m = r.search(line)
+ if m is not None:
+ return m.groups()[0]
+ raise ValueError('No version in api docs')
+
+ def get_sm_functions():
+ f = opener.open('http://docs.sourcemod.net/api/SMfuncs.js')
+ r = re.compile(r'SMfunctions\[\d+\] = Array \("(?:public )?([^,]+)",".+"\);')
+ functions = []
+ for line in f:
+ m = r.match(line)
+ if m is not None:
+ functions.append(m.groups()[0])
+ return functions
+
+ def regenerate(filename, natives):
+ with open(filename) as fp:
+ content = fp.read()
+
+ header = content[:content.find('FUNCTIONS = (')]
+ footer = content[content.find("if __name__ == '__main__':")-1:]
+
+
+ with open(filename, 'w') as fp:
+ fp.write(header)
+ fp.write(format_lines('FUNCTIONS', natives))
+ fp.write(footer)
+
+ def run():
+ version = get_version()
+ print('> Downloading function index for SourceMod %s' % version)
+ functions = get_sm_functions()
+ print('> %d functions found:' % len(functions))
+
+ functionlist = []
+ for full_function_name in functions:
+ print('>> %s' % full_function_name)
+ functionlist.append(full_function_name)
+
+ regenerate(__file__, functionlist)
+
+
+ run()
diff --git a/pygments/lexers/_sourcemodbuiltins.py b/pygments/lexers/_sourcemodbuiltins.py
deleted file mode 100644
index 0f6b4770..00000000
--- a/pygments/lexers/_sourcemodbuiltins.py
+++ /dev/null
@@ -1,1072 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._sourcemodbuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file contains the names of SourceMod functions.
- It is able to re-generate itself.
-
- Do not edit the FUNCTIONS list by hand.
-
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-FUNCTIONS = ['TopMenuHandler',
- 'CreateTopMenu',
- 'LoadTopMenuConfig',
- 'AddToTopMenu',
- 'GetTopMenuInfoString',
- 'GetTopMenuObjName',
- 'RemoveFromTopMenu',
- 'DisplayTopMenu',
- 'FindTopMenuCategory',
- 'OnAdminMenuCreated',
- 'OnAdminMenuReady',
- 'GetAdminTopMenu',
- 'AddTargetsToMenu',
- 'AddTargetsToMenu2',
- 'RedisplayAdminMenu',
- 'TEHook',
- 'AddTempEntHook',
- 'RemoveTempEntHook',
- 'TE_Start',
- 'TE_IsValidProp',
- 'TE_WriteNum',
- 'TE_ReadNum',
- 'TE_WriteFloat',
- 'TE_ReadFloat',
- 'TE_WriteVector',
- 'TE_ReadVector',
- 'TE_WriteAngles',
- 'TE_WriteFloatArray',
- 'TE_Send',
- 'TE_WriteEncodedEnt',
- 'TE_SendToAll',
- 'TE_SendToClient',
- 'CreateKeyValues',
- 'KvSetString',
- 'KvSetNum',
- 'KvSetUInt64',
- 'KvSetFloat',
- 'KvSetColor',
- 'KvSetVector',
- 'KvGetString',
- 'KvGetNum',
- 'KvGetFloat',
- 'KvGetColor',
- 'KvGetUInt64',
- 'KvGetVector',
- 'KvJumpToKey',
- 'KvJumpToKeySymbol',
- 'KvGotoFirstSubKey',
- 'KvGotoNextKey',
- 'KvSavePosition',
- 'KvDeleteKey',
- 'KvDeleteThis',
- 'KvGoBack',
- 'KvRewind',
- 'KvGetSectionName',
- 'KvSetSectionName',
- 'KvGetDataType',
- 'KeyValuesToFile',
- 'FileToKeyValues',
- 'KvSetEscapeSequences',
- 'KvNodesInStack',
- 'KvCopySubkeys',
- 'KvFindKeyById',
- 'KvGetNameSymbol',
- 'KvGetSectionSymbol',
- 'TE_SetupSparks',
- 'TE_SetupSmoke',
- 'TE_SetupDust',
- 'TE_SetupMuzzleFlash',
- 'TE_SetupMetalSparks',
- 'TE_SetupEnergySplash',
- 'TE_SetupArmorRicochet',
- 'TE_SetupGlowSprite',
- 'TE_SetupExplosion',
- 'TE_SetupBloodSprite',
- 'TE_SetupBeamRingPoint',
- 'TE_SetupBeamPoints',
- 'TE_SetupBeamLaser',
- 'TE_SetupBeamRing',
- 'TE_SetupBeamFollow',
- 'HookEvent',
- 'HookEventEx',
- 'UnhookEvent',
- 'CreateEvent',
- 'FireEvent',
- 'CancelCreatedEvent',
- 'GetEventBool',
- 'SetEventBool',
- 'GetEventInt',
- 'SetEventInt',
- 'GetEventFloat',
- 'SetEventFloat',
- 'GetEventString',
- 'SetEventString',
- 'GetEventName',
- 'SetEventBroadcast',
- 'GetUserMessageId',
- 'GetUserMessageName',
- 'StartMessage',
- 'StartMessageEx',
- 'EndMessage',
- 'MsgHook',
- 'MsgPostHook',
- 'HookUserMessage',
- 'UnhookUserMessage',
- 'StartMessageAll',
- 'StartMessageOne',
- 'InactivateClient',
- 'ReconnectClient',
- 'GetMaxEntities',
- 'GetEntityCount',
- 'IsValidEntity',
- 'IsValidEdict',
- 'IsEntNetworkable',
- 'CreateEdict',
- 'RemoveEdict',
- 'GetEdictFlags',
- 'SetEdictFlags',
- 'GetEdictClassname',
- 'GetEntityNetClass',
- 'ChangeEdictState',
- 'GetEntData',
- 'SetEntData',
- 'GetEntDataFloat',
- 'SetEntDataFloat',
- 'GetEntDataEnt2',
- 'SetEntDataEnt2',
- 'GetEntDataVector',
- 'SetEntDataVector',
- 'GetEntDataString',
- 'SetEntDataString',
- 'FindSendPropOffs',
- 'FindSendPropInfo',
- 'FindDataMapOffs',
- 'GetEntSendPropOffs',
- 'GetEntProp',
- 'SetEntProp',
- 'GetEntPropFloat',
- 'SetEntPropFloat',
- 'GetEntPropEnt',
- 'SetEntPropEnt',
- 'GetEntPropVector',
- 'SetEntPropVector',
- 'GetEntPropString',
- 'SetEntPropString',
- 'GetEntPropArraySize',
- 'GetEntDataArray',
- 'SetEntDataArray',
- 'GetEntityClassname',
- 'float',
- 'FloatMul',
- 'FloatDiv',
- 'FloatAdd',
- 'FloatSub',
- 'FloatFraction',
- 'RoundToZero',
- 'RoundToCeil',
- 'RoundToFloor',
- 'RoundToNearest',
- 'FloatCompare',
- 'SquareRoot',
- 'Pow',
- 'Exponential',
- 'Logarithm',
- 'Sine',
- 'Cosine',
- 'Tangent',
- 'FloatAbs',
- 'ArcTangent',
- 'ArcCosine',
- 'ArcSine',
- 'ArcTangent2',
- 'RoundFloat',
- 'operator%',
- 'DegToRad',
- 'RadToDeg',
- 'GetURandomInt',
- 'GetURandomFloat',
- 'SetURandomSeed',
- 'SetURandomSeedSimple',
- 'RemovePlayerItem',
- 'GivePlayerItem',
- 'GetPlayerWeaponSlot',
- 'IgniteEntity',
- 'ExtinguishEntity',
- 'TeleportEntity',
- 'ForcePlayerSuicide',
- 'SlapPlayer',
- 'FindEntityByClassname',
- 'GetClientEyeAngles',
- 'CreateEntityByName',
- 'DispatchSpawn',
- 'DispatchKeyValue',
- 'DispatchKeyValueFloat',
- 'DispatchKeyValueVector',
- 'GetClientAimTarget',
- 'GetTeamCount',
- 'GetTeamName',
- 'GetTeamScore',
- 'SetTeamScore',
- 'GetTeamClientCount',
- 'SetEntityModel',
- 'GetPlayerDecalFile',
- 'GetServerNetStats',
- 'EquipPlayerWeapon',
- 'ActivateEntity',
- 'SetClientInfo',
- 'SetClientListeningFlags',
- 'GetClientListeningFlags',
- 'SetListenOverride',
- 'GetListenOverride',
- 'IsClientMuted',
- 'TR_GetPointContents',
- 'TR_GetPointContentsEnt',
- 'TR_TraceRay',
- 'TR_TraceHull',
- 'TR_TraceRayFilter',
- 'TR_TraceHullFilter',
- 'TR_TraceRayEx',
- 'TR_TraceHullEx',
- 'TR_TraceRayFilterEx',
- 'TR_TraceHullFilterEx',
- 'TR_GetFraction',
- 'TR_GetEndPosition',
- 'TR_GetEntityIndex',
- 'TR_DidHit',
- 'TR_GetHitGroup',
- 'TR_GetPlaneNormal',
- 'TR_PointOutsideWorld',
- 'SortIntegers',
- 'SortFloats',
- 'SortStrings',
- 'SortFunc1D',
- 'SortCustom1D',
- 'SortCustom2D',
- 'SortADTArray',
- 'SortFuncADTArray',
- 'SortADTArrayCustom',
- 'CompileRegex',
- 'MatchRegex',
- 'GetRegexSubString',
- 'SimpleRegexMatch',
- 'TF2_GetPlayerClass',
- 'TF2_SetPlayerClass',
- 'TF2_GetPlayerResourceData',
- 'TF2_SetPlayerResourceData',
- 'TF2_RemoveWeaponSlot',
- 'TF2_RemoveAllWeapons',
- 'TF2_IsPlayerInCondition',
- 'TF2_GetObjectType',
- 'TF2_GetObjectMode',
- 'NominateMap',
- 'RemoveNominationByMap',
- 'RemoveNominationByOwner',
- 'GetExcludeMapList',
- 'GetNominatedMapList',
- 'CanMapChooserStartVote',
- 'InitiateMapChooserVote',
- 'HasEndOfMapVoteFinished',
- 'EndOfMapVoteEnabled',
- 'OnNominationRemoved',
- 'OnMapVoteStarted',
- 'CreateTimer',
- 'KillTimer',
- 'TriggerTimer',
- 'GetTickedTime',
- 'GetMapTimeLeft',
- 'GetMapTimeLimit',
- 'ExtendMapTimeLimit',
- 'GetTickInterval',
- 'OnMapTimeLeftChanged',
- 'IsServerProcessing',
- 'CreateDataTimer',
- 'ByteCountToCells',
- 'CreateArray',
- 'ClearArray',
- 'CloneArray',
- 'ResizeArray',
- 'GetArraySize',
- 'PushArrayCell',
- 'PushArrayString',
- 'PushArrayArray',
- 'GetArrayCell',
- 'GetArrayString',
- 'GetArrayArray',
- 'SetArrayCell',
- 'SetArrayString',
- 'SetArrayArray',
- 'ShiftArrayUp',
- 'RemoveFromArray',
- 'SwapArrayItems',
- 'FindStringInArray',
- 'FindValueInArray',
- 'ProcessTargetString',
- 'ReplyToTargetError',
- 'MultiTargetFilter',
- 'AddMultiTargetFilter',
- 'RemoveMultiTargetFilter',
- 'OnBanClient',
- 'OnBanIdentity',
- 'OnRemoveBan',
- 'BanClient',
- 'BanIdentity',
- 'RemoveBan',
- 'CreateTrie',
- 'SetTrieValue',
- 'SetTrieArray',
- 'SetTrieString',
- 'GetTrieValue',
- 'GetTrieArray',
- 'GetTrieString',
- 'RemoveFromTrie',
- 'ClearTrie',
- 'GetTrieSize',
- 'GetFunctionByName',
- 'CreateGlobalForward',
- 'CreateForward',
- 'GetForwardFunctionCount',
- 'AddToForward',
- 'RemoveFromForward',
- 'RemoveAllFromForward',
- 'Call_StartForward',
- 'Call_StartFunction',
- 'Call_PushCell',
- 'Call_PushCellRef',
- 'Call_PushFloat',
- 'Call_PushFloatRef',
- 'Call_PushArray',
- 'Call_PushArrayEx',
- 'Call_PushString',
- 'Call_PushStringEx',
- 'Call_Finish',
- 'Call_Cancel',
- 'NativeCall',
- 'CreateNative',
- 'ThrowNativeError',
- 'GetNativeStringLength',
- 'GetNativeString',
- 'SetNativeString',
- 'GetNativeCell',
- 'GetNativeCellRef',
- 'SetNativeCellRef',
- 'GetNativeArray',
- 'SetNativeArray',
- 'FormatNativeString',
- 'OnRebuildAdminCache',
- 'DumpAdminCache',
- 'AddCommandOverride',
- 'GetCommandOverride',
- 'UnsetCommandOverride',
- 'CreateAdmGroup',
- 'FindAdmGroup',
- 'SetAdmGroupAddFlag',
- 'GetAdmGroupAddFlag',
- 'GetAdmGroupAddFlags',
- 'SetAdmGroupImmuneFrom',
- 'GetAdmGroupImmuneCount',
- 'GetAdmGroupImmuneFrom',
- 'AddAdmGroupCmdOverride',
- 'GetAdmGroupCmdOverride',
- 'RegisterAuthIdentType',
- 'CreateAdmin',
- 'GetAdminUsername',
- 'BindAdminIdentity',
- 'SetAdminFlag',
- 'GetAdminFlag',
- 'GetAdminFlags',
- 'AdminInheritGroup',
- 'GetAdminGroupCount',
- 'GetAdminGroup',
- 'SetAdminPassword',
- 'GetAdminPassword',
- 'FindAdminByIdentity',
- 'RemoveAdmin',
- 'FlagBitsToBitArray',
- 'FlagBitArrayToBits',
- 'FlagArrayToBits',
- 'FlagBitsToArray',
- 'FindFlagByName',
- 'FindFlagByChar',
- 'FindFlagChar',
- 'ReadFlagString',
- 'CanAdminTarget',
- 'CreateAuthMethod',
- 'SetAdmGroupImmunityLevel',
- 'GetAdmGroupImmunityLevel',
- 'SetAdminImmunityLevel',
- 'GetAdminImmunityLevel',
- 'FlagToBit',
- 'BitToFlag',
- 'ServerCommand',
- 'ServerCommandEx',
- 'InsertServerCommand',
- 'ServerExecute',
- 'ClientCommand',
- 'FakeClientCommand',
- 'FakeClientCommandEx',
- 'PrintToServer',
- 'PrintToConsole',
- 'ReplyToCommand',
- 'GetCmdReplySource',
- 'SetCmdReplySource',
- 'IsChatTrigger',
- 'ShowActivity2',
- 'ShowActivity',
- 'ShowActivityEx',
- 'FormatActivitySource',
- 'SrvCmd',
- 'RegServerCmd',
- 'ConCmd',
- 'RegConsoleCmd',
- 'RegAdminCmd',
- 'GetCmdArgs',
- 'GetCmdArg',
- 'GetCmdArgString',
- 'CreateConVar',
- 'FindConVar',
- 'ConVarChanged',
- 'HookConVarChange',
- 'UnhookConVarChange',
- 'GetConVarBool',
- 'SetConVarBool',
- 'GetConVarInt',
- 'SetConVarInt',
- 'GetConVarFloat',
- 'SetConVarFloat',
- 'GetConVarString',
- 'SetConVarString',
- 'ResetConVar',
- 'GetConVarDefault',
- 'GetConVarFlags',
- 'SetConVarFlags',
- 'GetConVarBounds',
- 'SetConVarBounds',
- 'GetConVarName',
- 'QueryClientConVar',
- 'GetCommandIterator',
- 'ReadCommandIterator',
- 'CheckCommandAccess',
- 'CheckAccess',
- 'IsValidConVarChar',
- 'GetCommandFlags',
- 'SetCommandFlags',
- 'FindFirstConCommand',
- 'FindNextConCommand',
- 'SendConVarValue',
- 'AddServerTag',
- 'RemoveServerTag',
- 'CommandListener',
- 'AddCommandListener',
- 'RemoveCommandListener',
- 'TF2_IgnitePlayer',
- 'TF2_RespawnPlayer',
- 'TF2_RegeneratePlayer',
- 'TF2_AddCondition',
- 'TF2_RemoveCondition',
- 'TF2_SetPlayerPowerPlay',
- 'TF2_DisguisePlayer',
- 'TF2_RemovePlayerDisguise',
- 'TF2_StunPlayer',
- 'TF2_MakeBleed',
- 'TF2_GetResourceEntity',
- 'TF2_GetClass',
- 'TF2_CalcIsAttackCritical',
- 'TF2_OnIsHolidayActive',
- 'TF2_IsPlayerInDuel',
- 'TF2_OnConditionAdded',
- 'TF2_OnConditionRemoved',
- 'TF2_OnWaitingForPlayersStart',
- 'TF2_OnWaitingForPlayersEnd',
- 'SQL_Connect',
- 'SQL_DefConnect',
- 'SQL_ConnectCustom',
- 'SQLite_UseDatabase',
- 'SQL_CheckConfig',
- 'SQL_GetDriver',
- 'SQL_ReadDriver',
- 'SQL_GetDriverIdent',
- 'SQL_GetDriverProduct',
- 'SQL_GetAffectedRows',
- 'SQL_GetInsertId',
- 'SQL_GetError',
- 'SQL_EscapeString',
- 'SQL_QuoteString',
- 'SQL_FastQuery',
- 'SQL_Query',
- 'SQL_PrepareQuery',
- 'SQL_FetchMoreResults',
- 'SQL_HasResultSet',
- 'SQL_GetRowCount',
- 'SQL_GetFieldCount',
- 'SQL_FieldNumToName',
- 'SQL_FieldNameToNum',
- 'SQL_FetchRow',
- 'SQL_MoreRows',
- 'SQL_Rewind',
- 'SQL_FetchString',
- 'SQL_FetchFloat',
- 'SQL_FetchInt',
- 'SQL_IsFieldNull',
- 'SQL_FetchSize',
- 'SQL_BindParamInt',
- 'SQL_BindParamFloat',
- 'SQL_BindParamString',
- 'SQL_Execute',
- 'SQL_LockDatabase',
- 'SQL_UnlockDatabase',
- 'SQLTCallback',
- 'SQL_IsSameConnection',
- 'SQL_TConnect',
- 'SQL_TQuery',
- 'CloseHandle',
- 'CloneHandle',
- 'MenuHandler',
- 'CreateMenu',
- 'DisplayMenu',
- 'DisplayMenuAtItem',
- 'AddMenuItem',
- 'InsertMenuItem',
- 'RemoveMenuItem',
- 'RemoveAllMenuItems',
- 'GetMenuItem',
- 'GetMenuSelectionPosition',
- 'GetMenuItemCount',
- 'SetMenuPagination',
- 'GetMenuPagination',
- 'GetMenuStyle',
- 'SetMenuTitle',
- 'GetMenuTitle',
- 'CreatePanelFromMenu',
- 'GetMenuExitButton',
- 'SetMenuExitButton',
- 'GetMenuExitBackButton',
- 'SetMenuExitBackButton',
- 'SetMenuNoVoteButton',
- 'CancelMenu',
- 'GetMenuOptionFlags',
- 'SetMenuOptionFlags',
- 'IsVoteInProgress',
- 'CancelVote',
- 'VoteMenu',
- 'VoteMenuToAll',
- 'VoteHandler',
- 'SetVoteResultCallback',
- 'CheckVoteDelay',
- 'IsClientInVotePool',
- 'RedrawClientVoteMenu',
- 'GetMenuStyleHandle',
- 'CreatePanel',
- 'CreateMenuEx',
- 'GetClientMenu',
- 'CancelClientMenu',
- 'GetMaxPageItems',
- 'GetPanelStyle',
- 'SetPanelTitle',
- 'DrawPanelItem',
- 'DrawPanelText',
- 'CanPanelDrawFlags',
- 'SetPanelKeys',
- 'SendPanelToClient',
- 'GetPanelTextRemaining',
- 'GetPanelCurrentKey',
- 'SetPanelCurrentKey',
- 'RedrawMenuItem',
- 'InternalShowMenu',
- 'GetMenuVoteInfo',
- 'IsNewVoteAllowed',
- 'PrefetchSound',
- 'EmitAmbientSound',
- 'FadeClientVolume',
- 'StopSound',
- 'EmitSound',
- 'EmitSentence',
- 'GetDistGainFromSoundLevel',
- 'AmbientSHook',
- 'NormalSHook',
- 'AddAmbientSoundHook',
- 'AddNormalSoundHook',
- 'RemoveAmbientSoundHook',
- 'RemoveNormalSoundHook',
- 'EmitSoundToClient',
- 'EmitSoundToAll',
- 'ATTN_TO_SNDLEVEL',
- 'strlen',
- 'StrContains',
- 'strcmp',
- 'strncmp',
- 'StrEqual',
- 'strcopy',
- 'Format',
- 'FormatEx',
- 'VFormat',
- 'StringToInt',
- 'StringToIntEx',
- 'IntToString',
- 'StringToFloat',
- 'StringToFloatEx',
- 'FloatToString',
- 'BreakString',
- 'TrimString',
- 'SplitString',
- 'ReplaceString',
- 'ReplaceStringEx',
- 'GetCharBytes',
- 'IsCharAlpha',
- 'IsCharNumeric',
- 'IsCharSpace',
- 'IsCharMB',
- 'IsCharUpper',
- 'IsCharLower',
- 'StripQuotes',
- 'CharToUpper',
- 'CharToLower',
- 'FindCharInString',
- 'StrCat',
- 'ExplodeString',
- 'ImplodeStrings',
- 'GetVectorLength',
- 'GetVectorDistance',
- 'GetVectorDotProduct',
- 'GetVectorCrossProduct',
- 'NormalizeVector',
- 'GetAngleVectors',
- 'GetVectorAngles',
- 'GetVectorVectors',
- 'AddVectors',
- 'SubtractVectors',
- 'ScaleVector',
- 'NegateVector',
- 'MakeVectorFromPoints',
- 'BaseComm_IsClientGagged',
- 'BaseComm_IsClientMuted',
- 'BaseComm_SetClientGag',
- 'BaseComm_SetClientMute',
- 'FormatUserLogText',
- 'FindPluginByFile',
- 'FindTarget',
- 'AcceptEntityInput',
- 'SetVariantBool',
- 'SetVariantString',
- 'SetVariantInt',
- 'SetVariantFloat',
- 'SetVariantVector3D',
- 'SetVariantPosVector3D',
- 'SetVariantColor',
- 'SetVariantEntity',
- 'GameRules_GetProp',
- 'GameRules_SetProp',
- 'GameRules_GetPropFloat',
- 'GameRules_SetPropFloat',
- 'GameRules_GetPropEnt',
- 'GameRules_SetPropEnt',
- 'GameRules_GetPropVector',
- 'GameRules_SetPropVector',
- 'GameRules_GetPropString',
- 'GameRules_SetPropString',
- 'GameRules_GetRoundState',
- 'OnClientConnect',
- 'OnClientConnected',
- 'OnClientPutInServer',
- 'OnClientDisconnect',
- 'OnClientDisconnect_Post',
- 'OnClientCommand',
- 'OnClientSettingsChanged',
- 'OnClientAuthorized',
- 'OnClientPreAdminCheck',
- 'OnClientPostAdminFilter',
- 'OnClientPostAdminCheck',
- 'GetMaxClients',
- 'GetClientCount',
- 'GetClientName',
- 'GetClientIP',
- 'GetClientAuthString',
- 'GetClientUserId',
- 'IsClientConnected',
- 'IsClientInGame',
- 'IsClientInKickQueue',
- 'IsClientAuthorized',
- 'IsFakeClient',
- 'IsClientSourceTV',
- 'IsClientReplay',
- 'IsClientObserver',
- 'IsPlayerAlive',
- 'GetClientInfo',
- 'GetClientTeam',
- 'SetUserAdmin',
- 'GetUserAdmin',
- 'AddUserFlags',
- 'RemoveUserFlags',
- 'SetUserFlagBits',
- 'GetUserFlagBits',
- 'CanUserTarget',
- 'RunAdminCacheChecks',
- 'NotifyPostAdminCheck',
- 'CreateFakeClient',
- 'SetFakeClientConVar',
- 'GetClientHealth',
- 'GetClientModel',
- 'GetClientWeapon',
- 'GetClientMaxs',
- 'GetClientMins',
- 'GetClientAbsAngles',
- 'GetClientAbsOrigin',
- 'GetClientArmor',
- 'GetClientDeaths',
- 'GetClientFrags',
- 'GetClientDataRate',
- 'IsClientTimingOut',
- 'GetClientTime',
- 'GetClientLatency',
- 'GetClientAvgLatency',
- 'GetClientAvgLoss',
- 'GetClientAvgChoke',
- 'GetClientAvgData',
- 'GetClientAvgPackets',
- 'GetClientOfUserId',
- 'KickClient',
- 'KickClientEx',
- 'ChangeClientTeam',
- 'GetClientSerial',
- 'GetClientFromSerial',
- 'FindStringTable',
- 'GetNumStringTables',
- 'GetStringTableNumStrings',
- 'GetStringTableMaxStrings',
- 'GetStringTableName',
- 'FindStringIndex',
- 'ReadStringTable',
- 'GetStringTableDataLength',
- 'GetStringTableData',
- 'SetStringTableData',
- 'AddToStringTable',
- 'LockStringTables',
- 'AddFileToDownloadsTable',
- 'GetEntityFlags',
- 'SetEntityFlags',
- 'GetEntityMoveType',
- 'SetEntityMoveType',
- 'GetEntityRenderMode',
- 'SetEntityRenderMode',
- 'GetEntityRenderFx',
- 'SetEntityRenderFx',
- 'SetEntityRenderColor',
- 'GetEntityGravity',
- 'SetEntityGravity',
- 'SetEntityHealth',
- 'GetClientButtons',
- 'EntityOutput',
- 'HookEntityOutput',
- 'UnhookEntityOutput',
- 'HookSingleEntityOutput',
- 'UnhookSingleEntityOutput',
- 'SMC_CreateParser',
- 'SMC_ParseFile',
- 'SMC_GetErrorString',
- 'SMC_ParseStart',
- 'SMC_SetParseStart',
- 'SMC_ParseEnd',
- 'SMC_SetParseEnd',
- 'SMC_NewSection',
- 'SMC_KeyValue',
- 'SMC_EndSection',
- 'SMC_SetReaders',
- 'SMC_RawLine',
- 'SMC_SetRawLine',
- 'BfWriteBool',
- 'BfWriteByte',
- 'BfWriteChar',
- 'BfWriteShort',
- 'BfWriteWord',
- 'BfWriteNum',
- 'BfWriteFloat',
- 'BfWriteString',
- 'BfWriteEntity',
- 'BfWriteAngle',
- 'BfWriteCoord',
- 'BfWriteVecCoord',
- 'BfWriteVecNormal',
- 'BfWriteAngles',
- 'BfReadBool',
- 'BfReadByte',
- 'BfReadChar',
- 'BfReadShort',
- 'BfReadWord',
- 'BfReadNum',
- 'BfReadFloat',
- 'BfReadString',
- 'BfReadEntity',
- 'BfReadAngle',
- 'BfReadCoord',
- 'BfReadVecCoord',
- 'BfReadVecNormal',
- 'BfReadAngles',
- 'BfGetNumBytesLeft',
- 'CreateProfiler',
- 'StartProfiling',
- 'StopProfiling',
- 'GetProfilerTime',
- 'OnPluginStart',
- 'AskPluginLoad2',
- 'OnPluginEnd',
- 'OnPluginPauseChange',
- 'OnGameFrame',
- 'OnMapStart',
- 'OnMapEnd',
- 'OnConfigsExecuted',
- 'OnAutoConfigsBuffered',
- 'OnAllPluginsLoaded',
- 'GetMyHandle',
- 'GetPluginIterator',
- 'MorePlugins',
- 'ReadPlugin',
- 'GetPluginStatus',
- 'GetPluginFilename',
- 'IsPluginDebugging',
- 'GetPluginInfo',
- 'FindPluginByNumber',
- 'SetFailState',
- 'ThrowError',
- 'GetTime',
- 'FormatTime',
- 'LoadGameConfigFile',
- 'GameConfGetOffset',
- 'GameConfGetKeyValue',
- 'GetSysTickCount',
- 'AutoExecConfig',
- 'RegPluginLibrary',
- 'LibraryExists',
- 'GetExtensionFileStatus',
- 'OnLibraryAdded',
- 'OnLibraryRemoved',
- 'ReadMapList',
- 'SetMapListCompatBind',
- 'OnClientFloodCheck',
- 'OnClientFloodResult',
- 'CanTestFeatures',
- 'GetFeatureStatus',
- 'RequireFeature',
- 'LoadFromAddress',
- 'StoreToAddress',
- 'CreateStack',
- 'PushStackCell',
- 'PushStackString',
- 'PushStackArray',
- 'PopStackCell',
- 'PopStackString',
- 'PopStackArray',
- 'IsStackEmpty',
- 'PopStack',
- 'OnPlayerRunCmd',
- 'BuildPath',
- 'OpenDirectory',
- 'ReadDirEntry',
- 'OpenFile',
- 'DeleteFile',
- 'ReadFileLine',
- 'ReadFile',
- 'ReadFileString',
- 'WriteFile',
- 'WriteFileString',
- 'WriteFileLine',
- 'ReadFileCell',
- 'WriteFileCell',
- 'IsEndOfFile',
- 'FileSeek',
- 'FilePosition',
- 'FileExists',
- 'RenameFile',
- 'DirExists',
- 'FileSize',
- 'FlushFile',
- 'RemoveDir',
- 'CreateDirectory',
- 'GetFileTime',
- 'LogToOpenFile',
- 'LogToOpenFileEx',
- 'SetNextMap',
- 'GetNextMap',
- 'ForceChangeLevel',
- 'GetMapHistorySize',
- 'GetMapHistory',
- 'GeoipCode2',
- 'GeoipCode3',
- 'GeoipCountry',
- 'MarkNativeAsOptional',
- 'RegClientCookie',
- 'FindClientCookie',
- 'SetClientCookie',
- 'GetClientCookie',
- 'SetAuthIdCookie',
- 'AreClientCookiesCached',
- 'OnClientCookiesCached',
- 'CookieMenuHandler',
- 'SetCookiePrefabMenu',
- 'SetCookieMenuItem',
- 'ShowCookieMenu',
- 'GetCookieIterator',
- 'ReadCookieIterator',
- 'GetCookieAccess',
- 'GetClientCookieTime',
- 'LoadTranslations',
- 'SetGlobalTransTarget',
- 'GetClientLanguage',
- 'GetServerLanguage',
- 'GetLanguageCount',
- 'GetLanguageInfo',
- 'SetClientLanguage',
- 'GetLanguageByCode',
- 'GetLanguageByName',
- 'CS_OnBuyCommand',
- 'CS_OnCSWeaponDrop',
- 'CS_OnGetWeaponPrice',
- 'CS_OnTerminateRound',
- 'CS_RespawnPlayer',
- 'CS_SwitchTeam',
- 'CS_DropWeapon',
- 'CS_TerminateRound',
- 'CS_GetTranslatedWeaponAlias',
- 'CS_GetWeaponPrice',
- 'CS_GetClientClanTag',
- 'CS_SetClientClanTag',
- 'LogToGame',
- 'SetRandomSeed',
- 'GetRandomFloat',
- 'GetRandomInt',
- 'IsMapValid',
- 'IsDedicatedServer',
- 'GetEngineTime',
- 'GetGameTime',
- 'GetGameTickCount',
- 'GetGameDescription',
- 'GetGameFolderName',
- 'GetCurrentMap',
- 'PrecacheModel',
- 'PrecacheSentenceFile',
- 'PrecacheDecal',
- 'PrecacheGeneric',
- 'IsModelPrecached',
- 'IsDecalPrecached',
- 'IsGenericPrecached',
- 'PrecacheSound',
- 'IsSoundPrecached',
- 'CreateDialog',
- 'GuessSDKVersion',
- 'PrintToChat',
- 'PrintToChatAll',
- 'PrintCenterText',
- 'PrintCenterTextAll',
- 'PrintHintText',
- 'PrintHintTextToAll',
- 'ShowVGUIPanel',
- 'CreateHudSynchronizer',
- 'SetHudTextParams',
- 'SetHudTextParamsEx',
- 'ShowSyncHudText',
- 'ClearSyncHud',
- 'ShowHudText',
- 'ShowMOTDPanel',
- 'DisplayAskConnectBox',
- 'EntIndexToEntRef',
- 'EntRefToEntIndex',
- 'MakeCompatEntRef',
- 'SetClientViewEntity',
- 'SetLightStyle',
- 'GetClientEyePosition',
- 'CreateDataPack',
- 'WritePackCell',
- 'WritePackFloat',
- 'WritePackString',
- 'ReadPackCell',
- 'ReadPackFloat',
- 'ReadPackString',
- 'ResetPack',
- 'GetPackPosition',
- 'SetPackPosition',
- 'IsPackReadable',
- 'LogMessage',
- 'LogMessageEx',
- 'LogToFile',
- 'LogToFileEx',
- 'LogAction',
- 'LogError',
- 'OnLogAction',
- 'GameLogHook',
- 'AddGameLogHook',
- 'RemoveGameLogHook',
- 'FindTeamByName',
- 'StartPrepSDKCall',
- 'PrepSDKCall_SetVirtual',
- 'PrepSDKCall_SetSignature',
- 'PrepSDKCall_SetFromConf',
- 'PrepSDKCall_SetReturnInfo',
- 'PrepSDKCall_AddParameter',
- 'EndPrepSDKCall',
- 'SDKCall']
-
-if __name__ == '__main__':
- import pprint
- import re
- import sys
- import urllib
-
- # urllib ends up wanting to import a module called 'math' -- if
- # pygments/lexers is in the path, this ends badly.
- for i in range(len(sys.path)-1, -1, -1):
- if sys.path[i].endswith('/lexers'):
- del sys.path[i]
-
- def get_version():
- f = urllib.urlopen('http://docs.sourcemod.net/api/index.php')
- r = re.compile(r'SourceMod v\.<b>([\d\.]+)</td>')
- for line in f:
- m = r.search(line)
- if m is not None:
- return m.groups()[0]
-
- def get_sm_functions():
- f = urllib.urlopen('http://docs.sourcemod.net/api/SMfuncs.js')
- r = re.compile(r'SMfunctions\[\d+\] = Array \("(?:public )?([^,]+)",".+"\);')
- functions = []
- for line in f:
- m = r.match(line)
- if m is not None:
- functions.append(m.groups()[0])
- return functions
-
- def regenerate(filename, natives):
- f = open(filename)
- try:
- content = f.read()
- finally:
- f.close()
-
- header = content[:content.find('FUNCTIONS = [')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
-
- f = open(filename, 'w')
- f.write(header)
- f.write('FUNCTIONS = %s\n\n' % pprint.pformat(natives))
- f.write(footer)
- f.close()
-
- def run():
- version = get_version()
- print '> Downloading function index for SourceMod %s' % version
- functions = get_sm_functions()
- print '> %d functions found:' % len(functions)
-
- functionlist = []
- for full_function_name in functions:
- print '>> %s' % full_function_name
- functionlist.append(full_function_name)
-
- regenerate(__file__, functionlist)
-
-
- run()
diff --git a/pygments/lexers/_stan_builtins.py b/pygments/lexers/_stan_builtins.py
index 637072e4..0a225eba 100644
--- a/pygments/lexers/_stan_builtins.py
+++ b/pygments/lexers/_stan_builtins.py
@@ -1,18 +1,33 @@
# -*- coding: utf-8 -*-
"""
-pygments.lexers._stan_builtins
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ pygments.lexers._stan_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-This file contains the names of functions for Stan used by
-``pygments.lexers.math.StanLexer.
+ This file contains the names of functions for Stan used by
+ ``pygments.lexers.math.StanLexer. This is for Stan language version 2.5.0.
-:copyright: Copyright 2013 by the Pygments team, see AUTHORS.
-:license: BSD, see LICENSE for details.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
"""
-KEYWORDS = ['else', 'for', 'if', 'in', 'lower', 'lp__', 'print', 'upper', 'while']
+KEYWORDS = (
+ 'else',
+ 'for',
+ 'if',
+ 'in',
+ 'increment_log_prob',
+ 'integrate_ode',
+ 'lp__',
+ 'print',
+ 'reject',
+ 'return',
+ 'while'
+)
-TYPES = [ 'corr_matrix',
+TYPES = (
+ 'cholesky_factor_corr',
+ 'cholesky_factor_cov',
+ 'corr_matrix',
'cov_matrix',
'int',
'matrix',
@@ -22,47 +37,70 @@ TYPES = [ 'corr_matrix',
'row_vector',
'simplex',
'unit_vector',
- 'vector']
+ 'vector',
+ 'void'
+)
-FUNCTIONS = [ 'Phi',
+FUNCTIONS = (
+ 'Phi',
'Phi_approx',
'abs',
'acos',
'acosh',
+ 'append_col',
+ 'append_row',
'asin',
'asinh',
'atan',
'atan2',
'atanh',
+ 'bernoulli_ccdf_log',
'bernoulli_cdf',
+ 'bernoulli_cdf_log',
'bernoulli_log',
'bernoulli_logit_log',
'bernoulli_rng',
+ 'bessel_first_kind',
+ 'bessel_second_kind',
+ 'beta_binomial_ccdf_log',
'beta_binomial_cdf',
+ 'beta_binomial_cdf_log',
'beta_binomial_log',
'beta_binomial_rng',
+ 'beta_ccdf_log',
'beta_cdf',
+ 'beta_cdf_log',
'beta_log',
'beta_rng',
'binary_log_loss',
+ 'binomial_ccdf_log',
'binomial_cdf',
+ 'binomial_cdf_log',
'binomial_coefficient_log',
'binomial_log',
'binomial_logit_log',
'binomial_rng',
'block',
'categorical_log',
+ 'categorical_logit_log',
'categorical_rng',
+ 'cauchy_ccdf_log',
'cauchy_cdf',
+ 'cauchy_cdf_log',
'cauchy_log',
'cauchy_rng',
'cbrt',
'ceil',
+ 'chi_square_ccdf_log',
+ 'chi_square_cdf',
+ 'chi_square_cdf_log',
'chi_square_log',
'chi_square_rng',
'cholesky_decompose',
'col',
'cols',
+ 'columns_dot_product',
+ 'columns_dot_self',
'cos',
'cosh',
'crossprod',
@@ -72,56 +110,90 @@ FUNCTIONS = [ 'Phi',
'diag_post_multiply',
'diag_pre_multiply',
'diagonal',
+ 'digamma',
'dims',
'dirichlet_log',
'dirichlet_rng',
+ 'distance',
'dot_product',
'dot_self',
+ 'double_exponential_ccdf_log',
+ 'double_exponential_cdf',
+ 'double_exponential_cdf_log',
'double_exponential_log',
'double_exponential_rng',
'e',
'eigenvalues_sym',
'eigenvectors_sym',
- 'epsilon',
'erf',
'erfc',
'exp',
'exp2',
+ 'exp_mod_normal_ccdf_log',
'exp_mod_normal_cdf',
+ 'exp_mod_normal_cdf_log',
'exp_mod_normal_log',
'exp_mod_normal_rng',
'expm1',
+ 'exponential_ccdf_log',
'exponential_cdf',
+ 'exponential_cdf_log',
'exponential_log',
'exponential_rng',
'fabs',
+ 'falling_factorial',
'fdim',
'floor',
'fma',
'fmax',
'fmin',
'fmod',
+ 'frechet_ccdf_log',
+ 'frechet_cdf',
+ 'frechet_cdf_log',
+ 'frechet_log',
+ 'frechet_rng',
+ 'gamma_ccdf_log',
+ 'gamma_cdf',
+ 'gamma_cdf_log',
'gamma_log',
+ 'gamma_p',
+ 'gamma_q',
'gamma_rng',
+ 'gaussian_dlm_obs_log',
+ 'get_lp',
+ 'gumbel_ccdf_log',
'gumbel_cdf',
+ 'gumbel_cdf_log',
'gumbel_log',
'gumbel_rng',
+ 'head',
'hypergeometric_log',
'hypergeometric_rng',
'hypot',
'if_else',
'int_step',
+ 'inv',
+ 'inv_chi_square_ccdf_log',
'inv_chi_square_cdf',
+ 'inv_chi_square_cdf_log',
'inv_chi_square_log',
'inv_chi_square_rng',
'inv_cloglog',
+ 'inv_gamma_ccdf_log',
'inv_gamma_cdf',
+ 'inv_gamma_cdf_log',
'inv_gamma_log',
'inv_gamma_rng',
'inv_logit',
+ 'inv_sqrt',
+ 'inv_square',
'inv_wishart_log',
'inv_wishart_rng',
'inverse',
+ 'inverse_spd',
+ 'is_inf',
+ 'is_nan',
'lbeta',
'lgamma',
'lkj_corr_cholesky_log',
@@ -133,116 +205,194 @@ FUNCTIONS = [ 'Phi',
'log',
'log10',
'log1m',
+ 'log1m_exp',
'log1m_inv_logit',
'log1p',
'log1p_exp',
'log2',
'log_determinant',
+ 'log_diff_exp',
+ 'log_falling_factorial',
'log_inv_logit',
+ 'log_rising_factorial',
+ 'log_softmax',
'log_sum_exp',
+ 'logistic_ccdf_log',
'logistic_cdf',
+ 'logistic_cdf_log',
'logistic_log',
'logistic_rng',
'logit',
+ 'lognormal_ccdf_log',
'lognormal_cdf',
+ 'lognormal_cdf_log',
'lognormal_log',
'lognormal_rng',
+ 'machine_precision',
'max',
'mdivide_left_tri_low',
'mdivide_right_tri_low',
'mean',
'min',
+ 'modified_bessel_first_kind',
+ 'modified_bessel_second_kind',
+ 'multi_gp_log',
'multi_normal_cholesky_log',
+ 'multi_normal_cholesky_rng',
'multi_normal_log',
'multi_normal_prec_log',
'multi_normal_rng',
'multi_student_t_log',
'multi_student_t_rng',
- 'multinomial_cdf',
'multinomial_log',
'multinomial_rng',
'multiply_log',
'multiply_lower_tri_self_transpose',
+ 'neg_binomial_2_log',
+ 'neg_binomial_2_log_log',
+ 'neg_binomial_2_log_rng',
+ 'neg_binomial_2_rng',
+ 'neg_binomial_ccdf_log',
'neg_binomial_cdf',
+ 'neg_binomial_cdf_log',
'neg_binomial_log',
'neg_binomial_rng',
- 'negative_epsilon',
'negative_infinity',
+ 'normal_ccdf_log',
'normal_cdf',
+ 'normal_cdf_log',
'normal_log',
'normal_rng',
'not_a_number',
+ 'num_elements',
'ordered_logistic_log',
'ordered_logistic_rng',
'owens_t',
+ 'pareto_ccdf_log',
'pareto_cdf',
+ 'pareto_cdf_log',
'pareto_log',
'pareto_rng',
+ 'pareto_type_2_ccdf_log',
+ 'pareto_type_2_cdf',
+ 'pareto_type_2_cdf_log',
+ 'pareto_type_2_log',
+ 'pareto_type_2_rng',
'pi',
+ 'poisson_ccdf_log',
'poisson_cdf',
+ 'poisson_cdf_log',
'poisson_log',
'poisson_log_log',
'poisson_rng',
'positive_infinity',
'pow',
'prod',
+ 'qr_Q',
+ 'qr_R',
+ 'quad_form',
+ 'quad_form_diag',
+ 'quad_form_sym',
+ 'rank',
+ 'rayleigh_ccdf_log',
+ 'rayleigh_cdf',
+ 'rayleigh_cdf_log',
+ 'rayleigh_log',
+ 'rayleigh_rng',
'rep_array',
'rep_matrix',
'rep_row_vector',
'rep_vector',
+ 'rising_factorial',
'round',
'row',
'rows',
+ 'rows_dot_product',
+ 'rows_dot_self',
+ 'scaled_inv_chi_square_ccdf_log',
'scaled_inv_chi_square_cdf',
+ 'scaled_inv_chi_square_cdf_log',
'scaled_inv_chi_square_log',
'scaled_inv_chi_square_rng',
'sd',
+ 'segment',
'sin',
'singular_values',
'sinh',
'size',
+ 'skew_normal_ccdf_log',
'skew_normal_cdf',
+ 'skew_normal_cdf_log',
'skew_normal_log',
'skew_normal_rng',
'softmax',
+ 'sort_asc',
+ 'sort_desc',
+ 'sort_indices_asc',
+ 'sort_indices_desc',
'sqrt',
'sqrt2',
'square',
+ 'squared_distance',
'step',
+ 'student_t_ccdf_log',
'student_t_cdf',
+ 'student_t_cdf_log',
'student_t_log',
'student_t_rng',
+ 'sub_col',
+ 'sub_row',
'sum',
+ 'tail',
'tan',
'tanh',
'tcrossprod',
'tgamma',
+ 'to_array_1d',
+ 'to_array_2d',
+ 'to_matrix',
+ 'to_row_vector',
+ 'to_vector',
'trace',
+ 'trace_gen_quad_form',
+ 'trace_quad_form',
+ 'trigamma',
'trunc',
+ 'uniform_ccdf_log',
+ 'uniform_cdf',
+ 'uniform_cdf_log',
'uniform_log',
'uniform_rng',
'variance',
+ 'von_mises_log',
+ 'von_mises_rng',
+ 'weibull_ccdf_log',
'weibull_cdf',
+ 'weibull_cdf_log',
'weibull_log',
'weibull_rng',
'wishart_log',
- 'wishart_rng']
+ 'wishart_rng'
+)
-DISTRIBUTIONS = [ 'bernoulli',
+DISTRIBUTIONS = (
+ 'bernoulli',
'bernoulli_logit',
'beta',
'beta_binomial',
'binomial',
- 'binomial_coefficient',
'binomial_logit',
'categorical',
+ 'categorical_logit',
'cauchy',
'chi_square',
'dirichlet',
'double_exponential',
'exp_mod_normal',
'exponential',
+ 'frechet',
'gamma',
+ 'gaussian_dlm_obs',
'gumbel',
'hypergeometric',
'inv_chi_square',
@@ -253,26 +403,33 @@ DISTRIBUTIONS = [ 'bernoulli',
'lkj_cov',
'logistic',
'lognormal',
+ 'multi_gp',
'multi_normal',
'multi_normal_cholesky',
'multi_normal_prec',
'multi_student_t',
'multinomial',
- 'multiply',
'neg_binomial',
+ 'neg_binomial_2',
+ 'neg_binomial_2_log',
'normal',
'ordered_logistic',
'pareto',
+ 'pareto_type_2',
'poisson',
'poisson_log',
+ 'rayleigh',
'scaled_inv_chi_square',
'skew_normal',
'student_t',
'uniform',
+ 'von_mises',
'weibull',
- 'wishart']
+ 'wishart'
+)
-RESERVED = [ 'alignas',
+RESERVED = (
+ 'alignas',
'alignof',
'and',
'and_eq',
@@ -307,6 +464,7 @@ RESERVED = [ 'alignas',
'false',
'float',
'friend',
+ 'fvar',
'goto',
'inline',
'int',
@@ -327,7 +485,6 @@ RESERVED = [ 'alignas',
'register',
'reinterpret_cast',
'repeat',
- 'return',
'short',
'signed',
'sizeof',
@@ -351,10 +508,12 @@ RESERVED = [ 'alignas',
'unsigned',
'until',
'using',
+ 'var',
'virtual',
'void',
'volatile',
'wchar_t',
'xor',
- 'xor_eq']
+ 'xor_eq'
+)
diff --git a/pygments/lexers/_vim_builtins.py b/pygments/lexers/_vim_builtins.py
new file mode 100644
index 00000000..e9b5fa1e
--- /dev/null
+++ b/pygments/lexers/_vim_builtins.py
@@ -0,0 +1,1939 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._vim_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file is autogenerated by scripts/get_vimkw.py
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+# Split up in multiple functions so it's importable by jython, which has a
+# per-method size limit.
+
+def _getauto():
+ var = (
+ ('BufAdd','BufAdd'),
+ ('BufCreate','BufCreate'),
+ ('BufDelete','BufDelete'),
+ ('BufEnter','BufEnter'),
+ ('BufFilePost','BufFilePost'),
+ ('BufFilePre','BufFilePre'),
+ ('BufHidden','BufHidden'),
+ ('BufLeave','BufLeave'),
+ ('BufNew','BufNew'),
+ ('BufNewFile','BufNewFile'),
+ ('BufRead','BufRead'),
+ ('BufReadCmd','BufReadCmd'),
+ ('BufReadPost','BufReadPost'),
+ ('BufReadPre','BufReadPre'),
+ ('BufUnload','BufUnload'),
+ ('BufWinEnter','BufWinEnter'),
+ ('BufWinLeave','BufWinLeave'),
+ ('BufWipeout','BufWipeout'),
+ ('BufWrite','BufWrite'),
+ ('BufWriteCmd','BufWriteCmd'),
+ ('BufWritePost','BufWritePost'),
+ ('BufWritePre','BufWritePre'),
+ ('Cmd','Cmd'),
+ ('CmdwinEnter','CmdwinEnter'),
+ ('CmdwinLeave','CmdwinLeave'),
+ ('ColorScheme','ColorScheme'),
+ ('CompleteDone','CompleteDone'),
+ ('CursorHold','CursorHold'),
+ ('CursorHoldI','CursorHoldI'),
+ ('CursorMoved','CursorMoved'),
+ ('CursorMovedI','CursorMovedI'),
+ ('EncodingChanged','EncodingChanged'),
+ ('FileAppendCmd','FileAppendCmd'),
+ ('FileAppendPost','FileAppendPost'),
+ ('FileAppendPre','FileAppendPre'),
+ ('FileChangedRO','FileChangedRO'),
+ ('FileChangedShell','FileChangedShell'),
+ ('FileChangedShellPost','FileChangedShellPost'),
+ ('FileEncoding','FileEncoding'),
+ ('FileReadCmd','FileReadCmd'),
+ ('FileReadPost','FileReadPost'),
+ ('FileReadPre','FileReadPre'),
+ ('FileType','FileType'),
+ ('FileWriteCmd','FileWriteCmd'),
+ ('FileWritePost','FileWritePost'),
+ ('FileWritePre','FileWritePre'),
+ ('FilterReadPost','FilterReadPost'),
+ ('FilterReadPre','FilterReadPre'),
+ ('FilterWritePost','FilterWritePost'),
+ ('FilterWritePre','FilterWritePre'),
+ ('FocusGained','FocusGained'),
+ ('FocusLost','FocusLost'),
+ ('FuncUndefined','FuncUndefined'),
+ ('GUIEnter','GUIEnter'),
+ ('GUIFailed','GUIFailed'),
+ ('InsertChange','InsertChange'),
+ ('InsertCharPre','InsertCharPre'),
+ ('InsertEnter','InsertEnter'),
+ ('InsertLeave','InsertLeave'),
+ ('MenuPopup','MenuPopup'),
+ ('QuickFixCmdPost','QuickFixCmdPost'),
+ ('QuickFixCmdPre','QuickFixCmdPre'),
+ ('QuitPre','QuitPre'),
+ ('RemoteReply','RemoteReply'),
+ ('SessionLoadPost','SessionLoadPost'),
+ ('ShellCmdPost','ShellCmdPost'),
+ ('ShellFilterPost','ShellFilterPost'),
+ ('SourceCmd','SourceCmd'),
+ ('SourcePre','SourcePre'),
+ ('SpellFileMissing','SpellFileMissing'),
+ ('StdinReadPost','StdinReadPost'),
+ ('StdinReadPre','StdinReadPre'),
+ ('SwapExists','SwapExists'),
+ ('Syntax','Syntax'),
+ ('TabEnter','TabEnter'),
+ ('TabLeave','TabLeave'),
+ ('TermChanged','TermChanged'),
+ ('TermResponse','TermResponse'),
+ ('TextChanged','TextChanged'),
+ ('TextChangedI','TextChangedI'),
+ ('User','User'),
+ ('UserGettingBored','UserGettingBored'),
+ ('VimEnter','VimEnter'),
+ ('VimLeave','VimLeave'),
+ ('VimLeavePre','VimLeavePre'),
+ ('VimResized','VimResized'),
+ ('WinEnter','WinEnter'),
+ ('WinLeave','WinLeave'),
+ ('event','event'),
+ )
+ return var
+auto = _getauto()
+
+def _getcommand():
+ var = (
+ ('a','a'),
+ ('ab','ab'),
+ ('abc','abclear'),
+ ('abo','aboveleft'),
+ ('al','all'),
+ ('ar','ar'),
+ ('ar','args'),
+ ('arga','argadd'),
+ ('argd','argdelete'),
+ ('argdo','argdo'),
+ ('arge','argedit'),
+ ('argg','argglobal'),
+ ('argl','arglocal'),
+ ('argu','argument'),
+ ('as','ascii'),
+ ('au','au'),
+ ('b','buffer'),
+ ('bN','bNext'),
+ ('ba','ball'),
+ ('bad','badd'),
+ ('bd','bdelete'),
+ ('bel','belowright'),
+ ('bf','bfirst'),
+ ('bl','blast'),
+ ('bm','bmodified'),
+ ('bn','bnext'),
+ ('bo','botright'),
+ ('bp','bprevious'),
+ ('br','br'),
+ ('br','brewind'),
+ ('brea','break'),
+ ('breaka','breakadd'),
+ ('breakd','breakdel'),
+ ('breakl','breaklist'),
+ ('bro','browse'),
+ ('bu','bu'),
+ ('buf','buf'),
+ ('bufdo','bufdo'),
+ ('buffers','buffers'),
+ ('bun','bunload'),
+ ('bw','bwipeout'),
+ ('c','c'),
+ ('c','change'),
+ ('cN','cN'),
+ ('cN','cNext'),
+ ('cNf','cNf'),
+ ('cNf','cNfile'),
+ ('cabc','cabclear'),
+ ('cad','cad'),
+ ('cad','caddexpr'),
+ ('caddb','caddbuffer'),
+ ('caddf','caddfile'),
+ ('cal','call'),
+ ('cat','catch'),
+ ('cb','cbuffer'),
+ ('cc','cc'),
+ ('ccl','cclose'),
+ ('cd','cd'),
+ ('ce','center'),
+ ('cex','cexpr'),
+ ('cf','cfile'),
+ ('cfir','cfirst'),
+ ('cg','cgetfile'),
+ ('cgetb','cgetbuffer'),
+ ('cgete','cgetexpr'),
+ ('changes','changes'),
+ ('chd','chdir'),
+ ('che','checkpath'),
+ ('checkt','checktime'),
+ ('cl','cl'),
+ ('cl','clist'),
+ ('cla','clast'),
+ ('clo','close'),
+ ('cmapc','cmapclear'),
+ ('cn','cn'),
+ ('cn','cnext'),
+ ('cnew','cnewer'),
+ ('cnf','cnf'),
+ ('cnf','cnfile'),
+ ('co','copy'),
+ ('col','colder'),
+ ('colo','colorscheme'),
+ ('com','com'),
+ ('comc','comclear'),
+ ('comp','compiler'),
+ ('con','con'),
+ ('con','continue'),
+ ('conf','confirm'),
+ ('cope','copen'),
+ ('cp','cprevious'),
+ ('cpf','cpfile'),
+ ('cq','cquit'),
+ ('cr','crewind'),
+ ('cs','cs'),
+ ('cscope','cscope'),
+ ('cstag','cstag'),
+ ('cuna','cunabbrev'),
+ ('cw','cwindow'),
+ ('d','d'),
+ ('d','delete'),
+ ('de','de'),
+ ('debug','debug'),
+ ('debugg','debuggreedy'),
+ ('del','del'),
+ ('delc','delcommand'),
+ ('delel','delel'),
+ ('delep','delep'),
+ ('deletel','deletel'),
+ ('deletep','deletep'),
+ ('deletl','deletl'),
+ ('deletp','deletp'),
+ ('delf','delf'),
+ ('delf','delfunction'),
+ ('dell','dell'),
+ ('delm','delmarks'),
+ ('delp','delp'),
+ ('dep','dep'),
+ ('di','di'),
+ ('di','display'),
+ ('diffg','diffget'),
+ ('diffo','diffoff'),
+ ('diffp','diffpatch'),
+ ('diffpu','diffput'),
+ ('diffs','diffsplit'),
+ ('difft','diffthis'),
+ ('diffu','diffupdate'),
+ ('dig','dig'),
+ ('dig','digraphs'),
+ ('dir','dir'),
+ ('dj','djump'),
+ ('dl','dl'),
+ ('dli','dlist'),
+ ('do','do'),
+ ('doau','doau'),
+ ('dp','dp'),
+ ('dr','drop'),
+ ('ds','dsearch'),
+ ('dsp','dsplit'),
+ ('e','e'),
+ ('e','edit'),
+ ('ea','ea'),
+ ('earlier','earlier'),
+ ('ec','ec'),
+ ('echoe','echoerr'),
+ ('echom','echomsg'),
+ ('echon','echon'),
+ ('el','else'),
+ ('elsei','elseif'),
+ ('em','emenu'),
+ ('en','en'),
+ ('en','endif'),
+ ('endf','endf'),
+ ('endf','endfunction'),
+ ('endfo','endfor'),
+ ('endfun','endfun'),
+ ('endt','endtry'),
+ ('endw','endwhile'),
+ ('ene','enew'),
+ ('ex','ex'),
+ ('exi','exit'),
+ ('exu','exusage'),
+ ('f','f'),
+ ('f','file'),
+ ('files','files'),
+ ('filet','filet'),
+ ('filetype','filetype'),
+ ('fin','fin'),
+ ('fin','find'),
+ ('fina','finally'),
+ ('fini','finish'),
+ ('fir','first'),
+ ('fix','fixdel'),
+ ('fo','fold'),
+ ('foldc','foldclose'),
+ ('foldd','folddoopen'),
+ ('folddoc','folddoclosed'),
+ ('foldo','foldopen'),
+ ('for','for'),
+ ('fu','fu'),
+ ('fu','function'),
+ ('fun','fun'),
+ ('g','g'),
+ ('go','goto'),
+ ('gr','grep'),
+ ('grepa','grepadd'),
+ ('gui','gui'),
+ ('gvim','gvim'),
+ ('h','h'),
+ ('h','help'),
+ ('ha','hardcopy'),
+ ('helpf','helpfind'),
+ ('helpg','helpgrep'),
+ ('helpt','helptags'),
+ ('hi','hi'),
+ ('hid','hide'),
+ ('his','history'),
+ ('i','i'),
+ ('ia','ia'),
+ ('iabc','iabclear'),
+ ('if','if'),
+ ('ij','ijump'),
+ ('il','ilist'),
+ ('imapc','imapclear'),
+ ('in','in'),
+ ('intro','intro'),
+ ('is','isearch'),
+ ('isp','isplit'),
+ ('iuna','iunabbrev'),
+ ('j','join'),
+ ('ju','jumps'),
+ ('k','k'),
+ ('kee','keepmarks'),
+ ('keepa','keepa'),
+ ('keepalt','keepalt'),
+ ('keepj','keepjumps'),
+ ('keepp','keeppatterns'),
+ ('l','l'),
+ ('l','list'),
+ ('lN','lN'),
+ ('lN','lNext'),
+ ('lNf','lNf'),
+ ('lNf','lNfile'),
+ ('la','la'),
+ ('la','last'),
+ ('lad','lad'),
+ ('lad','laddexpr'),
+ ('laddb','laddbuffer'),
+ ('laddf','laddfile'),
+ ('lan','lan'),
+ ('lan','language'),
+ ('lat','lat'),
+ ('later','later'),
+ ('lb','lbuffer'),
+ ('lc','lcd'),
+ ('lch','lchdir'),
+ ('lcl','lclose'),
+ ('lcs','lcs'),
+ ('lcscope','lcscope'),
+ ('le','left'),
+ ('lefta','leftabove'),
+ ('lex','lexpr'),
+ ('lf','lfile'),
+ ('lfir','lfirst'),
+ ('lg','lgetfile'),
+ ('lgetb','lgetbuffer'),
+ ('lgete','lgetexpr'),
+ ('lgr','lgrep'),
+ ('lgrepa','lgrepadd'),
+ ('lh','lhelpgrep'),
+ ('ll','ll'),
+ ('lla','llast'),
+ ('lli','llist'),
+ ('lmak','lmake'),
+ ('lmapc','lmapclear'),
+ ('lne','lne'),
+ ('lne','lnext'),
+ ('lnew','lnewer'),
+ ('lnf','lnf'),
+ ('lnf','lnfile'),
+ ('lo','lo'),
+ ('lo','loadview'),
+ ('loadk','loadk'),
+ ('loadkeymap','loadkeymap'),
+ ('loc','lockmarks'),
+ ('lockv','lockvar'),
+ ('lol','lolder'),
+ ('lop','lopen'),
+ ('lp','lprevious'),
+ ('lpf','lpfile'),
+ ('lr','lrewind'),
+ ('ls','ls'),
+ ('lt','ltag'),
+ ('lua','lua'),
+ ('luado','luado'),
+ ('luafile','luafile'),
+ ('lv','lvimgrep'),
+ ('lvimgrepa','lvimgrepadd'),
+ ('lw','lwindow'),
+ ('m','move'),
+ ('ma','ma'),
+ ('ma','mark'),
+ ('mak','make'),
+ ('marks','marks'),
+ ('mat','match'),
+ ('menut','menut'),
+ ('menut','menutranslate'),
+ ('mes','mes'),
+ ('messages','messages'),
+ ('mk','mk'),
+ ('mk','mkexrc'),
+ ('mks','mksession'),
+ ('mksp','mkspell'),
+ ('mkv','mkv'),
+ ('mkv','mkvimrc'),
+ ('mkvie','mkview'),
+ ('mo','mo'),
+ ('mod','mode'),
+ ('mz','mz'),
+ ('mz','mzscheme'),
+ ('mzf','mzfile'),
+ ('n','n'),
+ ('n','next'),
+ ('nb','nbkey'),
+ ('nbc','nbclose'),
+ ('nbs','nbstart'),
+ ('ne','ne'),
+ ('new','new'),
+ ('nmapc','nmapclear'),
+ ('noa','noa'),
+ ('noautocmd','noautocmd'),
+ ('noh','nohlsearch'),
+ ('nu','number'),
+ ('o','o'),
+ ('o','open'),
+ ('ol','oldfiles'),
+ ('omapc','omapclear'),
+ ('on','only'),
+ ('opt','options'),
+ ('ownsyntax','ownsyntax'),
+ ('p','p'),
+ ('p','print'),
+ ('pc','pclose'),
+ ('pe','pe'),
+ ('pe','perl'),
+ ('ped','pedit'),
+ ('perld','perldo'),
+ ('po','pop'),
+ ('popu','popu'),
+ ('popu','popup'),
+ ('pp','ppop'),
+ ('pr','pr'),
+ ('pre','preserve'),
+ ('prev','previous'),
+ ('pro','pro'),
+ ('prof','profile'),
+ ('profd','profdel'),
+ ('promptf','promptfind'),
+ ('promptr','promptrepl'),
+ ('ps','psearch'),
+ ('ptN','ptN'),
+ ('ptN','ptNext'),
+ ('pta','ptag'),
+ ('ptf','ptfirst'),
+ ('ptj','ptjump'),
+ ('ptl','ptlast'),
+ ('ptn','ptn'),
+ ('ptn','ptnext'),
+ ('ptp','ptprevious'),
+ ('ptr','ptrewind'),
+ ('pts','ptselect'),
+ ('pu','put'),
+ ('pw','pwd'),
+ ('py','py'),
+ ('py','python'),
+ ('py3','py3'),
+ ('py3','py3'),
+ ('py3do','py3do'),
+ ('pydo','pydo'),
+ ('pyf','pyfile'),
+ ('python3','python3'),
+ ('q','q'),
+ ('q','quit'),
+ ('qa','qall'),
+ ('quita','quitall'),
+ ('r','r'),
+ ('r','read'),
+ ('re','re'),
+ ('rec','recover'),
+ ('red','red'),
+ ('red','redo'),
+ ('redi','redir'),
+ ('redr','redraw'),
+ ('redraws','redrawstatus'),
+ ('reg','registers'),
+ ('res','resize'),
+ ('ret','retab'),
+ ('retu','return'),
+ ('rew','rewind'),
+ ('ri','right'),
+ ('rightb','rightbelow'),
+ ('ru','ru'),
+ ('ru','runtime'),
+ ('rub','ruby'),
+ ('rubyd','rubydo'),
+ ('rubyf','rubyfile'),
+ ('rundo','rundo'),
+ ('rv','rviminfo'),
+ ('sN','sNext'),
+ ('sa','sargument'),
+ ('sal','sall'),
+ ('san','sandbox'),
+ ('sav','saveas'),
+ ('sb','sbuffer'),
+ ('sbN','sbNext'),
+ ('sba','sball'),
+ ('sbf','sbfirst'),
+ ('sbl','sblast'),
+ ('sbm','sbmodified'),
+ ('sbn','sbnext'),
+ ('sbp','sbprevious'),
+ ('sbr','sbrewind'),
+ ('scrip','scrip'),
+ ('scrip','scriptnames'),
+ ('scripte','scriptencoding'),
+ ('scs','scs'),
+ ('scscope','scscope'),
+ ('se','set'),
+ ('setf','setfiletype'),
+ ('setg','setglobal'),
+ ('setl','setlocal'),
+ ('sf','sfind'),
+ ('sfir','sfirst'),
+ ('sh','shell'),
+ ('si','si'),
+ ('sig','sig'),
+ ('sign','sign'),
+ ('sil','silent'),
+ ('sim','simalt'),
+ ('sl','sl'),
+ ('sl','sleep'),
+ ('sla','slast'),
+ ('sm','smagic'),
+ ('sm','smap'),
+ ('sme','sme'),
+ ('smenu','smenu'),
+ ('sn','snext'),
+ ('sni','sniff'),
+ ('sno','snomagic'),
+ ('snoreme','snoreme'),
+ ('snoremenu','snoremenu'),
+ ('so','so'),
+ ('so','source'),
+ ('sor','sort'),
+ ('sp','split'),
+ ('spe','spe'),
+ ('spe','spellgood'),
+ ('spelld','spelldump'),
+ ('spelli','spellinfo'),
+ ('spellr','spellrepall'),
+ ('spellu','spellundo'),
+ ('spellw','spellwrong'),
+ ('spr','sprevious'),
+ ('sre','srewind'),
+ ('st','st'),
+ ('st','stop'),
+ ('sta','stag'),
+ ('star','star'),
+ ('star','startinsert'),
+ ('start','start'),
+ ('startg','startgreplace'),
+ ('startr','startreplace'),
+ ('stj','stjump'),
+ ('stopi','stopinsert'),
+ ('sts','stselect'),
+ ('sun','sunhide'),
+ ('sunme','sunme'),
+ ('sunmenu','sunmenu'),
+ ('sus','suspend'),
+ ('sv','sview'),
+ ('sw','swapname'),
+ ('sy','sy'),
+ ('syn','syn'),
+ ('sync','sync'),
+ ('syncbind','syncbind'),
+ ('syntime','syntime'),
+ ('t','t'),
+ ('tN','tN'),
+ ('tN','tNext'),
+ ('ta','ta'),
+ ('ta','tag'),
+ ('tab','tab'),
+ ('tabN','tabN'),
+ ('tabN','tabNext'),
+ ('tabc','tabclose'),
+ ('tabd','tabdo'),
+ ('tabe','tabedit'),
+ ('tabf','tabfind'),
+ ('tabfir','tabfirst'),
+ ('tabl','tablast'),
+ ('tabm','tabmove'),
+ ('tabn','tabnext'),
+ ('tabnew','tabnew'),
+ ('tabo','tabonly'),
+ ('tabp','tabprevious'),
+ ('tabr','tabrewind'),
+ ('tabs','tabs'),
+ ('tags','tags'),
+ ('tc','tcl'),
+ ('tcld','tcldo'),
+ ('tclf','tclfile'),
+ ('te','tearoff'),
+ ('tf','tfirst'),
+ ('th','throw'),
+ ('tj','tjump'),
+ ('tl','tlast'),
+ ('tm','tm'),
+ ('tm','tmenu'),
+ ('tn','tn'),
+ ('tn','tnext'),
+ ('to','topleft'),
+ ('tp','tprevious'),
+ ('tr','tr'),
+ ('tr','trewind'),
+ ('try','try'),
+ ('ts','tselect'),
+ ('tu','tu'),
+ ('tu','tunmenu'),
+ ('u','u'),
+ ('u','undo'),
+ ('un','un'),
+ ('una','unabbreviate'),
+ ('undoj','undojoin'),
+ ('undol','undolist'),
+ ('unh','unhide'),
+ ('unl','unl'),
+ ('unlo','unlockvar'),
+ ('uns','unsilent'),
+ ('up','update'),
+ ('v','v'),
+ ('ve','ve'),
+ ('ve','version'),
+ ('verb','verbose'),
+ ('vert','vertical'),
+ ('vi','vi'),
+ ('vi','visual'),
+ ('vie','view'),
+ ('vim','vimgrep'),
+ ('vimgrepa','vimgrepadd'),
+ ('viu','viusage'),
+ ('vmapc','vmapclear'),
+ ('vne','vnew'),
+ ('vs','vsplit'),
+ ('w','w'),
+ ('w','write'),
+ ('wN','wNext'),
+ ('wa','wall'),
+ ('wh','while'),
+ ('win','win'),
+ ('win','winsize'),
+ ('winc','wincmd'),
+ ('windo','windo'),
+ ('winp','winpos'),
+ ('wn','wnext'),
+ ('wp','wprevious'),
+ ('wq','wq'),
+ ('wqa','wqall'),
+ ('ws','wsverb'),
+ ('wundo','wundo'),
+ ('wv','wviminfo'),
+ ('x','x'),
+ ('x','xit'),
+ ('xa','xall'),
+ ('xmapc','xmapclear'),
+ ('xme','xme'),
+ ('xmenu','xmenu'),
+ ('xnoreme','xnoreme'),
+ ('xnoremenu','xnoremenu'),
+ ('xunme','xunme'),
+ ('xunmenu','xunmenu'),
+ ('xwininfo','xwininfo'),
+ ('y','yank'),
+ )
+ return var
+command = _getcommand()
+
+def _getoption():
+ var = (
+ ('acd','acd'),
+ ('ai','ai'),
+ ('akm','akm'),
+ ('al','al'),
+ ('aleph','aleph'),
+ ('allowrevins','allowrevins'),
+ ('altkeymap','altkeymap'),
+ ('ambiwidth','ambiwidth'),
+ ('ambw','ambw'),
+ ('anti','anti'),
+ ('antialias','antialias'),
+ ('ar','ar'),
+ ('arab','arab'),
+ ('arabic','arabic'),
+ ('arabicshape','arabicshape'),
+ ('ari','ari'),
+ ('arshape','arshape'),
+ ('autochdir','autochdir'),
+ ('autoindent','autoindent'),
+ ('autoread','autoread'),
+ ('autowrite','autowrite'),
+ ('autowriteall','autowriteall'),
+ ('aw','aw'),
+ ('awa','awa'),
+ ('background','background'),
+ ('backspace','backspace'),
+ ('backup','backup'),
+ ('backupcopy','backupcopy'),
+ ('backupdir','backupdir'),
+ ('backupext','backupext'),
+ ('backupskip','backupskip'),
+ ('balloondelay','balloondelay'),
+ ('ballooneval','ballooneval'),
+ ('balloonexpr','balloonexpr'),
+ ('bdir','bdir'),
+ ('bdlay','bdlay'),
+ ('beval','beval'),
+ ('bex','bex'),
+ ('bexpr','bexpr'),
+ ('bg','bg'),
+ ('bh','bh'),
+ ('bin','bin'),
+ ('binary','binary'),
+ ('biosk','biosk'),
+ ('bioskey','bioskey'),
+ ('bk','bk'),
+ ('bkc','bkc'),
+ ('bl','bl'),
+ ('bomb','bomb'),
+ ('breakat','breakat'),
+ ('brk','brk'),
+ ('browsedir','browsedir'),
+ ('bs','bs'),
+ ('bsdir','bsdir'),
+ ('bsk','bsk'),
+ ('bt','bt'),
+ ('bufhidden','bufhidden'),
+ ('buflisted','buflisted'),
+ ('buftype','buftype'),
+ ('casemap','casemap'),
+ ('cb','cb'),
+ ('cc','cc'),
+ ('ccv','ccv'),
+ ('cd','cd'),
+ ('cdpath','cdpath'),
+ ('cedit','cedit'),
+ ('cf','cf'),
+ ('cfu','cfu'),
+ ('ch','ch'),
+ ('charconvert','charconvert'),
+ ('ci','ci'),
+ ('cin','cin'),
+ ('cindent','cindent'),
+ ('cink','cink'),
+ ('cinkeys','cinkeys'),
+ ('cino','cino'),
+ ('cinoptions','cinoptions'),
+ ('cinw','cinw'),
+ ('cinwords','cinwords'),
+ ('clipboard','clipboard'),
+ ('cmdheight','cmdheight'),
+ ('cmdwinheight','cmdwinheight'),
+ ('cmp','cmp'),
+ ('cms','cms'),
+ ('co','co'),
+ ('cocu','cocu'),
+ ('cole','cole'),
+ ('colorcolumn','colorcolumn'),
+ ('columns','columns'),
+ ('com','com'),
+ ('comments','comments'),
+ ('commentstring','commentstring'),
+ ('compatible','compatible'),
+ ('complete','complete'),
+ ('completefunc','completefunc'),
+ ('completeopt','completeopt'),
+ ('concealcursor','concealcursor'),
+ ('conceallevel','conceallevel'),
+ ('confirm','confirm'),
+ ('consk','consk'),
+ ('conskey','conskey'),
+ ('copyindent','copyindent'),
+ ('cot','cot'),
+ ('cp','cp'),
+ ('cpo','cpo'),
+ ('cpoptions','cpoptions'),
+ ('cpt','cpt'),
+ ('crb','crb'),
+ ('cryptmethod','cryptmethod'),
+ ('cscopepathcomp','cscopepathcomp'),
+ ('cscopeprg','cscopeprg'),
+ ('cscopequickfix','cscopequickfix'),
+ ('cscoperelative','cscoperelative'),
+ ('cscopetag','cscopetag'),
+ ('cscopetagorder','cscopetagorder'),
+ ('cscopeverbose','cscopeverbose'),
+ ('cspc','cspc'),
+ ('csprg','csprg'),
+ ('csqf','csqf'),
+ ('csre','csre'),
+ ('cst','cst'),
+ ('csto','csto'),
+ ('csverb','csverb'),
+ ('cuc','cuc'),
+ ('cul','cul'),
+ ('cursorbind','cursorbind'),
+ ('cursorcolumn','cursorcolumn'),
+ ('cursorline','cursorline'),
+ ('cwh','cwh'),
+ ('debug','debug'),
+ ('deco','deco'),
+ ('def','def'),
+ ('define','define'),
+ ('delcombine','delcombine'),
+ ('dex','dex'),
+ ('dg','dg'),
+ ('dict','dict'),
+ ('dictionary','dictionary'),
+ ('diff','diff'),
+ ('diffexpr','diffexpr'),
+ ('diffopt','diffopt'),
+ ('digraph','digraph'),
+ ('dip','dip'),
+ ('dir','dir'),
+ ('directory','directory'),
+ ('display','display'),
+ ('dy','dy'),
+ ('ea','ea'),
+ ('ead','ead'),
+ ('eadirection','eadirection'),
+ ('eb','eb'),
+ ('ed','ed'),
+ ('edcompatible','edcompatible'),
+ ('ef','ef'),
+ ('efm','efm'),
+ ('ei','ei'),
+ ('ek','ek'),
+ ('enc','enc'),
+ ('encoding','encoding'),
+ ('endofline','endofline'),
+ ('eol','eol'),
+ ('ep','ep'),
+ ('equalalways','equalalways'),
+ ('equalprg','equalprg'),
+ ('errorbells','errorbells'),
+ ('errorfile','errorfile'),
+ ('errorformat','errorformat'),
+ ('esckeys','esckeys'),
+ ('et','et'),
+ ('eventignore','eventignore'),
+ ('ex','ex'),
+ ('expandtab','expandtab'),
+ ('exrc','exrc'),
+ ('fcl','fcl'),
+ ('fcs','fcs'),
+ ('fdc','fdc'),
+ ('fde','fde'),
+ ('fdi','fdi'),
+ ('fdl','fdl'),
+ ('fdls','fdls'),
+ ('fdm','fdm'),
+ ('fdn','fdn'),
+ ('fdo','fdo'),
+ ('fdt','fdt'),
+ ('fen','fen'),
+ ('fenc','fenc'),
+ ('fencs','fencs'),
+ ('fex','fex'),
+ ('ff','ff'),
+ ('ffs','ffs'),
+ ('fic','fic'),
+ ('fileencoding','fileencoding'),
+ ('fileencodings','fileencodings'),
+ ('fileformat','fileformat'),
+ ('fileformats','fileformats'),
+ ('fileignorecase','fileignorecase'),
+ ('filetype','filetype'),
+ ('fillchars','fillchars'),
+ ('fk','fk'),
+ ('fkmap','fkmap'),
+ ('flp','flp'),
+ ('fml','fml'),
+ ('fmr','fmr'),
+ ('fo','fo'),
+ ('foldclose','foldclose'),
+ ('foldcolumn','foldcolumn'),
+ ('foldenable','foldenable'),
+ ('foldexpr','foldexpr'),
+ ('foldignore','foldignore'),
+ ('foldlevel','foldlevel'),
+ ('foldlevelstart','foldlevelstart'),
+ ('foldmarker','foldmarker'),
+ ('foldmethod','foldmethod'),
+ ('foldminlines','foldminlines'),
+ ('foldnestmax','foldnestmax'),
+ ('foldopen','foldopen'),
+ ('foldtext','foldtext'),
+ ('formatexpr','formatexpr'),
+ ('formatlistpat','formatlistpat'),
+ ('formatoptions','formatoptions'),
+ ('formatprg','formatprg'),
+ ('fp','fp'),
+ ('fs','fs'),
+ ('fsync','fsync'),
+ ('ft','ft'),
+ ('gcr','gcr'),
+ ('gd','gd'),
+ ('gdefault','gdefault'),
+ ('gfm','gfm'),
+ ('gfn','gfn'),
+ ('gfs','gfs'),
+ ('gfw','gfw'),
+ ('ghr','ghr'),
+ ('go','go'),
+ ('gp','gp'),
+ ('grepformat','grepformat'),
+ ('grepprg','grepprg'),
+ ('gtl','gtl'),
+ ('gtt','gtt'),
+ ('guicursor','guicursor'),
+ ('guifont','guifont'),
+ ('guifontset','guifontset'),
+ ('guifontwide','guifontwide'),
+ ('guiheadroom','guiheadroom'),
+ ('guioptions','guioptions'),
+ ('guipty','guipty'),
+ ('guitablabel','guitablabel'),
+ ('guitabtooltip','guitabtooltip'),
+ ('helpfile','helpfile'),
+ ('helpheight','helpheight'),
+ ('helplang','helplang'),
+ ('hf','hf'),
+ ('hh','hh'),
+ ('hi','hi'),
+ ('hid','hid'),
+ ('hidden','hidden'),
+ ('highlight','highlight'),
+ ('history','history'),
+ ('hk','hk'),
+ ('hkmap','hkmap'),
+ ('hkmapp','hkmapp'),
+ ('hkp','hkp'),
+ ('hl','hl'),
+ ('hlg','hlg'),
+ ('hls','hls'),
+ ('hlsearch','hlsearch'),
+ ('ic','ic'),
+ ('icon','icon'),
+ ('iconstring','iconstring'),
+ ('ignorecase','ignorecase'),
+ ('im','im'),
+ ('imactivatefunc','imactivatefunc'),
+ ('imactivatekey','imactivatekey'),
+ ('imaf','imaf'),
+ ('imak','imak'),
+ ('imc','imc'),
+ ('imcmdline','imcmdline'),
+ ('imd','imd'),
+ ('imdisable','imdisable'),
+ ('imi','imi'),
+ ('iminsert','iminsert'),
+ ('ims','ims'),
+ ('imsearch','imsearch'),
+ ('imsf','imsf'),
+ ('imstatusfunc','imstatusfunc'),
+ ('inc','inc'),
+ ('include','include'),
+ ('includeexpr','includeexpr'),
+ ('incsearch','incsearch'),
+ ('inde','inde'),
+ ('indentexpr','indentexpr'),
+ ('indentkeys','indentkeys'),
+ ('indk','indk'),
+ ('inex','inex'),
+ ('inf','inf'),
+ ('infercase','infercase'),
+ ('inoremap','inoremap'),
+ ('insertmode','insertmode'),
+ ('invacd','invacd'),
+ ('invai','invai'),
+ ('invakm','invakm'),
+ ('invallowrevins','invallowrevins'),
+ ('invaltkeymap','invaltkeymap'),
+ ('invanti','invanti'),
+ ('invantialias','invantialias'),
+ ('invar','invar'),
+ ('invarab','invarab'),
+ ('invarabic','invarabic'),
+ ('invarabicshape','invarabicshape'),
+ ('invari','invari'),
+ ('invarshape','invarshape'),
+ ('invautochdir','invautochdir'),
+ ('invautoindent','invautoindent'),
+ ('invautoread','invautoread'),
+ ('invautowrite','invautowrite'),
+ ('invautowriteall','invautowriteall'),
+ ('invaw','invaw'),
+ ('invawa','invawa'),
+ ('invbackup','invbackup'),
+ ('invballooneval','invballooneval'),
+ ('invbeval','invbeval'),
+ ('invbin','invbin'),
+ ('invbinary','invbinary'),
+ ('invbiosk','invbiosk'),
+ ('invbioskey','invbioskey'),
+ ('invbk','invbk'),
+ ('invbl','invbl'),
+ ('invbomb','invbomb'),
+ ('invbuflisted','invbuflisted'),
+ ('invcf','invcf'),
+ ('invci','invci'),
+ ('invcin','invcin'),
+ ('invcindent','invcindent'),
+ ('invcompatible','invcompatible'),
+ ('invconfirm','invconfirm'),
+ ('invconsk','invconsk'),
+ ('invconskey','invconskey'),
+ ('invcopyindent','invcopyindent'),
+ ('invcp','invcp'),
+ ('invcrb','invcrb'),
+ ('invcscoperelative','invcscoperelative'),
+ ('invcscopetag','invcscopetag'),
+ ('invcscopeverbose','invcscopeverbose'),
+ ('invcsre','invcsre'),
+ ('invcst','invcst'),
+ ('invcsverb','invcsverb'),
+ ('invcuc','invcuc'),
+ ('invcul','invcul'),
+ ('invcursorbind','invcursorbind'),
+ ('invcursorcolumn','invcursorcolumn'),
+ ('invcursorline','invcursorline'),
+ ('invdeco','invdeco'),
+ ('invdelcombine','invdelcombine'),
+ ('invdg','invdg'),
+ ('invdiff','invdiff'),
+ ('invdigraph','invdigraph'),
+ ('invea','invea'),
+ ('inveb','inveb'),
+ ('inved','inved'),
+ ('invedcompatible','invedcompatible'),
+ ('invek','invek'),
+ ('invendofline','invendofline'),
+ ('inveol','inveol'),
+ ('invequalalways','invequalalways'),
+ ('inverrorbells','inverrorbells'),
+ ('invesckeys','invesckeys'),
+ ('invet','invet'),
+ ('invex','invex'),
+ ('invexpandtab','invexpandtab'),
+ ('invexrc','invexrc'),
+ ('invfen','invfen'),
+ ('invfic','invfic'),
+ ('invfileignorecase','invfileignorecase'),
+ ('invfk','invfk'),
+ ('invfkmap','invfkmap'),
+ ('invfoldenable','invfoldenable'),
+ ('invgd','invgd'),
+ ('invgdefault','invgdefault'),
+ ('invguipty','invguipty'),
+ ('invhid','invhid'),
+ ('invhidden','invhidden'),
+ ('invhk','invhk'),
+ ('invhkmap','invhkmap'),
+ ('invhkmapp','invhkmapp'),
+ ('invhkp','invhkp'),
+ ('invhls','invhls'),
+ ('invhlsearch','invhlsearch'),
+ ('invic','invic'),
+ ('invicon','invicon'),
+ ('invignorecase','invignorecase'),
+ ('invim','invim'),
+ ('invimc','invimc'),
+ ('invimcmdline','invimcmdline'),
+ ('invimd','invimd'),
+ ('invimdisable','invimdisable'),
+ ('invincsearch','invincsearch'),
+ ('invinf','invinf'),
+ ('invinfercase','invinfercase'),
+ ('invinsertmode','invinsertmode'),
+ ('invis','invis'),
+ ('invjoinspaces','invjoinspaces'),
+ ('invjs','invjs'),
+ ('invlazyredraw','invlazyredraw'),
+ ('invlbr','invlbr'),
+ ('invlinebreak','invlinebreak'),
+ ('invlisp','invlisp'),
+ ('invlist','invlist'),
+ ('invloadplugins','invloadplugins'),
+ ('invlpl','invlpl'),
+ ('invlz','invlz'),
+ ('invma','invma'),
+ ('invmacatsui','invmacatsui'),
+ ('invmagic','invmagic'),
+ ('invmh','invmh'),
+ ('invml','invml'),
+ ('invmod','invmod'),
+ ('invmodeline','invmodeline'),
+ ('invmodifiable','invmodifiable'),
+ ('invmodified','invmodified'),
+ ('invmore','invmore'),
+ ('invmousef','invmousef'),
+ ('invmousefocus','invmousefocus'),
+ ('invmousehide','invmousehide'),
+ ('invnu','invnu'),
+ ('invnumber','invnumber'),
+ ('invodev','invodev'),
+ ('invopendevice','invopendevice'),
+ ('invpaste','invpaste'),
+ ('invpi','invpi'),
+ ('invpreserveindent','invpreserveindent'),
+ ('invpreviewwindow','invpreviewwindow'),
+ ('invprompt','invprompt'),
+ ('invpvw','invpvw'),
+ ('invreadonly','invreadonly'),
+ ('invrelativenumber','invrelativenumber'),
+ ('invremap','invremap'),
+ ('invrestorescreen','invrestorescreen'),
+ ('invrevins','invrevins'),
+ ('invri','invri'),
+ ('invrightleft','invrightleft'),
+ ('invrl','invrl'),
+ ('invrnu','invrnu'),
+ ('invro','invro'),
+ ('invrs','invrs'),
+ ('invru','invru'),
+ ('invruler','invruler'),
+ ('invsb','invsb'),
+ ('invsc','invsc'),
+ ('invscb','invscb'),
+ ('invscrollbind','invscrollbind'),
+ ('invscs','invscs'),
+ ('invsecure','invsecure'),
+ ('invsft','invsft'),
+ ('invshellslash','invshellslash'),
+ ('invshelltemp','invshelltemp'),
+ ('invshiftround','invshiftround'),
+ ('invshortname','invshortname'),
+ ('invshowcmd','invshowcmd'),
+ ('invshowfulltag','invshowfulltag'),
+ ('invshowmatch','invshowmatch'),
+ ('invshowmode','invshowmode'),
+ ('invsi','invsi'),
+ ('invsm','invsm'),
+ ('invsmartcase','invsmartcase'),
+ ('invsmartindent','invsmartindent'),
+ ('invsmarttab','invsmarttab'),
+ ('invsmd','invsmd'),
+ ('invsn','invsn'),
+ ('invsol','invsol'),
+ ('invspell','invspell'),
+ ('invsplitbelow','invsplitbelow'),
+ ('invsplitright','invsplitright'),
+ ('invspr','invspr'),
+ ('invsr','invsr'),
+ ('invssl','invssl'),
+ ('invsta','invsta'),
+ ('invstartofline','invstartofline'),
+ ('invstmp','invstmp'),
+ ('invswapfile','invswapfile'),
+ ('invswf','invswf'),
+ ('invta','invta'),
+ ('invtagbsearch','invtagbsearch'),
+ ('invtagrelative','invtagrelative'),
+ ('invtagstack','invtagstack'),
+ ('invtbi','invtbi'),
+ ('invtbidi','invtbidi'),
+ ('invtbs','invtbs'),
+ ('invtermbidi','invtermbidi'),
+ ('invterse','invterse'),
+ ('invtextauto','invtextauto'),
+ ('invtextmode','invtextmode'),
+ ('invtf','invtf'),
+ ('invtgst','invtgst'),
+ ('invtildeop','invtildeop'),
+ ('invtimeout','invtimeout'),
+ ('invtitle','invtitle'),
+ ('invto','invto'),
+ ('invtop','invtop'),
+ ('invtr','invtr'),
+ ('invttimeout','invttimeout'),
+ ('invttybuiltin','invttybuiltin'),
+ ('invttyfast','invttyfast'),
+ ('invtx','invtx'),
+ ('invudf','invudf'),
+ ('invundofile','invundofile'),
+ ('invvb','invvb'),
+ ('invvisualbell','invvisualbell'),
+ ('invwa','invwa'),
+ ('invwarn','invwarn'),
+ ('invwb','invwb'),
+ ('invweirdinvert','invweirdinvert'),
+ ('invwfh','invwfh'),
+ ('invwfw','invwfw'),
+ ('invwic','invwic'),
+ ('invwildignorecase','invwildignorecase'),
+ ('invwildmenu','invwildmenu'),
+ ('invwinfixheight','invwinfixheight'),
+ ('invwinfixwidth','invwinfixwidth'),
+ ('invwiv','invwiv'),
+ ('invwmnu','invwmnu'),
+ ('invwrap','invwrap'),
+ ('invwrapscan','invwrapscan'),
+ ('invwrite','invwrite'),
+ ('invwriteany','invwriteany'),
+ ('invwritebackup','invwritebackup'),
+ ('invws','invws'),
+ ('is','is'),
+ ('isf','isf'),
+ ('isfname','isfname'),
+ ('isi','isi'),
+ ('isident','isident'),
+ ('isk','isk'),
+ ('iskeyword','iskeyword'),
+ ('isp','isp'),
+ ('isprint','isprint'),
+ ('joinspaces','joinspaces'),
+ ('js','js'),
+ ('key','key'),
+ ('keymap','keymap'),
+ ('keymodel','keymodel'),
+ ('keywordprg','keywordprg'),
+ ('km','km'),
+ ('kmp','kmp'),
+ ('kp','kp'),
+ ('langmap','langmap'),
+ ('langmenu','langmenu'),
+ ('laststatus','laststatus'),
+ ('lazyredraw','lazyredraw'),
+ ('lbr','lbr'),
+ ('lcs','lcs'),
+ ('linebreak','linebreak'),
+ ('lines','lines'),
+ ('linespace','linespace'),
+ ('lisp','lisp'),
+ ('lispwords','lispwords'),
+ ('list','list'),
+ ('listchars','listchars'),
+ ('lm','lm'),
+ ('lmap','lmap'),
+ ('loadplugins','loadplugins'),
+ ('lpl','lpl'),
+ ('ls','ls'),
+ ('lsp','lsp'),
+ ('lw','lw'),
+ ('lz','lz'),
+ ('ma','ma'),
+ ('macatsui','macatsui'),
+ ('magic','magic'),
+ ('makeef','makeef'),
+ ('makeprg','makeprg'),
+ ('mat','mat'),
+ ('matchpairs','matchpairs'),
+ ('matchtime','matchtime'),
+ ('maxcombine','maxcombine'),
+ ('maxfuncdepth','maxfuncdepth'),
+ ('maxmapdepth','maxmapdepth'),
+ ('maxmem','maxmem'),
+ ('maxmempattern','maxmempattern'),
+ ('maxmemtot','maxmemtot'),
+ ('mco','mco'),
+ ('mef','mef'),
+ ('menuitems','menuitems'),
+ ('mfd','mfd'),
+ ('mh','mh'),
+ ('mis','mis'),
+ ('mkspellmem','mkspellmem'),
+ ('ml','ml'),
+ ('mls','mls'),
+ ('mm','mm'),
+ ('mmd','mmd'),
+ ('mmp','mmp'),
+ ('mmt','mmt'),
+ ('mod','mod'),
+ ('modeline','modeline'),
+ ('modelines','modelines'),
+ ('modifiable','modifiable'),
+ ('modified','modified'),
+ ('more','more'),
+ ('mouse','mouse'),
+ ('mousef','mousef'),
+ ('mousefocus','mousefocus'),
+ ('mousehide','mousehide'),
+ ('mousem','mousem'),
+ ('mousemodel','mousemodel'),
+ ('mouses','mouses'),
+ ('mouseshape','mouseshape'),
+ ('mouset','mouset'),
+ ('mousetime','mousetime'),
+ ('mp','mp'),
+ ('mps','mps'),
+ ('msm','msm'),
+ ('mzq','mzq'),
+ ('mzquantum','mzquantum'),
+ ('nf','nf'),
+ ('nnoremap','nnoremap'),
+ ('noacd','noacd'),
+ ('noai','noai'),
+ ('noakm','noakm'),
+ ('noallowrevins','noallowrevins'),
+ ('noaltkeymap','noaltkeymap'),
+ ('noanti','noanti'),
+ ('noantialias','noantialias'),
+ ('noar','noar'),
+ ('noarab','noarab'),
+ ('noarabic','noarabic'),
+ ('noarabicshape','noarabicshape'),
+ ('noari','noari'),
+ ('noarshape','noarshape'),
+ ('noautochdir','noautochdir'),
+ ('noautoindent','noautoindent'),
+ ('noautoread','noautoread'),
+ ('noautowrite','noautowrite'),
+ ('noautowriteall','noautowriteall'),
+ ('noaw','noaw'),
+ ('noawa','noawa'),
+ ('nobackup','nobackup'),
+ ('noballooneval','noballooneval'),
+ ('nobeval','nobeval'),
+ ('nobin','nobin'),
+ ('nobinary','nobinary'),
+ ('nobiosk','nobiosk'),
+ ('nobioskey','nobioskey'),
+ ('nobk','nobk'),
+ ('nobl','nobl'),
+ ('nobomb','nobomb'),
+ ('nobuflisted','nobuflisted'),
+ ('nocf','nocf'),
+ ('noci','noci'),
+ ('nocin','nocin'),
+ ('nocindent','nocindent'),
+ ('nocompatible','nocompatible'),
+ ('noconfirm','noconfirm'),
+ ('noconsk','noconsk'),
+ ('noconskey','noconskey'),
+ ('nocopyindent','nocopyindent'),
+ ('nocp','nocp'),
+ ('nocrb','nocrb'),
+ ('nocscoperelative','nocscoperelative'),
+ ('nocscopetag','nocscopetag'),
+ ('nocscopeverbose','nocscopeverbose'),
+ ('nocsre','nocsre'),
+ ('nocst','nocst'),
+ ('nocsverb','nocsverb'),
+ ('nocuc','nocuc'),
+ ('nocul','nocul'),
+ ('nocursorbind','nocursorbind'),
+ ('nocursorcolumn','nocursorcolumn'),
+ ('nocursorline','nocursorline'),
+ ('nodeco','nodeco'),
+ ('nodelcombine','nodelcombine'),
+ ('nodg','nodg'),
+ ('nodiff','nodiff'),
+ ('nodigraph','nodigraph'),
+ ('noea','noea'),
+ ('noeb','noeb'),
+ ('noed','noed'),
+ ('noedcompatible','noedcompatible'),
+ ('noek','noek'),
+ ('noendofline','noendofline'),
+ ('noeol','noeol'),
+ ('noequalalways','noequalalways'),
+ ('noerrorbells','noerrorbells'),
+ ('noesckeys','noesckeys'),
+ ('noet','noet'),
+ ('noex','noex'),
+ ('noexpandtab','noexpandtab'),
+ ('noexrc','noexrc'),
+ ('nofen','nofen'),
+ ('nofic','nofic'),
+ ('nofileignorecase','nofileignorecase'),
+ ('nofk','nofk'),
+ ('nofkmap','nofkmap'),
+ ('nofoldenable','nofoldenable'),
+ ('nogd','nogd'),
+ ('nogdefault','nogdefault'),
+ ('noguipty','noguipty'),
+ ('nohid','nohid'),
+ ('nohidden','nohidden'),
+ ('nohk','nohk'),
+ ('nohkmap','nohkmap'),
+ ('nohkmapp','nohkmapp'),
+ ('nohkp','nohkp'),
+ ('nohls','nohls'),
+ ('nohlsearch','nohlsearch'),
+ ('noic','noic'),
+ ('noicon','noicon'),
+ ('noignorecase','noignorecase'),
+ ('noim','noim'),
+ ('noimc','noimc'),
+ ('noimcmdline','noimcmdline'),
+ ('noimd','noimd'),
+ ('noimdisable','noimdisable'),
+ ('noincsearch','noincsearch'),
+ ('noinf','noinf'),
+ ('noinfercase','noinfercase'),
+ ('noinsertmode','noinsertmode'),
+ ('nois','nois'),
+ ('nojoinspaces','nojoinspaces'),
+ ('nojs','nojs'),
+ ('nolazyredraw','nolazyredraw'),
+ ('nolbr','nolbr'),
+ ('nolinebreak','nolinebreak'),
+ ('nolisp','nolisp'),
+ ('nolist','nolist'),
+ ('noloadplugins','noloadplugins'),
+ ('nolpl','nolpl'),
+ ('nolz','nolz'),
+ ('noma','noma'),
+ ('nomacatsui','nomacatsui'),
+ ('nomagic','nomagic'),
+ ('nomh','nomh'),
+ ('noml','noml'),
+ ('nomod','nomod'),
+ ('nomodeline','nomodeline'),
+ ('nomodifiable','nomodifiable'),
+ ('nomodified','nomodified'),
+ ('nomore','nomore'),
+ ('nomousef','nomousef'),
+ ('nomousefocus','nomousefocus'),
+ ('nomousehide','nomousehide'),
+ ('nonu','nonu'),
+ ('nonumber','nonumber'),
+ ('noodev','noodev'),
+ ('noopendevice','noopendevice'),
+ ('nopaste','nopaste'),
+ ('nopi','nopi'),
+ ('nopreserveindent','nopreserveindent'),
+ ('nopreviewwindow','nopreviewwindow'),
+ ('noprompt','noprompt'),
+ ('nopvw','nopvw'),
+ ('noreadonly','noreadonly'),
+ ('norelativenumber','norelativenumber'),
+ ('noremap','noremap'),
+ ('norestorescreen','norestorescreen'),
+ ('norevins','norevins'),
+ ('nori','nori'),
+ ('norightleft','norightleft'),
+ ('norl','norl'),
+ ('nornu','nornu'),
+ ('noro','noro'),
+ ('nors','nors'),
+ ('noru','noru'),
+ ('noruler','noruler'),
+ ('nosb','nosb'),
+ ('nosc','nosc'),
+ ('noscb','noscb'),
+ ('noscrollbind','noscrollbind'),
+ ('noscs','noscs'),
+ ('nosecure','nosecure'),
+ ('nosft','nosft'),
+ ('noshellslash','noshellslash'),
+ ('noshelltemp','noshelltemp'),
+ ('noshiftround','noshiftround'),
+ ('noshortname','noshortname'),
+ ('noshowcmd','noshowcmd'),
+ ('noshowfulltag','noshowfulltag'),
+ ('noshowmatch','noshowmatch'),
+ ('noshowmode','noshowmode'),
+ ('nosi','nosi'),
+ ('nosm','nosm'),
+ ('nosmartcase','nosmartcase'),
+ ('nosmartindent','nosmartindent'),
+ ('nosmarttab','nosmarttab'),
+ ('nosmd','nosmd'),
+ ('nosn','nosn'),
+ ('nosol','nosol'),
+ ('nospell','nospell'),
+ ('nosplitbelow','nosplitbelow'),
+ ('nosplitright','nosplitright'),
+ ('nospr','nospr'),
+ ('nosr','nosr'),
+ ('nossl','nossl'),
+ ('nosta','nosta'),
+ ('nostartofline','nostartofline'),
+ ('nostmp','nostmp'),
+ ('noswapfile','noswapfile'),
+ ('noswf','noswf'),
+ ('nota','nota'),
+ ('notagbsearch','notagbsearch'),
+ ('notagrelative','notagrelative'),
+ ('notagstack','notagstack'),
+ ('notbi','notbi'),
+ ('notbidi','notbidi'),
+ ('notbs','notbs'),
+ ('notermbidi','notermbidi'),
+ ('noterse','noterse'),
+ ('notextauto','notextauto'),
+ ('notextmode','notextmode'),
+ ('notf','notf'),
+ ('notgst','notgst'),
+ ('notildeop','notildeop'),
+ ('notimeout','notimeout'),
+ ('notitle','notitle'),
+ ('noto','noto'),
+ ('notop','notop'),
+ ('notr','notr'),
+ ('nottimeout','nottimeout'),
+ ('nottybuiltin','nottybuiltin'),
+ ('nottyfast','nottyfast'),
+ ('notx','notx'),
+ ('noudf','noudf'),
+ ('noundofile','noundofile'),
+ ('novb','novb'),
+ ('novisualbell','novisualbell'),
+ ('nowa','nowa'),
+ ('nowarn','nowarn'),
+ ('nowb','nowb'),
+ ('noweirdinvert','noweirdinvert'),
+ ('nowfh','nowfh'),
+ ('nowfw','nowfw'),
+ ('nowic','nowic'),
+ ('nowildignorecase','nowildignorecase'),
+ ('nowildmenu','nowildmenu'),
+ ('nowinfixheight','nowinfixheight'),
+ ('nowinfixwidth','nowinfixwidth'),
+ ('nowiv','nowiv'),
+ ('nowmnu','nowmnu'),
+ ('nowrap','nowrap'),
+ ('nowrapscan','nowrapscan'),
+ ('nowrite','nowrite'),
+ ('nowriteany','nowriteany'),
+ ('nowritebackup','nowritebackup'),
+ ('nows','nows'),
+ ('nrformats','nrformats'),
+ ('nu','nu'),
+ ('number','number'),
+ ('numberwidth','numberwidth'),
+ ('nuw','nuw'),
+ ('odev','odev'),
+ ('oft','oft'),
+ ('ofu','ofu'),
+ ('omnifunc','omnifunc'),
+ ('opendevice','opendevice'),
+ ('operatorfunc','operatorfunc'),
+ ('opfunc','opfunc'),
+ ('osfiletype','osfiletype'),
+ ('pa','pa'),
+ ('para','para'),
+ ('paragraphs','paragraphs'),
+ ('paste','paste'),
+ ('pastetoggle','pastetoggle'),
+ ('patchexpr','patchexpr'),
+ ('patchmode','patchmode'),
+ ('path','path'),
+ ('pdev','pdev'),
+ ('penc','penc'),
+ ('pex','pex'),
+ ('pexpr','pexpr'),
+ ('pfn','pfn'),
+ ('ph','ph'),
+ ('pheader','pheader'),
+ ('pi','pi'),
+ ('pm','pm'),
+ ('pmbcs','pmbcs'),
+ ('pmbfn','pmbfn'),
+ ('popt','popt'),
+ ('preserveindent','preserveindent'),
+ ('previewheight','previewheight'),
+ ('previewwindow','previewwindow'),
+ ('printdevice','printdevice'),
+ ('printencoding','printencoding'),
+ ('printexpr','printexpr'),
+ ('printfont','printfont'),
+ ('printheader','printheader'),
+ ('printmbcharset','printmbcharset'),
+ ('printmbfont','printmbfont'),
+ ('printoptions','printoptions'),
+ ('prompt','prompt'),
+ ('pt','pt'),
+ ('pumheight','pumheight'),
+ ('pvh','pvh'),
+ ('pvw','pvw'),
+ ('qe','qe'),
+ ('quoteescape','quoteescape'),
+ ('rdt','rdt'),
+ ('re','re'),
+ ('readonly','readonly'),
+ ('redrawtime','redrawtime'),
+ ('regexpengine','regexpengine'),
+ ('relativenumber','relativenumber'),
+ ('remap','remap'),
+ ('report','report'),
+ ('restorescreen','restorescreen'),
+ ('revins','revins'),
+ ('ri','ri'),
+ ('rightleft','rightleft'),
+ ('rightleftcmd','rightleftcmd'),
+ ('rl','rl'),
+ ('rlc','rlc'),
+ ('rnu','rnu'),
+ ('ro','ro'),
+ ('rs','rs'),
+ ('rtp','rtp'),
+ ('ru','ru'),
+ ('ruf','ruf'),
+ ('ruler','ruler'),
+ ('rulerformat','rulerformat'),
+ ('runtimepath','runtimepath'),
+ ('sb','sb'),
+ ('sbo','sbo'),
+ ('sbr','sbr'),
+ ('sc','sc'),
+ ('scb','scb'),
+ ('scr','scr'),
+ ('scroll','scroll'),
+ ('scrollbind','scrollbind'),
+ ('scrolljump','scrolljump'),
+ ('scrolloff','scrolloff'),
+ ('scrollopt','scrollopt'),
+ ('scs','scs'),
+ ('sect','sect'),
+ ('sections','sections'),
+ ('secure','secure'),
+ ('sel','sel'),
+ ('selection','selection'),
+ ('selectmode','selectmode'),
+ ('sessionoptions','sessionoptions'),
+ ('sft','sft'),
+ ('sh','sh'),
+ ('shcf','shcf'),
+ ('shell','shell'),
+ ('shellcmdflag','shellcmdflag'),
+ ('shellpipe','shellpipe'),
+ ('shellquote','shellquote'),
+ ('shellredir','shellredir'),
+ ('shellslash','shellslash'),
+ ('shelltemp','shelltemp'),
+ ('shelltype','shelltype'),
+ ('shellxescape','shellxescape'),
+ ('shellxquote','shellxquote'),
+ ('shiftround','shiftround'),
+ ('shiftwidth','shiftwidth'),
+ ('shm','shm'),
+ ('shortmess','shortmess'),
+ ('shortname','shortname'),
+ ('showbreak','showbreak'),
+ ('showcmd','showcmd'),
+ ('showfulltag','showfulltag'),
+ ('showmatch','showmatch'),
+ ('showmode','showmode'),
+ ('showtabline','showtabline'),
+ ('shq','shq'),
+ ('si','si'),
+ ('sidescroll','sidescroll'),
+ ('sidescrolloff','sidescrolloff'),
+ ('siso','siso'),
+ ('sj','sj'),
+ ('slm','slm'),
+ ('sm','sm'),
+ ('smartcase','smartcase'),
+ ('smartindent','smartindent'),
+ ('smarttab','smarttab'),
+ ('smc','smc'),
+ ('smd','smd'),
+ ('sn','sn'),
+ ('so','so'),
+ ('softtabstop','softtabstop'),
+ ('sol','sol'),
+ ('sp','sp'),
+ ('spc','spc'),
+ ('spell','spell'),
+ ('spellcapcheck','spellcapcheck'),
+ ('spellfile','spellfile'),
+ ('spelllang','spelllang'),
+ ('spellsuggest','spellsuggest'),
+ ('spf','spf'),
+ ('spl','spl'),
+ ('splitbelow','splitbelow'),
+ ('splitright','splitright'),
+ ('spr','spr'),
+ ('sps','sps'),
+ ('sr','sr'),
+ ('srr','srr'),
+ ('ss','ss'),
+ ('ssl','ssl'),
+ ('ssop','ssop'),
+ ('st','st'),
+ ('sta','sta'),
+ ('stal','stal'),
+ ('startofline','startofline'),
+ ('statusline','statusline'),
+ ('stl','stl'),
+ ('stmp','stmp'),
+ ('sts','sts'),
+ ('su','su'),
+ ('sua','sua'),
+ ('suffixes','suffixes'),
+ ('suffixesadd','suffixesadd'),
+ ('sw','sw'),
+ ('swapfile','swapfile'),
+ ('swapsync','swapsync'),
+ ('swb','swb'),
+ ('swf','swf'),
+ ('switchbuf','switchbuf'),
+ ('sws','sws'),
+ ('sxe','sxe'),
+ ('sxq','sxq'),
+ ('syn','syn'),
+ ('synmaxcol','synmaxcol'),
+ ('syntax','syntax'),
+ ('t_AB','t_AB'),
+ ('t_AF','t_AF'),
+ ('t_AL','t_AL'),
+ ('t_CS','t_CS'),
+ ('t_CV','t_CV'),
+ ('t_Ce','t_Ce'),
+ ('t_Co','t_Co'),
+ ('t_Cs','t_Cs'),
+ ('t_DL','t_DL'),
+ ('t_EI','t_EI'),
+ ('t_F1','t_F1'),
+ ('t_F2','t_F2'),
+ ('t_F3','t_F3'),
+ ('t_F4','t_F4'),
+ ('t_F5','t_F5'),
+ ('t_F6','t_F6'),
+ ('t_F7','t_F7'),
+ ('t_F8','t_F8'),
+ ('t_F9','t_F9'),
+ ('t_IE','t_IE'),
+ ('t_IS','t_IS'),
+ ('t_K1','t_K1'),
+ ('t_K3','t_K3'),
+ ('t_K4','t_K4'),
+ ('t_K5','t_K5'),
+ ('t_K6','t_K6'),
+ ('t_K7','t_K7'),
+ ('t_K8','t_K8'),
+ ('t_K9','t_K9'),
+ ('t_KA','t_KA'),
+ ('t_KB','t_KB'),
+ ('t_KC','t_KC'),
+ ('t_KD','t_KD'),
+ ('t_KE','t_KE'),
+ ('t_KF','t_KF'),
+ ('t_KG','t_KG'),
+ ('t_KH','t_KH'),
+ ('t_KI','t_KI'),
+ ('t_KJ','t_KJ'),
+ ('t_KK','t_KK'),
+ ('t_KL','t_KL'),
+ ('t_RI','t_RI'),
+ ('t_RV','t_RV'),
+ ('t_SI','t_SI'),
+ ('t_Sb','t_Sb'),
+ ('t_Sf','t_Sf'),
+ ('t_WP','t_WP'),
+ ('t_WS','t_WS'),
+ ('t_ZH','t_ZH'),
+ ('t_ZR','t_ZR'),
+ ('t_al','t_al'),
+ ('t_bc','t_bc'),
+ ('t_cd','t_cd'),
+ ('t_ce','t_ce'),
+ ('t_cl','t_cl'),
+ ('t_cm','t_cm'),
+ ('t_cs','t_cs'),
+ ('t_da','t_da'),
+ ('t_db','t_db'),
+ ('t_dl','t_dl'),
+ ('t_fs','t_fs'),
+ ('t_k1','t_k1'),
+ ('t_k2','t_k2'),
+ ('t_k3','t_k3'),
+ ('t_k4','t_k4'),
+ ('t_k5','t_k5'),
+ ('t_k6','t_k6'),
+ ('t_k7','t_k7'),
+ ('t_k8','t_k8'),
+ ('t_k9','t_k9'),
+ ('t_kB','t_kB'),
+ ('t_kD','t_kD'),
+ ('t_kI','t_kI'),
+ ('t_kN','t_kN'),
+ ('t_kP','t_kP'),
+ ('t_kb','t_kb'),
+ ('t_kd','t_kd'),
+ ('t_ke','t_ke'),
+ ('t_kh','t_kh'),
+ ('t_kl','t_kl'),
+ ('t_kr','t_kr'),
+ ('t_ks','t_ks'),
+ ('t_ku','t_ku'),
+ ('t_le','t_le'),
+ ('t_mb','t_mb'),
+ ('t_md','t_md'),
+ ('t_me','t_me'),
+ ('t_mr','t_mr'),
+ ('t_ms','t_ms'),
+ ('t_nd','t_nd'),
+ ('t_op','t_op'),
+ ('t_se','t_se'),
+ ('t_so','t_so'),
+ ('t_sr','t_sr'),
+ ('t_te','t_te'),
+ ('t_ti','t_ti'),
+ ('t_ts','t_ts'),
+ ('t_u7','t_u7'),
+ ('t_ue','t_ue'),
+ ('t_us','t_us'),
+ ('t_ut','t_ut'),
+ ('t_vb','t_vb'),
+ ('t_ve','t_ve'),
+ ('t_vi','t_vi'),
+ ('t_vs','t_vs'),
+ ('t_xs','t_xs'),
+ ('ta','ta'),
+ ('tabline','tabline'),
+ ('tabpagemax','tabpagemax'),
+ ('tabstop','tabstop'),
+ ('tag','tag'),
+ ('tagbsearch','tagbsearch'),
+ ('taglength','taglength'),
+ ('tagrelative','tagrelative'),
+ ('tags','tags'),
+ ('tagstack','tagstack'),
+ ('tal','tal'),
+ ('tb','tb'),
+ ('tbi','tbi'),
+ ('tbidi','tbidi'),
+ ('tbis','tbis'),
+ ('tbs','tbs'),
+ ('tenc','tenc'),
+ ('term','term'),
+ ('termbidi','termbidi'),
+ ('termencoding','termencoding'),
+ ('terse','terse'),
+ ('textauto','textauto'),
+ ('textmode','textmode'),
+ ('textwidth','textwidth'),
+ ('tf','tf'),
+ ('tgst','tgst'),
+ ('thesaurus','thesaurus'),
+ ('tildeop','tildeop'),
+ ('timeout','timeout'),
+ ('timeoutlen','timeoutlen'),
+ ('title','title'),
+ ('titlelen','titlelen'),
+ ('titleold','titleold'),
+ ('titlestring','titlestring'),
+ ('tl','tl'),
+ ('tm','tm'),
+ ('to','to'),
+ ('toolbar','toolbar'),
+ ('toolbariconsize','toolbariconsize'),
+ ('top','top'),
+ ('tpm','tpm'),
+ ('tr','tr'),
+ ('ts','ts'),
+ ('tsl','tsl'),
+ ('tsr','tsr'),
+ ('ttimeout','ttimeout'),
+ ('ttimeoutlen','ttimeoutlen'),
+ ('ttm','ttm'),
+ ('tty','tty'),
+ ('ttybuiltin','ttybuiltin'),
+ ('ttyfast','ttyfast'),
+ ('ttym','ttym'),
+ ('ttymouse','ttymouse'),
+ ('ttyscroll','ttyscroll'),
+ ('ttytype','ttytype'),
+ ('tw','tw'),
+ ('tx','tx'),
+ ('uc','uc'),
+ ('udf','udf'),
+ ('udir','udir'),
+ ('ul','ul'),
+ ('undodir','undodir'),
+ ('undofile','undofile'),
+ ('undolevels','undolevels'),
+ ('undoreload','undoreload'),
+ ('updatecount','updatecount'),
+ ('updatetime','updatetime'),
+ ('ur','ur'),
+ ('ut','ut'),
+ ('vb','vb'),
+ ('vbs','vbs'),
+ ('vdir','vdir'),
+ ('ve','ve'),
+ ('verbose','verbose'),
+ ('verbosefile','verbosefile'),
+ ('vfile','vfile'),
+ ('vi','vi'),
+ ('viewdir','viewdir'),
+ ('viewoptions','viewoptions'),
+ ('viminfo','viminfo'),
+ ('virtualedit','virtualedit'),
+ ('visualbell','visualbell'),
+ ('vnoremap','vnoremap'),
+ ('vop','vop'),
+ ('wa','wa'),
+ ('wak','wak'),
+ ('warn','warn'),
+ ('wb','wb'),
+ ('wc','wc'),
+ ('wcm','wcm'),
+ ('wd','wd'),
+ ('weirdinvert','weirdinvert'),
+ ('wfh','wfh'),
+ ('wfw','wfw'),
+ ('wh','wh'),
+ ('whichwrap','whichwrap'),
+ ('wi','wi'),
+ ('wic','wic'),
+ ('wig','wig'),
+ ('wildchar','wildchar'),
+ ('wildcharm','wildcharm'),
+ ('wildignore','wildignore'),
+ ('wildignorecase','wildignorecase'),
+ ('wildmenu','wildmenu'),
+ ('wildmode','wildmode'),
+ ('wildoptions','wildoptions'),
+ ('wim','wim'),
+ ('winaltkeys','winaltkeys'),
+ ('window','window'),
+ ('winfixheight','winfixheight'),
+ ('winfixwidth','winfixwidth'),
+ ('winheight','winheight'),
+ ('winminheight','winminheight'),
+ ('winminwidth','winminwidth'),
+ ('winwidth','winwidth'),
+ ('wiv','wiv'),
+ ('wiw','wiw'),
+ ('wm','wm'),
+ ('wmh','wmh'),
+ ('wmnu','wmnu'),
+ ('wmw','wmw'),
+ ('wop','wop'),
+ ('wrap','wrap'),
+ ('wrapmargin','wrapmargin'),
+ ('wrapscan','wrapscan'),
+ ('write','write'),
+ ('writeany','writeany'),
+ ('writebackup','writebackup'),
+ ('writedelay','writedelay'),
+ ('ws','ws'),
+ ('ww','ww'),
+ )
+ return var
+option = _getoption()
+
diff --git a/pygments/lexers/_vimbuiltins.py b/pygments/lexers/_vimbuiltins.py
deleted file mode 100644
index 9fc1b15e..00000000
--- a/pygments/lexers/_vimbuiltins.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Split up in multiple functions so it's importable by jython, which has a
-# per-method size limit.
-
-def _getauto():
- return [('BufAdd','BufAdd'),('BufCreate','BufCreate'),('BufDelete','BufDelete'),('BufEnter','BufEnter'),('BufFilePost','BufFilePost'),('BufFilePre','BufFilePre'),('BufHidden','BufHidden'),('BufLeave','BufLeave'),('BufNew','BufNew'),('BufNewFile','BufNewFile'),('BufRead','BufRead'),('BufReadCmd','BufReadCmd'),('BufReadPost','BufReadPost'),('BufReadPre','BufReadPre'),('BufUnload','BufUnload'),('BufWinEnter','BufWinEnter'),('BufWinLeave','BufWinLeave'),('BufWipeout','BufWipeout'),('BufWrite','BufWrite'),('BufWriteCmd','BufWriteCmd'),('BufWritePost','BufWritePost'),('BufWritePre','BufWritePre'),('Cmd','Cmd'),('CmdwinEnter','CmdwinEnter'),('CmdwinLeave','CmdwinLeave'),('ColorScheme','ColorScheme'),('CursorHold','CursorHold'),('CursorHoldI','CursorHoldI'),('CursorMoved','CursorMoved'),('CursorMovedI','CursorMovedI'),('EncodingChanged','EncodingChanged'),('FileAppendCmd','FileAppendCmd'),('FileAppendPost','FileAppendPost'),('FileAppendPre','FileAppendPre'),('FileChangedRO','FileChangedRO'),('FileChangedShell','FileChangedShell'),('FileChangedShellPost','FileChangedShellPost'),('FileEncoding','FileEncoding'),('FileReadCmd','FileReadCmd'),('FileReadPost','FileReadPost'),('FileReadPre','FileReadPre'),('FileType','FileType'),('FileWriteCmd','FileWriteCmd'),('FileWritePost','FileWritePost'),('FileWritePre','FileWritePre'),('FilterReadPost','FilterReadPost'),('FilterReadPre','FilterReadPre'),('FilterWritePost','FilterWritePost'),('FilterWritePre','FilterWritePre'),('FocusGained','FocusGained'),('FocusLost','FocusLost'),('FuncUndefined','FuncUndefined'),('GUIEnter','GUIEnter'),('GUIFailed','GUIFailed'),('InsertChange','InsertChange'),('InsertCharPre','InsertCharPre'),('InsertEnter','InsertEnter'),('InsertLeave','InsertLeave'),('MenuPopup','MenuPopup'),('QuickFixCmdPost','QuickFixCmdPost'),('QuickFixCmdPre','QuickFixCmdPre'),('RemoteReply','RemoteReply'),('SessionLoadPost','SessionLoadPost'),('ShellCmdPost','ShellCmdPost'),('ShellFilterPost','ShellFilterPost'),('SourceCmd','SourceCmd'),('SourcePre','SourcePre'),('SpellFileMissing','SpellFileMissing'),('StdinReadPost','StdinReadPost'),('StdinReadPre','StdinReadPre'),('SwapExists','SwapExists'),('Syntax','Syntax'),('TabEnter','TabEnter'),('TabLeave','TabLeave'),('TermChanged','TermChanged'),('TermResponse','TermResponse'),('User','User'),('UserGettingBored','UserGettingBored'),('VimEnter','VimEnter'),('VimLeave','VimLeave'),('VimLeavePre','VimLeavePre'),('VimResized','VimResized'),('WinEnter','WinEnter'),('WinLeave','WinLeave'),('event','event')]
-def _getcommand():
- return [('Allargs','Allargs'),('DiffOrig','DiffOrig'),('Error','Error'),('Man','Man'),('MyCommand','MyCommand'),('Mycmd','Mycmd'),('N','N'),('N','Next'),('P','P'),('P','Print'),('Ren','Ren'),('Rena','Rena'),('Renu','Renu'),('TOhtml','TOhtml'),('X','X'),('XMLent','XMLent'),('XMLns','XMLns'),('a','a'),('ab','ab'),('abc','abclear'),('abo','aboveleft'),('al','all'),('ar','ar'),('ar','args'),('arga','argadd'),('argd','argdelete'),('argdo','argdo'),('arge','argedit'),('argg','argglobal'),('argl','arglocal'),('argu','argument'),('as','ascii'),('au','au'),('b','buffer'),('bN','bNext'),('ba','ball'),('bad','badd'),('bar','bar'),('bd','bdelete'),('bel','belowright'),('bf','bfirst'),('bl','blast'),('bm','bmodified'),('bn','bnext'),('bo','botright'),('bp','bprevious'),('br','br'),('br','brewind'),('brea','break'),('breaka','breakadd'),('breakd','breakdel'),('breakl','breaklist'),('bro','browse'),('browseset','browseset'),('bu','bu'),('buf','buf'),('bufdo','bufdo'),('buffers','buffers'),('bun','bunload'),('bw','bwipeout'),('c','c'),('c','change'),('cN','cN'),('cN','cNext'),('cNf','cNf'),('cNf','cNfile'),('cabc','cabclear'),('cad','cad'),('cad','caddexpr'),('caddb','caddbuffer'),('caddf','caddfile'),('cal','call'),('cat','catch'),('cb','cbuffer'),('cc','cc'),('ccl','cclose'),('cd','cd'),('ce','center'),('cex','cexpr'),('cf','cfile'),('cfir','cfirst'),('cg','cgetfile'),('cgetb','cgetbuffer'),('cgete','cgetexpr'),('changes','changes'),('chd','chdir'),('che','checkpath'),('checkt','checktime'),('cl','cl'),('cl','clist'),('cla','clast'),('clo','close'),('cmapc','cmapclear'),('cmdname','cmdname'),('cn','cn'),('cn','cnext'),('cnew','cnewer'),('cnf','cnf'),('cnf','cnfile'),('co','copy'),('col','colder'),('colo','colorscheme'),('com','com'),('comc','comclear'),('comment','comment'),('comp','compiler'),('con','con'),('con','continue'),('conf','confirm'),('cope','copen'),('count','count'),('cp','cprevious'),('cpf','cpfile'),('cq','cquit'),('cr','crewind'),('cs','cs'),('cscope','cscope'),('cstag','cstag'),('cuna','cunabbrev'),('cw','cwindow'),('d','d'),('d','delete'),('de','de'),('debug','debug'),('debugg','debuggreedy'),('del','del'),('delc','delcommand'),('delf','delf'),('delf','delfunction'),('delm','delmarks'),('di','di'),('di','display'),('diffg','diffget'),('diffo','diffo'),('diffoff','diffoff'),('diffp','diffp'),('diffpatch','diffpatch'),('diffpu','diffput'),('diffsplit','diffsplit'),('difft','difft'),('diffthis','diffthis'),('diffu','diffupdate'),('dig','dig'),('dig','digraphs'),('dj','djump'),('dl','dlist'),('do','do'),('doau','doau'),('dr','drop'),('ds','dsearch'),('dsp','dsplit'),('dwim','dwim'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','edit'),('ea','ea'),('earlier','earlier'),('ec','ec'),('echoe','echoerr'),('echom','echomsg'),('echon','echon'),('el','else'),('elsei','elseif'),('em','emenu'),('emenu','emenu'),('en','en'),('en','endif'),('endf','endf'),('endf','endfunction'),('endfo','endfor'),('endfun','endfun'),('endt','endtry'),('endw','endwhile'),('ene','enew'),('ex','ex'),('exi','exit'),('exu','exusage'),('f','f'),('f','file'),('filename','filename'),('files','files'),('filet','filet'),('filetype','filetype'),('fin','fin'),('fin','find'),('fina','finally'),('fini','finish'),('fir','first'),('fix','fixdel'),('fo','fold'),('foldc','foldclose'),('foldd','folddoopen'),('folddoc','folddoclosed'),('foldo','foldopen'),('for','for'),('fu','fu'),('fu','function'),('fun','fun'),('g','g'),('get','get'),('go','goto'),('gr','grep'),('grepa','grepadd'),('gs','gs'),('gs','gs'),('gui','gui'),('gvim','gvim'),('h','h'),('h','h'),('h','h'),('h','h'),('h','help'),('ha','hardcopy'),('helpf','helpfind'),('helpg','helpgrep'),('helpt','helptags'),('hi','hi'),('hid','hide'),('his','history'),('i','i'),('ia','ia'),('iabc','iabclear'),('if','if'),('ij','ijump'),('il','ilist'),('imapc','imapclear'),('in','in'),('index','index'),('intro','intro'),('is','isearch'),('isp','isplit'),('iuna','iunabbrev'),('j','join'),('ju','jumps'),('k','k'),('kee','keepmarks'),('keepa','keepa'),('keepalt','keepalt'),('keepj','keepjumps'),('l','l'),('l','list'),('lN','lN'),('lN','lNext'),('lNf','lNf'),('lNf','lNfile'),('la','la'),('la','last'),('lad','lad'),('lad','laddexpr'),('laddb','laddbuffer'),('laddf','laddfile'),('lan','lan'),('lan','language'),('lat','lat'),('later','later'),('lb','lbuffer'),('lc','lcd'),('lch','lchdir'),('lcl','lclose'),('lcs','lcs'),('lcscope','lcscope'),('le','left'),('lefta','leftabove'),('let','let'),('lex','lexpr'),('lf','lfile'),('lfir','lfirst'),('lg','lgetfile'),('lgetb','lgetbuffer'),('lgete','lgetexpr'),('lgr','lgrep'),('lgrepa','lgrepadd'),('lh','lhelpgrep'),('ll','ll'),('lla','llast'),('lli','llist'),('lmak','lmake'),('lmapc','lmapclear'),('lne','lne'),('lne','lnext'),('lnew','lnewer'),('lnf','lnf'),('lnf','lnfile'),('lo','lo'),('lo','loadview'),('loadk','loadk'),('loadkeymap','loadkeymap'),('loc','lockmarks'),('locale','locale'),('lockv','lockvar'),('lol','lolder'),('lop','lopen'),('lp','lprevious'),('lpf','lpfile'),('lr','lrewind'),('ls','ls'),('lt','ltag'),('lua','lua'),('luado','luado'),('luafile','luafile'),('lv','lvimgrep'),('lvimgrepa','lvimgrepadd'),('lw','lwindow'),('m','move'),('ma','ma'),('ma','mark'),('main','main'),('main','main'),('mak','make'),('marks','marks'),('mat','match'),('menut','menut'),('menut','menutranslate'),('mes','mes'),('messages','messages'),('mk','mk'),('mk','mkexrc'),('mkdir','mkdir'),('mks','mksession'),('mksp','mkspell'),('mkv','mkv'),('mkv','mkvimrc'),('mkvie','mkview'),('mo','mo'),('mod','mode'),('mv','mv'),('mz','mz'),('mz','mzscheme'),('mzf','mzfile'),('n','n'),('n','n'),('n','next'),('nb','nbkey'),('nbc','nbclose'),('nbs','nbstart'),('ne','ne'),('new','new'),('nkf','nkf'),('nmapc','nmapclear'),('noa','noa'),('noautocmd','noautocmd'),('noh','nohlsearch'),('nu','number'),('o','o'),('o','open'),('ol','oldfiles'),('omapc','omapclear'),('on','only'),('opt','options'),('ownsyntax','ownsyntax'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','print'),('pat','pat'),('pat','pat'),('pc','pclose'),('pe','pe'),('pe','perl'),('ped','pedit'),('perld','perldo'),('po','pop'),('popu','popu'),('popu','popup'),('pp','ppop'),('pr','pr'),('pre','preserve'),('prev','previous'),('pro','pro'),('prof','profile'),('profd','profdel'),('promptf','promptfind'),('promptr','promptrepl'),('ps','psearch'),('ptN','ptN'),('ptN','ptNext'),('pta','ptag'),('ptf','ptfirst'),('ptj','ptjump'),('ptl','ptlast'),('ptn','ptn'),('ptn','ptnext'),('ptp','ptprevious'),('ptr','ptrewind'),('pts','ptselect'),('pu','put'),('pw','pwd'),('py','py'),('py','python'),('py3','py3'),('py3','py3'),('py3file','py3file'),('pyf','pyfile'),('python3','python3'),('q','q'),('q','quit'),('qa','qall'),('quita','quitall'),('quote','quote'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','read'),('re','re'),('rec','recover'),('red','red'),('red','redo'),('redi','redir'),('redr','redraw'),('redraws','redrawstatus'),('reg','registers'),('res','resize'),('ret','retab'),('retu','return'),('rew','rewind'),('ri','right'),('rightb','rightbelow'),('ru','ru'),('ru','runtime'),('rub','ruby'),('rubyd','rubydo'),('rubyf','rubyfile'),('rundo','rundo'),('rv','rviminfo'),('s','s'),('s','s'),('s','s'),('s','s'),('sN','sNext'),('sa','sargument'),('sal','sall'),('san','sandbox'),('sav','saveas'),('sb','sbuffer'),('sbN','sbNext'),('sba','sball'),('sbf','sbfirst'),('sbl','sblast'),('sbm','sbmodified'),('sbn','sbnext'),('sbp','sbprevious'),('sbr','sbrewind'),('scrip','scrip'),('scrip','scriptnames'),('scripte','scriptencoding'),('scs','scs'),('scscope','scscope'),('se','set'),('setf','setfiletype'),('setg','setglobal'),('setl','setlocal'),('sf','sfind'),('sfir','sfirst'),('sh','shell'),('si','si'),('sig','sig'),('sign','sign'),('sil','silent'),('sim','simalt'),('sl','sl'),('sl','sleep'),('sla','slast'),('sm','smagic'),('sm','smap'),('sme','sme'),('smenu','smenu'),('sn','snext'),('sni','sniff'),('sno','snomagic'),('snoreme','snoreme'),('snoremenu','snoremenu'),('so','so'),('so','source'),('sor','sort'),('sp','split'),('spe','spe'),('spe','spellgood'),('spelld','spelldump'),('spelli','spellinfo'),('spellr','spellrepall'),('spellu','spellundo'),('spellw','spellwrong'),('spr','sprevious'),('sre','srewind'),('st','st'),('st','stop'),('sta','stag'),('star','star'),('star','startinsert'),('start','start'),('startg','startgreplace'),('startr','startreplace'),('stj','stjump'),('stopi','stopinsert'),('sts','stselect'),('sub','sub'),('sub','sub'),('sun','sunhide'),('sunme','sunme'),('sunmenu','sunmenu'),('sus','suspend'),('sv','sview'),('sw','swapname'),('sy','sy'),('syn','syn'),('sync','sync'),('syncbind','syncbind'),('synlist','synlist'),('t','t'),('t','t'),('t','t'),('tN','tN'),('tN','tNext'),('ta','ta'),('ta','tag'),('tab','tab'),('tabN','tabN'),('tabN','tabNext'),('tabc','tabclose'),('tabd','tabdo'),('tabe','tabedit'),('tabf','tabfind'),('tabfir','tabfirst'),('tabl','tablast'),('tabm','tabmove'),('tabn','tabnext'),('tabnew','tabnew'),('tabo','tabonly'),('tabp','tabprevious'),('tabr','tabrewind'),('tabs','tabs'),('tags','tags'),('tc','tcl'),('tcld','tcldo'),('tclf','tclfile'),('te','tearoff'),('tf','tfirst'),('th','throw'),('tj','tjump'),('tl','tlast'),('tm','tm'),('tm','tmenu'),('tn','tn'),('tn','tnext'),('to','topleft'),('tp','tprevious'),('tr','tr'),('tr','trewind'),('try','try'),('ts','tselect'),('tu','tu'),('tu','tunmenu'),('u','u'),('u','undo'),('un','un'),('una','unabbreviate'),('undoj','undojoin'),('undol','undolist'),('unh','unhide'),('unl','unl'),('unlo','unlockvar'),('uns','unsilent'),('up','update'),('v','v'),('ve','ve'),('ve','version'),('verb','verbose'),('version','version'),('version','version'),('vert','vertical'),('vi','vi'),('vi','visual'),('vie','view'),('vim','vimgrep'),('vimgrepa','vimgrepadd'),('viu','viusage'),('vmapc','vmapclear'),('vne','vnew'),('vs','vsplit'),('w','w'),('w','write'),('wN','wNext'),('wa','wall'),('wh','while'),('win','win'),('win','winsize'),('winc','wincmd'),('windo','windo'),('winp','winpos'),('wn','wnext'),('wp','wprevious'),('wq','wq'),('wqa','wqall'),('ws','wsverb'),('wundo','wundo'),('wv','wviminfo'),('x','x'),('x','xit'),('xa','xall'),('xmapc','xmapclear'),('xme','xme'),('xmenu','xmenu'),('xnoreme','xnoreme'),('xnoremenu','xnoremenu'),('xterm','xterm'),('xunme','xunme'),('xunmenu','xunmenu'),('xwininfo','xwininfo'),('y','yank')]
-def _getoption():
- return [('acd','acd'),('ai','ai'),('akm','akm'),('al','al'),('aleph','aleph'),('allowrevins','allowrevins'),('altkeymap','altkeymap'),('ambiwidth','ambiwidth'),('ambw','ambw'),('anti','anti'),('antialias','antialias'),('ar','ar'),('arab','arab'),('arabic','arabic'),('arabicshape','arabicshape'),('ari','ari'),('arshape','arshape'),('autochdir','autochdir'),('autoindent','autoindent'),('autoread','autoread'),('autowrite','autowrite'),('autowriteall','autowriteall'),('aw','aw'),('awa','awa'),('background','background'),('backspace','backspace'),('backup','backup'),('backupcopy','backupcopy'),('backupdir','backupdir'),('backupext','backupext'),('backupskip','backupskip'),('balloondelay','balloondelay'),('ballooneval','ballooneval'),('balloonexpr','balloonexpr'),('bdir','bdir'),('bdlay','bdlay'),('beval','beval'),('bex','bex'),('bexpr','bexpr'),('bg','bg'),('bh','bh'),('bin','bin'),('binary','binary'),('biosk','biosk'),('bioskey','bioskey'),('bk','bk'),('bkc','bkc'),('bl','bl'),('bomb','bomb'),('breakat','breakat'),('brk','brk'),('browsedir','browsedir'),('bs','bs'),('bsdir','bsdir'),('bsk','bsk'),('bt','bt'),('bufhidden','bufhidden'),('buflisted','buflisted'),('buftype','buftype'),('casemap','casemap'),('cb','cb'),('cc','cc'),('ccv','ccv'),('cd','cd'),('cdpath','cdpath'),('cedit','cedit'),('cf','cf'),('cfu','cfu'),('ch','ch'),('charconvert','charconvert'),('ci','ci'),('cin','cin'),('cindent','cindent'),('cink','cink'),('cinkeys','cinkeys'),('cino','cino'),('cinoptions','cinoptions'),('cinw','cinw'),('cinwords','cinwords'),('clipboard','clipboard'),('cmdheight','cmdheight'),('cmdwinheight','cmdwinheight'),('cmp','cmp'),('cms','cms'),('co','co'),('cocu','cocu'),('cole','cole'),('colorcolumn','colorcolumn'),('columns','columns'),('com','com'),('comments','comments'),('commentstring','commentstring'),('compatible','compatible'),('complete','complete'),('completefunc','completefunc'),('completeopt','completeopt'),('concealcursor','concealcursor'),('conceallevel','conceallevel'),('confirm','confirm'),('consk','consk'),('conskey','conskey'),('copyindent','copyindent'),('cot','cot'),('cp','cp'),('cpo','cpo'),('cpoptions','cpoptions'),('cpt','cpt'),('crb','crb'),('cryptmethod','cryptmethod'),('cscopepathcomp','cscopepathcomp'),('cscopeprg','cscopeprg'),('cscopequickfix','cscopequickfix'),('cscoperelative','cscoperelative'),('cscopetag','cscopetag'),('cscopetagorder','cscopetagorder'),('cscopeverbose','cscopeverbose'),('cspc','cspc'),('csprg','csprg'),('csqf','csqf'),('csre','csre'),('cst','cst'),('csto','csto'),('csverb','csverb'),('cuc','cuc'),('cul','cul'),('cursorbind','cursorbind'),('cursorcolumn','cursorcolumn'),('cursorline','cursorline'),('cwh','cwh'),('debug','debug'),('deco','deco'),('def','def'),('define','define'),('delcombine','delcombine'),('dex','dex'),('dg','dg'),('dict','dict'),('dictionary','dictionary'),('diff','diff'),('diffexpr','diffexpr'),('diffopt','diffopt'),('digraph','digraph'),('dip','dip'),('dir','dir'),('directory','directory'),('display','display'),('dy','dy'),('ea','ea'),('ead','ead'),('eadirection','eadirection'),('eb','eb'),('ed','ed'),('edcompatible','edcompatible'),('ef','ef'),('efm','efm'),('ei','ei'),('ek','ek'),('enc','enc'),('encoding','encoding'),('endofline','endofline'),('eol','eol'),('ep','ep'),('equalalways','equalalways'),('equalprg','equalprg'),('errorbells','errorbells'),('errorfile','errorfile'),('errorformat','errorformat'),('esckeys','esckeys'),('et','et'),('eventignore','eventignore'),('ex','ex'),('expandtab','expandtab'),('exrc','exrc'),('fcl','fcl'),('fcs','fcs'),('fdc','fdc'),('fde','fde'),('fdi','fdi'),('fdl','fdl'),('fdls','fdls'),('fdm','fdm'),('fdn','fdn'),('fdo','fdo'),('fdt','fdt'),('fen','fen'),('fenc','fenc'),('fencs','fencs'),('fex','fex'),('ff','ff'),('ffs','ffs'),('fileencoding','fileencoding'),('fileencodings','fileencodings'),('fileformat','fileformat'),('fileformats','fileformats'),('filetype','filetype'),('fillchars','fillchars'),('fk','fk'),('fkmap','fkmap'),('flp','flp'),('fml','fml'),('fmr','fmr'),('fo','fo'),('foldclose','foldclose'),('foldcolumn','foldcolumn'),('foldenable','foldenable'),('foldexpr','foldexpr'),('foldignore','foldignore'),('foldlevel','foldlevel'),('foldlevelstart','foldlevelstart'),('foldmarker','foldmarker'),('foldmethod','foldmethod'),('foldminlines','foldminlines'),('foldnestmax','foldnestmax'),('foldopen','foldopen'),('foldtext','foldtext'),('formatexpr','formatexpr'),('formatlistpat','formatlistpat'),('formatoptions','formatoptions'),('formatprg','formatprg'),('fp','fp'),('fs','fs'),('fsync','fsync'),('ft','ft'),('gcr','gcr'),('gd','gd'),('gdefault','gdefault'),('gfm','gfm'),('gfn','gfn'),('gfs','gfs'),('gfw','gfw'),('ghr','ghr'),('go','go'),('gp','gp'),('grepformat','grepformat'),('grepprg','grepprg'),('gtl','gtl'),('gtt','gtt'),('guicursor','guicursor'),('guifont','guifont'),('guifontset','guifontset'),('guifontwide','guifontwide'),('guiheadroom','guiheadroom'),('guioptions','guioptions'),('guipty','guipty'),('guitablabel','guitablabel'),('guitabtooltip','guitabtooltip'),('helpfile','helpfile'),('helpheight','helpheight'),('helplang','helplang'),('hf','hf'),('hh','hh'),('hi','hi'),('hid','hid'),('hidden','hidden'),('highlight','highlight'),('history','history'),('hk','hk'),('hkmap','hkmap'),('hkmapp','hkmapp'),('hkp','hkp'),('hl','hl'),('hlg','hlg'),('hls','hls'),('hlsearch','hlsearch'),('ic','ic'),('icon','icon'),('iconstring','iconstring'),('ignorecase','ignorecase'),('im','im'),('imactivatekey','imactivatekey'),('imak','imak'),('imc','imc'),('imcmdline','imcmdline'),('imd','imd'),('imdisable','imdisable'),('imi','imi'),('iminsert','iminsert'),('ims','ims'),('imsearch','imsearch'),('inc','inc'),('include','include'),('includeexpr','includeexpr'),('incsearch','incsearch'),('inde','inde'),('indentexpr','indentexpr'),('indentkeys','indentkeys'),('indk','indk'),('inex','inex'),('inf','inf'),('infercase','infercase'),('inoremap','inoremap'),('insertmode','insertmode'),('invacd','invacd'),('invai','invai'),('invakm','invakm'),('invallowrevins','invallowrevins'),('invaltkeymap','invaltkeymap'),('invanti','invanti'),('invantialias','invantialias'),('invar','invar'),('invarab','invarab'),('invarabic','invarabic'),('invarabicshape','invarabicshape'),('invari','invari'),('invarshape','invarshape'),('invautochdir','invautochdir'),('invautoindent','invautoindent'),('invautoread','invautoread'),('invautowrite','invautowrite'),('invautowriteall','invautowriteall'),('invaw','invaw'),('invawa','invawa'),('invbackup','invbackup'),('invballooneval','invballooneval'),('invbeval','invbeval'),('invbin','invbin'),('invbinary','invbinary'),('invbiosk','invbiosk'),('invbioskey','invbioskey'),('invbk','invbk'),('invbl','invbl'),('invbomb','invbomb'),('invbuflisted','invbuflisted'),('invcf','invcf'),('invci','invci'),('invcin','invcin'),('invcindent','invcindent'),('invcompatible','invcompatible'),('invconfirm','invconfirm'),('invconsk','invconsk'),('invconskey','invconskey'),('invcopyindent','invcopyindent'),('invcp','invcp'),('invcrb','invcrb'),('invcscopetag','invcscopetag'),('invcscopeverbose','invcscopeverbose'),('invcst','invcst'),('invcsverb','invcsverb'),('invcuc','invcuc'),('invcul','invcul'),('invcursorbind','invcursorbind'),('invcursorcolumn','invcursorcolumn'),('invcursorline','invcursorline'),('invdeco','invdeco'),('invdelcombine','invdelcombine'),('invdg','invdg'),('invdiff','invdiff'),('invdigraph','invdigraph'),('invea','invea'),('inveb','inveb'),('inved','inved'),('invedcompatible','invedcompatible'),('invek','invek'),('invendofline','invendofline'),('inveol','inveol'),('invequalalways','invequalalways'),('inverrorbells','inverrorbells'),('invesckeys','invesckeys'),('invet','invet'),('invex','invex'),('invexpandtab','invexpandtab'),('invexrc','invexrc'),('invfen','invfen'),('invfk','invfk'),('invfkmap','invfkmap'),('invfoldenable','invfoldenable'),('invgd','invgd'),('invgdefault','invgdefault'),('invguipty','invguipty'),('invhid','invhid'),('invhidden','invhidden'),('invhk','invhk'),('invhkmap','invhkmap'),('invhkmapp','invhkmapp'),('invhkp','invhkp'),('invhls','invhls'),('invhlsearch','invhlsearch'),('invic','invic'),('invicon','invicon'),('invignorecase','invignorecase'),('invim','invim'),('invimc','invimc'),('invimcmdline','invimcmdline'),('invimd','invimd'),('invimdisable','invimdisable'),('invincsearch','invincsearch'),('invinf','invinf'),('invinfercase','invinfercase'),('invinsertmode','invinsertmode'),('invis','invis'),('invjoinspaces','invjoinspaces'),('invjs','invjs'),('invlazyredraw','invlazyredraw'),('invlbr','invlbr'),('invlinebreak','invlinebreak'),('invlisp','invlisp'),('invlist','invlist'),('invloadplugins','invloadplugins'),('invlpl','invlpl'),('invlz','invlz'),('invma','invma'),('invmacatsui','invmacatsui'),('invmagic','invmagic'),('invmh','invmh'),('invml','invml'),('invmod','invmod'),('invmodeline','invmodeline'),('invmodifiable','invmodifiable'),('invmodified','invmodified'),('invmore','invmore'),('invmousef','invmousef'),('invmousefocus','invmousefocus'),('invmousehide','invmousehide'),('invnu','invnu'),('invnumber','invnumber'),('invodev','invodev'),('invopendevice','invopendevice'),('invpaste','invpaste'),('invpi','invpi'),('invpreserveindent','invpreserveindent'),('invpreviewwindow','invpreviewwindow'),('invprompt','invprompt'),('invpvw','invpvw'),('invreadonly','invreadonly'),('invrelativenumber','invrelativenumber'),('invremap','invremap'),('invrestorescreen','invrestorescreen'),('invrevins','invrevins'),('invri','invri'),('invrightleft','invrightleft'),('invrl','invrl'),('invrnu','invrnu'),('invro','invro'),('invrs','invrs'),('invru','invru'),('invruler','invruler'),('invsb','invsb'),('invsc','invsc'),('invscb','invscb'),('invscrollbind','invscrollbind'),('invscs','invscs'),('invsecure','invsecure'),('invsft','invsft'),('invshellslash','invshellslash'),('invshelltemp','invshelltemp'),('invshiftround','invshiftround'),('invshortname','invshortname'),('invshowcmd','invshowcmd'),('invshowfulltag','invshowfulltag'),('invshowmatch','invshowmatch'),('invshowmode','invshowmode'),('invsi','invsi'),('invsm','invsm'),('invsmartcase','invsmartcase'),('invsmartindent','invsmartindent'),('invsmarttab','invsmarttab'),('invsmd','invsmd'),('invsn','invsn'),('invsol','invsol'),('invspell','invspell'),('invsplitbelow','invsplitbelow'),('invsplitright','invsplitright'),('invspr','invspr'),('invsr','invsr'),('invssl','invssl'),('invsta','invsta'),('invstartofline','invstartofline'),('invstmp','invstmp'),('invswapfile','invswapfile'),('invswf','invswf'),('invta','invta'),('invtagbsearch','invtagbsearch'),('invtagrelative','invtagrelative'),('invtagstack','invtagstack'),('invtbi','invtbi'),('invtbidi','invtbidi'),('invtbs','invtbs'),('invtermbidi','invtermbidi'),('invterse','invterse'),('invtextauto','invtextauto'),('invtextmode','invtextmode'),('invtf','invtf'),('invtgst','invtgst'),('invtildeop','invtildeop'),('invtimeout','invtimeout'),('invtitle','invtitle'),('invto','invto'),('invtop','invtop'),('invtr','invtr'),('invttimeout','invttimeout'),('invttybuiltin','invttybuiltin'),('invttyfast','invttyfast'),('invtx','invtx'),('invvb','invvb'),('invvisualbell','invvisualbell'),('invwa','invwa'),('invwarn','invwarn'),('invwb','invwb'),('invweirdinvert','invweirdinvert'),('invwfh','invwfh'),('invwfw','invwfw'),('invwildignorecase','invwildignorecase'),('invwildmenu','invwildmenu'),('invwinfixheight','invwinfixheight'),('invwinfixwidth','invwinfixwidth'),('invwiv','invwiv'),('invwmnu','invwmnu'),('invwrap','invwrap'),('invwrapscan','invwrapscan'),('invwrite','invwrite'),('invwriteany','invwriteany'),('invwritebackup','invwritebackup'),('invws','invws'),('is','is'),('isf','isf'),('isfname','isfname'),('isi','isi'),('isident','isident'),('isk','isk'),('iskeyword','iskeyword'),('isp','isp'),('isprint','isprint'),('joinspaces','joinspaces'),('js','js'),('key','key'),('keymap','keymap'),('keymodel','keymodel'),('keywordprg','keywordprg'),('km','km'),('kmp','kmp'),('kp','kp'),('langmap','langmap'),('langmenu','langmenu'),('laststatus','laststatus'),('lazyredraw','lazyredraw'),('lbr','lbr'),('lcs','lcs'),('linebreak','linebreak'),('lines','lines'),('linespace','linespace'),('lisp','lisp'),('lispwords','lispwords'),('list','list'),('listchars','listchars'),('lm','lm'),('lmap','lmap'),('loadplugins','loadplugins'),('lpl','lpl'),('ls','ls'),('lsp','lsp'),('lw','lw'),('lz','lz'),('ma','ma'),('macatsui','macatsui'),('magic','magic'),('makeef','makeef'),('makeprg','makeprg'),('mat','mat'),('matchpairs','matchpairs'),('matchtime','matchtime'),('maxcombine','maxcombine'),('maxfuncdepth','maxfuncdepth'),('maxmapdepth','maxmapdepth'),('maxmem','maxmem'),('maxmempattern','maxmempattern'),('maxmemtot','maxmemtot'),('mco','mco'),('mef','mef'),('menuitems','menuitems'),('mfd','mfd'),('mh','mh'),('mis','mis'),('mkspellmem','mkspellmem'),('ml','ml'),('mls','mls'),('mm','mm'),('mmd','mmd'),('mmp','mmp'),('mmt','mmt'),('mod','mod'),('modeline','modeline'),('modelines','modelines'),('modifiable','modifiable'),('modified','modified'),('more','more'),('mouse','mouse'),('mousef','mousef'),('mousefocus','mousefocus'),('mousehide','mousehide'),('mousem','mousem'),('mousemodel','mousemodel'),('mouses','mouses'),('mouseshape','mouseshape'),('mouset','mouset'),('mousetime','mousetime'),('mp','mp'),('mps','mps'),('msm','msm'),('mzq','mzq'),('mzquantum','mzquantum'),('nf','nf'),('nnoremap','nnoremap'),('noacd','noacd'),('noai','noai'),('noakm','noakm'),('noallowrevins','noallowrevins'),('noaltkeymap','noaltkeymap'),('noanti','noanti'),('noantialias','noantialias'),('noar','noar'),('noarab','noarab'),('noarabic','noarabic'),('noarabicshape','noarabicshape'),('noari','noari'),('noarshape','noarshape'),('noautochdir','noautochdir'),('noautoindent','noautoindent'),('noautoread','noautoread'),('noautowrite','noautowrite'),('noautowriteall','noautowriteall'),('noaw','noaw'),('noawa','noawa'),('nobackup','nobackup'),('noballooneval','noballooneval'),('nobeval','nobeval'),('nobin','nobin'),('nobinary','nobinary'),('nobiosk','nobiosk'),('nobioskey','nobioskey'),('nobk','nobk'),('nobl','nobl'),('nobomb','nobomb'),('nobuflisted','nobuflisted'),('nocf','nocf'),('noci','noci'),('nocin','nocin'),('nocindent','nocindent'),('nocompatible','nocompatible'),('noconfirm','noconfirm'),('noconsk','noconsk'),('noconskey','noconskey'),('nocopyindent','nocopyindent'),('nocp','nocp'),('nocrb','nocrb'),('nocscopetag','nocscopetag'),('nocscopeverbose','nocscopeverbose'),('nocst','nocst'),('nocsverb','nocsverb'),('nocuc','nocuc'),('nocul','nocul'),('nocursorbind','nocursorbind'),('nocursorcolumn','nocursorcolumn'),('nocursorline','nocursorline'),('nodeco','nodeco'),('nodelcombine','nodelcombine'),('nodg','nodg'),('nodiff','nodiff'),('nodigraph','nodigraph'),('noea','noea'),('noeb','noeb'),('noed','noed'),('noedcompatible','noedcompatible'),('noek','noek'),('noendofline','noendofline'),('noeol','noeol'),('noequalalways','noequalalways'),('noerrorbells','noerrorbells'),('noesckeys','noesckeys'),('noet','noet'),('noex','noex'),('noexpandtab','noexpandtab'),('noexrc','noexrc'),('nofen','nofen'),('nofk','nofk'),('nofkmap','nofkmap'),('nofoldenable','nofoldenable'),('nogd','nogd'),('nogdefault','nogdefault'),('noguipty','noguipty'),('nohid','nohid'),('nohidden','nohidden'),('nohk','nohk'),('nohkmap','nohkmap'),('nohkmapp','nohkmapp'),('nohkp','nohkp'),('nohls','nohls'),('nohlsearch','nohlsearch'),('noic','noic'),('noicon','noicon'),('noignorecase','noignorecase'),('noim','noim'),('noimc','noimc'),('noimcmdline','noimcmdline'),('noimd','noimd'),('noimdisable','noimdisable'),('noincsearch','noincsearch'),('noinf','noinf'),('noinfercase','noinfercase'),('noinsertmode','noinsertmode'),('nois','nois'),('nojoinspaces','nojoinspaces'),('nojs','nojs'),('nolazyredraw','nolazyredraw'),('nolbr','nolbr'),('nolinebreak','nolinebreak'),('nolisp','nolisp'),('nolist','nolist'),('noloadplugins','noloadplugins'),('nolpl','nolpl'),('nolz','nolz'),('noma','noma'),('nomacatsui','nomacatsui'),('nomagic','nomagic'),('nomh','nomh'),('noml','noml'),('nomod','nomod'),('nomodeline','nomodeline'),('nomodifiable','nomodifiable'),('nomodified','nomodified'),('nomore','nomore'),('nomousef','nomousef'),('nomousefocus','nomousefocus'),('nomousehide','nomousehide'),('nonu','nonu'),('nonumber','nonumber'),('noodev','noodev'),('noopendevice','noopendevice'),('nopaste','nopaste'),('nopi','nopi'),('nopreserveindent','nopreserveindent'),('nopreviewwindow','nopreviewwindow'),('noprompt','noprompt'),('nopvw','nopvw'),('noreadonly','noreadonly'),('norelativenumber','norelativenumber'),('noremap','noremap'),('norestorescreen','norestorescreen'),('norevins','norevins'),('nori','nori'),('norightleft','norightleft'),('norl','norl'),('nornu','nornu'),('noro','noro'),('nors','nors'),('noru','noru'),('noruler','noruler'),('nosb','nosb'),('nosc','nosc'),('noscb','noscb'),('noscrollbind','noscrollbind'),('noscs','noscs'),('nosecure','nosecure'),('nosft','nosft'),('noshellslash','noshellslash'),('noshelltemp','noshelltemp'),('noshiftround','noshiftround'),('noshortname','noshortname'),('noshowcmd','noshowcmd'),('noshowfulltag','noshowfulltag'),('noshowmatch','noshowmatch'),('noshowmode','noshowmode'),('nosi','nosi'),('nosm','nosm'),('nosmartcase','nosmartcase'),('nosmartindent','nosmartindent'),('nosmarttab','nosmarttab'),('nosmd','nosmd'),('nosn','nosn'),('nosol','nosol'),('nospell','nospell'),('nosplitbelow','nosplitbelow'),('nosplitright','nosplitright'),('nospr','nospr'),('nosr','nosr'),('nossl','nossl'),('nosta','nosta'),('nostartofline','nostartofline'),('nostmp','nostmp'),('noswapfile','noswapfile'),('noswf','noswf'),('nota','nota'),('notagbsearch','notagbsearch'),('notagrelative','notagrelative'),('notagstack','notagstack'),('notbi','notbi'),('notbidi','notbidi'),('notbs','notbs'),('notermbidi','notermbidi'),('noterse','noterse'),('notextauto','notextauto'),('notextmode','notextmode'),('notf','notf'),('notgst','notgst'),('notildeop','notildeop'),('notimeout','notimeout'),('notitle','notitle'),('noto','noto'),('notop','notop'),('notr','notr'),('nottimeout','nottimeout'),('nottybuiltin','nottybuiltin'),('nottyfast','nottyfast'),('notx','notx'),('novb','novb'),('novisualbell','novisualbell'),('nowa','nowa'),('nowarn','nowarn'),('nowb','nowb'),('noweirdinvert','noweirdinvert'),('nowfh','nowfh'),('nowfw','nowfw'),('nowildignorecase','nowildignorecase'),('nowildmenu','nowildmenu'),('nowinfixheight','nowinfixheight'),('nowinfixwidth','nowinfixwidth'),('nowiv','nowiv'),('nowmnu','nowmnu'),('nowrap','nowrap'),('nowrapscan','nowrapscan'),('nowrite','nowrite'),('nowriteany','nowriteany'),('nowritebackup','nowritebackup'),('nows','nows'),('nrformats','nrformats'),('nu','nu'),('number','number'),('numberwidth','numberwidth'),('nuw','nuw'),('odev','odev'),('oft','oft'),('ofu','ofu'),('omnifunc','omnifunc'),('opendevice','opendevice'),('operatorfunc','operatorfunc'),('opfunc','opfunc'),('osfiletype','osfiletype'),('pa','pa'),('para','para'),('paragraphs','paragraphs'),('paste','paste'),('pastetoggle','pastetoggle'),('patchexpr','patchexpr'),('patchmode','patchmode'),('path','path'),('pdev','pdev'),('penc','penc'),('pex','pex'),('pexpr','pexpr'),('pfn','pfn'),('ph','ph'),('pheader','pheader'),('pi','pi'),('pm','pm'),('pmbcs','pmbcs'),('pmbfn','pmbfn'),('popt','popt'),('preserveindent','preserveindent'),('previewheight','previewheight'),('previewwindow','previewwindow'),('printdevice','printdevice'),('printencoding','printencoding'),('printexpr','printexpr'),('printfont','printfont'),('printheader','printheader'),('printmbcharset','printmbcharset'),('printmbfont','printmbfont'),('printoptions','printoptions'),('prompt','prompt'),('pt','pt'),('pumheight','pumheight'),('pvh','pvh'),('pvw','pvw'),('qe','qe'),('quoteescape','quoteescape'),('rdt','rdt'),('readonly','readonly'),('redrawtime','redrawtime'),('relativenumber','relativenumber'),('remap','remap'),('report','report'),('restorescreen','restorescreen'),('revins','revins'),('ri','ri'),('rightleft','rightleft'),('rightleftcmd','rightleftcmd'),('rl','rl'),('rlc','rlc'),('rnu','rnu'),('ro','ro'),('rs','rs'),('rtp','rtp'),('ru','ru'),('ruf','ruf'),('ruler','ruler'),('rulerformat','rulerformat'),('runtimepath','runtimepath'),('sb','sb'),('sbo','sbo'),('sbr','sbr'),('sc','sc'),('scb','scb'),('scr','scr'),('scroll','scroll'),('scrollbind','scrollbind'),('scrolljump','scrolljump'),('scrolloff','scrolloff'),('scrollopt','scrollopt'),('scs','scs'),('sect','sect'),('sections','sections'),('secure','secure'),('sel','sel'),('selection','selection'),('selectmode','selectmode'),('sessionoptions','sessionoptions'),('sft','sft'),('sh','sh'),('shcf','shcf'),('shell','shell'),('shellcmdflag','shellcmdflag'),('shellpipe','shellpipe'),('shellquote','shellquote'),('shellredir','shellredir'),('shellslash','shellslash'),('shelltemp','shelltemp'),('shelltype','shelltype'),('shellxquote','shellxquote'),('shiftround','shiftround'),('shiftwidth','shiftwidth'),('shm','shm'),('shortmess','shortmess'),('shortname','shortname'),('showbreak','showbreak'),('showcmd','showcmd'),('showfulltag','showfulltag'),('showmatch','showmatch'),('showmode','showmode'),('showtabline','showtabline'),('shq','shq'),('si','si'),('sidescroll','sidescroll'),('sidescrolloff','sidescrolloff'),('siso','siso'),('sj','sj'),('slm','slm'),('sm','sm'),('smartcase','smartcase'),('smartindent','smartindent'),('smarttab','smarttab'),('smc','smc'),('smd','smd'),('sn','sn'),('so','so'),('softtabstop','softtabstop'),('sol','sol'),('sp','sp'),('spc','spc'),('spell','spell'),('spellcapcheck','spellcapcheck'),('spellfile','spellfile'),('spelllang','spelllang'),('spellsuggest','spellsuggest'),('spf','spf'),('spl','spl'),('splitbelow','splitbelow'),('splitright','splitright'),('spr','spr'),('sps','sps'),('sr','sr'),('srr','srr'),('ss','ss'),('ssl','ssl'),('ssop','ssop'),('st','st'),('sta','sta'),('stal','stal'),('startofline','startofline'),('statusline','statusline'),('stl','stl'),('stmp','stmp'),('sts','sts'),('su','su'),('sua','sua'),('suffixes','suffixes'),('suffixesadd','suffixesadd'),('sw','sw'),('swapfile','swapfile'),('swapsync','swapsync'),('swb','swb'),('swf','swf'),('switchbuf','switchbuf'),('sws','sws'),('sxq','sxq'),('syn','syn'),('synmaxcol','synmaxcol'),('syntax','syntax'),('t_AB','t_AB'),('t_AF','t_AF'),('t_AL','t_AL'),('t_CS','t_CS'),('t_CV','t_CV'),('t_Ce','t_Ce'),('t_Co','t_Co'),('t_Cs','t_Cs'),('t_DL','t_DL'),('t_EI','t_EI'),('t_F1','t_F1'),('t_F2','t_F2'),('t_F3','t_F3'),('t_F4','t_F4'),('t_F5','t_F5'),('t_F6','t_F6'),('t_F7','t_F7'),('t_F8','t_F8'),('t_F9','t_F9'),('t_IE','t_IE'),('t_IS','t_IS'),('t_K1','t_K1'),('t_K3','t_K3'),('t_K4','t_K4'),('t_K5','t_K5'),('t_K6','t_K6'),('t_K7','t_K7'),('t_K8','t_K8'),('t_K9','t_K9'),('t_KA','t_KA'),('t_KB','t_KB'),('t_KC','t_KC'),('t_KD','t_KD'),('t_KE','t_KE'),('t_KF','t_KF'),('t_KG','t_KG'),('t_KH','t_KH'),('t_KI','t_KI'),('t_KJ','t_KJ'),('t_KK','t_KK'),('t_KL','t_KL'),('t_RI','t_RI'),('t_RV','t_RV'),('t_SI','t_SI'),('t_Sb','t_Sb'),('t_Sf','t_Sf'),('t_WP','t_WP'),('t_WS','t_WS'),('t_ZH','t_ZH'),('t_ZR','t_ZR'),('t_al','t_al'),('t_bc','t_bc'),('t_cd','t_cd'),('t_ce','t_ce'),('t_cl','t_cl'),('t_cm','t_cm'),('t_cs','t_cs'),('t_da','t_da'),('t_db','t_db'),('t_dl','t_dl'),('t_fs','t_fs'),('t_k1','t_k1'),('t_k2','t_k2'),('t_k3','t_k3'),('t_k4','t_k4'),('t_k5','t_k5'),('t_k6','t_k6'),('t_k7','t_k7'),('t_k8','t_k8'),('t_k9','t_k9'),('t_kB','t_kB'),('t_kD','t_kD'),('t_kI','t_kI'),('t_kN','t_kN'),('t_kP','t_kP'),('t_kb','t_kb'),('t_kd','t_kd'),('t_ke','t_ke'),('t_kh','t_kh'),('t_kl','t_kl'),('t_kr','t_kr'),('t_ks','t_ks'),('t_ku','t_ku'),('t_le','t_le'),('t_mb','t_mb'),('t_md','t_md'),('t_me','t_me'),('t_mr','t_mr'),('t_ms','t_ms'),('t_nd','t_nd'),('t_op','t_op'),('t_se','t_se'),('t_so','t_so'),('t_sr','t_sr'),('t_te','t_te'),('t_ti','t_ti'),('t_ts','t_ts'),('t_ue','t_ue'),('t_us','t_us'),('t_ut','t_ut'),('t_vb','t_vb'),('t_ve','t_ve'),('t_vi','t_vi'),('t_vs','t_vs'),('t_xs','t_xs'),('ta','ta'),('tabline','tabline'),('tabpagemax','tabpagemax'),('tabstop','tabstop'),('tag','tag'),('tagbsearch','tagbsearch'),('taglength','taglength'),('tagrelative','tagrelative'),('tags','tags'),('tagstack','tagstack'),('tal','tal'),('tb','tb'),('tbi','tbi'),('tbidi','tbidi'),('tbis','tbis'),('tbs','tbs'),('tenc','tenc'),('term','term'),('termbidi','termbidi'),('termencoding','termencoding'),('terse','terse'),('textauto','textauto'),('textmode','textmode'),('textwidth','textwidth'),('tf','tf'),('tgst','tgst'),('thesaurus','thesaurus'),('tildeop','tildeop'),('timeout','timeout'),('timeoutlen','timeoutlen'),('title','title'),('titlelen','titlelen'),('titleold','titleold'),('titlestring','titlestring'),('tl','tl'),('tm','tm'),('to','to'),('toolbar','toolbar'),('toolbariconsize','toolbariconsize'),('top','top'),('tpm','tpm'),('tr','tr'),('ts','ts'),('tsl','tsl'),('tsr','tsr'),('ttimeout','ttimeout'),('ttimeoutlen','ttimeoutlen'),('ttm','ttm'),('tty','tty'),('ttybuiltin','ttybuiltin'),('ttyfast','ttyfast'),('ttym','ttym'),('ttymouse','ttymouse'),('ttyscroll','ttyscroll'),('ttytype','ttytype'),('tw','tw'),('tx','tx'),('uc','uc'),('udf','udf'),('udir','udir'),('ul','ul'),('undodir','undodir'),('undofile','undofile'),('undolevels','undolevels'),('undoreload','undoreload'),('updatecount','updatecount'),('updatetime','updatetime'),('ur','ur'),('ut','ut'),('vb','vb'),('vbs','vbs'),('vdir','vdir'),('ve','ve'),('verbose','verbose'),('verbosefile','verbosefile'),('vfile','vfile'),('vi','vi'),('viewdir','viewdir'),('viewoptions','viewoptions'),('viminfo','viminfo'),('virtualedit','virtualedit'),('visualbell','visualbell'),('vnoremap','vnoremap'),('vop','vop'),('wa','wa'),('wak','wak'),('warn','warn'),('wb','wb'),('wc','wc'),('wcm','wcm'),('wd','wd'),('weirdinvert','weirdinvert'),('wfh','wfh'),('wfw','wfw'),('wh','wh'),('whichwrap','whichwrap'),('wi','wi'),('wic','wic'),('wig','wig'),('wildchar','wildchar'),('wildcharm','wildcharm'),('wildignore','wildignore'),('wildignorecase','wildignorecase'),('wildmenu','wildmenu'),('wildmode','wildmode'),('wildoptions','wildoptions'),('wim','wim'),('winaltkeys','winaltkeys'),('window','window'),('winfixheight','winfixheight'),('winfixwidth','winfixwidth'),('winheight','winheight'),('winminheight','winminheight'),('winminwidth','winminwidth'),('winwidth','winwidth'),('wiv','wiv'),('wiw','wiw'),('wm','wm'),('wmh','wmh'),('wmnu','wmnu'),('wmw','wmw'),('wop','wop'),('wrap','wrap'),('wrapmargin','wrapmargin'),('wrapscan','wrapscan'),('write','write'),('writeany','writeany'),('writebackup','writebackup'),('writedelay','writedelay'),('ws','ws'),('ww','ww')]
-
-option = _getoption()
-command = _getcommand()
-auto = _getauto()
diff --git a/pygments/lexers/actionscript.py b/pygments/lexers/actionscript.py
new file mode 100644
index 00000000..9c687a57
--- /dev/null
+++ b/pygments/lexers/actionscript.py
@@ -0,0 +1,240 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.actionscript
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for ActionScript and MXML.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, using, this, words, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['ActionScriptLexer', 'ActionScript3Lexer', 'MxmlLexer']
+
+
+class ActionScriptLexer(RegexLexer):
+ """
+ For ActionScript source code.
+
+ .. versionadded:: 0.9
+ """
+
+ name = 'ActionScript'
+ aliases = ['as', 'actionscript']
+ filenames = ['*.as']
+ mimetypes = ['application/x-actionscript', 'text/x-actionscript',
+ 'text/actionscript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex),
+ (r'[~^*!%&<>|+=:;,/?\\-]+', Operator),
+ (r'[{}\[\]();.]+', Punctuation),
+ (words((
+ 'case', 'default', 'for', 'each', 'in', 'while', 'do', 'break',
+ 'return', 'continue', 'if', 'else', 'throw', 'try', 'catch',
+ 'var', 'with', 'new', 'typeof', 'arguments', 'instanceof', 'this',
+ 'switch'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'class', 'public', 'final', 'internal', 'native', 'override', 'private',
+ 'protected', 'static', 'import', 'extends', 'implements', 'interface',
+ 'intrinsic', 'return', 'super', 'dynamic', 'function', 'const', 'get',
+ 'namespace', 'package', 'set'), suffix=r'\b'),
+ Keyword.Declaration),
+ (r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
+ Keyword.Constant),
+ (words((
+ 'Accessibility', 'AccessibilityProperties', 'ActionScriptVersion',
+ 'ActivityEvent', 'AntiAliasType', 'ApplicationDomain', 'AsBroadcaster', 'Array',
+ 'AsyncErrorEvent', 'AVM1Movie', 'BevelFilter', 'Bitmap', 'BitmapData',
+ 'BitmapDataChannel', 'BitmapFilter', 'BitmapFilterQuality', 'BitmapFilterType',
+ 'BlendMode', 'BlurFilter', 'Boolean', 'ByteArray', 'Camera', 'Capabilities', 'CapsStyle',
+ 'Class', 'Color', 'ColorMatrixFilter', 'ColorTransform', 'ContextMenu',
+ 'ContextMenuBuiltInItems', 'ContextMenuEvent', 'ContextMenuItem',
+ 'ConvultionFilter', 'CSMSettings', 'DataEvent', 'Date', 'DefinitionError',
+ 'DeleteObjectSample', 'Dictionary', 'DisplacmentMapFilter', 'DisplayObject',
+ 'DisplacmentMapFilterMode', 'DisplayObjectContainer', 'DropShadowFilter',
+ 'Endian', 'EOFError', 'Error', 'ErrorEvent', 'EvalError', 'Event', 'EventDispatcher',
+ 'EventPhase', 'ExternalInterface', 'FileFilter', 'FileReference',
+ 'FileReferenceList', 'FocusDirection', 'FocusEvent', 'Font', 'FontStyle', 'FontType',
+ 'FrameLabel', 'FullScreenEvent', 'Function', 'GlowFilter', 'GradientBevelFilter',
+ 'GradientGlowFilter', 'GradientType', 'Graphics', 'GridFitType', 'HTTPStatusEvent',
+ 'IBitmapDrawable', 'ID3Info', 'IDataInput', 'IDataOutput', 'IDynamicPropertyOutput'
+ 'IDynamicPropertyWriter', 'IEventDispatcher', 'IExternalizable',
+ 'IllegalOperationError', 'IME', 'IMEConversionMode', 'IMEEvent', 'int',
+ 'InteractiveObject', 'InterpolationMethod', 'InvalidSWFError', 'InvokeEvent',
+ 'IOError', 'IOErrorEvent', 'JointStyle', 'Key', 'Keyboard', 'KeyboardEvent', 'KeyLocation',
+ 'LineScaleMode', 'Loader', 'LoaderContext', 'LoaderInfo', 'LoadVars', 'LocalConnection',
+ 'Locale', 'Math', 'Matrix', 'MemoryError', 'Microphone', 'MorphShape', 'Mouse', 'MouseEvent',
+ 'MovieClip', 'MovieClipLoader', 'Namespace', 'NetConnection', 'NetStatusEvent',
+ 'NetStream', 'NewObjectSample', 'Number', 'Object', 'ObjectEncoding', 'PixelSnapping',
+ 'Point', 'PrintJob', 'PrintJobOptions', 'PrintJobOrientation', 'ProgressEvent', 'Proxy',
+ 'QName', 'RangeError', 'Rectangle', 'ReferenceError', 'RegExp', 'Responder', 'Sample',
+ 'Scene', 'ScriptTimeoutError', 'Security', 'SecurityDomain', 'SecurityError',
+ 'SecurityErrorEvent', 'SecurityPanel', 'Selection', 'Shape', 'SharedObject',
+ 'SharedObjectFlushStatus', 'SimpleButton', 'Socket', 'Sound', 'SoundChannel',
+ 'SoundLoaderContext', 'SoundMixer', 'SoundTransform', 'SpreadMethod', 'Sprite',
+ 'StackFrame', 'StackOverflowError', 'Stage', 'StageAlign', 'StageDisplayState',
+ 'StageQuality', 'StageScaleMode', 'StaticText', 'StatusEvent', 'String', 'StyleSheet',
+ 'SWFVersion', 'SyncEvent', 'SyntaxError', 'System', 'TextColorType', 'TextField',
+ 'TextFieldAutoSize', 'TextFieldType', 'TextFormat', 'TextFormatAlign',
+ 'TextLineMetrics', 'TextRenderer', 'TextSnapshot', 'Timer', 'TimerEvent', 'Transform',
+ 'TypeError', 'uint', 'URIError', 'URLLoader', 'URLLoaderDataFormat', 'URLRequest',
+ 'URLRequestHeader', 'URLRequestMethod', 'URLStream', 'URLVariabeles', 'VerifyError',
+ 'Video', 'XML', 'XMLDocument', 'XMLList', 'XMLNode', 'XMLNodeType', 'XMLSocket',
+ 'XMLUI'), suffix=r'\b'),
+ Name.Builtin),
+ (words((
+ 'decodeURI', 'decodeURIComponent', 'encodeURI', 'escape', 'eval', 'isFinite', 'isNaN',
+ 'isXMLName', 'clearInterval', 'fscommand', 'getTimer', 'getURL', 'getVersion',
+ 'parseFloat', 'parseInt', 'setInterval', 'trace', 'updateAfterEvent',
+ 'unescape'), suffix=r'\b'),
+ Name.Function),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class ActionScript3Lexer(RegexLexer):
+ """
+ For ActionScript 3 source code.
+
+ .. versionadded:: 0.11
+ """
+
+ name = 'ActionScript 3'
+ aliases = ['as3', 'actionscript3']
+ filenames = ['*.as']
+ mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
+ 'text/actionscript3']
+
+ identifier = r'[$a-zA-Z_]\w*'
+ typeidentifier = identifier + '(?:\.<\w+>)?'
+
+ flags = re.DOTALL | re.MULTILINE
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'(function\s+)(' + identifier + r')(\s*)(\()',
+ bygroups(Keyword.Declaration, Name.Function, Text, Operator),
+ 'funcparams'),
+ (r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' +
+ typeidentifier + r')',
+ bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text,
+ Keyword.Type)),
+ (r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
+ bygroups(Keyword, Text, Name.Namespace, Text)),
+ (r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()',
+ bygroups(Keyword, Text, Keyword.Type, Text, Operator)),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex),
+ (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
+ (r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
+ r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
+ r'switch|import|include|as|is)\b',
+ Keyword),
+ (r'(class|public|final|internal|native|override|private|protected|'
+ r'static|import|extends|implements|interface|intrinsic|return|super|'
+ r'dynamic|function|const|get|namespace|package|set)\b',
+ Keyword.Declaration),
+ (r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
+ Keyword.Constant),
+ (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
+ r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
+ r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
+ r'unescape)\b', Name.Function),
+ (identifier, Name),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'[~^*!%&<>|+=:;,/?\\{}\[\]().-]+', Operator),
+ ],
+ 'funcparams': [
+ (r'\s+', Text),
+ (r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
+ typeidentifier + r'|\*)(\s*)',
+ bygroups(Text, Punctuation, Name, Text, Operator, Text,
+ Keyword.Type, Text), 'defval'),
+ (r'\)', Operator, 'type')
+ ],
+ 'type': [
+ (r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)',
+ bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'),
+ (r'\s+', Text, '#pop:2'),
+ default('#pop:2')
+ ],
+ 'defval': [
+ (r'(=)(\s*)([^(),]+)(\s*)(,?)',
+ bygroups(Operator, Text, using(this), Text, Operator), '#pop'),
+ (r',', Operator, '#pop'),
+ default('#pop')
+ ]
+ }
+
+ def analyse_text(text):
+ if re.match(r'\w+\s*:\s*\w', text):
+ return 0.3
+ return 0
+
+
+class MxmlLexer(RegexLexer):
+ """
+ For MXML markup.
+ Nested AS3 in <script> tags is highlighted by the appropriate lexer.
+
+ .. versionadded:: 1.1
+ """
+ flags = re.MULTILINE | re.DOTALL
+ name = 'MXML'
+ aliases = ['mxml']
+ filenames = ['*.mxml']
+ mimetimes = ['text/xml', 'application/xml']
+
+ tokens = {
+ 'root': [
+ ('[^<&]+', Text),
+ (r'&\S*?;', Name.Entity),
+ (r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
+ bygroups(String, using(ActionScript3Lexer), String)),
+ ('<!--', Comment, 'comment'),
+ (r'<\?.*?\?>', Comment.Preproc),
+ ('<![^>]*>', Comment.Preproc),
+ (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
+ (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
+ ],
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'attr': [
+ ('\s+', Text),
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py
index 1f81365e..defa7b6e 100644
--- a/pygments/lexers/agile.py
+++ b/pygments/lexers/agile.py
@@ -3,2288 +3,22 @@
pygments.lexers.agile
~~~~~~~~~~~~~~~~~~~~~
- Lexers for agile languages.
+ Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-
-from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, \
- LexerContext, include, combined, do_insertions, bygroups, using, this
-from pygments.token import Error, Text, Other, \
- Comment, Operator, Keyword, Name, String, Number, Generic, Punctuation
-from pygments.util import get_bool_opt, get_list_opt, shebang_matches
-from pygments import unistring as uni
-
-
-__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
- 'Python3Lexer', 'Python3TracebackLexer', 'RubyLexer',
- 'RubyConsoleLexer', 'PerlLexer', 'LuaLexer', 'MoonScriptLexer',
- 'CrocLexer', 'MiniDLexer', 'IoLexer', 'TclLexer', 'FactorLexer',
- 'FancyLexer', 'DgLexer', 'Perl6Lexer']
-
-# b/w compatibility
-from pygments.lexers.functional import SchemeLexer
+from pygments.lexers.lisp import SchemeLexer
from pygments.lexers.jvm import IokeLexer, ClojureLexer
-
-line_re = re.compile('.*?\n')
-
-
-class PythonLexer(RegexLexer):
- """
- For `Python <http://www.python.org>`_ source code.
- """
-
- name = 'Python'
- aliases = ['python', 'py', 'sage']
- filenames = ['*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage']
- mimetypes = ['text/x-python', 'application/x-python']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
- (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
- (r'[^\S\n]+', Text),
- (r'#.*$', Comment),
- (r'[]{}:(),;[]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
- include('keywords'),
- (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
- (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'fromimport'),
- (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'import'),
- include('builtins'),
- include('backtick'),
- ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
- ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
- ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
- ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
- ('[uU]?"""', String, combined('stringescape', 'tdqs')),
- ("[uU]?'''", String, combined('stringescape', 'tsqs')),
- ('[uU]?"', String, combined('stringescape', 'dqs')),
- ("[uU]?'", String, combined('stringescape', 'sqs')),
- include('name'),
- include('numbers'),
- ],
- 'keywords': [
- (r'(assert|break|continue|del|elif|else|except|exec|'
- r'finally|for|global|if|lambda|pass|print|raise|'
- r'return|try|while|yield(\s+from)?|as|with)\b', Keyword),
- ],
- 'builtins': [
- (r'(?<!\.)(__import__|abs|all|any|apply|basestring|bin|bool|buffer|'
- r'bytearray|bytes|callable|chr|classmethod|cmp|coerce|compile|'
- r'complex|delattr|dict|dir|divmod|enumerate|eval|execfile|exit|'
- r'file|filter|float|frozenset|getattr|globals|hasattr|hash|hex|id|'
- r'input|int|intern|isinstance|issubclass|iter|len|list|locals|'
- r'long|map|max|min|next|object|oct|open|ord|pow|property|range|'
- r'raw_input|reduce|reload|repr|reversed|round|set|setattr|slice|'
- r'sorted|staticmethod|str|sum|super|tuple|type|unichr|unicode|'
- r'vars|xrange|zip)\b', Name.Builtin),
- (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True'
- r')\b', Name.Builtin.Pseudo),
- (r'(?<!\.)(ArithmeticError|AssertionError|AttributeError|'
- r'BaseException|DeprecationWarning|EOFError|EnvironmentError|'
- r'Exception|FloatingPointError|FutureWarning|GeneratorExit|IOError|'
- r'ImportError|ImportWarning|IndentationError|IndexError|KeyError|'
- r'KeyboardInterrupt|LookupError|MemoryError|NameError|'
- r'NotImplemented|NotImplementedError|OSError|OverflowError|'
- r'OverflowWarning|PendingDeprecationWarning|ReferenceError|'
- r'RuntimeError|RuntimeWarning|StandardError|StopIteration|'
- r'SyntaxError|SyntaxWarning|SystemError|SystemExit|TabError|'
- r'TypeError|UnboundLocalError|UnicodeDecodeError|'
- r'UnicodeEncodeError|UnicodeError|UnicodeTranslateError|'
- r'UnicodeWarning|UserWarning|ValueError|VMSError|Warning|'
- r'WindowsError|ZeroDivisionError)\b', Name.Exception),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+j?', Number.Integer)
- ],
- 'backtick': [
- ('`.*?`', String.Backtick),
- ],
- 'name': [
- (r'@[a-zA-Z0-9_.]+', Name.Decorator),
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
- ],
- 'funcname': [
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
- ],
- 'classname': [
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
- ],
- 'import': [
- (r'(?:[ \t]|\\\n)+', Text),
- (r'as\b', Keyword.Namespace),
- (r',', Operator),
- (r'[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace),
- (r'', Text, '#pop') # all else: go back
- ],
- 'fromimport': [
- (r'(?:[ \t]|\\\n)+', Text),
- (r'import\b', Keyword.Namespace, '#pop'),
- # if None occurs here, it's "raise x from None", since None can
- # never be a module name
- (r'None\b', Name.Builtin.Pseudo, '#pop'),
- # sadly, in "raise x from y" y will be highlighted as namespace too
- (r'[a-zA-Z_.][a-zA-Z0-9_.]*', Name.Namespace),
- # anything else here also means "raise x from y" and is therefore
- # not an error
- (r'', Text, '#pop'),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'%(\([a-zA-Z0-9_]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String)
- # newlines are an error (use "nl" state)
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- include('strings'),
- include('nl')
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'pythonw?(2(\.\d)?)?') or \
- 'import ' in text[:1000]
-
-
-class Python3Lexer(RegexLexer):
- """
- For `Python <http://www.python.org>`_ source code (version 3.0).
-
- *New in Pygments 0.10.*
- """
-
- name = 'Python 3'
- aliases = ['python3', 'py3']
- filenames = [] # Nothing until Python 3 gets widespread
- mimetypes = ['text/x-python3', 'application/x-python3']
-
- flags = re.MULTILINE | re.UNICODE
-
- uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
-
- tokens = PythonLexer.tokens.copy()
- tokens['keywords'] = [
- (r'(assert|break|continue|del|elif|else|except|'
- r'finally|for|global|if|lambda|pass|raise|nonlocal|'
- r'return|try|while|yield(\s+from)?|as|with|True|False|None)\b',
- Keyword),
- ]
- tokens['builtins'] = [
- (r'(?<!\.)(__import__|abs|all|any|bin|bool|bytearray|bytes|'
- r'chr|classmethod|cmp|compile|complex|delattr|dict|dir|'
- r'divmod|enumerate|eval|filter|float|format|frozenset|getattr|'
- r'globals|hasattr|hash|hex|id|input|int|isinstance|issubclass|'
- r'iter|len|list|locals|map|max|memoryview|min|next|object|oct|'
- r'open|ord|pow|print|property|range|repr|reversed|round|'
- r'set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|'
- r'vars|zip)\b', Name.Builtin),
- (r'(?<!\.)(self|Ellipsis|NotImplemented)\b', Name.Builtin.Pseudo),
- (r'(?<!\.)(ArithmeticError|AssertionError|AttributeError|'
- r'BaseException|BufferError|BytesWarning|DeprecationWarning|'
- r'EOFError|EnvironmentError|Exception|FloatingPointError|'
- r'FutureWarning|GeneratorExit|IOError|ImportError|'
- r'ImportWarning|IndentationError|IndexError|KeyError|'
- r'KeyboardInterrupt|LookupError|MemoryError|NameError|'
- r'NotImplementedError|OSError|OverflowError|'
- r'PendingDeprecationWarning|ReferenceError|'
- r'RuntimeError|RuntimeWarning|StopIteration|'
- r'SyntaxError|SyntaxWarning|SystemError|SystemExit|TabError|'
- r'TypeError|UnboundLocalError|UnicodeDecodeError|'
- r'UnicodeEncodeError|UnicodeError|UnicodeTranslateError|'
- r'UnicodeWarning|UserWarning|ValueError|VMSError|Warning|'
- r'WindowsError|ZeroDivisionError|'
- # new builtin exceptions from PEP 3151
- r'BlockingIOError|ChildProcessError|ConnectionError|'
- r'BrokenPipeError|ConnectionAbortedError|ConnectionRefusedError|'
- r'ConnectionResetError|FileExistsError|FileNotFoundError|'
- r'InterruptedError|IsADirectoryError|NotADirectoryError|'
- r'PermissionError|ProcessLookupError|TimeoutError)\b',
- Name.Exception),
- ]
- tokens['numbers'] = [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[bB][01]+', Number.Bin),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+', Number.Integer)
- ]
- tokens['backtick'] = []
- tokens['name'] = [
- (r'@[a-zA-Z0-9_]+', Name.Decorator),
- (uni_name, Name),
- ]
- tokens['funcname'] = [
- (uni_name, Name.Function, '#pop')
- ]
- tokens['classname'] = [
- (uni_name, Name.Class, '#pop')
- ]
- tokens['import'] = [
- (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
- (r'\.', Name.Namespace),
- (uni_name, Name.Namespace),
- (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
- (r'', Text, '#pop') # all else: go back
- ]
- tokens['fromimport'] = [
- (r'(\s+)(import)\b', bygroups(Text, Keyword), '#pop'),
- (r'\.', Name.Namespace),
- (uni_name, Name.Namespace),
- (r'', Text, '#pop'),
- ]
- # don't highlight "%s" substitutions
- tokens['strings'] = [
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String)
- # newlines are an error (use "nl" state)
- ]
-
- def analyse_text(text):
- return shebang_matches(text, r'pythonw?3(\.\d)?')
-
-
-class PythonConsoleLexer(Lexer):
- """
- For Python console output or doctests, such as:
-
- .. sourcecode:: pycon
-
- >>> a = 'foo'
- >>> print a
- foo
- >>> 1 / 0
- Traceback (most recent call last):
- File "<stdin>", line 1, in <module>
- ZeroDivisionError: integer division or modulo by zero
-
- Additional options:
-
- `python3`
- Use Python 3 lexer for code. Default is ``False``.
- *New in Pygments 1.0.*
- """
- name = 'Python console session'
- aliases = ['pycon']
- mimetypes = ['text/x-python-doctest']
-
- def __init__(self, **options):
- self.python3 = get_bool_opt(options, 'python3', False)
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- if self.python3:
- pylexer = Python3Lexer(**self.options)
- tblexer = Python3TracebackLexer(**self.options)
- else:
- pylexer = PythonLexer(**self.options)
- tblexer = PythonTracebackLexer(**self.options)
-
- curcode = ''
- insertions = []
- curtb = ''
- tbindex = 0
- tb = 0
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith(u'>>> ') or line.startswith(u'... '):
- tb = 0
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:4])]))
- curcode += line[4:]
- elif line.rstrip() == u'...' and not tb:
- # only a new >>> prompt can end an exception block
- # otherwise an ellipsis in place of the traceback frames
- # will be mishandled
- insertions.append((len(curcode),
- [(0, Generic.Prompt, u'...')]))
- curcode += line[3:]
- else:
- if curcode:
- for item in do_insertions(insertions,
- pylexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- if (line.startswith(u'Traceback (most recent call last):') or
- re.match(ur' File "[^"]+", line \d+\n$', line)):
- tb = 1
- curtb = line
- tbindex = match.start()
- elif line == 'KeyboardInterrupt\n':
- yield match.start(), Name.Class, line
- elif tb:
- curtb += line
- if not (line.startswith(' ') or line.strip() == u'...'):
- tb = 0
- for i, t, v in tblexer.get_tokens_unprocessed(curtb):
- yield tbindex+i, t, v
- else:
- yield match.start(), Generic.Output, line
- if curcode:
- for item in do_insertions(insertions,
- pylexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class PythonTracebackLexer(RegexLexer):
- """
- For Python tracebacks.
-
- *New in Pygments 0.7.*
- """
-
- name = 'Python Traceback'
- aliases = ['pytb']
- filenames = ['*.pytb']
- mimetypes = ['text/x-python-traceback']
-
- tokens = {
- 'root': [
- (r'^Traceback \(most recent call last\):\n',
- Generic.Traceback, 'intb'),
- # SyntaxError starts with this.
- (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
- (r'^.*\n', Other),
- ],
- 'intb': [
- (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
- (r'^( File )("[^"]+")(, line )(\d+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text)),
- (r'^( )(.+)(\n)',
- bygroups(Text, using(PythonLexer), Text)),
- (r'^([ \t]*)(\.\.\.)(\n)',
- bygroups(Text, Comment, Text)), # for doctests...
- (r'^([^:]+)(: )(.+)(\n)',
- bygroups(Generic.Error, Text, Name, Text), '#pop'),
- (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)',
- bygroups(Generic.Error, Text), '#pop')
- ],
- }
-
-
-class Python3TracebackLexer(RegexLexer):
- """
- For Python 3.0 tracebacks, with support for chained exceptions.
-
- *New in Pygments 1.0.*
- """
-
- name = 'Python 3.0 Traceback'
- aliases = ['py3tb']
- filenames = ['*.py3tb']
- mimetypes = ['text/x-python3-traceback']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
- (r'^During handling of the above exception, another '
- r'exception occurred:\n\n', Generic.Traceback),
- (r'^The above exception was the direct cause of the '
- r'following exception:\n\n', Generic.Traceback),
- (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
- ],
- 'intb': [
- (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
- (r'^( File )("[^"]+")(, line )(\d+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text)),
- (r'^( )(.+)(\n)',
- bygroups(Text, using(Python3Lexer), Text)),
- (r'^([ \t]*)(\.\.\.)(\n)',
- bygroups(Text, Comment, Text)), # for doctests...
- (r'^([^:]+)(: )(.+)(\n)',
- bygroups(Generic.Error, Text, Name, Text), '#pop'),
- (r'^([a-zA-Z_][a-zA-Z0-9_]*)(:?\n)',
- bygroups(Generic.Error, Text), '#pop')
- ],
- }
-
-
-class RubyLexer(ExtendedRegexLexer):
- """
- For `Ruby <http://www.ruby-lang.org>`_ source code.
- """
-
- name = 'Ruby'
- aliases = ['rb', 'ruby', 'duby']
- filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
- '*.rbx', '*.duby']
- mimetypes = ['text/x-ruby', 'application/x-ruby']
-
- flags = re.DOTALL | re.MULTILINE
-
- def heredoc_callback(self, match, ctx):
- # okay, this is the hardest part of parsing Ruby...
- # match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
-
- start = match.start(1)
- yield start, Operator, match.group(1) # <<-?
- yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
- yield match.start(3), Name.Constant, match.group(3) # heredoc name
- yield match.start(4), String.Heredoc, match.group(4) # quote again
-
- heredocstack = ctx.__dict__.setdefault('heredocstack', [])
- outermost = not bool(heredocstack)
- heredocstack.append((match.group(1) == '<<-', match.group(3)))
-
- ctx.pos = match.start(5)
- ctx.end = match.end(5)
- # this may find other heredocs
- for i, t, v in self.get_tokens_unprocessed(context=ctx):
- yield i, t, v
- ctx.pos = match.end()
-
- if outermost:
- # this is the outer heredoc again, now we can process them all
- for tolerant, hdname in heredocstack:
- lines = []
- for match in line_re.finditer(ctx.text, ctx.pos):
- if tolerant:
- check = match.group().strip()
- else:
- check = match.group().rstrip()
- if check == hdname:
- for amatch in lines:
- yield amatch.start(), String.Heredoc, amatch.group()
- yield match.start(), Name.Constant, match.group()
- ctx.pos = match.end()
- break
- else:
- lines.append(match)
- else:
- # end of heredoc not found -- error!
- for amatch in lines:
- yield amatch.start(), Error, amatch.group()
- ctx.end = len(ctx.text)
- del heredocstack[:]
-
-
- def gen_rubystrings_rules():
- def intp_regex_callback(self, match, ctx):
- yield match.start(1), String.Regex, match.group(1) # begin
- nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
- for i, t, v in self.get_tokens_unprocessed(context=nctx):
- yield match.start(3)+i, t, v
- yield match.start(4), String.Regex, match.group(4) # end[mixounse]*
- ctx.pos = match.end()
-
- def intp_string_callback(self, match, ctx):
- yield match.start(1), String.Other, match.group(1)
- nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
- for i, t, v in self.get_tokens_unprocessed(context=nctx):
- yield match.start(3)+i, t, v
- yield match.start(4), String.Other, match.group(4) # end
- ctx.pos = match.end()
-
- states = {}
- states['strings'] = [
- # easy ones
- (r'\:@{0,2}([a-zA-Z_]\w*[\!\?]?|\*\*?|[-+]@?|'
- r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)', String.Symbol),
- (r":'(\\\\|\\'|[^'])*'", String.Symbol),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r':"', String.Symbol, 'simple-sym'),
- (r'([a-zA-Z_][a-zA-Z0-9]*)(:)(?!:)',
- bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
- (r'"', String.Double, 'simple-string'),
- (r'(?<!\.)`', String.Backtick, 'simple-backtick'),
- ]
-
- # double-quoted string and symbol
- for name, ttype, end in ('string', String.Double, '"'), \
- ('sym', String.Symbol, '"'), \
- ('backtick', String.Backtick, '`'):
- states['simple-'+name] = [
- include('string-intp-escaped'),
- (r'[^\\%s#]+' % end, ttype),
- (r'[\\#]', ttype),
- (end, ttype, '#pop'),
- ]
-
- # braced quoted strings
- for lbrace, rbrace, name in ('\\{', '\\}', 'cb'), \
- ('\\[', '\\]', 'sb'), \
- ('\\(', '\\)', 'pa'), \
- ('<', '>', 'ab'):
- states[name+'-intp-string'] = [
- (r'\\[\\' + lbrace + rbrace + ']', String.Other),
- (r'(?<!\\)' + lbrace, String.Other, '#push'),
- (r'(?<!\\)' + rbrace, String.Other, '#pop'),
- include('string-intp-escaped'),
- (r'[\\#' + lbrace + rbrace + ']', String.Other),
- (r'[^\\#' + lbrace + rbrace + ']+', String.Other),
- ]
- states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
- name+'-intp-string'))
- states[name+'-string'] = [
- (r'\\[\\' + lbrace + rbrace + ']', String.Other),
- (r'(?<!\\)' + lbrace, String.Other, '#push'),
- (r'(?<!\\)' + rbrace, String.Other, '#pop'),
- (r'[\\#' + lbrace + rbrace + ']', String.Other),
- (r'[^\\#' + lbrace + rbrace + ']+', String.Other),
- ]
- states['strings'].append((r'%[qsw]' + lbrace, String.Other,
- name+'-string'))
- states[name+'-regex'] = [
- (r'\\[\\' + lbrace + rbrace + ']', String.Regex),
- (r'(?<!\\)' + lbrace, String.Regex, '#push'),
- (r'(?<!\\)' + rbrace + '[mixounse]*', String.Regex, '#pop'),
- include('string-intp'),
- (r'[\\#' + lbrace + rbrace + ']', String.Regex),
- (r'[^\\#' + lbrace + rbrace + ']+', String.Regex),
- ]
- states['strings'].append((r'%r' + lbrace, String.Regex,
- name+'-regex'))
-
- # these must come after %<brace>!
- states['strings'] += [
- # %r regex
- (r'(%r([^a-zA-Z0-9]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
- intp_regex_callback),
- # regular fancy strings with qsw
- (r'%[qsw]([^a-zA-Z0-9])((?:\\\1|(?!\1).)*)\1', String.Other),
- (r'(%[QWx]([^a-zA-Z0-9]))((?:\\\2|(?!\2).)*)(\2)',
- intp_string_callback),
- # special forms of fancy strings after operators or
- # in method calls with braces
- (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
- bygroups(Text, String.Other, None)),
- # and because of fixed width lookbehinds the whole thing a
- # second time for line startings...
- (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
- bygroups(Text, String.Other, None)),
- # all regular fancy strings without qsw
- (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
- intp_string_callback),
- ]
-
- return states
-
- tokens = {
- 'root': [
- (r'#.*?$', Comment.Single),
- (r'=begin\s.*?\n=end.*?$', Comment.Multiline),
- # keywords
- (r'(BEGIN|END|alias|begin|break|case|defined\?|'
- r'do|else|elsif|end|ensure|for|if|in|next|redo|'
- r'rescue|raise|retry|return|super|then|undef|unless|until|when|'
- r'while|yield)\b', Keyword),
- # start of function, class and module names
- (r'(module)(\s+)([a-zA-Z_][a-zA-Z0-9_]*'
- r'(?:::[a-zA-Z_][a-zA-Z0-9_]*)*)',
- bygroups(Keyword, Text, Name.Namespace)),
- (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
- (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- # special methods
- (r'(initialize|new|loop|include|extend|raise|attr_reader|'
- r'attr_writer|attr_accessor|attr|catch|throw|private|'
- r'module_function|public|protected|true|false|nil)\b',
- Keyword.Pseudo),
- (r'(not|and|or)\b', Operator.Word),
- (r'(autoload|block_given|const_defined|eql|equal|frozen|include|'
- r'instance_of|is_a|iterator|kind_of|method_defined|nil|'
- r'private_method_defined|protected_method_defined|'
- r'public_method_defined|respond_to|tainted)\?', Name.Builtin),
- (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
- (r'(?<!\.)(Array|Float|Integer|String|__id__|__send__|abort|'
- r'ancestors|at_exit|autoload|binding|callcc|caller|'
- r'catch|chomp|chop|class_eval|class_variables|'
- r'clone|const_defined\?|const_get|const_missing|const_set|'
- r'constants|display|dup|eval|exec|exit|extend|fail|fork|'
- r'format|freeze|getc|gets|global_variables|gsub|'
- r'hash|id|included_modules|inspect|instance_eval|'
- r'instance_method|instance_methods|'
- r'instance_variable_get|instance_variable_set|instance_variables|'
- r'lambda|load|local_variables|loop|'
- r'method|method_missing|methods|module_eval|name|'
- r'object_id|open|p|print|printf|private_class_method|'
- r'private_instance_methods|'
- r'private_methods|proc|protected_instance_methods|'
- r'protected_methods|public_class_method|'
- r'public_instance_methods|public_methods|'
- r'putc|puts|raise|rand|readline|readlines|require|'
- r'scan|select|self|send|set_trace_func|singleton_methods|sleep|'
- r'split|sprintf|srand|sub|syscall|system|taint|'
- r'test|throw|to_a|to_s|trace_var|trap|untaint|untrace_var|'
- r'warn)\b', Name.Builtin),
- (r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
- # normal heredocs
- (r'(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
- heredoc_callback),
- # empty string heredocs
- (r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback),
- (r'__END__', Comment.Preproc, 'end-part'),
- # multiline regex (after keywords or assignments)
- (r'(?:^|(?<=[=<>~!:])|'
- r'(?<=(?:\s|;)when\s)|'
- r'(?<=(?:\s|;)or\s)|'
- r'(?<=(?:\s|;)and\s)|'
- r'(?<=(?:\s|;|\.)index\s)|'
- r'(?<=(?:\s|;|\.)scan\s)|'
- r'(?<=(?:\s|;|\.)sub\s)|'
- r'(?<=(?:\s|;|\.)sub!\s)|'
- r'(?<=(?:\s|;|\.)gsub\s)|'
- r'(?<=(?:\s|;|\.)gsub!\s)|'
- r'(?<=(?:\s|;|\.)match\s)|'
- r'(?<=(?:\s|;)if\s)|'
- r'(?<=(?:\s|;)elsif\s)|'
- r'(?<=^when\s)|'
- r'(?<=^index\s)|'
- r'(?<=^scan\s)|'
- r'(?<=^sub\s)|'
- r'(?<=^gsub\s)|'
- r'(?<=^sub!\s)|'
- r'(?<=^gsub!\s)|'
- r'(?<=^match\s)|'
- r'(?<=^if\s)|'
- r'(?<=^elsif\s)'
- r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
- # multiline regex (in method calls or subscripts)
- (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
- # multiline regex (this time the funny no whitespace rule)
- (r'(\s+)(/)(?![\s=])', bygroups(Text, String.Regex),
- 'multiline-regex'),
- # lex numbers and ignore following regular expressions which
- # are division operators in fact (grrrr. i hate that. any
- # better ideas?)
- # since pygments 0.7 we also eat a "?" operator after numbers
- # so that the char operator does not work. Chars are not allowed
- # there so that you can use the ternary operator.
- # stupid example:
- # x>=0?n[x]:""
- (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
- bygroups(Number.Oct, Text, Operator)),
- (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
- bygroups(Number.Hex, Text, Operator)),
- (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
- bygroups(Number.Bin, Text, Operator)),
- (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
- bygroups(Number.Integer, Text, Operator)),
- # Names
- (r'@@[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable.Class),
- (r'@[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable.Instance),
- (r'\$[a-zA-Z0-9_]+', Name.Variable.Global),
- (r'\$[!@&`\'+~=/\\,;.<>_*$?:"]', Name.Variable.Global),
- (r'\$-[0adFiIlpvw]', Name.Variable.Global),
- (r'::', Operator),
- include('strings'),
- # chars
- (r'\?(\\[MC]-)*' # modifiers
- r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
- r'(?!\w)',
- String.Char),
- (r'[A-Z][a-zA-Z0-9_]+', Name.Constant),
- # this is needed because ruby attributes can look
- # like keywords (class) or like this: ` ?!?
- (r'(\.|::)([a-zA-Z_]\w*[\!\?]?|[*%&^`~+-/\[<>=])',
- bygroups(Operator, Name)),
- (r'[a-zA-Z_]\w*[\!\?]?', Name),
- (r'(\[|\]|\*\*|<<?|>>?|>=|<=|<=>|=~|={3}|'
- r'!~|&&?|\|\||\.{1,3})', Operator),
- (r'[-+/*%=<>&!^|~]=?', Operator),
- (r'[(){};,/?:\\]', Punctuation),
- (r'\s+', Text)
- ],
- 'funcname': [
- (r'\(', Punctuation, 'defexpr'),
- (r'(?:([a-zA-Z_][a-zA-Z0-9_]*)(\.))?'
- r'([a-zA-Z_]\w*[\!\?]?|\*\*?|[-+]@?|'
- r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)',
- bygroups(Name.Class, Operator, Name.Function), '#pop'),
- (r'', Text, '#pop')
- ],
- 'classname': [
- (r'\(', Punctuation, 'defexpr'),
- (r'<<', Operator, '#pop'),
- (r'[A-Z_]\w*', Name.Class, '#pop'),
- (r'', Text, '#pop')
- ],
- 'defexpr': [
- (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
- (r'\(', Operator, '#push'),
- include('root')
- ],
- 'in-intp': [
- ('}', String.Interpol, '#pop'),
- include('root'),
- ],
- 'string-intp': [
- (r'#{', String.Interpol, 'in-intp'),
- (r'#@@?[a-zA-Z_][a-zA-Z0-9_]*', String.Interpol),
- (r'#\$[a-zA-Z_][a-zA-Z0-9_]*', String.Interpol)
- ],
- 'string-intp-escaped': [
- include('string-intp'),
- (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
- String.Escape)
- ],
- 'interpolated-regex': [
- include('string-intp'),
- (r'[\\#]', String.Regex),
- (r'[^\\#]+', String.Regex),
- ],
- 'interpolated-string': [
- include('string-intp'),
- (r'[\\#]', String.Other),
- (r'[^\\#]+', String.Other),
- ],
- 'multiline-regex': [
- include('string-intp'),
- (r'\\\\', String.Regex),
- (r'\\/', String.Regex),
- (r'[\\#]', String.Regex),
- (r'[^\\/#]+', String.Regex),
- (r'/[mixounse]*', String.Regex, '#pop'),
- ],
- 'end-part': [
- (r'.+', Comment.Preproc, '#pop')
- ]
- }
- tokens.update(gen_rubystrings_rules())
-
- def analyse_text(text):
- return shebang_matches(text, r'ruby(1\.\d)?')
-
-
-class RubyConsoleLexer(Lexer):
- """
- For Ruby interactive console (**irb**) output like:
-
- .. sourcecode:: rbcon
-
- irb(main):001:0> a = 1
- => 1
- irb(main):002:0> puts a
- 1
- => nil
- """
- name = 'Ruby irb session'
- aliases = ['rbcon', 'irb']
- mimetypes = ['text/x-ruby-shellsession']
-
- _prompt_re = re.compile('irb\([a-zA-Z_][a-zA-Z0-9_]*\):\d{3}:\d+[>*"\'] '
- '|>> |\?> ')
-
- def get_tokens_unprocessed(self, text):
- rblexer = RubyLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- for item in do_insertions(insertions,
- rblexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- yield match.start(), Generic.Output, line
- if curcode:
- for item in do_insertions(insertions,
- rblexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class PerlLexer(RegexLexer):
- """
- For `Perl <http://www.perl.org>`_ source code.
- """
-
- name = 'Perl'
- aliases = ['perl', 'pl']
- filenames = ['*.pl', '*.pm']
- mimetypes = ['text/x-perl', 'application/x-perl']
-
- flags = re.DOTALL | re.MULTILINE
- # TODO: give this to a perl guy who knows how to parse perl...
- tokens = {
- 'balanced-regex': [
- (r'/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', String.Regex, '#pop'),
- (r'!(\\\\|\\[^\\]|[^\\!])*![egimosx]*', String.Regex, '#pop'),
- (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
- (r'{(\\\\|\\[^\\]|[^\\}])*}[egimosx]*', String.Regex, '#pop'),
- (r'<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*', String.Regex, '#pop'),
- (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'),
- (r'\((\\\\|\\[^\\]|[^\\\)])*\)[egimosx]*', String.Regex, '#pop'),
- (r'@(\\\\|\\[^\\]|[^\\\@])*@[egimosx]*', String.Regex, '#pop'),
- (r'%(\\\\|\\[^\\]|[^\\\%])*%[egimosx]*', String.Regex, '#pop'),
- (r'\$(\\\\|\\[^\\]|[^\\\$])*\$[egimosx]*', String.Regex, '#pop'),
- ],
- 'root': [
- (r'\#.*?$', Comment.Single),
- (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
- (r'(case|continue|do|else|elsif|for|foreach|if|last|my|'
- r'next|our|redo|reset|then|unless|until|while|use|'
- r'print|new|BEGIN|CHECK|INIT|END|return)\b', Keyword),
- (r'(format)(\s+)([a-zA-Z0-9_]+)(\s*)(=)(\s*\n)',
- bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'),
- (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
- # common delimiters
- (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*',
- String.Regex),
- (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
- (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
- (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*',
- String.Regex),
- (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*',
- String.Regex),
- # balanced delimiters
- (r's{(\\\\|\\[^\\]|[^\\}])*}\s*', String.Regex, 'balanced-regex'),
- (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'),
- (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex,
- 'balanced-regex'),
- (r's\((\\\\|\\[^\\]|[^\\\)])*\)\s*', String.Regex,
- 'balanced-regex'),
-
- (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex),
- (r'm(?=[/!\\{<\[\(@%\$])', String.Regex, 'balanced-regex'),
- (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*',
- String.Regex),
- (r'\s+', Text),
- (r'(abs|accept|alarm|atan2|bind|binmode|bless|caller|chdir|'
- r'chmod|chomp|chop|chown|chr|chroot|close|closedir|connect|'
- r'continue|cos|crypt|dbmclose|dbmopen|defined|delete|die|'
- r'dump|each|endgrent|endhostent|endnetent|endprotoent|'
- r'endpwent|endservent|eof|eval|exec|exists|exit|exp|fcntl|'
- r'fileno|flock|fork|format|formline|getc|getgrent|getgrgid|'
- r'getgrnam|gethostbyaddr|gethostbyname|gethostent|getlogin|'
- r'getnetbyaddr|getnetbyname|getnetent|getpeername|getpgrp|'
- r'getppid|getpriority|getprotobyname|getprotobynumber|'
- r'getprotoent|getpwent|getpwnam|getpwuid|getservbyname|'
- r'getservbyport|getservent|getsockname|getsockopt|glob|gmtime|'
- r'goto|grep|hex|import|index|int|ioctl|join|keys|kill|last|'
- r'lc|lcfirst|length|link|listen|local|localtime|log|lstat|'
- r'map|mkdir|msgctl|msgget|msgrcv|msgsnd|my|next|no|oct|open|'
- r'opendir|ord|our|pack|package|pipe|pop|pos|printf|'
- r'prototype|push|quotemeta|rand|read|readdir|'
- r'readline|readlink|readpipe|recv|redo|ref|rename|require|'
- r'reverse|rewinddir|rindex|rmdir|scalar|seek|seekdir|'
- r'select|semctl|semget|semop|send|setgrent|sethostent|setnetent|'
- r'setpgrp|setpriority|setprotoent|setpwent|setservent|'
- r'setsockopt|shift|shmctl|shmget|shmread|shmwrite|shutdown|'
- r'sin|sleep|socket|socketpair|sort|splice|split|sprintf|sqrt|'
- r'srand|stat|study|substr|symlink|syscall|sysopen|sysread|'
- r'sysseek|system|syswrite|tell|telldir|tie|tied|time|times|tr|'
- r'truncate|uc|ucfirst|umask|undef|unlink|unpack|unshift|untie|'
- r'utime|values|vec|wait|waitpid|wantarray|warn|write'
- r')\b', Name.Builtin),
- (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo),
- (r'<<([\'"]?)([a-zA-Z_][a-zA-Z0-9_]*)\1;?\n.*?\n\2\n', String),
- (r'__END__', Comment.Preproc, 'end-part'),
- (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global),
- (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global),
- (r'[$@%#]+', Name.Variable, 'varname'),
- (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
- (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
- (r'0b[01]+(_[01]+)*', Number.Bin),
- (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
- Number.Float),
- (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
- (r'\d+(_\d+)*', Number.Integer),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick),
- (r'<([^\s>]+)>', String.Regex),
- (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
- (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
- (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
- (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
- (r'(q|qq|qw|qr|qx)([^a-zA-Z0-9])(.|\n)*?\2', String.Other),
- (r'package\s+', Keyword, 'modulename'),
- (r'sub\s+', Keyword, 'funcname'),
- (r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|'
- r'!~|&&?|\|\||\.{1,3})', Operator),
- (r'[-+/*%=<>&^|!\\~]=?', Operator),
- (r'[\(\)\[\]:;,<>/\?\{\}]', Punctuation), # yes, there's no shortage
- # of punctuation in Perl!
- (r'(?=\w)', Name, 'name'),
- ],
- 'format': [
- (r'\.\n', String.Interpol, '#pop'),
- (r'[^\n]*\n', String.Interpol),
- ],
- 'varname': [
- (r'\s+', Text),
- (r'\{', Punctuation, '#pop'), # hash syntax?
- (r'\)|,', Punctuation, '#pop'), # argument specifier
- (r'[a-zA-Z0-9_]+::', Name.Namespace),
- (r'[a-zA-Z0-9_:]+', Name.Variable, '#pop'),
- ],
- 'name': [
- (r'[a-zA-Z0-9_]+::', Name.Namespace),
- (r'[a-zA-Z0-9_:]+', Name, '#pop'),
- (r'[A-Z_]+(?=[^a-zA-Z0-9_])', Name.Constant, '#pop'),
- (r'(?=[^a-zA-Z0-9_])', Text, '#pop'),
- ],
- 'modulename': [
- (r'[a-zA-Z_]\w*', Name.Namespace, '#pop')
- ],
- 'funcname': [
- (r'[a-zA-Z_]\w*[\!\?]?', Name.Function),
- (r'\s+', Text),
- # argument declaration
- (r'(\([$@%]*\))(\s*)', bygroups(Punctuation, Text)),
- (r'.*?{', Punctuation, '#pop'),
- (r';', Punctuation, '#pop'),
- ],
- 'cb-string': [
- (r'\\[\{\}\\]', String.Other),
- (r'\\', String.Other),
- (r'\{', String.Other, 'cb-string'),
- (r'\}', String.Other, '#pop'),
- (r'[^\{\}\\]+', String.Other)
- ],
- 'rb-string': [
- (r'\\[\(\)\\]', String.Other),
- (r'\\', String.Other),
- (r'\(', String.Other, 'rb-string'),
- (r'\)', String.Other, '#pop'),
- (r'[^\(\)]+', String.Other)
- ],
- 'sb-string': [
- (r'\\[\[\]\\]', String.Other),
- (r'\\', String.Other),
- (r'\[', String.Other, 'sb-string'),
- (r'\]', String.Other, '#pop'),
- (r'[^\[\]]+', String.Other)
- ],
- 'lt-string': [
- (r'\\[\<\>\\]', String.Other),
- (r'\\', String.Other),
- (r'\<', String.Other, 'lt-string'),
- (r'\>', String.Other, '#pop'),
- (r'[^\<\>]+', String.Other)
- ],
- 'end-part': [
- (r'.+', Comment.Preproc, '#pop')
- ]
- }
-
- def analyse_text(text):
- if shebang_matches(text, r'perl'):
- return True
- if 'my $' in text:
- return 0.9
- return 0.1 # who knows, might still be perl!
-
-
-class LuaLexer(RegexLexer):
- """
- For `Lua <http://www.lua.org>`_ source code.
-
- Additional options accepted:
-
- `func_name_highlighting`
- If given and ``True``, highlight builtin function names
- (default: ``True``).
- `disabled_modules`
- If given, must be a list of module names whose function names
- should not be highlighted. By default all modules are highlighted.
-
- To get a list of allowed modules have a look into the
- `_luabuiltins` module:
-
- .. sourcecode:: pycon
-
- >>> from pygments.lexers._luabuiltins import MODULES
- >>> MODULES.keys()
- ['string', 'coroutine', 'modules', 'io', 'basic', ...]
- """
-
- name = 'Lua'
- aliases = ['lua']
- filenames = ['*.lua', '*.wlua']
- mimetypes = ['text/x-lua', 'application/x-lua']
-
- tokens = {
- 'root': [
- # lua allows a file to start with a shebang
- (r'#!(.*?)$', Comment.Preproc),
- (r'', Text, 'base'),
- ],
- 'base': [
- (r'(?s)--\[(=*)\[.*?\]\1\]', Comment.Multiline),
- ('--.*$', Comment.Single),
-
- (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
- (r'(?i)\d+e[+-]?\d+', Number.Float),
- ('(?i)0x[0-9a-f]*', Number.Hex),
- (r'\d+', Number.Integer),
-
- (r'\n', Text),
- (r'[^\S\n]', Text),
- # multiline strings
- (r'(?s)\[(=*)\[.*?\]\1\]', String),
-
- (r'(==|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#])', Operator),
- (r'[\[\]\{\}\(\)\.,:;]', Punctuation),
- (r'(and|or|not)\b', Operator.Word),
-
- ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
- r'while)\b', Keyword),
- (r'(local)\b', Keyword.Declaration),
- (r'(true|false|nil)\b', Keyword.Constant),
-
- (r'(function)\b', Keyword, 'funcname'),
-
- (r'[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)?', Name),
-
- ("'", String.Single, combined('stringescape', 'sqs')),
- ('"', String.Double, combined('stringescape', 'dqs'))
- ],
-
- 'funcname': [
- (r'\s+', Text),
- ('(?:([A-Za-z_][A-Za-z0-9_]*)(\.))?([A-Za-z_][A-Za-z0-9_]*)',
- bygroups(Name.Class, Punctuation, Name.Function), '#pop'),
- # inline function
- ('\(', Punctuation, '#pop'),
- ],
-
- # if I understand correctly, every character is valid in a lua string,
- # so this state is only for later corrections
- 'string': [
- ('.', String)
- ],
-
- 'stringescape': [
- (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
- ],
-
- 'sqs': [
- ("'", String, '#pop'),
- include('string')
- ],
-
- 'dqs': [
- ('"', String, '#pop'),
- include('string')
- ]
- }
-
- def __init__(self, **options):
- self.func_name_highlighting = get_bool_opt(
- options, 'func_name_highlighting', True)
- self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
-
- self._functions = set()
- if self.func_name_highlighting:
- from pygments.lexers._luabuiltins import MODULES
- for mod, func in MODULES.iteritems():
- if mod not in self.disabled_modules:
- self._functions.update(func)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if value in self._functions:
- yield index, Name.Builtin, value
- continue
- elif '.' in value:
- a, b = value.split('.')
- yield index, Name, a
- yield index + len(a), Punctuation, u'.'
- yield index + len(a) + 1, Name, b
- continue
- yield index, token, value
-
-
-class MoonScriptLexer(LuaLexer):
- """
- For `MoonScript <http://moonscript.org.org>`_ source code.
-
- *New in Pygments 1.5.*
- """
-
- name = "MoonScript"
- aliases = ["moon", "moonscript"]
- filenames = ["*.moon"]
- mimetypes = ['text/x-moonscript', 'application/x-moonscript']
-
- tokens = {
- 'root': [
- (r'#!(.*?)$', Comment.Preproc),
- (r'', Text, 'base'),
- ],
- 'base': [
- ('--.*$', Comment.Single),
- (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
- (r'(?i)\d+e[+-]?\d+', Number.Float),
- (r'(?i)0x[0-9a-f]*', Number.Hex),
- (r'\d+', Number.Integer),
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'(?s)\[(=*)\[.*?\]\1\]', String),
- (r'(->|=>)', Name.Function),
- (r':[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
- (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator),
- (r'[;,]', Punctuation),
- (r'[\[\]\{\}\(\)]', Keyword.Type),
- (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Variable),
- (r"(class|extends|if|then|super|do|with|import|export|"
- r"while|elseif|return|for|in|from|when|using|else|"
- r"and|or|not|switch|break)\b", Keyword),
- (r'(true|false|nil)\b', Keyword.Constant),
- (r'(and|or|not)\b', Operator.Word),
- (r'(self)\b', Name.Builtin.Pseudo),
- (r'@@?([a-zA-Z_][a-zA-Z0-9_]*)?', Name.Variable.Class),
- (r'[A-Z]\w*', Name.Class), # proper name
- (r'[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)?', Name),
- ("'", String.Single, combined('stringescape', 'sqs')),
- ('"', String.Double, combined('stringescape', 'dqs'))
- ],
- 'stringescape': [
- (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
- ],
- 'sqs': [
- ("'", String.Single, '#pop'),
- (".", String)
- ],
- 'dqs': [
- ('"', String.Double, '#pop'),
- (".", String)
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- # set . as Operator instead of Punctuation
- for index, token, value in \
- LuaLexer.get_tokens_unprocessed(self, text):
- if token == Punctuation and value == ".":
- token = Operator
- yield index, token, value
-
-
-class CrocLexer(RegexLexer):
- """
- For `Croc <http://jfbillingsley.com/croc>`_ source.
- """
- name = 'Croc'
- filenames = ['*.croc']
- aliases = ['croc']
- mimetypes = ['text/x-crocsrc']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- # Comments
- (r'//(.*?)\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'nestedcomment'),
- # Keywords
- (r'(as|assert|break|case|catch|class|continue|default'
- r'|do|else|finally|for|foreach|function|global|namespace'
- r'|if|import|in|is|local|module|return|scope|super|switch'
- r'|this|throw|try|vararg|while|with|yield)\b', Keyword),
- (r'(false|true|null)\b', Keyword.Constant),
- # FloatLiteral
- (r'([0-9][0-9_]*)(?=[.eE])(\.[0-9][0-9_]*)?([eE][+\-]?[0-9_]+)?',
- Number.Float),
- # IntegerLiteral
- # -- Binary
- (r'0[bB][01][01_]*', Number),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex),
- # -- Decimal
- (r'([0-9][0-9_]*)(?![.eE])', Number.Integer),
- # CharacterLiteral
- (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-9]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
- String.Char
- ),
- # StringLiteral
- # -- WysiwygString
- (r'@"(""|[^"])*"', String),
- (r'@`(``|[^`])*`', String),
- (r"@'(''|[^'])*'", String),
- # -- DoubleQuotedString
- (r'"(\\\\|\\"|[^"])*"', String),
- # Tokens
- (
- r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>'
- r'|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.|/=)'
- r'|[-/.&$@|\+<>!()\[\]{}?,;:=*%^~#\\]', Punctuation
- ),
- # Identifier
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'nestedcomment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- }
-
-
-class MiniDLexer(CrocLexer):
- """
- For MiniD source. MiniD is now known as Croc.
- """
- name = 'MiniD'
- filenames = ['*.md']
- aliases = ['minid']
- mimetypes = ['text/x-minidsrc']
-
-
-class IoLexer(RegexLexer):
- """
- For `Io <http://iolanguage.com/>`_ (a small, prototype-based
- programming language) source.
-
- *New in Pygments 0.10.*
- """
- name = 'Io'
- filenames = ['*.io']
- aliases = ['io']
- mimetypes = ['text/x-iosrc']
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- # Comments
- (r'//(.*?)\n', Comment.Single),
- (r'#(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'/\+', Comment.Multiline, 'nestedcomment'),
- # DoubleQuotedString
- (r'"(\\\\|\\"|[^"])*"', String),
- # Operators
- (r'::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}',
- Operator),
- # keywords
- (r'(clone|do|doFile|doString|method|for|if|else|elseif|then)\b',
- Keyword),
- # constants
- (r'(nil|false|true)\b', Name.Constant),
- # names
- (r'(Object|list|List|Map|args|Sequence|Coroutine|File)\b',
- Name.Builtin),
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
- # numbers
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer)
- ],
- 'nestedcomment': [
- (r'[^+/]+', Comment.Multiline),
- (r'/\+', Comment.Multiline, '#push'),
- (r'\+/', Comment.Multiline, '#pop'),
- (r'[+/]', Comment.Multiline),
- ]
- }
-
-
-class TclLexer(RegexLexer):
- """
- For Tcl source code.
-
- *New in Pygments 0.10.*
- """
-
- keyword_cmds_re = (
- r'\b(after|apply|array|break|catch|continue|elseif|else|error|'
- r'eval|expr|for|foreach|global|if|namespace|proc|rename|return|'
- r'set|switch|then|trace|unset|update|uplevel|upvar|variable|'
- r'vwait|while)\b'
- )
-
- builtin_cmds_re = (
- r'\b(append|bgerror|binary|cd|chan|clock|close|concat|dde|dict|'
- r'encoding|eof|exec|exit|fblocked|fconfigure|fcopy|file|'
- r'fileevent|flush|format|gets|glob|history|http|incr|info|interp|'
- r'join|lappend|lassign|lindex|linsert|list|llength|load|loadTk|'
- r'lrange|lrepeat|lreplace|lreverse|lsearch|lset|lsort|mathfunc|'
- r'mathop|memory|msgcat|open|package|pid|pkg::create|pkg_mkIndex|'
- r'platform|platform::shell|puts|pwd|re_syntax|read|refchan|'
- r'regexp|registry|regsub|scan|seek|socket|source|split|string|'
- r'subst|tell|time|tm|unknown|unload)\b'
- )
-
- name = 'Tcl'
- aliases = ['tcl']
- filenames = ['*.tcl']
- mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl']
-
- def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""):
- return [
- (keyword_cmds_re, Keyword, 'params' + context),
- (builtin_cmds_re, Name.Builtin, 'params' + context),
- (r'([\w\.\-]+)', Name.Variable, 'params' + context),
- (r'#', Comment, 'comment'),
- ]
-
- tokens = {
- 'root': [
- include('command'),
- include('basic'),
- include('data'),
- (r'}', Keyword), # HACK: somehow we miscounted our braces
- ],
- 'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re),
- 'command-in-brace': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-brace"),
- 'command-in-bracket': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-bracket"),
- 'command-in-paren': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-paren"),
- 'basic': [
- (r'\(', Keyword, 'paren'),
- (r'\[', Keyword, 'bracket'),
- (r'\{', Keyword, 'brace'),
- (r'"', String.Double, 'string'),
- (r'(eq|ne|in|ni)\b', Operator.Word),
- (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator),
- ],
- 'data': [
- (r'\s+', Text),
- (r'0x[a-fA-F0-9]+', Number.Hex),
- (r'0[0-7]+', Number.Oct),
- (r'\d+\.\d+', Number.Float),
- (r'\d+', Number.Integer),
- (r'\$([\w\.\-\:]+)', Name.Variable),
- (r'([\w\.\-\:]+)', Text),
- ],
- 'params': [
- (r';', Keyword, '#pop'),
- (r'\n', Text, '#pop'),
- (r'(else|elseif|then)\b', Keyword),
- include('basic'),
- include('data'),
- ],
- 'params-in-brace': [
- (r'}', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'params-in-paren': [
- (r'\)', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'params-in-bracket': [
- (r'\]', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'string': [
- (r'\[', String.Double, 'string-square'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double),
- (r'"', String.Double, '#pop')
- ],
- 'string-square': [
- (r'\[', String.Double, 'string-square'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double),
- (r'\]', String.Double, '#pop')
- ],
- 'brace': [
- (r'}', Keyword, '#pop'),
- include('command-in-brace'),
- include('basic'),
- include('data'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('command-in-paren'),
- include('basic'),
- include('data'),
- ],
- 'bracket': [
- (r'\]', Keyword, '#pop'),
- include('command-in-bracket'),
- include('basic'),
- include('data'),
- ],
- 'comment': [
- (r'.*[^\\]\n', Comment, '#pop'),
- (r'.*\\\n', Comment),
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'(tcl)')
-
-
-class FactorLexer(RegexLexer):
- """
- Lexer for the `Factor <http://factorcode.org>`_ language.
-
- *New in Pygments 1.4.*
- """
- name = 'Factor'
- aliases = ['factor']
- filenames = ['*.factor']
- mimetypes = ['text/x-factor']
-
- flags = re.MULTILINE | re.UNICODE
-
- builtin_kernel = (
- r'(?:or|2bi|2tri|while|wrapper|nip|4dip|wrapper\\?|bi\\*|'
- r'callstack>array|both\\?|hashcode|die|dupd|callstack|'
- r'callstack\\?|3dup|tri@|pick|curry|build|\\?execute|3bi|'
- r'prepose|>boolean|\\?if|clone|eq\\?|tri\\*|\\?|=|swapd|'
- r'2over|2keep|3keep|clear|2dup|when|not|tuple\\?|dup|2bi\\*|'
- r'2tri\\*|call|tri-curry|object|bi@|do|unless\\*|if\\*|loop|'
- r'bi-curry\\*|drop|when\\*|assert=|retainstack|assert\\?|-rot|'
- r'execute|2bi@|2tri@|boa|with|either\\?|3drop|bi|curry\\?|'
- r'datastack|until|3dip|over|3curry|tri-curry\\*|tri-curry@|swap|'
- r'and|2nip|throw|bi-curry|\\(clone\\)|hashcode\\*|compose|2dip|if|3tri|'
- r'unless|compose\\?|tuple|keep|2curry|equal\\?|assert|tri|2drop|'
- r'most|<wrapper>|boolean\\?|identity-hashcode|identity-tuple\\?|'
- r'null|new|dip|bi-curry@|rot|xor|identity-tuple|boolean)\s'
- )
-
- builtin_assocs = (
- r'(?:\\?at|assoc\\?|assoc-clone-like|assoc=|delete-at\\*|'
- r'assoc-partition|extract-keys|new-assoc|value\\?|assoc-size|'
- r'map>assoc|push-at|assoc-like|key\\?|assoc-intersect|'
- r'assoc-refine|update|assoc-union|assoc-combine|at\\*|'
- r'assoc-empty\\?|at\\+|set-at|assoc-all\\?|assoc-subset\\?|'
- r'assoc-hashcode|change-at|assoc-each|assoc-diff|zip|values|'
- r'value-at|rename-at|inc-at|enum\\?|at|cache|assoc>map|<enum>|'
- r'assoc|assoc-map|enum|value-at\\*|assoc-map-as|>alist|'
- r'assoc-filter-as|clear-assoc|assoc-stack|maybe-set-at|'
- r'substitute|assoc-filter|2cache|delete-at|assoc-find|keys|'
- r'assoc-any\\?|unzip)\s'
- )
-
- builtin_combinators = (
- r'(?:case|execute-effect|no-cond|no-case\\?|3cleave>quot|2cleave|'
- r'cond>quot|wrong-values\\?|no-cond\\?|cleave>quot|no-case|'
- r'case>quot|3cleave|wrong-values|to-fixed-point|alist>quot|'
- r'case-find|cond|cleave|call-effect|2cleave>quot|recursive-hashcode|'
- r'linear-case-quot|spread|spread>quot)\s'
- )
-
- builtin_math = (
- r'(?:number=|if-zero|next-power-of-2|each-integer|\\?1\\+|'
- r'fp-special\\?|imaginary-part|unless-zero|float>bits|number\\?|'
- r'fp-infinity\\?|bignum\\?|fp-snan\\?|denominator|fp-bitwise=|\\*|'
- r'\\+|power-of-2\\?|-|u>=|/|>=|bitand|log2-expects-positive|<|'
- r'log2|>|integer\\?|number|bits>double|2/|zero\\?|(find-integer)|'
- r'bits>float|float\\?|shift|ratio\\?|even\\?|ratio|fp-sign|bitnot|'
- r'>fixnum|complex\\?|/i|/f|byte-array>bignum|when-zero|sgn|>bignum|'
- r'next-float|u<|u>|mod|recip|rational|find-last-integer|>float|'
- r'(all-integers\\?)|2^|times|integer|fixnum\\?|neg|fixnum|sq|'
- r'bignum|(each-integer)|bit\\?|fp-qnan\\?|find-integer|complex|'
- r'<fp-nan>|real|double>bits|bitor|rem|fp-nan-payload|all-integers\\?|'
- r'real-part|log2-expects-positive\\?|prev-float|align|unordered\\?|'
- r'float|fp-nan\\?|abs|bitxor|u<=|odd\\?|<=|/mod|rational\\?|>integer|'
- r'real\\?|numerator)\s'
- )
-
- builtin_sequences = (
- r'(?:member-eq\\?|append|assert-sequence=|find-last-from|trim-head-slice|'
- r'clone-like|3sequence|assert-sequence\\?|map-as|last-index-from|'
- r'reversed|index-from|cut\\*|pad-tail|remove-eq!|concat-as|'
- r'but-last|snip|trim-tail|nths|nth|2selector|sequence|slice\\?|'
- r'<slice>|partition|remove-nth|tail-slice|empty\\?|tail\\*|'
- r'if-empty|find-from|virtual-sequence\\?|member\\?|set-length|'
- r'drop-prefix|unclip|unclip-last-slice|iota|map-sum|'
- r'bounds-error\\?|sequence-hashcode-step|selector-for|'
- r'accumulate-as|map|start|midpoint@|\\(accumulate\\)|rest-slice|'
- r'prepend|fourth|sift|accumulate!|new-sequence|follow|map!|'
- r'like|first4|1sequence|reverse|slice|unless-empty|padding|'
- r'virtual@|repetition\\?|set-last|index|4sequence|max-length|'
- r'set-second|immutable-sequence|first2|first3|replicate-as|'
- r'reduce-index|unclip-slice|supremum|suffix!|insert-nth|'
- r'trim-tail-slice|tail|3append|short|count|suffix|concat|'
- r'flip|filter|sum|immutable\\?|reverse!|2sequence|map-integers|'
- r'delete-all|start\\*|indices|snip-slice|check-slice|sequence\\?|'
- r'head|map-find|filter!|append-as|reduce|sequence=|halves|'
- r'collapse-slice|interleave|2map|filter-as|binary-reduce|'
- r'slice-error\\?|product|bounds-check\\?|bounds-check|harvest|'
- r'immutable|virtual-exemplar|find|produce|remove|pad-head|last|'
- r'replicate|set-fourth|remove-eq|shorten|reversed\\?|'
- r'map-find-last|3map-as|2unclip-slice|shorter\\?|3map|find-last|'
- r'head-slice|pop\\*|2map-as|tail-slice\\*|but-last-slice|'
- r'2map-reduce|iota\\?|collector-for|accumulate|each|selector|'
- r'append!|new-resizable|cut-slice|each-index|head-slice\\*|'
- r'2reverse-each|sequence-hashcode|pop|set-nth|\\?nth|'
- r'<flat-slice>|second|join|when-empty|collector|'
- r'immutable-sequence\\?|<reversed>|all\\?|3append-as|'
- r'virtual-sequence|subseq\\?|remove-nth!|push-either|new-like|'
- r'length|last-index|push-if|2all\\?|lengthen|assert-sequence|'
- r'copy|map-reduce|move|third|first|3each|tail\\?|set-first|'
- r'prefix|bounds-error|any\\?|<repetition>|trim-slice|exchange|'
- r'surround|2reduce|cut|change-nth|min-length|set-third|produce-as|'
- r'push-all|head\\?|delete-slice|rest|sum-lengths|2each|head\\*|'
- r'infimum|remove!|glue|slice-error|subseq|trim|replace-slice|'
- r'push|repetition|map-index|trim-head|unclip-last|mismatch)\s'
- )
-
- builtin_namespaces = (
- r'(?:global|\\+@|change|set-namestack|change-global|init-namespaces|'
- r'on|off|set-global|namespace|set|with-scope|bind|with-variable|'
- r'inc|dec|counter|initialize|namestack|get|get-global|make-assoc)\s'
- )
-
- builtin_arrays = (
- r'(?:<array>|2array|3array|pair|>array|1array|4array|pair\\?|'
- r'array|resize-array|array\\?)\s'
- )
-
- builtin_io = (
- r'(?:\\+character\\+|bad-seek-type\\?|readln|each-morsel|stream-seek|'
- r'read|print|with-output-stream|contents|write1|stream-write1|'
- r'stream-copy|stream-element-type|with-input-stream|'
- r'stream-print|stream-read|stream-contents|stream-tell|'
- r'tell-output|bl|seek-output|bad-seek-type|nl|stream-nl|write|'
- r'flush|stream-lines|\\+byte\\+|stream-flush|read1|'
- r'seek-absolute\\?|stream-read1|lines|stream-readln|'
- r'stream-read-until|each-line|seek-end|with-output-stream\\*|'
- r'seek-absolute|with-streams|seek-input|seek-relative\\?|'
- r'input-stream|stream-write|read-partial|seek-end\\?|'
- r'seek-relative|error-stream|read-until|with-input-stream\\*|'
- r'with-streams\\*|tell-input|each-block|output-stream|'
- r'stream-read-partial|each-stream-block|each-stream-line)\s'
- )
-
- builtin_strings = (
- r'(?:resize-string|>string|<string>|1string|string|string\\?)\s'
- )
-
- builtin_vectors = (
- r'(?:vector\\?|<vector>|\\?push|vector|>vector|1vector)\s'
- )
-
- builtin_continuations = (
- r'(?:with-return|restarts|return-continuation|with-datastack|'
- r'recover|rethrow-restarts|<restart>|ifcc|set-catchstack|'
- r'>continuation<|cleanup|ignore-errors|restart\\?|'
- r'compute-restarts|attempt-all-error|error-thread|continue|'
- r'<continuation>|attempt-all-error\\?|condition\\?|'
- r'<condition>|throw-restarts|error|catchstack|continue-with|'
- r'thread-error-hook|continuation|rethrow|callcc1|'
- r'error-continuation|callcc0|attempt-all|condition|'
- r'continuation\\?|restart|return)\s'
- )
-
- tokens = {
- 'root': [
- # TODO: (( inputs -- outputs ))
- # TODO: << ... >>
-
- # defining words
- (r'(\s*)(:|::|MACRO:|MEMO:)(\s+)(\S+)',
- bygroups(Text, Keyword, Text, Name.Function)),
- (r'(\s*)(M:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Text, Keyword, Text, Name.Class, Text, Name.Function)),
- (r'(\s*)(GENERIC:)(\s+)(\S+)',
- bygroups(Text, Keyword, Text, Name.Function)),
- (r'(\s*)(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Text, Keyword, Text, Name.Function, Text, Name.Function)),
- (r'(\()(\s+)', bygroups(Name.Function, Text), 'stackeffect'),
- (r'\;\s', Keyword),
-
- # imports and namespaces
- (r'(USING:)((?:\s|\\\s)+)',
- bygroups(Keyword.Namespace, Text), 'import'),
- (r'(USE:)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Namespace)),
- (r'(UNUSE:)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Namespace)),
- (r'(QUALIFIED:)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Namespace)),
- (r'(QUALIFIED-WITH:)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Namespace)),
- (r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+)(=>)',
- bygroups(Keyword.Namespace, Text, Name.Namespace, Text, Text)),
- (r'(IN:)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Namespace)),
- (r'(?:ALIAS|DEFER|FORGET|POSTPONE):', Keyword.Namespace),
-
- # tuples and classes
- (r'(TUPLE:)(\s+)(\S+)(\s+<\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class, Text, Name.Class), 'slots'),
- (r'(TUPLE:)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class), 'slots'),
- (r'(UNION:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
- (r'(INTERSECTION:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
- (r'(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class, Text, Name.Class)),
- (r'(C:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Function, Text, Name.Class)),
- (r'INSTANCE:', Keyword),
- (r'SLOT:', Keyword),
- (r'MIXIN:', Keyword),
- (r'(?:SINGLETON|SINGLETONS):', Keyword),
-
- # other syntax
- (r'CONSTANT:', Keyword),
- (r'(?:SYMBOL|SYMBOLS):', Keyword),
- (r'ERROR:', Keyword),
- (r'SYNTAX:', Keyword),
- (r'(HELP:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Function)),
- (r'(MAIN:)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Function)),
- (r'(?:ALIEN|TYPEDEF|FUNCTION|STRUCT):', Keyword),
-
- # vocab.private
- # TODO: words inside vocab.private should have red names?
- (r'(?:<PRIVATE|PRIVATE>)', Keyword.Namespace),
-
- # strings
- (r'"""\s+(?:.|\n)*?\s+"""', String),
- (r'"(?:\\\\|\\"|[^"])*"', String),
- (r'CHAR:\s+(\\[\\abfnrstv]*|\S)\s', String.Char),
-
- # comments
- (r'\!\s+.*$', Comment),
- (r'#\!\s+.*$', Comment),
-
- # boolean constants
- (r'(t|f)\s', Name.Constant),
-
- # numbers
- (r'-?\d+\.\d+\s', Number.Float),
- (r'-?\d+\s', Number.Integer),
- (r'HEX:\s+[a-fA-F\d]+\s', Number.Hex),
- (r'BIN:\s+[01]+\s', Number.Integer),
- (r'OCT:\s+[0-7]+\s', Number.Oct),
-
- # operators
- (r'[-+/*=<>^]\s', Operator),
-
- # keywords
- (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s',
- Keyword),
-
- # builtins
- (builtin_kernel, Name.Builtin),
- (builtin_assocs, Name.Builtin),
- (builtin_combinators, Name.Builtin),
- (builtin_math, Name.Builtin),
- (builtin_sequences, Name.Builtin),
- (builtin_namespaces, Name.Builtin),
- (builtin_arrays, Name.Builtin),
- (builtin_io, Name.Builtin),
- (builtin_strings, Name.Builtin),
- (builtin_vectors, Name.Builtin),
- (builtin_continuations, Name.Builtin),
-
- # whitespaces - usually not relevant
- (r'\s+', Text),
-
- # everything else is text
- (r'\S+', Text),
- ],
-
- 'stackeffect': [
- (r'\s*\(', Name.Function, 'stackeffect'),
- (r'\)', Name.Function, '#pop'),
- (r'\-\-', Name.Function),
- (r'\s+', Text),
- (r'\S+', Name.Variable),
- ],
-
- 'slots': [
- (r'\s+', Text),
- (r';\s', Keyword, '#pop'),
- (r'\S+', Name.Variable),
- ],
-
- 'import': [
- (r';', Keyword, '#pop'),
- (r'\S+', Name.Namespace),
- (r'\s+', Text),
- ],
- }
-
-
-class FancyLexer(RegexLexer):
- """
- Pygments Lexer For `Fancy <http://www.fancy-lang.org/>`_.
-
- Fancy is a self-hosted, pure object-oriented, dynamic,
- class-based, concurrent general-purpose programming language
- running on Rubinius, the Ruby VM.
-
- *New in Pygments 1.5.*
- """
- name = 'Fancy'
- filenames = ['*.fy', '*.fancypack']
- aliases = ['fancy', 'fy']
- mimetypes = ['text/x-fancysrc']
-
- tokens = {
- # copied from PerlLexer:
- 'balanced-regex': [
- (r'/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex, '#pop'),
- (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'),
- (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
- (r'{(\\\\|\\}|[^}])*}[egimosx]*', String.Regex, '#pop'),
- (r'<(\\\\|\\>|[^>])*>[egimosx]*', String.Regex, '#pop'),
- (r'\[(\\\\|\\\]|[^\]])*\][egimosx]*', String.Regex, '#pop'),
- (r'\((\\\\|\\\)|[^\)])*\)[egimosx]*', String.Regex, '#pop'),
- (r'@(\\\\|\\\@|[^\@])*@[egimosx]*', String.Regex, '#pop'),
- (r'%(\\\\|\\\%|[^\%])*%[egimosx]*', String.Regex, '#pop'),
- (r'\$(\\\\|\\\$|[^\$])*\$[egimosx]*', String.Regex, '#pop'),
- ],
- 'root': [
- (r'\s+', Text),
-
- # balanced delimiters (copied from PerlLexer):
- (r's{(\\\\|\\}|[^}])*}\s*', String.Regex, 'balanced-regex'),
- (r's<(\\\\|\\>|[^>])*>\s*', String.Regex, 'balanced-regex'),
- (r's\[(\\\\|\\\]|[^\]])*\]\s*', String.Regex, 'balanced-regex'),
- (r's\((\\\\|\\\)|[^\)])*\)\s*', String.Regex, 'balanced-regex'),
- (r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex),
- (r'm(?=[/!\\{<\[\(@%\$])', String.Regex, 'balanced-regex'),
-
- # Comments
- (r'#(.*?)\n', Comment.Single),
- # Symbols
- (r'\'([^\'\s\[\]\(\)\{\}]+|\[\])', String.Symbol),
- # Multi-line DoubleQuotedString
- (r'"""(\\\\|\\"|[^"])*"""', String),
- # DoubleQuotedString
- (r'"(\\\\|\\"|[^"])*"', String),
- # keywords
- (r'(def|class|try|catch|finally|retry|return|return_local|match|'
- r'case|->|=>)\b', Keyword),
- # constants
- (r'(self|super|nil|false|true)\b', Name.Constant),
- (r'[(){};,/?\|:\\]', Punctuation),
- # names
- (r'(Object|Array|Hash|Directory|File|Class|String|Number|'
- r'Enumerable|FancyEnumerable|Block|TrueClass|NilClass|'
- r'FalseClass|Tuple|Symbol|Stack|Set|FancySpec|Method|Package|'
- r'Range)\b', Name.Builtin),
- # functions
- (r'[a-zA-Z]([a-zA-Z0-9_]|[-+?!=*/^><%])*:', Name.Function),
- # operators, must be below functions
- (r'[-+*/~,<>=&!?%^\[\]\.$]+', Operator),
- ('[A-Z][a-zA-Z0-9_]*', Name.Constant),
- ('@[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable.Instance),
- ('@@[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable.Class),
- ('@@?', Operator),
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
- # numbers - / checks are necessary to avoid mismarking regexes,
- # see comment in RubyLexer
- (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
- bygroups(Number.Oct, Text, Operator)),
- (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
- bygroups(Number.Hex, Text, Operator)),
- (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
- bygroups(Number.Bin, Text, Operator)),
- (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
- bygroups(Number.Integer, Text, Operator)),
- (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer)
- ]
- }
-
-
-class DgLexer(RegexLexer):
- """
- Lexer for `dg <http://pyos.github.com/dg>`_,
- a functional and object-oriented programming language
- running on the CPython 3 VM.
-
- *New in Pygments 1.6.*
- """
- name = 'dg'
- aliases = ['dg']
- filenames = ['*.dg']
- mimetypes = ['text/x-dg']
-
- tokens = {
- 'root': [
- # Whitespace:
- (r'\s+', Text),
- (r'#.*?$', Comment.Single),
- # Lexemes:
- # Numbers
- (r'0[bB][01]+', Number.Bin),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'[+-]?\d+\.\d+([eE][+-]?\d+)?[jJ]?', Number.Float),
- (r'[+-]?\d+[eE][+-]?\d+[jJ]?', Number.Float),
- (r'[+-]?\d+[jJ]?', Number.Integer),
- # Character/String Literals
- (r"[br]*'''", String, combined('stringescape', 'tsqs', 'string')),
- (r'[br]*"""', String, combined('stringescape', 'tdqs', 'string')),
- (r"[br]*'", String, combined('stringescape', 'sqs', 'string')),
- (r'[br]*"', String, combined('stringescape', 'dqs', 'string')),
- # Operators
- (r"`\w+'*`", Operator), # Infix links
- # Reserved infix links
- (r'\b(or|and|if|else|where|is|in)\b', Operator.Word),
- (r'[!$%&*+\-./:<-@\\^|~;,]+', Operator),
- # Identifiers
- # Python 3 types
- (r"(?<!\.)(bool|bytearray|bytes|classmethod|complex|dict'?|"
- r"float|frozenset|int|list'?|memoryview|object|property|range|"
- r"set'?|slice|staticmethod|str|super|tuple'?|type)"
- r"(?!['\w])", Name.Builtin),
- # Python 3 builtins + some more
- (r'(?<!\.)(__import__|abs|all|any|bin|bind|chr|cmp|compile|complex|'
- r'delattr|dir|divmod|drop|dropwhile|enumerate|eval|filter|flip|'
- r'foldl1?|format|fst|getattr|globals|hasattr|hash|head|hex|id|'
- r'init|input|isinstance|issubclass|iter|iterate|last|len|locals|'
- r'map|max|min|next|oct|open|ord|pow|print|repr|reversed|round|'
- r'setattr|scanl1?|snd|sorted|sum|tail|take|takewhile|vars|zip)'
- r"(?!['\w])", Name.Builtin),
- (r"(?<!\.)(self|Ellipsis|NotImplemented|None|True|False)(?!['\w])",
- Name.Builtin.Pseudo),
- (r"(?<!\.)[A-Z]\w*(Error|Exception|Warning)'*(?!['\w])",
- Name.Exception),
- (r"(?<!\.)(KeyboardInterrupt|SystemExit|StopIteration|"
- r"GeneratorExit)(?!['\w])", Name.Exception),
- # Compiler-defined identifiers
- (r"(?<![\.\w])(import|inherit|for|while|switch|not|raise|unsafe|"
- r"yield|with)(?!['\w])", Keyword.Reserved),
- # Other links
- (r"[A-Z_']+\b", Name),
- (r"[A-Z][\w']*\b", Keyword.Type),
- (r"\w+'*", Name),
- # Blocks
- (r'[()]', Punctuation),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'string': [
- (r'%(\([a-zA-Z0-9_]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String),
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop')
- ],
- 'sqs': [
- (r"'", String, '#pop')
- ],
- 'tdqs': [
- (r'"""', String, '#pop')
- ],
- 'tsqs': [
- (r"'''", String, '#pop')
- ],
- }
-
-class Perl6Lexer(ExtendedRegexLexer):
- """
- For `Perl 6 <http://www.perl6.org>`_ source code.
-
- *New in Pygments 1.7.*
- """
-
- name = 'Perl6'
- aliases = ['perl6', 'pl6']
- filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6',
- '*.6pm', '*.p6m', '*.pm6']
- mimetypes = ['text/x-perl6', 'application/x-perl6']
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- PERL6_IDENTIFIER_RANGE = "['a-zA-Z0-9_:-]"
-
- PERL6_KEYWORDS = (
- 'BEGIN', 'CATCH', 'CHECK', 'CONTROL', 'END', 'ENTER', 'FIRST', 'INIT',
- 'KEEP', 'LAST', 'LEAVE', 'NEXT', 'POST', 'PRE', 'START', 'TEMP',
- 'UNDO', 'as', 'assoc', 'async', 'augment', 'binary', 'break', 'but',
- 'cached', 'category', 'class', 'constant', 'contend', 'continue',
- 'copy', 'deep', 'default', 'defequiv', 'defer', 'die', 'do', 'else',
- 'elsif', 'enum', 'equiv', 'exit', 'export', 'fail', 'fatal', 'for',
- 'gather', 'given', 'goto', 'grammar', 'handles', 'has', 'if', 'inline',
- 'irs', 'is', 'last', 'leave', 'let', 'lift', 'loop', 'looser', 'macro',
- 'make', 'maybe', 'method', 'module', 'multi', 'my', 'next', 'of',
- 'ofs', 'only', 'oo', 'ors', 'our', 'package', 'parsed', 'prec',
- 'proto', 'readonly', 'redo', 'ref', 'regex', 'reparsed', 'repeat',
- 'require', 'required', 'return', 'returns', 'role', 'rule', 'rw',
- 'self', 'slang', 'state', 'sub', 'submethod', 'subset', 'supersede',
- 'take', 'temp', 'tighter', 'token', 'trusts', 'try', 'unary',
- 'unless', 'until', 'use', 'warn', 'when', 'where', 'while', 'will',
- )
-
- PERL6_BUILTINS = (
- 'ACCEPTS', 'HOW', 'REJECTS', 'VAR', 'WHAT', 'WHENCE', 'WHERE', 'WHICH',
- 'WHO', 'abs', 'acos', 'acosec', 'acosech', 'acosh', 'acotan', 'acotanh',
- 'all', 'any', 'approx', 'arity', 'asec', 'asech', 'asin', 'asinh'
- 'assuming', 'atan', 'atan2', 'atanh', 'attr', 'bless', 'body', 'by'
- 'bytes', 'caller', 'callsame', 'callwith', 'can', 'capitalize', 'cat',
- 'ceiling', 'chars', 'chmod', 'chomp', 'chop', 'chr', 'chroot',
- 'circumfix', 'cis', 'classify', 'clone', 'close', 'cmp_ok', 'codes',
- 'comb', 'connect', 'contains', 'context', 'cos', 'cosec', 'cosech',
- 'cosh', 'cotan', 'cotanh', 'count', 'defined', 'delete', 'diag',
- 'dies_ok', 'does', 'e', 'each', 'eager', 'elems', 'end', 'eof', 'eval',
- 'eval_dies_ok', 'eval_elsewhere', 'eval_lives_ok', 'evalfile', 'exists',
- 'exp', 'first', 'flip', 'floor', 'flunk', 'flush', 'fmt', 'force_todo',
- 'fork', 'from', 'getc', 'gethost', 'getlogin', 'getpeername', 'getpw',
- 'gmtime', 'graphs', 'grep', 'hints', 'hyper', 'im', 'index', 'infix',
- 'invert', 'is_approx', 'is_deeply', 'isa', 'isa_ok', 'isnt', 'iterator',
- 'join', 'key', 'keys', 'kill', 'kv', 'lastcall', 'lazy', 'lc', 'lcfirst',
- 'like', 'lines', 'link', 'lives_ok', 'localtime', 'log', 'log10', 'map',
- 'max', 'min', 'minmax', 'name', 'new', 'nextsame', 'nextwith', 'nfc',
- 'nfd', 'nfkc', 'nfkd', 'nok_error', 'nonce', 'none', 'normalize', 'not',
- 'nothing', 'ok', 'once', 'one', 'open', 'opendir', 'operator', 'ord',
- 'p5chomp', 'p5chop', 'pack', 'pair', 'pairs', 'pass', 'perl', 'pi',
- 'pick', 'plan', 'plan_ok', 'polar', 'pop', 'pos', 'postcircumfix',
- 'postfix', 'pred', 'prefix', 'print', 'printf', 'push', 'quasi',
- 'quotemeta', 'rand', 're', 'read', 'readdir', 'readline', 'reduce',
- 'reverse', 'rewind', 'rewinddir', 'rindex', 'roots', 'round',
- 'roundrobin', 'run', 'runinstead', 'sameaccent', 'samecase', 'say',
- 'sec', 'sech', 'sech', 'seek', 'shape', 'shift', 'sign', 'signature',
- 'sin', 'sinh', 'skip', 'skip_rest', 'sleep', 'slurp', 'sort', 'splice',
- 'split', 'sprintf', 'sqrt', 'srand', 'strand', 'subst', 'substr', 'succ',
- 'sum', 'symlink', 'tan', 'tanh', 'throws_ok', 'time', 'times', 'to',
- 'todo', 'trim', 'trim_end', 'trim_start', 'true', 'truncate', 'uc',
- 'ucfirst', 'undef', 'undefine', 'uniq', 'unlike', 'unlink', 'unpack',
- 'unpolar', 'unshift', 'unwrap', 'use_ok', 'value', 'values', 'vec',
- 'version_lt', 'void', 'wait', 'want', 'wrap', 'write', 'zip',
- )
-
- PERL6_BUILTIN_CLASSES = (
- 'Abstraction', 'Any', 'AnyChar', 'Array', 'Associative', 'Bag', 'Bit',
- 'Blob', 'Block', 'Bool', 'Buf', 'Byte', 'Callable', 'Capture', 'Char', 'Class',
- 'Code', 'Codepoint', 'Comparator', 'Complex', 'Decreasing', 'Exception',
- 'Failure', 'False', 'Grammar', 'Grapheme', 'Hash', 'IO', 'Increasing',
- 'Int', 'Junction', 'KeyBag', 'KeyExtractor', 'KeyHash', 'KeySet',
- 'KitchenSink', 'List', 'Macro', 'Mapping', 'Match', 'Matcher', 'Method',
- 'Module', 'Num', 'Object', 'Ordered', 'Ordering', 'OrderingPair',
- 'Package', 'Pair', 'Positional', 'Proxy', 'Range', 'Rat', 'Regex',
- 'Role', 'Routine', 'Scalar', 'Seq', 'Set', 'Signature', 'Str', 'StrLen',
- 'StrPos', 'Sub', 'Submethod', 'True', 'UInt', 'Undef', 'Version', 'Void',
- 'Whatever', 'bit', 'bool', 'buf', 'buf1', 'buf16', 'buf2', 'buf32',
- 'buf4', 'buf64', 'buf8', 'complex', 'int', 'int1', 'int16', 'int2',
- 'int32', 'int4', 'int64', 'int8', 'num', 'rat', 'rat1', 'rat16', 'rat2',
- 'rat32', 'rat4', 'rat64', 'rat8', 'uint', 'uint1', 'uint16', 'uint2',
- 'uint32', 'uint4', 'uint64', 'uint8', 'utf16', 'utf32', 'utf8',
- )
-
- PERL6_OPERATORS = (
- 'X', 'Z', 'after', 'also', 'and', 'andthen', 'before', 'cmp', 'div',
- 'eq', 'eqv', 'extra', 'ff', 'fff', 'ge', 'gt', 'le', 'leg', 'lt', 'm',
- 'mm', 'mod', 'ne', 'or', 'orelse', 'rx', 's', 'tr', 'x', 'xor', 'xx',
- '++', '--', '**', '!', '+', '-', '~', '?', '|', '||', '+^', '~^', '?^',
- '^', '*', '/', '%', '%%', '+&', '+<', '+>', '~&', '~<', '~>', '?&',
- 'gcd', 'lcm', '+', '-', '+|', '+^', '~|', '~^', '?|', '?^',
- '~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^',
- '!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv',
- '&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so',
- 'not', '<==', '==>', '<<==', '==>>',
- )
-
- # Perl 6 has a *lot* of possible bracketing characters
- # this list was lifted from STD.pm6 (https://github.com/perl6/std)
- PERL6_BRACKETS = {
- u'\u0028' : u'\u0029', u'\u003c' : u'\u003e', u'\u005b' : u'\u005d', u'\u007b' : u'\u007d',
- u'\u00ab' : u'\u00bb', u'\u0f3a' : u'\u0f3b', u'\u0f3c' : u'\u0f3d', u'\u169b' : u'\u169c',
- u'\u2018' : u'\u2019', u'\u201a' : u'\u2019', u'\u201b' : u'\u2019', u'\u201c' : u'\u201d',
- u'\u201e' : u'\u201d', u'\u201f' : u'\u201d', u'\u2039' : u'\u203a', u'\u2045' : u'\u2046',
- u'\u207d' : u'\u207e', u'\u208d' : u'\u208e', u'\u2208' : u'\u220b', u'\u2209' : u'\u220c',
- u'\u220a' : u'\u220d', u'\u2215' : u'\u29f5', u'\u223c' : u'\u223d', u'\u2243' : u'\u22cd',
- u'\u2252' : u'\u2253', u'\u2254' : u'\u2255', u'\u2264' : u'\u2265', u'\u2266' : u'\u2267',
- u'\u2268' : u'\u2269', u'\u226a' : u'\u226b', u'\u226e' : u'\u226f', u'\u2270' : u'\u2271',
- u'\u2272' : u'\u2273', u'\u2274' : u'\u2275', u'\u2276' : u'\u2277', u'\u2278' : u'\u2279',
- u'\u227a' : u'\u227b', u'\u227c' : u'\u227d', u'\u227e' : u'\u227f', u'\u2280' : u'\u2281',
- u'\u2282' : u'\u2283', u'\u2284' : u'\u2285', u'\u2286' : u'\u2287', u'\u2288' : u'\u2289',
- u'\u228a' : u'\u228b', u'\u228f' : u'\u2290', u'\u2291' : u'\u2292', u'\u2298' : u'\u29b8',
- u'\u22a2' : u'\u22a3', u'\u22a6' : u'\u2ade', u'\u22a8' : u'\u2ae4', u'\u22a9' : u'\u2ae3',
- u'\u22ab' : u'\u2ae5', u'\u22b0' : u'\u22b1', u'\u22b2' : u'\u22b3', u'\u22b4' : u'\u22b5',
- u'\u22b6' : u'\u22b7', u'\u22c9' : u'\u22ca', u'\u22cb' : u'\u22cc', u'\u22d0' : u'\u22d1',
- u'\u22d6' : u'\u22d7', u'\u22d8' : u'\u22d9', u'\u22da' : u'\u22db', u'\u22dc' : u'\u22dd',
- u'\u22de' : u'\u22df', u'\u22e0' : u'\u22e1', u'\u22e2' : u'\u22e3', u'\u22e4' : u'\u22e5',
- u'\u22e6' : u'\u22e7', u'\u22e8' : u'\u22e9', u'\u22ea' : u'\u22eb', u'\u22ec' : u'\u22ed',
- u'\u22f0' : u'\u22f1', u'\u22f2' : u'\u22fa', u'\u22f3' : u'\u22fb', u'\u22f4' : u'\u22fc',
- u'\u22f6' : u'\u22fd', u'\u22f7' : u'\u22fe', u'\u2308' : u'\u2309', u'\u230a' : u'\u230b',
- u'\u2329' : u'\u232a', u'\u23b4' : u'\u23b5', u'\u2768' : u'\u2769', u'\u276a' : u'\u276b',
- u'\u276c' : u'\u276d', u'\u276e' : u'\u276f', u'\u2770' : u'\u2771', u'\u2772' : u'\u2773',
- u'\u2774' : u'\u2775', u'\u27c3' : u'\u27c4', u'\u27c5' : u'\u27c6', u'\u27d5' : u'\u27d6',
- u'\u27dd' : u'\u27de', u'\u27e2' : u'\u27e3', u'\u27e4' : u'\u27e5', u'\u27e6' : u'\u27e7',
- u'\u27e8' : u'\u27e9', u'\u27ea' : u'\u27eb', u'\u2983' : u'\u2984', u'\u2985' : u'\u2986',
- u'\u2987' : u'\u2988', u'\u2989' : u'\u298a', u'\u298b' : u'\u298c', u'\u298d' : u'\u298e',
- u'\u298f' : u'\u2990', u'\u2991' : u'\u2992', u'\u2993' : u'\u2994', u'\u2995' : u'\u2996',
- u'\u2997' : u'\u2998', u'\u29c0' : u'\u29c1', u'\u29c4' : u'\u29c5', u'\u29cf' : u'\u29d0',
- u'\u29d1' : u'\u29d2', u'\u29d4' : u'\u29d5', u'\u29d8' : u'\u29d9', u'\u29da' : u'\u29db',
- u'\u29f8' : u'\u29f9', u'\u29fc' : u'\u29fd', u'\u2a2b' : u'\u2a2c', u'\u2a2d' : u'\u2a2e',
- u'\u2a34' : u'\u2a35', u'\u2a3c' : u'\u2a3d', u'\u2a64' : u'\u2a65', u'\u2a79' : u'\u2a7a',
- u'\u2a7d' : u'\u2a7e', u'\u2a7f' : u'\u2a80', u'\u2a81' : u'\u2a82', u'\u2a83' : u'\u2a84',
- u'\u2a8b' : u'\u2a8c', u'\u2a91' : u'\u2a92', u'\u2a93' : u'\u2a94', u'\u2a95' : u'\u2a96',
- u'\u2a97' : u'\u2a98', u'\u2a99' : u'\u2a9a', u'\u2a9b' : u'\u2a9c', u'\u2aa1' : u'\u2aa2',
- u'\u2aa6' : u'\u2aa7', u'\u2aa8' : u'\u2aa9', u'\u2aaa' : u'\u2aab', u'\u2aac' : u'\u2aad',
- u'\u2aaf' : u'\u2ab0', u'\u2ab3' : u'\u2ab4', u'\u2abb' : u'\u2abc', u'\u2abd' : u'\u2abe',
- u'\u2abf' : u'\u2ac0', u'\u2ac1' : u'\u2ac2', u'\u2ac3' : u'\u2ac4', u'\u2ac5' : u'\u2ac6',
- u'\u2acd' : u'\u2ace', u'\u2acf' : u'\u2ad0', u'\u2ad1' : u'\u2ad2', u'\u2ad3' : u'\u2ad4',
- u'\u2ad5' : u'\u2ad6', u'\u2aec' : u'\u2aed', u'\u2af7' : u'\u2af8', u'\u2af9' : u'\u2afa',
- u'\u2e02' : u'\u2e03', u'\u2e04' : u'\u2e05', u'\u2e09' : u'\u2e0a', u'\u2e0c' : u'\u2e0d',
- u'\u2e1c' : u'\u2e1d', u'\u2e20' : u'\u2e21', u'\u3008' : u'\u3009', u'\u300a' : u'\u300b',
- u'\u300c' : u'\u300d', u'\u300e' : u'\u300f', u'\u3010' : u'\u3011', u'\u3014' : u'\u3015',
- u'\u3016' : u'\u3017', u'\u3018' : u'\u3019', u'\u301a' : u'\u301b', u'\u301d' : u'\u301e',
- u'\ufd3e' : u'\ufd3f', u'\ufe17' : u'\ufe18', u'\ufe35' : u'\ufe36', u'\ufe37' : u'\ufe38',
- u'\ufe39' : u'\ufe3a', u'\ufe3b' : u'\ufe3c', u'\ufe3d' : u'\ufe3e', u'\ufe3f' : u'\ufe40',
- u'\ufe41' : u'\ufe42', u'\ufe43' : u'\ufe44', u'\ufe47' : u'\ufe48', u'\ufe59' : u'\ufe5a',
- u'\ufe5b' : u'\ufe5c', u'\ufe5d' : u'\ufe5e', u'\uff08' : u'\uff09', u'\uff1c' : u'\uff1e',
- u'\uff3b' : u'\uff3d', u'\uff5b' : u'\uff5d', u'\uff5f' : u'\uff60', u'\uff62' : u'\uff63',
- }
-
- def _build_word_match(words, boundary_regex_fragment = None, prefix = '', suffix = ''):
- if boundary_regex_fragment is None:
- return r'\b(' + prefix + r'|'.join([ re.escape(x) for x in words]) + suffix + r')\b'
- else:
- return r'(?<!' + boundary_regex_fragment + ')' + prefix + '(' + \
- r'|'.join([ re.escape(x) for x in words]) + r')' + suffix + '(?!' + boundary_regex_fragment + ')'
-
- def brackets_callback(token_class):
- def callback(lexer, match, context):
- groups = match.groupdict()
- opening_chars = groups['delimiter']
- n_chars = len(opening_chars)
- adverbs = groups.get('adverbs')
-
- closer = Perl6Lexer.PERL6_BRACKETS.get(opening_chars[0])
- text = context.text
-
- if closer is None: # it's not a mirrored character, which means we
- # just need to look for the next occurrence
-
- end_pos = text.find(opening_chars, match.start('delimiter') + n_chars)
- else: # we need to look for the corresponding closing character,
- # keep nesting in mind
- closing_chars = closer * n_chars
- nesting_level = 1
-
- search_pos = match.start('delimiter')
-
- while nesting_level > 0:
- next_open_pos = text.find(opening_chars, search_pos + n_chars)
- next_close_pos = text.find(closing_chars, search_pos + n_chars)
-
- if next_close_pos == -1:
- next_close_pos = len(text)
- nesting_level = 0
- elif next_open_pos != -1 and next_open_pos < next_close_pos:
- nesting_level += 1
- search_pos = next_open_pos
- else: # next_close_pos < next_open_pos
- nesting_level -= 1
- search_pos = next_close_pos
-
- end_pos = next_close_pos
-
- if adverbs is not None and re.search(r':to\b', adverbs):
- heredoc_terminator = text[match.start('delimiter') + n_chars : end_pos]
- end_heredoc = re.search(r'^\s*' + re.escape(heredoc_terminator) + r'\s*$', text[ match.end('delimiter') : ], re.MULTILINE)
-
- if end_heredoc:
- end_pos = match.end('delimiter') + end_heredoc.end()
- else:
- end_pos = len(text)
-
- yield match.start(), token_class, text[match.start() : end_pos + n_chars]
- context.pos = end_pos + n_chars
-
- return callback
-
- def opening_brace_callback(lexer, match, context):
- stack = context.stack
-
- yield match.start(), Text, context.text[match.start() : match.end()]
- context.pos = match.end()
-
- # if we encounter an opening brace and we're one level
- # below a token state, it means we need to increment
- # the nesting level for braces so we know later when
- # we should return to the token rules.
- if len(stack) > 2 and stack[-2] == 'token':
- context.perl6_token_nesting_level += 1
-
- def closing_brace_callback(lexer, match, context):
- stack = context.stack
-
- yield match.start(), Text, context.text[match.start() : match.end()]
- context.pos = match.end()
-
- # if we encounter a free closing brace and we're one level
- # below a token state, it means we need to check the nesting
- # level to see if we need to return to the token state.
- if len(stack) > 2 and stack[-2] == 'token':
- context.perl6_token_nesting_level -= 1
- if context.perl6_token_nesting_level == 0:
- stack.pop()
-
- def embedded_perl6_callback(lexer, match, context):
- context.perl6_token_nesting_level = 1
- yield match.start(), Text, context.text[match.start() : match.end()]
- context.pos = match.end()
- context.stack.append('root')
-
- # If you're modifying these rules, be careful if you need to process '{' or '}' characters.
- # We have special logic for processing these characters (due to the fact that you can nest
- # Perl 6 code in regex blocks), so if you need to process one of them, make sure you also
- # process the corresponding one!
- tokens = {
- 'common' : [
- (r'#[`|=](?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS.keys()) + r'])(?P=first_char)*)', brackets_callback(Comment.Multiline)),
- (r'#[^\n]*$', Comment.Singleline),
- (r'^(\s*)=begin\s+(\w+)\b.*?^\1=end\s+\2', Comment.Multiline),
- (r'^(\s*)=for.*?\n\s*?\n', Comment.Multiline),
- (r'^=.*?\n\s*?\n', Comment.Multiline),
- (r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)', bygroups(Keyword, Name), 'token-sym-brackets'),
- (r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + ')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?', bygroups(Keyword, Name), 'pre-token'),
- # deal with a special case in the Perl 6 grammar (role q { ... })
- (r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Text, Name, Text)),
- (_build_word_match(PERL6_KEYWORDS, PERL6_IDENTIFIER_RANGE), Keyword),
- (_build_word_match(PERL6_BUILTIN_CLASSES, PERL6_IDENTIFIER_RANGE, suffix = '(?::[UD])?'), Name.Builtin),
- (_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin),
- # copied from PerlLexer
- (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable),
- (r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global),
- (r'::\?\w+', Name.Variable.Global),
- (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global),
- (r'\$(?:<.*?>)+', Name.Variable),
- (r'(?:q|qq|Q)[a-zA-Z]?\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^0-9a-zA-Z:\s])(?P=first_char)*)', brackets_callback(String)),
- # copied from PerlLexer
- (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
- (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
- (r'0b[01]+(_[01]+)*', Number.Bin),
- (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?', Number.Float),
- (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
- (r'\d+(_\d+)*', Number.Integer),
- (r'(?<=~~)\s*/(?:\\\\|\\/|.)*?/', String.Regex),
- (r'(?<=[=(,])\s*/(?:\\\\|\\/|.)*?/', String.Regex),
- (r'm\w+(?=\()', Name),
- (r'(?:m|ms|rx)\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^0-9a-zA-Z:\s])(?P=first_char)*)', brackets_callback(String.Regex)),
- (r'(?:s|ss|tr)\s*(?::[\w\s:]+)?\s*/(?:\\\\|\\/|.)*?/(?:\\\\|\\/|.)*?/', String.Regex),
- (r'<[^\s=].*?\S>', String),
- (_build_word_match(PERL6_OPERATORS), Operator),
- (r'[0-9a-zA-Z_]' + PERL6_IDENTIFIER_RANGE + '*', Name),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- ],
- 'root' : [
- include('common'),
- (r'\{', opening_brace_callback),
- (r'\}', closing_brace_callback),
- (r'.+?', Text),
- ],
- 'pre-token' : [
- include('common'),
- (r'\{', Text, ('#pop', 'token')),
- (r'.+?', Text),
- ],
- 'token-sym-brackets' : [
- (r'(?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS.keys()) + '])(?P=first_char)*)', brackets_callback(Name), ('#pop', 'pre-token')),
- (r'', Name, ('#pop', 'pre-token')),
- ],
- 'token': [
- (r'}', Text, '#pop'),
- (r'(?<=:)(?:my|our|state|constant|temp|let).*?;', using(this)),
- # make sure that quotes in character classes aren't treated as strings
- (r'<(?:[-!?+.]\s*)?\[.*?\]>', String.Regex),
- # make sure that '#' characters in quotes aren't treated as comments
- (r"(?<!\\)'(\\\\|\\[^\\]|[^'\\])*'", String.Regex),
- (r'(?<!\\)"(\\\\|\\[^\\]|[^"\\])*"', String.Regex),
- (r'#.*?$', Comment.Singleline),
- (r'\{', embedded_perl6_callback),
- ('.+?', String.Regex),
- ],
- }
-
- def analyse_text(text):
- def strip_pod(lines):
- in_pod = False
- stripped_lines = []
-
- for line in lines:
- if re.match(r'^=(?:end|cut)', line):
- in_pod = False
- elif re.match(r'^=\w+', line):
- in_pod = True
- elif not in_pod:
- stripped_lines.append(line)
-
- return stripped_lines
-
- lines = text.splitlines()
- lines = strip_pod(lines)
- text = '\n'.join(lines)
-
- if shebang_matches(text, r'perl6|rakudo|niecza'):
- return True
-
- if 'use v6' in text:
- return 0.91 # 0.01 greater than Perl says for 'my $'
- if re.search(r'[$@%]\*[A-Z]+', text): # Perl 6-style globals ($*OS)
- return 0.91
- if re.search(r'[$@%]\?[A-Z]+', text): # Perl 6 compiler variables ($?PACKAGE)
- return 0.91
- if re.search(r'[$@%][!.][A-Za-z0-9_-]+', text): # Perl 6 member variables
- return 0.91
-
- for line in text.splitlines():
- if re.match(r'\s*(?:my|our)?\s*module', line): # module declarations
- return 0.91
- if re.match(r'\s*(?:my|our)?\s*role', line): # role declarations
- return 0.91
- if re.match(r'\s*(?:my|our)?\s*class\b', line): # class declarations
- return 0.91
- return False
-
- def __init__(self, **options):
- super(Perl6Lexer, self).__init__(**options)
- self.encoding = options.get('encoding', 'utf-8')
+from pygments.lexers.python import PythonLexer, PythonConsoleLexer, \
+ PythonTracebackLexer, Python3Lexer, Python3TracebackLexer, DgLexer
+from pygments.lexers.ruby import RubyLexer, RubyConsoleLexer, FancyLexer
+from pygments.lexers.perl import PerlLexer, Perl6Lexer
+from pygments.lexers.d import CrocLexer, MiniDLexer
+from pygments.lexers.iolang import IoLexer
+from pygments.lexers.tcl import TclLexer
+from pygments.lexers.factor import FactorLexer
+from pygments.lexers.scripting import LuaLexer, MoonScriptLexer
+
+__all__ = []
diff --git a/pygments/lexers/algebra.py b/pygments/lexers/algebra.py
new file mode 100644
index 00000000..873b1bf2
--- /dev/null
+++ b/pygments/lexers/algebra.py
@@ -0,0 +1,187 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.algebra
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for computer algebra systems.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['GAPLexer', 'MathematicaLexer', 'MuPADLexer']
+
+
+class GAPLexer(RegexLexer):
+ """
+ For `GAP <http://www.gap-system.org>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'GAP'
+ aliases = ['gap']
+ filenames = ['*.g', '*.gd', '*.gi', '*.gap']
+
+ tokens = {
+ 'root': [
+ (r'#.*$', Comment.Single),
+ (r'"(?:[^"\\]|\\.)*"', String),
+ (r'\(|\)|\[|\]|\{|\}', Punctuation),
+ (r'''(?x)\b(?:
+ if|then|elif|else|fi|
+ for|while|do|od|
+ repeat|until|
+ break|continue|
+ function|local|return|end|
+ rec|
+ quit|QUIT|
+ IsBound|Unbind|
+ TryNextMethod|
+ Info|Assert
+ )\b''', Keyword),
+ (r'''(?x)\b(?:
+ true|false|fail|infinity
+ )\b''',
+ Name.Constant),
+ (r'''(?x)\b(?:
+ (Declare|Install)([A-Z][A-Za-z]+)|
+ BindGlobal|BIND_GLOBAL
+ )\b''',
+ Name.Builtin),
+ (r'\.|,|:=|;|=|\+|-|\*|/|\^|>|<', Operator),
+ (r'''(?x)\b(?:
+ and|or|not|mod|in
+ )\b''',
+ Operator.Word),
+ (r'''(?x)
+ (?:\w+|`[^`]*`)
+ (?:::\w+|`[^`]*`)*''', Name.Variable),
+ (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
+ (r'\.[0-9]+(?:e[0-9]+)?', Number),
+ (r'.', Text)
+ ]
+ }
+
+
+class MathematicaLexer(RegexLexer):
+ """
+ Lexer for `Mathematica <http://www.wolfram.com/mathematica/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Mathematica'
+ aliases = ['mathematica', 'mma', 'nb']
+ filenames = ['*.nb', '*.cdf', '*.nbp', '*.ma']
+ mimetypes = ['application/mathematica',
+ 'application/vnd.wolfram.mathematica',
+ 'application/vnd.wolfram.mathematica.package',
+ 'application/vnd.wolfram.cdf']
+
+ # http://reference.wolfram.com/mathematica/guide/Syntax.html
+ operators = (
+ ";;", "=", "=.", "!=" "==", ":=", "->", ":>", "/.", "+", "-", "*", "/",
+ "^", "&&", "||", "!", "<>", "|", "/;", "?", "@", "//", "/@", "@@",
+ "@@@", "~~", "===", "&", "<", ">", "<=", ">=",
+ )
+
+ punctuation = (",", ";", "(", ")", "[", "]", "{", "}")
+
+ def _multi_escape(entries):
+ return '(%s)' % ('|'.join(re.escape(entry) for entry in entries))
+
+ tokens = {
+ 'root': [
+ (r'(?s)\(\*.*?\*\)', Comment),
+
+ (r'([a-zA-Z]+[A-Za-z0-9]*`)', Name.Namespace),
+ (r'([A-Za-z0-9]*_+[A-Za-z0-9]*)', Name.Variable),
+ (r'#\d*', Name.Variable),
+ (r'([a-zA-Z]+[a-zA-Z0-9]*)', Name),
+
+ (r'-?[0-9]+\.[0-9]*', Number.Float),
+ (r'-?[0-9]*\.[0-9]+', Number.Float),
+ (r'-?[0-9]+', Number.Integer),
+
+ (words(operators), Operator),
+ (words(punctuation), Punctuation),
+ (r'".*?"', String),
+ (r'\s+', Text.Whitespace),
+ ],
+ }
+
+
+class MuPADLexer(RegexLexer):
+ """
+ A `MuPAD <http://www.mupad.com>`_ lexer.
+ Contributed by Christopher Creutzig <christopher@creutzig.de>.
+
+ .. versionadded:: 0.8
+ """
+ name = 'MuPAD'
+ aliases = ['mupad']
+ filenames = ['*.mu']
+
+ tokens = {
+ 'root': [
+ (r'//.*?$', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"(?:[^"\\]|\\.)*"', String),
+ (r'\(|\)|\[|\]|\{|\}', Punctuation),
+ (r'''(?x)\b(?:
+ next|break|end|
+ axiom|end_axiom|category|end_category|domain|end_domain|inherits|
+ if|%if|then|elif|else|end_if|
+ case|of|do|otherwise|end_case|
+ while|end_while|
+ repeat|until|end_repeat|
+ for|from|to|downto|step|end_for|
+ proc|local|option|save|begin|end_proc|
+ delete|frame
+ )\b''', Keyword),
+ (r'''(?x)\b(?:
+ DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
+ DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
+ DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
+ DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
+ )\b''', Name.Class),
+ (r'''(?x)\b(?:
+ PI|EULER|E|CATALAN|
+ NIL|FAIL|undefined|infinity|
+ TRUE|FALSE|UNKNOWN
+ )\b''',
+ Name.Constant),
+ (r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
+ (r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
+ (r'''(?x)\b(?:
+ and|or|not|xor|
+ assuming|
+ div|mod|
+ union|minus|intersect|in|subset
+ )\b''',
+ Operator.Word),
+ (r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
+ # (r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
+ (r'''(?x)
+ ((?:[a-zA-Z_#][\w#]*|`[^`]*`)
+ (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*)(\s*)([(])''',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'''(?x)
+ (?:[a-zA-Z_#][\w#]*|`[^`]*`)
+ (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*''', Name.Variable),
+ (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
+ (r'\.[0-9]+(?:e[0-9]+)?', Number),
+ (r'.', Text)
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ]
+ }
diff --git a/pygments/lexers/ambient.py b/pygments/lexers/ambient.py
new file mode 100644
index 00000000..7f622fbc
--- /dev/null
+++ b/pygments/lexers/ambient.py
@@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ambient
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for AmbientTalk language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['AmbientTalkLexer']
+
+
+class AmbientTalkLexer(RegexLexer):
+ """
+ Lexer for `AmbientTalk <https://code.google.com/p/ambienttalk>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'AmbientTalk'
+ filenames = ['*.at']
+ aliases = ['at', 'ambienttalk', 'ambienttalk/2']
+ mimetypes = ['text/x-ambienttalk']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ builtin = words(('if:', 'then:', 'else:', 'when:', 'whenever:', 'discovered:',
+ 'disconnected:', 'reconnected:', 'takenOffline:', 'becomes:',
+ 'export:', 'as:', 'object:', 'actor:', 'mirror:', 'taggedAs:',
+ 'mirroredBy:', 'is:'))
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'(def|deftype|import|alias|exclude)\b', Keyword),
+ (builtin, Name.Builtin),
+ (r'(true|false|nil)\b', Keyword.Constant),
+ (r'(~|lobby|jlobby|/)\.', Keyword.Constant, 'namespace'),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r'\|', Punctuation, 'arglist'),
+ (r'<:|[*^!%&<>+=,./?-]|:=', Operator),
+ (r"`[a-zA-Z_]\w*", String.Symbol),
+ (r"[a-zA-Z_]\w*:", Name.Function),
+ (r"[{}()\[\];`]", Punctuation),
+ (r'(self|super)\b', Name.Variable.Instance),
+ (r"[a-zA-Z_]\w*", Name.Variable),
+ (r"@[a-zA-Z_]\w*", Name.Class),
+ (r"@\[", Name.Class, 'annotations'),
+ include('numbers'),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer)
+ ],
+ 'namespace': [
+ (r'[a-zA-Z_]\w*\.', Name.Namespace),
+ (r'[a-zA-Z_]\w*:', Name.Function, '#pop'),
+ (r'[a-zA-Z_]\w*(?!\.)', Name.Function, '#pop')
+ ],
+ 'annotations': [
+ (r"(.*?)\]", Name.Class, '#pop')
+ ],
+ 'arglist': [
+ (r'\|', Punctuation, '#pop'),
+ (r'\s*(,)\s*', Punctuation),
+ (r'[a-zA-Z_]\w*', Name.Variable),
+ ],
+ }
diff --git a/pygments/lexers/apl.py b/pygments/lexers/apl.py
new file mode 100644
index 00000000..61ea4c4b
--- /dev/null
+++ b/pygments/lexers/apl.py
@@ -0,0 +1,101 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.apl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for APL.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['APLLexer']
+
+
+class APLLexer(RegexLexer):
+ """
+ A simple APL lexer.
+
+ .. versionadded:: 2.0
+ """
+ name = 'APL'
+ aliases = ['apl']
+ filenames = ['*.apl']
+
+ tokens = {
+ 'root': [
+ # Whitespace
+ # ==========
+ (r'\s+', Text),
+ #
+ # Comment
+ # =======
+ # '⍝' is traditional; '#' is supported by GNU APL and NGN (but not Dyalog)
+ (u'[⍝#].*$', Comment.Single),
+ #
+ # Strings
+ # =======
+ (r'\'((\'\')|[^\'])*\'', String.Single),
+ (r'"(("")|[^"])*"', String.Double), # supported by NGN APL
+ #
+ # Punctuation
+ # ===========
+ # This token type is used for diamond and parenthesis
+ # but not for bracket and ; (see below)
+ (u'[⋄◇()]', Punctuation),
+ #
+ # Array indexing
+ # ==============
+ # Since this token type is very important in APL, it is not included in
+ # the punctuation token type but rather in the following one
+ (r'[\[\];]', String.Regex),
+ #
+ # Distinguished names
+ # ===================
+ # following IBM APL2 standard
+ (u'⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Function),
+ #
+ # Labels
+ # ======
+ # following IBM APL2 standard
+ # (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*:', Name.Label),
+ #
+ # Variables
+ # =========
+ # following IBM APL2 standard
+ (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable),
+ #
+ # Numbers
+ # =======
+ (u'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)'
+ u'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?',
+ Number),
+ #
+ # Operators
+ # ==========
+ (u'[\.\\\/⌿⍀¨⍣⍨⍠⍤∘]', Name.Attribute), # closest token type
+ (u'[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]',
+ Operator),
+ #
+ # Constant
+ # ========
+ (u'⍬', Name.Constant),
+ #
+ # Quad symbol
+ # ===========
+ (u'[⎕⍞]', Name.Variable.Global),
+ #
+ # Arrows left/right
+ # =================
+ (u'[←→]', Keyword.Declaration),
+ #
+ # D-Fn
+ # ====
+ (u'[⍺⍵⍶⍹∇:]', Name.Builtin.Pseudo),
+ (r'[{}]', Keyword.Type),
+ ],
+ }
diff --git a/pygments/lexers/asm.py b/pygments/lexers/asm.py
index 3f67862c..c308f7fc 100644
--- a/pygments/lexers/asm.py
+++ b/pygments/lexers/asm.py
@@ -5,19 +5,21 @@
Lexers for assembly languages.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, using, DelegatingLexer
-from pygments.lexers.compiled import DLexer, CppLexer, CLexer
+from pygments.lexers.c_cpp import CppLexer, CLexer
+from pygments.lexers.d import DLexer
from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
- Other, Keyword, Operator
+ Other, Keyword, Operator
-__all__ = ['GasLexer', 'ObjdumpLexer','DObjdumpLexer', 'CppObjdumpLexer',
- 'CObjdumpLexer', 'LlvmLexer', 'NasmLexer', 'Ca65Lexer']
+__all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer',
+ 'CObjdumpLexer', 'LlvmLexer', 'NasmLexer', 'NasmObjdumpLexer',
+ 'Ca65Lexer']
class GasLexer(RegexLexer):
@@ -31,7 +33,7 @@ class GasLexer(RegexLexer):
#: optional Comment or Whitespace
string = r'"(\\"|[^"])*"'
- char = r'[a-zA-Z$._0-9@-]'
+ char = r'[\w$.@-]'
identifier = r'(?:[a-zA-Z$_]' + char + '*|\.' + char + '+)'
number = r'(?:0[xX][a-zA-Z0-9]+|\d+)'
@@ -96,18 +98,12 @@ class GasLexer(RegexLexer):
return 0.1
-class ObjdumpLexer(RegexLexer):
+def _objdump_lexer_tokens(asm_lexer):
"""
- For the output of 'objdump -dr'
+ Common objdump lexer tokens to wrap an ASM lexer.
"""
- name = 'objdump'
- aliases = ['objdump']
- filenames = ['*.objdump']
- mimetypes = ['text/x-objdump']
-
- hex = r'[0-9A-Za-z]'
-
- tokens = {
+ hex_re = r'[0-9A-Za-z]'
+ return {
'root': [
# File name & format:
('(.*?)(:)( +file format )(.*?)$',
@@ -117,33 +113,33 @@ class ObjdumpLexer(RegexLexer):
bygroups(Text, Name.Label, Punctuation)),
# Function labels
# (With offset)
- ('('+hex+'+)( )(<)(.*?)([-+])(0[xX][A-Za-z0-9]+)(>:)$',
+ ('('+hex_re+'+)( )(<)(.*?)([-+])(0[xX][A-Za-z0-9]+)(>:)$',
bygroups(Number.Hex, Text, Punctuation, Name.Function,
Punctuation, Number.Hex, Punctuation)),
# (Without offset)
- ('('+hex+'+)( )(<)(.*?)(>:)$',
+ ('('+hex_re+'+)( )(<)(.*?)(>:)$',
bygroups(Number.Hex, Text, Punctuation, Name.Function,
Punctuation)),
# Code line with disassembled instructions
- ('( *)('+hex+r'+:)(\t)((?:'+hex+hex+' )+)( *\t)([a-zA-Z].*?)$',
+ ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *\t)([a-zA-Z].*?)$',
bygroups(Text, Name.Label, Text, Number.Hex, Text,
- using(GasLexer))),
+ using(asm_lexer))),
# Code line with ascii
- ('( *)('+hex+r'+:)(\t)((?:'+hex+hex+' )+)( *)(.*?)$',
+ ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *)(.*?)$',
bygroups(Text, Name.Label, Text, Number.Hex, Text, String)),
# Continued code line, only raw opcodes without disassembled
# instruction
- ('( *)('+hex+r'+:)(\t)((?:'+hex+hex+' )+)$',
+ ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)$',
bygroups(Text, Name.Label, Text, Number.Hex)),
# Skipped a few bytes
(r'\t\.\.\.$', Text),
# Relocation line
# (With offset)
- (r'(\t\t\t)('+hex+r'+:)( )([^\t]+)(\t)(.*?)([-+])(0x' + hex + '+)$',
+ (r'(\t\t\t)('+hex_re+r'+:)( )([^\t]+)(\t)(.*?)([-+])(0x'+hex_re+'+)$',
bygroups(Text, Name.Label, Text, Name.Property, Text,
Name.Constant, Punctuation, Number.Hex)),
# (Without offset)
- (r'(\t\t\t)('+hex+r'+:)( )([^\t]+)(\t)(.*?)$',
+ (r'(\t\t\t)('+hex_re+r'+:)( )([^\t]+)(\t)(.*?)$',
bygroups(Text, Name.Label, Text, Name.Property, Text,
Name.Constant)),
(r'[^\n]+\n', Other)
@@ -151,6 +147,18 @@ class ObjdumpLexer(RegexLexer):
}
+class ObjdumpLexer(RegexLexer):
+ """
+ For the output of 'objdump -dr'
+ """
+ name = 'objdump'
+ aliases = ['objdump']
+ filenames = ['*.objdump']
+ mimetypes = ['text/x-objdump']
+
+ tokens = _objdump_lexer_tokens(GasLexer)
+
+
class DObjdumpLexer(DelegatingLexer):
"""
For the output of 'objdump -Sr on compiled D files'
@@ -201,7 +209,7 @@ class LlvmLexer(RegexLexer):
#: optional Comment or Whitespace
string = r'"[^"]*?"'
- identifier = r'([-a-zA-Z$._][-a-zA-Z$._0-9]*|' + string + ')'
+ identifier = r'([-a-zA-Z$._][\w\-$.]*|' + string + ')'
tokens = {
'root': [
@@ -212,10 +220,11 @@ class LlvmLexer(RegexLexer):
include('keyword'),
- (r'%' + identifier, Name.Variable),#Name.Identifier.Local),
- (r'@' + identifier, Name.Variable.Global),#Name.Identifier.Global),
- (r'%\d+', Name.Variable.Anonymous),#Name.Identifier.Anonymous),
- (r'@\d+', Name.Variable.Global),#Name.Identifier.Anonymous),
+ (r'%' + identifier, Name.Variable),
+ (r'@' + identifier, Name.Variable.Global),
+ (r'%\d+', Name.Variable.Anonymous),
+ (r'@\d+', Name.Variable.Global),
+ (r'#\d+', Name.Variable.Global),
(r'!' + identifier, Name.Variable),
(r'!\d+', Name.Variable.Anonymous),
(r'c?' + string, String),
@@ -242,17 +251,24 @@ class LlvmLexer(RegexLexer):
r'|thread_local|zeroinitializer|undef|null|to|tail|target|triple'
r'|datalayout|volatile|nuw|nsw|nnan|ninf|nsz|arcp|fast|exact|inbounds'
r'|align|addrspace|section|alias|module|asm|sideeffect|gc|dbg'
+ r'|linker_private_weak'
+ r'|attributes|blockaddress|initialexec|localdynamic|localexec'
+ r'|prefix|unnamed_addr'
r'|ccc|fastcc|coldcc|x86_stdcallcc|x86_fastcallcc|arm_apcscc'
r'|arm_aapcscc|arm_aapcs_vfpcc|ptx_device|ptx_kernel'
+ r'|intel_ocl_bicc|msp430_intrcc|spir_func|spir_kernel'
+ r'|x86_64_sysvcc|x86_64_win64cc|x86_thiscallcc'
r'|cc|c'
r'|signext|zeroext|inreg|sret|nounwind|noreturn|noalias|nocapture'
r'|byval|nest|readnone|readonly'
-
r'|inlinehint|noinline|alwaysinline|optsize|ssp|sspreq|noredzone'
r'|noimplicitfloat|naked'
+ r'|builtin|cold|nobuiltin|noduplicate|nonlazybind|optnone'
+ r'|returns_twice|sanitize_address|sanitize_memory|sanitize_thread'
+ r'|sspstrong|uwtable|returned'
r'|type|opaque'
@@ -261,24 +277,30 @@ class LlvmLexer(RegexLexer):
r'|oeq|one|olt|ogt|ole'
r'|oge|ord|uno|ueq|une'
r'|x'
+ r'|acq_rel|acquire|alignstack|atomic|catch|cleanup|filter'
+ r'|inteldialect|max|min|monotonic|nand|personality|release'
+ r'|seq_cst|singlethread|umax|umin|unordered|xchg'
# instructions
r'|add|fadd|sub|fsub|mul|fmul|udiv|sdiv|fdiv|urem|srem|frem|shl'
r'|lshr|ashr|and|or|xor|icmp|fcmp'
r'|phi|call|trunc|zext|sext|fptrunc|fpext|uitofp|sitofp|fptoui'
- r'fptosi|inttoptr|ptrtoint|bitcast|select|va_arg|ret|br|switch'
+ r'|fptosi|inttoptr|ptrtoint|bitcast|select|va_arg|ret|br|switch'
r'|invoke|unwind|unreachable'
+ r'|indirectbr|landingpad|resume'
r'|malloc|alloca|free|load|store|getelementptr'
r'|extractelement|insertelement|shufflevector|getresult'
r'|extractvalue|insertvalue'
+ r'|atomicrmw|cmpxchg|fence'
+
r')\b', Keyword),
# Types
- (r'void|float|double|x86_fp80|fp128|ppc_fp128|label|metadata',
+ (r'void|half|float|double|x86_fp80|fp128|ppc_fp128|label|metadata',
Keyword.Type),
# Integer types
@@ -296,8 +318,8 @@ class NasmLexer(RegexLexer):
filenames = ['*.asm', '*.ASM']
mimetypes = ['text/x-nasm']
- identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?#@~]*'
- hexn = r'(?:0[xX][0-9a-fA-F]+|$0[0-9a-fA-F]*|[0-9]+[0-9a-fA-F]*h)'
+ identifier = r'[a-z$._?][\w$.?#@~]*'
+ hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
octn = r'[0-7]+q'
binn = r'[01]+b'
decn = r'[0-9]+'
@@ -316,8 +338,8 @@ class NasmLexer(RegexLexer):
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
- include('whitespace'),
(r'^\s*%', Comment.Preproc, 'preproc'),
+ include('whitespace'),
(identifier + ':', Name.Label),
(r'(%s)(\s+)(equ)' % identifier,
bygroups(Name.Constant, Keyword.Declaration, Keyword.Declaration),
@@ -331,7 +353,7 @@ class NasmLexer(RegexLexer):
(string, String),
(hexn, Number.Hex),
(octn, Number.Oct),
- (binn, Number),
+ (binn, Number.Bin),
(floatn, Number.Float),
(decn, Number.Integer),
include('punctuation'),
@@ -360,13 +382,27 @@ class NasmLexer(RegexLexer):
}
+class NasmObjdumpLexer(ObjdumpLexer):
+ """
+ For the output of 'objdump -d -M intel'.
+
+ .. versionadded:: 2.0
+ """
+ name = 'objdump-nasm'
+ aliases = ['objdump-nasm']
+ filenames = ['*.objdump-intel']
+ mimetypes = ['text/x-nasm-objdump']
+
+ tokens = _objdump_lexer_tokens(NasmLexer)
+
+
class Ca65Lexer(RegexLexer):
"""
For ca65 assembler sources.
- *New in Pygments 1.6.*
+ .. versionadded:: 1.6
"""
- name = 'ca65'
+ name = 'ca65 assembler'
aliases = ['ca65']
filenames = ['*.s']
@@ -381,13 +417,14 @@ class Ca65Lexer(RegexLexer):
r'|cl[cvdi]|se[cdi]|jmp|jsr|bne|beq|bpl|bmi|bvc|bvs|bcc|bcs'
r'|p[lh][ap]|rt[is]|brk|nop|ta[xy]|t[xy]a|txs|tsx|and|ora|eor'
r'|bit)\b', Keyword),
- (r'\.[a-z0-9_]+', Keyword.Pseudo),
+ (r'\.\w+', Keyword.Pseudo),
(r'[-+~*/^&|!<>=]', Operator),
(r'"[^"\n]*.', String),
(r"'[^'\n]*.", String.Char),
(r'\$[0-9a-f]+|[0-9a-f]+h\b', Number.Hex),
- (r'\d+|%[01]+', Number.Integer),
- (r'[#,.:()=]', Punctuation),
+ (r'\d+', Number.Integer),
+ (r'%[01]+', Number.Bin),
+ (r'[#,.:()=\[\]]', Punctuation),
(r'[a-z_.@$][\w.@$]*', Name),
]
}
diff --git a/pygments/lexers/automation.py b/pygments/lexers/automation.py
new file mode 100644
index 00000000..2ebc4d24
--- /dev/null
+++ b/pygments/lexers/automation.py
@@ -0,0 +1,374 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.automation
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for automation scripting languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, combined
+from pygments.token import Text, Comment, Operator, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['AutohotkeyLexer', 'AutoItLexer']
+
+
+class AutohotkeyLexer(RegexLexer):
+ """
+ For `autohotkey <http://www.autohotkey.com/>`_ source code.
+
+ .. versionadded:: 1.4
+ """
+ name = 'autohotkey'
+ aliases = ['ahk', 'autohotkey']
+ filenames = ['*.ahk', '*.ahkl']
+ mimetypes = ['text/x-autohotkey']
+
+ tokens = {
+ 'root': [
+ (r'^(\s*)(/\*)', bygroups(Text, Comment.Multiline), 'incomment'),
+ (r'^(\s*)(\()', bygroups(Text, Generic), 'incontinuation'),
+ (r'\s+;.*?$', Comment.Singleline),
+ (r'^;.*?$', Comment.Singleline),
+ (r'[]{}(),;[]', Punctuation),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'\%[a-zA-Z_#@$][\w#@$]*\%', Name.Variable),
+ (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
+ include('commands'),
+ include('labels'),
+ include('builtInFunctions'),
+ include('builtInVariables'),
+ (r'"', String, combined('stringescape', 'dqs')),
+ include('numbers'),
+ (r'[a-zA-Z_#@$][\w#@$]*', Name),
+ (r'\\|\'', Text),
+ (r'\`([,%`abfnrtv\-+;])', String.Escape),
+ include('garbage'),
+ ],
+ 'incomment': [
+ (r'^\s*\*/', Comment.Multiline, '#pop'),
+ (r'[^*/]', Comment.Multiline),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'incontinuation': [
+ (r'^\s*\)', Generic, '#pop'),
+ (r'[^)]', Generic),
+ (r'[)]', Generic),
+ ],
+ 'commands': [
+ (r'(?i)^(\s*)(global|local|static|'
+ r'#AllowSameLineComments|#ClipboardTimeout|#CommentFlag|'
+ r'#ErrorStdOut|#EscapeChar|#HotkeyInterval|#HotkeyModifierTimeout|'
+ r'#Hotstring|#IfWinActive|#IfWinExist|#IfWinNotActive|'
+ r'#IfWinNotExist|#IncludeAgain|#Include|#InstallKeybdHook|'
+ r'#InstallMouseHook|#KeyHistory|#LTrim|#MaxHotkeysPerInterval|'
+ r'#MaxMem|#MaxThreads|#MaxThreadsBuffer|#MaxThreadsPerHotkey|'
+ r'#NoEnv|#NoTrayIcon|#Persistent|#SingleInstance|#UseHook|'
+ r'#WinActivateForce|AutoTrim|BlockInput|Break|Click|ClipWait|'
+ r'Continue|Control|ControlClick|ControlFocus|ControlGetFocus|'
+ r'ControlGetPos|ControlGetText|ControlGet|ControlMove|ControlSend|'
+ r'ControlSendRaw|ControlSetText|CoordMode|Critical|'
+ r'DetectHiddenText|DetectHiddenWindows|Drive|DriveGet|'
+ r'DriveSpaceFree|Edit|Else|EnvAdd|EnvDiv|EnvGet|EnvMult|EnvSet|'
+ r'EnvSub|EnvUpdate|Exit|ExitApp|FileAppend|'
+ r'FileCopy|FileCopyDir|FileCreateDir|FileCreateShortcut|'
+ r'FileDelete|FileGetAttrib|FileGetShortcut|FileGetSize|'
+ r'FileGetTime|FileGetVersion|FileInstall|FileMove|FileMoveDir|'
+ r'FileRead|FileReadLine|FileRecycle|FileRecycleEmpty|'
+ r'FileRemoveDir|FileSelectFile|FileSelectFolder|FileSetAttrib|'
+ r'FileSetTime|FormatTime|GetKeyState|Gosub|Goto|GroupActivate|'
+ r'GroupAdd|GroupClose|GroupDeactivate|Gui|GuiControl|'
+ r'GuiControlGet|Hotkey|IfEqual|IfExist|IfGreaterOrEqual|IfGreater|'
+ r'IfInString|IfLess|IfLessOrEqual|IfMsgBox|IfNotEqual|IfNotExist|'
+ r'IfNotInString|IfWinActive|IfWinExist|IfWinNotActive|'
+ r'IfWinNotExist|If |ImageSearch|IniDelete|IniRead|IniWrite|'
+ r'InputBox|Input|KeyHistory|KeyWait|ListHotkeys|ListLines|'
+ r'ListVars|Loop|Menu|MouseClickDrag|MouseClick|MouseGetPos|'
+ r'MouseMove|MsgBox|OnExit|OutputDebug|Pause|PixelGetColor|'
+ r'PixelSearch|PostMessage|Process|Progress|Random|RegDelete|'
+ r'RegRead|RegWrite|Reload|Repeat|Return|RunAs|RunWait|Run|'
+ r'SendEvent|SendInput|SendMessage|SendMode|SendPlay|SendRaw|Send|'
+ r'SetBatchLines|SetCapslockState|SetControlDelay|'
+ r'SetDefaultMouseSpeed|SetEnv|SetFormat|SetKeyDelay|'
+ r'SetMouseDelay|SetNumlockState|SetScrollLockState|'
+ r'SetStoreCapslockMode|SetTimer|SetTitleMatchMode|'
+ r'SetWinDelay|SetWorkingDir|Shutdown|Sleep|Sort|SoundBeep|'
+ r'SoundGet|SoundGetWaveVolume|SoundPlay|SoundSet|'
+ r'SoundSetWaveVolume|SplashImage|SplashTextOff|SplashTextOn|'
+ r'SplitPath|StatusBarGetText|StatusBarWait|StringCaseSense|'
+ r'StringGetPos|StringLeft|StringLen|StringLower|StringMid|'
+ r'StringReplace|StringRight|StringSplit|StringTrimLeft|'
+ r'StringTrimRight|StringUpper|Suspend|SysGet|Thread|ToolTip|'
+ r'Transform|TrayTip|URLDownloadToFile|While|WinActivate|'
+ r'WinActivateBottom|WinClose|WinGetActiveStats|WinGetActiveTitle|'
+ r'WinGetClass|WinGetPos|WinGetText|WinGetTitle|WinGet|WinHide|'
+ r'WinKill|WinMaximize|WinMenuSelectItem|WinMinimizeAllUndo|'
+ r'WinMinimizeAll|WinMinimize|WinMove|WinRestore|WinSetTitle|'
+ r'WinSet|WinShow|WinWaitActive|WinWaitClose|WinWaitNotActive|'
+ r'WinWait)\b', bygroups(Text, Name.Builtin)),
+ ],
+ 'builtInFunctions': [
+ (r'(?i)(Abs|ACos|Asc|ASin|ATan|Ceil|Chr|Cos|DllCall|Exp|FileExist|'
+ r'Floor|GetKeyState|IL_Add|IL_Create|IL_Destroy|InStr|IsFunc|'
+ r'IsLabel|Ln|Log|LV_Add|LV_Delete|LV_DeleteCol|LV_GetCount|'
+ r'LV_GetNext|LV_GetText|LV_Insert|LV_InsertCol|LV_Modify|'
+ r'LV_ModifyCol|LV_SetImageList|Mod|NumGet|NumPut|OnMessage|'
+ r'RegExMatch|RegExReplace|RegisterCallback|Round|SB_SetIcon|'
+ r'SB_SetParts|SB_SetText|Sin|Sqrt|StrLen|SubStr|Tan|TV_Add|'
+ r'TV_Delete|TV_GetChild|TV_GetCount|TV_GetNext|TV_Get|'
+ r'TV_GetParent|TV_GetPrev|TV_GetSelection|TV_GetText|TV_Modify|'
+ r'VarSetCapacity|WinActive|WinExist|Object|ComObjActive|'
+ r'ComObjArray|ComObjEnwrap|ComObjUnwrap|ComObjParameter|'
+ r'ComObjType|ComObjConnect|ComObjCreate|ComObjGet|ComObjError|'
+ r'ComObjValue|Insert|MinIndex|MaxIndex|Remove|SetCapacity|'
+ r'GetCapacity|GetAddress|_NewEnum|FileOpen|Read|Write|ReadLine|'
+ r'WriteLine|ReadNumType|WriteNumType|RawRead|RawWrite|Seek|Tell|'
+ r'Close|Next|IsObject|StrPut|StrGet|Trim|LTrim|RTrim)\b',
+ Name.Function),
+ ],
+ 'builtInVariables': [
+ (r'(?i)(A_AhkPath|A_AhkVersion|A_AppData|A_AppDataCommon|'
+ r'A_AutoTrim|A_BatchLines|A_CaretX|A_CaretY|A_ComputerName|'
+ r'A_ControlDelay|A_Cursor|A_DDDD|A_DDD|A_DD|A_DefaultMouseSpeed|'
+ r'A_Desktop|A_DesktopCommon|A_DetectHiddenText|'
+ r'A_DetectHiddenWindows|A_EndChar|A_EventInfo|A_ExitReason|'
+ r'A_FormatFloat|A_FormatInteger|A_Gui|A_GuiEvent|A_GuiControl|'
+ r'A_GuiControlEvent|A_GuiHeight|A_GuiWidth|A_GuiX|A_GuiY|A_Hour|'
+ r'A_IconFile|A_IconHidden|A_IconNumber|A_IconTip|A_Index|'
+ r'A_IPAddress1|A_IPAddress2|A_IPAddress3|A_IPAddress4|A_ISAdmin|'
+ r'A_IsCompiled|A_IsCritical|A_IsPaused|A_IsSuspended|A_KeyDelay|'
+ r'A_Language|A_LastError|A_LineFile|A_LineNumber|A_LoopField|'
+ r'A_LoopFileAttrib|A_LoopFileDir|A_LoopFileExt|A_LoopFileFullPath|'
+ r'A_LoopFileLongPath|A_LoopFileName|A_LoopFileShortName|'
+ r'A_LoopFileShortPath|A_LoopFileSize|A_LoopFileSizeKB|'
+ r'A_LoopFileSizeMB|A_LoopFileTimeAccessed|A_LoopFileTimeCreated|'
+ r'A_LoopFileTimeModified|A_LoopReadLine|A_LoopRegKey|'
+ r'A_LoopRegName|A_LoopRegSubkey|A_LoopRegTimeModified|'
+ r'A_LoopRegType|A_MDAY|A_Min|A_MM|A_MMM|A_MMMM|A_Mon|A_MouseDelay|'
+ r'A_MSec|A_MyDocuments|A_Now|A_NowUTC|A_NumBatchLines|A_OSType|'
+ r'A_OSVersion|A_PriorHotkey|A_ProgramFiles|A_Programs|'
+ r'A_ProgramsCommon|A_ScreenHeight|A_ScreenWidth|A_ScriptDir|'
+ r'A_ScriptFullPath|A_ScriptName|A_Sec|A_Space|A_StartMenu|'
+ r'A_StartMenuCommon|A_Startup|A_StartupCommon|A_StringCaseSense|'
+ r'A_Tab|A_Temp|A_ThisFunc|A_ThisHotkey|A_ThisLabel|A_ThisMenu|'
+ r'A_ThisMenuItem|A_ThisMenuItemPos|A_TickCount|A_TimeIdle|'
+ r'A_TimeIdlePhysical|A_TimeSincePriorHotkey|A_TimeSinceThisHotkey|'
+ r'A_TitleMatchMode|A_TitleMatchModeSpeed|A_UserName|A_WDay|'
+ r'A_WinDelay|A_WinDir|A_WorkingDir|A_YDay|A_YEAR|A_YWeek|A_YYYY|'
+ r'Clipboard|ClipboardAll|ComSpec|ErrorLevel|ProgramFiles|True|'
+ r'False|A_IsUnicode|A_FileEncoding|A_OSVersion|A_PtrSize)\b',
+ Name.Variable),
+ ],
+ 'labels': [
+ # hotkeys and labels
+ # technically, hotkey names are limited to named keys and buttons
+ (r'(^\s*)([^:\s("]+?:{1,2})', bygroups(Text, Name.Label)),
+ (r'(^\s*)(::[^:\s]+?::)', bygroups(Text, Name.Label)),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'0\d+', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer)
+ ],
+ 'stringescape': [
+ (r'\"\"|\`([,%`abfnrtv])', String.Escape),
+ ],
+ 'strings': [
+ (r'[^"\n]+', String),
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ include('strings')
+ ],
+ 'garbage': [
+ (r'[^\S\n]', Text),
+ # (r'.', Text), # no cheating
+ ],
+ }
+
+
+class AutoItLexer(RegexLexer):
+ """
+ For `AutoIt <http://www.autoitscript.com/site/autoit/>`_ files.
+
+ AutoIt is a freeware BASIC-like scripting language
+ designed for automating the Windows GUI and general scripting
+
+ .. versionadded:: 1.6
+ """
+ name = 'AutoIt'
+ aliases = ['autoit']
+ filenames = ['*.au3']
+ mimetypes = ['text/x-autoit']
+
+ # Keywords, functions, macros from au3.keywords.properties
+ # which can be found in AutoIt installed directory, e.g.
+ # c:\Program Files (x86)\AutoIt3\SciTE\au3.keywords.properties
+
+ keywords = """\
+ #include-once #include #endregion #forcedef #forceref #region
+ and byref case continueloop dim do else elseif endfunc endif
+ endselect exit exitloop for func global
+ if local next not or return select step
+ then to until wend while exit""".split()
+
+ functions = """\
+ abs acos adlibregister adlibunregister asc ascw asin assign atan
+ autoitsetoption autoitwingettitle autoitwinsettitle beep binary binarylen
+ binarymid binarytostring bitand bitnot bitor bitrotate bitshift bitxor
+ blockinput break call cdtray ceiling chr chrw clipget clipput consoleread
+ consolewrite consolewriteerror controlclick controlcommand controldisable
+ controlenable controlfocus controlgetfocus controlgethandle controlgetpos
+ controlgettext controlhide controllistview controlmove controlsend
+ controlsettext controlshow controltreeview cos dec dircopy dircreate
+ dirgetsize dirmove dirremove dllcall dllcalladdress dllcallbackfree
+ dllcallbackgetptr dllcallbackregister dllclose dllopen dllstructcreate
+ dllstructgetdata dllstructgetptr dllstructgetsize dllstructsetdata
+ drivegetdrive drivegetfilesystem drivegetlabel drivegetserial drivegettype
+ drivemapadd drivemapdel drivemapget drivesetlabel drivespacefree
+ drivespacetotal drivestatus envget envset envupdate eval execute exp
+ filechangedir fileclose filecopy filecreatentfslink filecreateshortcut
+ filedelete fileexists filefindfirstfile filefindnextfile fileflush
+ filegetattrib filegetencoding filegetlongname filegetpos filegetshortcut
+ filegetshortname filegetsize filegettime filegetversion fileinstall filemove
+ fileopen fileopendialog fileread filereadline filerecycle filerecycleempty
+ filesavedialog fileselectfolder filesetattrib filesetpos filesettime
+ filewrite filewriteline floor ftpsetproxy guicreate guictrlcreateavi
+ guictrlcreatebutton guictrlcreatecheckbox guictrlcreatecombo
+ guictrlcreatecontextmenu guictrlcreatedate guictrlcreatedummy
+ guictrlcreateedit guictrlcreategraphic guictrlcreategroup guictrlcreateicon
+ guictrlcreateinput guictrlcreatelabel guictrlcreatelist
+ guictrlcreatelistview guictrlcreatelistviewitem guictrlcreatemenu
+ guictrlcreatemenuitem guictrlcreatemonthcal guictrlcreateobj
+ guictrlcreatepic guictrlcreateprogress guictrlcreateradio
+ guictrlcreateslider guictrlcreatetab guictrlcreatetabitem
+ guictrlcreatetreeview guictrlcreatetreeviewitem guictrlcreateupdown
+ guictrldelete guictrlgethandle guictrlgetstate guictrlread guictrlrecvmsg
+ guictrlregisterlistviewsort guictrlsendmsg guictrlsendtodummy
+ guictrlsetbkcolor guictrlsetcolor guictrlsetcursor guictrlsetdata
+ guictrlsetdefbkcolor guictrlsetdefcolor guictrlsetfont guictrlsetgraphic
+ guictrlsetimage guictrlsetlimit guictrlsetonevent guictrlsetpos
+ guictrlsetresizing guictrlsetstate guictrlsetstyle guictrlsettip guidelete
+ guigetcursorinfo guigetmsg guigetstyle guiregistermsg guisetaccelerators
+ guisetbkcolor guisetcoord guisetcursor guisetfont guisethelp guiseticon
+ guisetonevent guisetstate guisetstyle guistartgroup guiswitch hex hotkeyset
+ httpsetproxy httpsetuseragent hwnd inetclose inetget inetgetinfo inetgetsize
+ inetread inidelete iniread inireadsection inireadsectionnames
+ inirenamesection iniwrite iniwritesection inputbox int isadmin isarray
+ isbinary isbool isdeclared isdllstruct isfloat ishwnd isint iskeyword
+ isnumber isobj isptr isstring log memgetstats mod mouseclick mouseclickdrag
+ mousedown mousegetcursor mousegetpos mousemove mouseup mousewheel msgbox
+ number objcreate objcreateinterface objevent objevent objget objname
+ onautoitexitregister onautoitexitunregister opt ping pixelchecksum
+ pixelgetcolor pixelsearch pluginclose pluginopen processclose processexists
+ processgetstats processlist processsetpriority processwait processwaitclose
+ progressoff progresson progressset ptr random regdelete regenumkey
+ regenumval regread regwrite round run runas runaswait runwait send
+ sendkeepactive seterror setextended shellexecute shellexecutewait shutdown
+ sin sleep soundplay soundsetwavevolume splashimageon splashoff splashtexton
+ sqrt srandom statusbargettext stderrread stdinwrite stdioclose stdoutread
+ string stringaddcr stringcompare stringformat stringfromasciiarray
+ stringinstr stringisalnum stringisalpha stringisascii stringisdigit
+ stringisfloat stringisint stringislower stringisspace stringisupper
+ stringisxdigit stringleft stringlen stringlower stringmid stringregexp
+ stringregexpreplace stringreplace stringright stringsplit stringstripcr
+ stringstripws stringtoasciiarray stringtobinary stringtrimleft
+ stringtrimright stringupper tan tcpaccept tcpclosesocket tcpconnect
+ tcplisten tcpnametoip tcprecv tcpsend tcpshutdown tcpstartup timerdiff
+ timerinit tooltip traycreateitem traycreatemenu traygetmsg trayitemdelete
+ trayitemgethandle trayitemgetstate trayitemgettext trayitemsetonevent
+ trayitemsetstate trayitemsettext traysetclick trayseticon traysetonevent
+ traysetpauseicon traysetstate traysettooltip traytip ubound udpbind
+ udpclosesocket udpopen udprecv udpsend udpshutdown udpstartup vargettype
+ winactivate winactive winclose winexists winflash wingetcaretpos
+ wingetclasslist wingetclientsize wingethandle wingetpos wingetprocess
+ wingetstate wingettext wingettitle winkill winlist winmenuselectitem
+ winminimizeall winminimizeallundo winmove winsetontop winsetstate
+ winsettitle winsettrans winwait winwaitactive winwaitclose
+ winwaitnotactive""".split()
+
+ macros = """\
+ @appdatacommondir @appdatadir @autoitexe @autoitpid @autoitversion
+ @autoitx64 @com_eventobj @commonfilesdir @compiled @computername @comspec
+ @cpuarch @cr @crlf @desktopcommondir @desktopdepth @desktopdir
+ @desktopheight @desktoprefresh @desktopwidth @documentscommondir @error
+ @exitcode @exitmethod @extended @favoritescommondir @favoritesdir
+ @gui_ctrlhandle @gui_ctrlid @gui_dragfile @gui_dragid @gui_dropid
+ @gui_winhandle @homedrive @homepath @homeshare @hotkeypressed @hour
+ @ipaddress1 @ipaddress2 @ipaddress3 @ipaddress4 @kblayout @lf
+ @logondnsdomain @logondomain @logonserver @mday @min @mon @msec @muilang
+ @mydocumentsdir @numparams @osarch @osbuild @oslang @osservicepack @ostype
+ @osversion @programfilesdir @programscommondir @programsdir @scriptdir
+ @scriptfullpath @scriptlinenumber @scriptname @sec @startmenucommondir
+ @startmenudir @startupcommondir @startupdir @sw_disable @sw_enable @sw_hide
+ @sw_lock @sw_maximize @sw_minimize @sw_restore @sw_show @sw_showdefault
+ @sw_showmaximized @sw_showminimized @sw_showminnoactive @sw_showna
+ @sw_shownoactivate @sw_shownormal @sw_unlock @systemdir @tab @tempdir
+ @tray_id @trayiconflashing @trayiconvisible @username @userprofiledir @wday
+ @windowsdir @workingdir @yday @year""".split()
+
+ tokens = {
+ 'root': [
+ (r';.*\n', Comment.Single),
+ (r'(#comments-start|#cs)(.|\n)*?(#comments-end|#ce)',
+ Comment.Multiline),
+ (r'[\[\]{}(),;]', Punctuation),
+ (r'(and|or|not)\b', Operator.Word),
+ (r'[$|@][a-zA-Z_]\w*', Name.Variable),
+ (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
+ include('commands'),
+ include('labels'),
+ include('builtInFunctions'),
+ include('builtInMarcros'),
+ (r'"', String, combined('stringescape', 'dqs')),
+ include('numbers'),
+ (r'[a-zA-Z_#@$][\w#@$]*', Name),
+ (r'\\|\'', Text),
+ (r'\`([,%`abfnrtv\-+;])', String.Escape),
+ (r'_\n', Text), # Line continuation
+ include('garbage'),
+ ],
+ 'commands': [
+ (r'(?i)(\s*)(%s)\b' % '|'.join(keywords),
+ bygroups(Text, Name.Builtin)),
+ ],
+ 'builtInFunctions': [
+ (r'(?i)(%s)\b' % '|'.join(functions),
+ Name.Function),
+ ],
+ 'builtInMarcros': [
+ (r'(?i)(%s)\b' % '|'.join(macros),
+ Name.Variable.Global),
+ ],
+ 'labels': [
+ # sendkeys
+ (r'(^\s*)(\{\S+?\})', bygroups(Text, Name.Label)),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'0\d+', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer)
+ ],
+ 'stringescape': [
+ (r'\"\"|\`([,%`abfnrtv])', String.Escape),
+ ],
+ 'strings': [
+ (r'[^"\n]+', String),
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ include('strings')
+ ],
+ 'garbage': [
+ (r'[^\S\n]', Text),
+ ],
+ }
diff --git a/pygments/lexers/basic.py b/pygments/lexers/basic.py
new file mode 100644
index 00000000..a73ad8b4
--- /dev/null
+++ b/pygments/lexers/basic.py
@@ -0,0 +1,500 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.basic
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for BASIC like languages (other than VB.net).
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, default, words, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer', 'CbmBasicV2Lexer',
+ 'QBasicLexer']
+
+
+class BlitzMaxLexer(RegexLexer):
+ """
+ For `BlitzMax <http://blitzbasic.com>`_ source code.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'BlitzMax'
+ aliases = ['blitzmax', 'bmax']
+ filenames = ['*.bmx']
+ mimetypes = ['text/x-bmx']
+
+ bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b'
+ bmax_sktypes = r'@{1,2}|[!#$%]'
+ bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b'
+ bmax_name = r'[a-z_]\w*'
+ bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)'
+ r'|([ \t]*)(:)([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \
+ (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name)
+ bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])'
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ # Text
+ (r'[ \t]+', Text),
+ (r'\.\.\n', Text), # Line continuation
+ # Comments
+ (r"'.*?\n", Comment.Single),
+ (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline),
+ # Data types
+ ('"', String.Double, 'string'),
+ # Numbers
+ (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
+ (r'\.[0-9]*(?!\.)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\$[0-9a-f]+', Number.Hex),
+ (r'\%[10]+', Number.Bin),
+ # Other
+ (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' %
+ (bmax_vopwords), Operator),
+ (r'[(),.:\[\]]', Punctuation),
+ (r'(?:#[\w \t]*)', Name.Label),
+ (r'(?:\?[\w \t]*)', Comment.Preproc),
+ # Identifiers
+ (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name),
+ bygroups(Keyword.Reserved, Text, Punctuation, Name.Class)),
+ (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' %
+ (bmax_name, bmax_name),
+ bygroups(Keyword.Reserved, Text, Keyword.Namespace)),
+ (bmax_func, bygroups(Name.Function, Text, Keyword.Type,
+ Operator, Text, Punctuation, Text,
+ Keyword.Type, Name.Class, Text,
+ Keyword.Type, Text, Punctuation)),
+ (bmax_var, bygroups(Name.Variable, Text, Keyword.Type, Operator,
+ Text, Punctuation, Text, Keyword.Type,
+ Name.Class, Text, Keyword.Type)),
+ (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name),
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ # Keywords
+ (r'\b(Ptr)\b', Keyword.Type),
+ (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant),
+ (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration),
+ (words((
+ 'TNullMethodException', 'TNullFunctionException',
+ 'TNullObjectException', 'TArrayBoundsException',
+ 'TRuntimeException'), prefix=r'\b', suffix=r'\b'), Name.Exception),
+ (words((
+ 'Strict', 'SuperStrict', 'Module', 'ModuleInfo',
+ 'End', 'Return', 'Continue', 'Exit', 'Public', 'Private',
+ 'Var', 'VarPtr', 'Chr', 'Len', 'Asc', 'SizeOf', 'Sgn', 'Abs', 'Min', 'Max',
+ 'New', 'Release', 'Delete', 'Incbin', 'IncbinPtr', 'IncbinLen',
+ 'Framework', 'Include', 'Import', 'Extern', 'EndExtern',
+ 'Function', 'EndFunction', 'Type', 'EndType', 'Extends', 'Method', 'EndMethod',
+ 'Abstract', 'Final', 'If', 'Then', 'Else', 'ElseIf', 'EndIf',
+ 'For', 'To', 'Next', 'Step', 'EachIn', 'While', 'Wend', 'EndWhile',
+ 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', 'EndSelect',
+ 'Try', 'Catch', 'EndTry', 'Throw', 'Assert', 'Goto', 'DefData', 'ReadData',
+ 'RestoreData'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ # Final resolve (for variable names and such)
+ (r'(%s)' % (bmax_name), Name.Variable),
+ ],
+ 'string': [
+ (r'""', String.Double),
+ (r'"C?', String.Double, '#pop'),
+ (r'[^"]+', String.Double),
+ ],
+ }
+
+
+class BlitzBasicLexer(RegexLexer):
+ """
+ For `BlitzBasic <http://blitzbasic.com>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'BlitzBasic'
+ aliases = ['blitzbasic', 'b3d', 'bplus']
+ filenames = ['*.bb', '*.decls']
+ mimetypes = ['text/x-bb']
+
+ bb_sktypes = r'@{1,2}|[#$%]'
+ bb_name = r'[a-z]\w*'
+ bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \
+ (bb_name, bb_sktypes, bb_name)
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ # Text
+ (r'[ \t]+', Text),
+ # Comments
+ (r";.*?\n", Comment.Single),
+ # Data types
+ ('"', String.Double, 'string'),
+ # Numbers
+ (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
+ (r'\.[0-9]+(?!\.)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\$[0-9a-f]+', Number.Hex),
+ (r'\%[10]+', Number.Bin),
+ # Other
+ (words(('Shl', 'Shr', 'Sar', 'Mod', 'Or', 'And', 'Not',
+ 'Abs', 'Sgn', 'Handle', 'Int', 'Float', 'Str',
+ 'First', 'Last', 'Before', 'After'),
+ prefix=r'\b', suffix=r'\b'),
+ Operator),
+ (r'([+\-*/~=<>^])', Operator),
+ (r'[(),:\[\]\\]', Punctuation),
+ (r'\.([ \t]*)(%s)' % bb_name, Name.Label),
+ # Identifiers
+ (r'\b(New)\b([ \t]+)(%s)' % (bb_name),
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ (r'\b(Gosub|Goto)\b([ \t]+)(%s)' % (bb_name),
+ bygroups(Keyword.Reserved, Text, Name.Label)),
+ (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name),
+ bygroups(Operator, Text, Punctuation, Text, Name.Class)),
+ (r'\b%s\b([ \t]*)(\()' % bb_var,
+ bygroups(Name.Function, Text, Keyword.Type, Text, Punctuation,
+ Text, Name.Class, Text, Punctuation)),
+ (r'\b(Function)\b([ \t]+)%s' % bb_var,
+ bygroups(Keyword.Reserved, Text, Name.Function, Text, Keyword.Type,
+ Text, Punctuation, Text, Name.Class)),
+ (r'\b(Type)([ \t]+)(%s)' % (bb_name),
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ # Keywords
+ (r'\b(Pi|True|False|Null)\b', Keyword.Constant),
+ (r'\b(Local|Global|Const|Field|Dim)\b', Keyword.Declaration),
+ (words((
+ 'End', 'Return', 'Exit', 'Chr', 'Len', 'Asc', 'New', 'Delete', 'Insert',
+ 'Include', 'Function', 'Type', 'If', 'Then', 'Else', 'ElseIf', 'EndIf',
+ 'For', 'To', 'Next', 'Step', 'Each', 'While', 'Wend',
+ 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default',
+ 'Goto', 'Gosub', 'Data', 'Read', 'Restore'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ # Final resolve (for variable names and such)
+ # (r'(%s)' % (bb_name), Name.Variable),
+ (bb_var, bygroups(Name.Variable, Text, Keyword.Type,
+ Text, Punctuation, Text, Name.Class)),
+ ],
+ 'string': [
+ (r'""', String.Double),
+ (r'"C?', String.Double, '#pop'),
+ (r'[^"]+', String.Double),
+ ],
+ }
+
+
+class MonkeyLexer(RegexLexer):
+ """
+ For
+ `Monkey <https://en.wikipedia.org/wiki/Monkey_(programming_language)>`_
+ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Monkey'
+ aliases = ['monkey']
+ filenames = ['*.monkey']
+ mimetypes = ['text/x-monkey']
+
+ name_variable = r'[a-z_]\w*'
+ name_function = r'[A-Z]\w*'
+ name_constant = r'[A-Z_][A-Z0-9_]*'
+ name_class = r'[A-Z]\w*'
+ name_module = r'[a-z0-9_]*'
+
+ keyword_type = r'(?:Int|Float|String|Bool|Object|Array|Void)'
+ # ? == Bool // % == Int // # == Float // $ == String
+ keyword_type_special = r'[?%#$]'
+
+ flags = re.MULTILINE
+
+ tokens = {
+ 'root': [
+ # Text
+ (r'\s+', Text),
+ # Comments
+ (r"'.*", Comment),
+ (r'(?i)^#rem\b', Comment.Multiline, 'comment'),
+ # preprocessor directives
+ (r'(?i)^(?:#If|#ElseIf|#Else|#EndIf|#End|#Print|#Error)\b', Comment.Preproc),
+ # preprocessor variable (any line starting with '#' that is not a directive)
+ (r'^#', Comment.Preproc, 'variables'),
+ # String
+ ('"', String.Double, 'string'),
+ # Numbers
+ (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
+ (r'\.[0-9]+(?!\.)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\$[0-9a-fA-Z]+', Number.Hex),
+ (r'\%[10]+', Number.Bin),
+ # Native data types
+ (r'\b%s\b' % keyword_type, Keyword.Type),
+ # Exception handling
+ (r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved),
+ (r'Throwable', Name.Exception),
+ # Builtins
+ (r'(?i)\b(?:Null|True|False)\b', Name.Builtin),
+ (r'(?i)\b(?:Self|Super)\b', Name.Builtin.Pseudo),
+ (r'\b(?:HOST|LANG|TARGET|CONFIG)\b', Name.Constant),
+ # Keywords
+ (r'(?i)^(Import)(\s+)(.*)(\n)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace, Text)),
+ (r'(?i)^Strict\b.*\n', Keyword.Reserved),
+ (r'(?i)(Const|Local|Global|Field)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'variables'),
+ (r'(?i)(New|Class|Interface|Extends|Implements)(\s+)',
+ bygroups(Keyword.Reserved, Text), 'classname'),
+ (r'(?i)(Function|Method)(\s+)',
+ bygroups(Keyword.Reserved, Text), 'funcname'),
+ (r'(?i)(?:End|Return|Public|Private|Extern|Property|'
+ r'Final|Abstract)\b', Keyword.Reserved),
+ # Flow Control stuff
+ (r'(?i)(?:If|Then|Else|ElseIf|EndIf|'
+ r'Select|Case|Default|'
+ r'While|Wend|'
+ r'Repeat|Until|Forever|'
+ r'For|To|Until|Step|EachIn|Next|'
+ r'Exit|Continue)\s+', Keyword.Reserved),
+ # not used yet
+ (r'(?i)\b(?:Module|Inline)\b', Keyword.Reserved),
+ # Array
+ (r'[\[\]]', Punctuation),
+ # Other
+ (r'<=|>=|<>|\*=|/=|\+=|-=|&=|~=|\|=|[-&*/^+=<>|~]', Operator),
+ (r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word),
+ (r'[(){}!#,.:]', Punctuation),
+ # catch the rest
+ (r'%s\b' % name_constant, Name.Constant),
+ (r'%s\b' % name_function, Name.Function),
+ (r'%s\b' % name_variable, Name.Variable),
+ ],
+ 'funcname': [
+ (r'(?i)%s\b' % name_function, Name.Function),
+ (r':', Punctuation, 'classname'),
+ (r'\s+', Text),
+ (r'\(', Punctuation, 'variables'),
+ (r'\)', Punctuation, '#pop')
+ ],
+ 'classname': [
+ (r'%s\.' % name_module, Name.Namespace),
+ (r'%s\b' % keyword_type, Keyword.Type),
+ (r'%s\b' % name_class, Name.Class),
+ # array (of given size)
+ (r'(\[)(\s*)(\d*)(\s*)(\])',
+ bygroups(Punctuation, Text, Number.Integer, Text, Punctuation)),
+ # generics
+ (r'\s+(?!<)', Text, '#pop'),
+ (r'<', Punctuation, '#push'),
+ (r'>', Punctuation, '#pop'),
+ (r'\n', Text, '#pop'),
+ default('#pop')
+ ],
+ 'variables': [
+ (r'%s\b' % name_constant, Name.Constant),
+ (r'%s\b' % name_variable, Name.Variable),
+ (r'%s' % keyword_type_special, Keyword.Type),
+ (r'\s+', Text),
+ (r':', Punctuation, 'classname'),
+ (r',', Punctuation, '#push'),
+ default('#pop')
+ ],
+ 'string': [
+ (r'[^"~]+', String.Double),
+ (r'~q|~n|~r|~t|~z|~~', String.Escape),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'comment': [
+ (r'(?i)^#rem.*?', Comment.Multiline, "#push"),
+ (r'(?i)^#end.*?', Comment.Multiline, "#pop"),
+ (r'\n', Comment.Multiline),
+ (r'.+', Comment.Multiline),
+ ],
+ }
+
+
+class CbmBasicV2Lexer(RegexLexer):
+ """
+ For CBM BASIC V2 sources.
+
+ .. versionadded:: 1.6
+ """
+ name = 'CBM BASIC V2'
+ aliases = ['cbmbas']
+ filenames = ['*.bas']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'rem.*\n', Comment.Single),
+ (r'\s+', Text),
+ (r'new|run|end|for|to|next|step|go(to|sub)?|on|return|stop|cont'
+ r'|if|then|input#?|read|wait|load|save|verify|poke|sys|print#?'
+ r'|list|clr|cmd|open|close|get#?', Keyword.Reserved),
+ (r'data|restore|dim|let|def|fn', Keyword.Declaration),
+ (r'tab|spc|sgn|int|abs|usr|fre|pos|sqr|rnd|log|exp|cos|sin|tan|atn'
+ r'|peek|len|val|asc|(str|chr|left|right|mid)\$', Name.Builtin),
+ (r'[-+*/^<>=]', Operator),
+ (r'not|and|or', Operator.Word),
+ (r'"[^"\n]*.', String),
+ (r'\d+|[-+]?\d*\.\d*(e[-+]?\d+)?', Number.Float),
+ (r'[(),:;]', Punctuation),
+ (r'\w+[$%]?', Name),
+ ]
+ }
+
+ def analyse_text(self, text):
+ # if it starts with a line number, it shouldn't be a "modern" Basic
+ # like VB.net
+ if re.match(r'\d+', text):
+ return 0.2
+
+
+class QBasicLexer(RegexLexer):
+ """
+ For
+ `QBasic <http://en.wikipedia.org/wiki/QBasic>`_
+ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'QBasic'
+ aliases = ['qbasic', 'basic']
+ filenames = ['*.BAS', '*.bas']
+ mimetypes = ['text/basic']
+
+ declarations = ('DATA', 'LET')
+
+ functions = (
+ 'ABS', 'ASC', 'ATN', 'CDBL', 'CHR$', 'CINT', 'CLNG',
+ 'COMMAND$', 'COS', 'CSNG', 'CSRLIN', 'CVD', 'CVDMBF', 'CVI',
+ 'CVL', 'CVS', 'CVSMBF', 'DATE$', 'ENVIRON$', 'EOF', 'ERDEV',
+ 'ERDEV$', 'ERL', 'ERR', 'EXP', 'FILEATTR', 'FIX', 'FRE',
+ 'FREEFILE', 'HEX$', 'INKEY$', 'INP', 'INPUT$', 'INSTR', 'INT',
+ 'IOCTL$', 'LBOUND', 'LCASE$', 'LEFT$', 'LEN', 'LOC', 'LOF',
+ 'LOG', 'LPOS', 'LTRIM$', 'MID$', 'MKD$', 'MKDMBF$', 'MKI$',
+ 'MKL$', 'MKS$', 'MKSMBF$', 'OCT$', 'PEEK', 'PEN', 'PLAY',
+ 'PMAP', 'POINT', 'POS', 'RIGHT$', 'RND', 'RTRIM$', 'SADD',
+ 'SCREEN', 'SEEK', 'SETMEM', 'SGN', 'SIN', 'SPACE$', 'SPC',
+ 'SQR', 'STICK', 'STR$', 'STRIG', 'STRING$', 'TAB', 'TAN',
+ 'TIME$', 'TIMER', 'UBOUND', 'UCASE$', 'VAL', 'VARPTR',
+ 'VARPTR$', 'VARSEG'
+ )
+
+ metacommands = ('$DYNAMIC', '$INCLUDE', '$STATIC')
+
+ operators = ('AND', 'EQV', 'IMP', 'NOT', 'OR', 'XOR')
+
+ statements = (
+ 'BEEP', 'BLOAD', 'BSAVE', 'CALL', 'CALL ABSOLUTE',
+ 'CALL INTERRUPT', 'CALLS', 'CHAIN', 'CHDIR', 'CIRCLE', 'CLEAR',
+ 'CLOSE', 'CLS', 'COLOR', 'COM', 'COMMON', 'CONST', 'DATA',
+ 'DATE$', 'DECLARE', 'DEF FN', 'DEF SEG', 'DEFDBL', 'DEFINT',
+ 'DEFLNG', 'DEFSNG', 'DEFSTR', 'DEF', 'DIM', 'DO', 'LOOP',
+ 'DRAW', 'END', 'ENVIRON', 'ERASE', 'ERROR', 'EXIT', 'FIELD',
+ 'FILES', 'FOR', 'NEXT', 'FUNCTION', 'GET', 'GOSUB', 'GOTO',
+ 'IF', 'THEN', 'INPUT', 'INPUT #', 'IOCTL', 'KEY', 'KEY',
+ 'KILL', 'LET', 'LINE', 'LINE INPUT', 'LINE INPUT #', 'LOCATE',
+ 'LOCK', 'UNLOCK', 'LPRINT', 'LSET', 'MID$', 'MKDIR', 'NAME',
+ 'ON COM', 'ON ERROR', 'ON KEY', 'ON PEN', 'ON PLAY',
+ 'ON STRIG', 'ON TIMER', 'ON UEVENT', 'ON', 'OPEN', 'OPEN COM',
+ 'OPTION BASE', 'OUT', 'PAINT', 'PALETTE', 'PCOPY', 'PEN',
+ 'PLAY', 'POKE', 'PRESET', 'PRINT', 'PRINT #', 'PRINT USING',
+ 'PSET', 'PUT', 'PUT', 'RANDOMIZE', 'READ', 'REDIM', 'REM',
+ 'RESET', 'RESTORE', 'RESUME', 'RETURN', 'RMDIR', 'RSET', 'RUN',
+ 'SCREEN', 'SEEK', 'SELECT CASE', 'SHARED', 'SHELL', 'SLEEP',
+ 'SOUND', 'STATIC', 'STOP', 'STRIG', 'SUB', 'SWAP', 'SYSTEM',
+ 'TIME$', 'TIMER', 'TROFF', 'TRON', 'TYPE', 'UEVENT', 'UNLOCK',
+ 'VIEW', 'WAIT', 'WHILE', 'WEND', 'WIDTH', 'WINDOW', 'WRITE'
+ )
+
+ keywords = (
+ 'ACCESS', 'ALIAS', 'ANY', 'APPEND', 'AS', 'BASE', 'BINARY',
+ 'BYVAL', 'CASE', 'CDECL', 'DOUBLE', 'ELSE', 'ELSEIF', 'ENDIF',
+ 'INTEGER', 'IS', 'LIST', 'LOCAL', 'LONG', 'LOOP', 'MOD',
+ 'NEXT', 'OFF', 'ON', 'OUTPUT', 'RANDOM', 'SIGNAL', 'SINGLE',
+ 'STEP', 'STRING', 'THEN', 'TO', 'UNTIL', 'USING', 'WEND'
+ )
+
+ tokens = {
+ 'root': [
+ (r'\n+', Text),
+ (r'\s+', Text.Whitespace),
+ (r'^(\s*)(\d*)(\s*)(REM .*)$',
+ bygroups(Text.Whitespace, Name.Label, Text.Whitespace,
+ Comment.Single)),
+ (r'^(\s*)(\d+)(\s*)',
+ bygroups(Text.Whitespace, Name.Label, Text.Whitespace)),
+ (r'(?=[\s]*)(\w+)(?=[\s]*=)', Name.Variable.Global),
+ (r'(?=[^"]*)\'.*$', Comment.Single),
+ (r'"[^\n"]*"', String.Double),
+ (r'(END)(\s+)(FUNCTION|IF|SELECT|SUB)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Keyword.Reserved)),
+ (r'(DECLARE)(\s+)([A-Z]+)(\s+)(\S+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
+ Text.Whitespace, Name)),
+ (r'(DIM)(\s+)(SHARED)(\s+)([^\s(]+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
+ Text.Whitespace, Name.Variable.Global)),
+ (r'(DIM)(\s+)([^\s(]+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable.Global)),
+ (r'^(\s*)([a-zA-Z_]+)(\s*)(\=)',
+ bygroups(Text.Whitespace, Name.Variable.Global, Text.Whitespace,
+ Operator)),
+ (r'(GOTO|GOSUB)(\s+)(\w+\:?)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
+ (r'(SUB)(\s+)(\w+\:?)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
+ include('declarations'),
+ include('functions'),
+ include('metacommands'),
+ include('operators'),
+ include('statements'),
+ include('keywords'),
+ (r'[a-zA-Z_]\w*[$@#&!]', Name.Variable.Global),
+ (r'[a-zA-Z_]\w*\:', Name.Label),
+ (r'\-?\d*\.\d+[@|#]?', Number.Float),
+ (r'\-?\d+[@|#]', Number.Float),
+ (r'\-?\d+#?', Number.Integer.Long),
+ (r'\-?\d+#?', Number.Integer),
+ (r'!=|==|:=|\.=|<<|>>|[-~+/\\*%=<>&^|?:!.]', Operator),
+ (r'[\[\]{}(),;]', Punctuation),
+ (r'[\w]+', Name.Variable.Global),
+ ],
+ # can't use regular \b because of X$()
+ # XXX: use words() here
+ 'declarations': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, declarations)),
+ Keyword.Declaration),
+ ],
+ 'functions': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, functions)),
+ Keyword.Reserved),
+ ],
+ 'metacommands': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, metacommands)),
+ Keyword.Constant),
+ ],
+ 'operators': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, operators)), Operator.Word),
+ ],
+ 'statements': [
+ (r'\b(%s)\b' % '|'.join(map(re.escape, statements)),
+ Keyword.Reserved),
+ ],
+ 'keywords': [
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ ],
+ }
+
+ def analyse_text(text):
+ if '$DYNAMIC' in text or '$STATIC' in text:
+ return 0.9
diff --git a/pygments/lexers/business.py b/pygments/lexers/business.py
new file mode 100644
index 00000000..c71d9c28
--- /dev/null
+++ b/pygments/lexers/business.py
@@ -0,0 +1,592 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.business
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for "business-oriented" languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, words, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+from pygments.lexers._openedge_builtins import OPENEDGEKEYWORDS
+
+__all__ = ['CobolLexer', 'CobolFreeformatLexer', 'ABAPLexer', 'OpenEdgeLexer',
+ 'GoodDataCLLexer', 'MaqlLexer']
+
+
+class CobolLexer(RegexLexer):
+ """
+ Lexer for OpenCOBOL code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'COBOL'
+ aliases = ['cobol']
+ filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY']
+ mimetypes = ['text/x-cobol']
+ flags = re.IGNORECASE | re.MULTILINE
+
+ # Data Types: by PICTURE and USAGE
+ # Operators: **, *, +, -, /, <, >, <=, >=, =, <>
+ # Logical (?): NOT, AND, OR
+
+ # Reserved words:
+ # http://opencobol.add1tocobol.com/#reserved-words
+ # Intrinsics:
+ # http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions
+
+ tokens = {
+ 'root': [
+ include('comment'),
+ include('strings'),
+ include('core'),
+ include('nums'),
+ (r'[a-z0-9]([\w\-]*[a-z0-9]+)?', Name.Variable),
+ # (r'[\s]+', Text),
+ (r'[ \t]+', Text),
+ ],
+ 'comment': [
+ (r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment),
+ ],
+ 'core': [
+ # Figurative constants
+ (r'(^|(?<=[^0-9a-z_\-]))(ALL\s+)?'
+ r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)'
+ r'\s*($|(?=[^0-9a-z_\-]))',
+ Name.Constant),
+
+ # Reserved words STATEMENTS and other bolds
+ (words((
+ 'ACCEPT', 'ADD', 'ALLOCATE', 'CALL', 'CANCEL', 'CLOSE', 'COMPUTE',
+ 'CONFIGURATION', 'CONTINUE', 'DATA', 'DELETE', 'DISPLAY', 'DIVIDE',
+ 'DIVISION', 'ELSE', 'END', 'END-ACCEPT',
+ 'END-ADD', 'END-CALL', 'END-COMPUTE', 'END-DELETE', 'END-DISPLAY',
+ 'END-DIVIDE', 'END-EVALUATE', 'END-IF', 'END-MULTIPLY', 'END-OF-PAGE',
+ 'END-PERFORM', 'END-READ', 'END-RETURN', 'END-REWRITE', 'END-SEARCH',
+ 'END-START', 'END-STRING', 'END-SUBTRACT', 'END-UNSTRING', 'END-WRITE',
+ 'ENVIRONMENT', 'EVALUATE', 'EXIT', 'FD', 'FILE', 'FILE-CONTROL', 'FOREVER',
+ 'FREE', 'GENERATE', 'GO', 'GOBACK', 'IDENTIFICATION', 'IF', 'INITIALIZE',
+ 'INITIATE', 'INPUT-OUTPUT', 'INSPECT', 'INVOKE', 'I-O-CONTROL', 'LINKAGE',
+ 'LOCAL-STORAGE', 'MERGE', 'MOVE', 'MULTIPLY', 'OPEN', 'PERFORM',
+ 'PROCEDURE', 'PROGRAM-ID', 'RAISE', 'READ', 'RELEASE', 'RESUME',
+ 'RETURN', 'REWRITE', 'SCREEN', 'SD', 'SEARCH', 'SECTION', 'SET',
+ 'SORT', 'START', 'STOP', 'STRING', 'SUBTRACT', 'SUPPRESS',
+ 'TERMINATE', 'THEN', 'UNLOCK', 'UNSTRING', 'USE', 'VALIDATE',
+ 'WORKING-STORAGE', 'WRITE'), prefix=r'(^|(?<=[^0-9a-z_\-]))',
+ suffix=r'\s*($|(?=[^0-9a-z_\-]))'),
+ Keyword.Reserved),
+
+ # Reserved words
+ (words((
+ 'ACCESS', 'ADDRESS', 'ADVANCING', 'AFTER', 'ALL',
+ 'ALPHABET', 'ALPHABETIC', 'ALPHABETIC-LOWER', 'ALPHABETIC-UPPER',
+ 'ALPHANUMERIC', 'ALPHANUMERIC-EDITED', 'ALSO', 'ALTER', 'ALTERNATE'
+ 'ANY', 'ARE', 'AREA', 'AREAS', 'ARGUMENT-NUMBER', 'ARGUMENT-VALUE', 'AS',
+ 'ASCENDING', 'ASSIGN', 'AT', 'AUTO', 'AUTO-SKIP', 'AUTOMATIC', 'AUTOTERMINATE',
+ 'BACKGROUND-COLOR', 'BASED', 'BEEP', 'BEFORE', 'BELL',
+ 'BLANK', 'BLINK', 'BLOCK', 'BOTTOM', 'BY', 'BYTE-LENGTH', 'CHAINING',
+ 'CHARACTER', 'CHARACTERS', 'CLASS', 'CODE', 'CODE-SET', 'COL', 'COLLATING',
+ 'COLS', 'COLUMN', 'COLUMNS', 'COMMA', 'COMMAND-LINE', 'COMMIT', 'COMMON',
+ 'CONSTANT', 'CONTAINS', 'CONTENT', 'CONTROL',
+ 'CONTROLS', 'CONVERTING', 'COPY', 'CORR', 'CORRESPONDING', 'COUNT', 'CRT',
+ 'CURRENCY', 'CURSOR', 'CYCLE', 'DATE', 'DAY', 'DAY-OF-WEEK', 'DE', 'DEBUGGING',
+ 'DECIMAL-POINT', 'DECLARATIVES', 'DEFAULT', 'DELIMITED',
+ 'DELIMITER', 'DEPENDING', 'DESCENDING', 'DETAIL', 'DISK',
+ 'DOWN', 'DUPLICATES', 'DYNAMIC', 'EBCDIC',
+ 'ENTRY', 'ENVIRONMENT-NAME', 'ENVIRONMENT-VALUE', 'EOL', 'EOP',
+ 'EOS', 'ERASE', 'ERROR', 'ESCAPE', 'EXCEPTION',
+ 'EXCLUSIVE', 'EXTEND', 'EXTERNAL',
+ 'FILE-ID', 'FILLER', 'FINAL', 'FIRST', 'FIXED', 'FLOAT-LONG', 'FLOAT-SHORT',
+ 'FOOTING', 'FOR', 'FOREGROUND-COLOR', 'FORMAT', 'FROM', 'FULL', 'FUNCTION',
+ 'FUNCTION-ID', 'GIVING', 'GLOBAL', 'GROUP',
+ 'HEADING', 'HIGHLIGHT', 'I-O', 'ID',
+ 'IGNORE', 'IGNORING', 'IN', 'INDEX', 'INDEXED', 'INDICATE',
+ 'INITIAL', 'INITIALIZED', 'INPUT',
+ 'INTO', 'INTRINSIC', 'INVALID', 'IS', 'JUST', 'JUSTIFIED', 'KEY', 'LABEL',
+ 'LAST', 'LEADING', 'LEFT', 'LENGTH', 'LIMIT', 'LIMITS', 'LINAGE',
+ 'LINAGE-COUNTER', 'LINE', 'LINES', 'LOCALE', 'LOCK',
+ 'LOWLIGHT', 'MANUAL', 'MEMORY', 'MINUS', 'MODE',
+ 'MULTIPLE', 'NATIONAL', 'NATIONAL-EDITED', 'NATIVE',
+ 'NEGATIVE', 'NEXT', 'NO', 'NULL', 'NULLS', 'NUMBER', 'NUMBERS', 'NUMERIC',
+ 'NUMERIC-EDITED', 'OBJECT-COMPUTER', 'OCCURS', 'OF', 'OFF', 'OMITTED', 'ON', 'ONLY',
+ 'OPTIONAL', 'ORDER', 'ORGANIZATION', 'OTHER', 'OUTPUT', 'OVERFLOW',
+ 'OVERLINE', 'PACKED-DECIMAL', 'PADDING', 'PAGE', 'PARAGRAPH',
+ 'PLUS', 'POINTER', 'POSITION', 'POSITIVE', 'PRESENT', 'PREVIOUS',
+ 'PRINTER', 'PRINTING', 'PROCEDURE-POINTER', 'PROCEDURES',
+ 'PROCEED', 'PROGRAM', 'PROGRAM-POINTER', 'PROMPT', 'QUOTE',
+ 'QUOTES', 'RANDOM', 'RD', 'RECORD', 'RECORDING', 'RECORDS', 'RECURSIVE',
+ 'REDEFINES', 'REEL', 'REFERENCE', 'RELATIVE', 'REMAINDER', 'REMOVAL',
+ 'RENAMES', 'REPLACING', 'REPORT', 'REPORTING', 'REPORTS', 'REPOSITORY',
+ 'REQUIRED', 'RESERVE', 'RETURNING', 'REVERSE-VIDEO', 'REWIND',
+ 'RIGHT', 'ROLLBACK', 'ROUNDED', 'RUN', 'SAME', 'SCROLL',
+ 'SECURE', 'SEGMENT-LIMIT', 'SELECT', 'SENTENCE', 'SEPARATE',
+ 'SEQUENCE', 'SEQUENTIAL', 'SHARING', 'SIGN', 'SIGNED', 'SIGNED-INT',
+ 'SIGNED-LONG', 'SIGNED-SHORT', 'SIZE', 'SORT-MERGE', 'SOURCE',
+ 'SOURCE-COMPUTER', 'SPECIAL-NAMES', 'STANDARD',
+ 'STANDARD-1', 'STANDARD-2', 'STATUS', 'SUM',
+ 'SYMBOLIC', 'SYNC', 'SYNCHRONIZED', 'TALLYING', 'TAPE',
+ 'TEST', 'THROUGH', 'THRU', 'TIME', 'TIMES', 'TO', 'TOP', 'TRAILING',
+ 'TRANSFORM', 'TYPE', 'UNDERLINE', 'UNIT', 'UNSIGNED',
+ 'UNSIGNED-INT', 'UNSIGNED-LONG', 'UNSIGNED-SHORT', 'UNTIL', 'UP',
+ 'UPDATE', 'UPON', 'USAGE', 'USING', 'VALUE', 'VALUES', 'VARYING',
+ 'WAIT', 'WHEN', 'WITH', 'WORDS', 'YYYYDDD', 'YYYYMMDD'),
+ prefix=r'(^|(?<=[^0-9a-z_\-]))', suffix=r'\s*($|(?=[^0-9a-z_\-]))'),
+ Keyword.Pseudo),
+
+ # inactive reserved words
+ (words((
+ 'ACTIVE-CLASS', 'ALIGNED', 'ANYCASE', 'ARITHMETIC', 'ATTRIBUTE', 'B-AND',
+ 'B-NOT', 'B-OR', 'B-XOR', 'BIT', 'BOOLEAN', 'CD', 'CENTER', 'CF', 'CH', 'CHAIN', 'CLASS-ID',
+ 'CLASSIFICATION', 'COMMUNICATION', 'CONDITION', 'DATA-POINTER',
+ 'DESTINATION', 'DISABLE', 'EC', 'EGI', 'EMI', 'ENABLE', 'END-RECEIVE',
+ 'ENTRY-CONVENTION', 'EO', 'ESI', 'EXCEPTION-OBJECT', 'EXPANDS', 'FACTORY',
+ 'FLOAT-BINARY-16', 'FLOAT-BINARY-34', 'FLOAT-BINARY-7',
+ 'FLOAT-DECIMAL-16', 'FLOAT-DECIMAL-34', 'FLOAT-EXTENDED', 'FORMAT',
+ 'FUNCTION-POINTER', 'GET', 'GROUP-USAGE', 'IMPLEMENTS', 'INFINITY',
+ 'INHERITS', 'INTERFACE', 'INTERFACE-ID', 'INVOKE', 'LC_ALL', 'LC_COLLATE',
+ 'LC_CTYPE', 'LC_MESSAGES', 'LC_MONETARY', 'LC_NUMERIC', 'LC_TIME',
+ 'LINE-COUNTER', 'MESSAGE', 'METHOD', 'METHOD-ID', 'NESTED', 'NONE', 'NORMAL',
+ 'OBJECT', 'OBJECT-REFERENCE', 'OPTIONS', 'OVERRIDE', 'PAGE-COUNTER', 'PF', 'PH',
+ 'PROPERTY', 'PROTOTYPE', 'PURGE', 'QUEUE', 'RAISE', 'RAISING', 'RECEIVE',
+ 'RELATION', 'REPLACE', 'REPRESENTS-NOT-A-NUMBER', 'RESET', 'RESUME', 'RETRY',
+ 'RF', 'RH', 'SECONDS', 'SEGMENT', 'SELF', 'SEND', 'SOURCES', 'STATEMENT', 'STEP',
+ 'STRONG', 'SUB-QUEUE-1', 'SUB-QUEUE-2', 'SUB-QUEUE-3', 'SUPER', 'SYMBOL',
+ 'SYSTEM-DEFAULT', 'TABLE', 'TERMINAL', 'TEXT', 'TYPEDEF', 'UCS-4', 'UNIVERSAL',
+ 'USER-DEFAULT', 'UTF-16', 'UTF-8', 'VAL-STATUS', 'VALID', 'VALIDATE',
+ 'VALIDATE-STATUS'),
+ prefix=r'(^|(?<=[^0-9a-z_\-]))', suffix=r'\s*($|(?=[^0-9a-z_\-]))'),
+ Error),
+
+ # Data Types
+ (r'(^|(?<=[^0-9a-z_\-]))'
+ r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|'
+ r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|'
+ r'BINARY-C-LONG|'
+ r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|'
+ r'BINARY)\s*($|(?=[^0-9a-z_\-]))', Keyword.Type),
+
+ # Operators
+ (r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator),
+
+ # (r'(::)', Keyword.Declaration),
+
+ (r'([(),;:&%.])', Punctuation),
+
+ # Intrinsics
+ (r'(^|(?<=[^0-9a-z_\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|'
+ r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|'
+ r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|'
+ r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|'
+ r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|'
+ r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|'
+ r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|'
+ r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|'
+ r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|'
+ r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|'
+ r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|'
+ r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*'
+ r'($|(?=[^0-9a-z_\-]))', Name.Function),
+
+ # Booleans
+ (r'(^|(?<=[^0-9a-z_\-]))(true|false)\s*($|(?=[^0-9a-z_\-]))', Name.Builtin),
+ # Comparing Operators
+ (r'(^|(?<=[^0-9a-z_\-]))(equal|equals|ne|lt|le|gt|ge|'
+ r'greater|less|than|not|and|or)\s*($|(?=[^0-9a-z_\-]))', Operator.Word),
+ ],
+
+ # \"[^\"\n]*\"|\'[^\'\n]*\'
+ 'strings': [
+ # apparently strings can be delimited by EOL if they are continued
+ # in the next line
+ (r'"[^"\n]*("|\n)', String.Double),
+ (r"'[^'\n]*('|\n)", String.Single),
+ ],
+
+ 'nums': [
+ (r'\d+(\s*|\.$|$)', Number.Integer),
+ (r'[+-]?\d*\.\d+(E[-+]?\d+)?', Number.Float),
+ (r'[+-]?\d+\.\d*(E[-+]?\d+)?', Number.Float),
+ ],
+ }
+
+
+class CobolFreeformatLexer(CobolLexer):
+ """
+ Lexer for Free format OpenCOBOL code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'COBOLFree'
+ aliases = ['cobolfree']
+ filenames = ['*.cbl', '*.CBL']
+ mimetypes = []
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'comment': [
+ (r'(\*>.*\n|^\w*\*.*$)', Comment),
+ ],
+ }
+
+
+class ABAPLexer(RegexLexer):
+ """
+ Lexer for ABAP, SAP's integrated language.
+
+ .. versionadded:: 1.1
+ """
+ name = 'ABAP'
+ aliases = ['abap']
+ filenames = ['*.abap']
+ mimetypes = ['text/x-abap']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'common': [
+ (r'\s+', Text),
+ (r'^\*.*$', Comment.Single),
+ (r'\".*?\n', Comment.Single),
+ ],
+ 'variable-names': [
+ (r'<\S+>', Name.Variable),
+ (r'\w[\w~]*(?:(\[\])|->\*)?', Name.Variable),
+ ],
+ 'root': [
+ include('common'),
+ # function calls
+ (r'(CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION))(\s+)(\'?\S+\'?)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
+ r'TRANSACTION|TRANSFORMATION))\b',
+ Keyword),
+ (r'(FORM|PERFORM)(\s+)(\w+)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(PERFORM)(\s+)(\()(\w+)(\))',
+ bygroups(Keyword, Text, Punctuation, Name.Variable, Punctuation)),
+ (r'(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)',
+ bygroups(Keyword, Text, Name.Function, Text, Keyword)),
+
+ # method implementation
+ (r'(METHOD)(\s+)([\w~]+)',
+ bygroups(Keyword, Text, Name.Function)),
+ # method calls
+ (r'(\s+)([\w\-]+)([=\-]>)([\w\-~]+)',
+ bygroups(Text, Name.Variable, Operator, Name.Function)),
+ # call methodnames returning style
+ (r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function),
+
+ # keywords with dashes in them.
+ # these need to be first, because for instance the -ID part
+ # of MESSAGE-ID wouldn't get highlighted if MESSAGE was
+ # first in the list of keywords.
+ (r'(ADD-CORRESPONDING|AUTHORITY-CHECK|'
+ r'CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|'
+ r'DELETE-ADJACENT|DIVIDE-CORRESPONDING|'
+ r'EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|'
+ r'FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|'
+ r'INTERFACE-POOL|INVERTED-DATE|'
+ r'LOAD-OF-PROGRAM|LOG-POINT|'
+ r'MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|'
+ r'NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|'
+ r'OUTPUT-LENGTH|PRINT-CONTROL|'
+ r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
+ r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
+ r'TYPE-POOL|TYPE-POOLS'
+ r')\b', Keyword),
+
+ # keyword kombinations
+ (r'CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
+ r'((PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
+ r'(TYPE|LIKE)(\s+(LINE\s+OF|REF\s+TO|'
+ r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
+ r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
+ r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
+ r'GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|'
+ r'PF-STATUS|(PROPERTY|REFERENCE)\s+OF|'
+ r'RUN\s+TIME|TIME\s+(STAMP)?)?|'
+ r'SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|'
+ r'HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|'
+ r'LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|'
+ r'RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|'
+ r'TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|'
+ r'CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|'
+ r'(CLOSE|OPEN)\s+(DATASET|CURSOR)|'
+ r'(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|'
+ r'DATABASE|SHARED\s+(MEMORY|BUFFER))|'
+ r'DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|'
+ r'FREE\s(MEMORY|OBJECT)?|'
+ r'PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|'
+ r'ON\s+(VALUE-REQUEST|HELP-REQUEST))|'
+ r'AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|'
+ r'AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|'
+ r'END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|'
+ r'SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|'
+ r'SCREEN)|COMMENT|FUNCTION\s+KEY|'
+ r'INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|'
+ r'SKIP|ULINE)|'
+ r'LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|'
+ r'TO LIST-PROCESSING|TO TRANSACTION)'
+ r'(ENDING|STARTING)\s+AT|'
+ r'FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|'
+ r'AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|'
+ r'WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|'
+ r'(BEGIN|END)\s+OF|'
+ r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
+ r'COMPARING(\s+ALL\s+FIELDS)?|'
+ r'INSERT(\s+INITIAL\s+LINE\s+INTO|\s+LINES\s+OF)?|'
+ r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
+ r'END-OF-(DEFINITION|PAGE|SELECTION)|'
+ r'WITH\s+FRAME(\s+TITLE)|'
+
+ # simple kombinations
+ r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
+ r'IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|'
+ r'LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|'
+ r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
+ r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
+ r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
+ r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE)\b', Keyword),
+
+ # single word keywords.
+ (r'(^|(?<=(\s|\.)))(ABBREVIATED|ADD|ALIASES|APPEND|ASSERT|'
+ r'ASSIGN(ING)?|AT(\s+FIRST)?|'
+ r'BACK|BLOCK|BREAK-POINT|'
+ r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|'
+ r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
+ r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|'
+ r'DATA|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
+ r'DETAIL|DIRECTORY|DIVIDE|DO|'
+ r'ELSE(IF)?|ENDAT|ENDCASE|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|'
+ r'ENDIF|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|'
+ r'ENHANCEMENT|EVENTS|EXCEPTIONS|EXIT|EXPORT|EXPORTING|EXTRACT|'
+ r'FETCH|FIELDS?|FIND|FOR|FORM|FORMAT|FREE|FROM|'
+ r'HIDE|'
+ r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
+ r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
+ r'LENGTH|LINES|LOAD|LOCAL|'
+ r'JOIN|'
+ r'KEY|'
+ r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFY|MOVE|MULTIPLY|'
+ r'NODES|'
+ r'OBLIGATORY|OF|OFF|ON|OVERLAY|'
+ r'PACK|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|'
+ r'RAISE|RAISING|RANGES|READ|RECEIVE|REFRESH|REJECT|REPORT|RESERVE|'
+ r'RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|'
+ r'SCROLL|SEARCH|SELECT|SHIFT|SINGLE|SKIP|SORT|SPLIT|STATICS|STOP|'
+ r'SUBMIT|SUBTRACT|SUM|SUMMARY|SUMMING|SUPPLY|'
+ r'TABLE|TABLES|TIMES|TITLE|TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
+ r'ULINE|UNDER|UNPACK|UPDATE|USING|'
+ r'VALUE|VALUES|VIA|'
+ r'WAIT|WHEN|WHERE|WHILE|WITH|WINDOW|WRITE)\b', Keyword),
+
+ # builtins
+ (r'(abs|acos|asin|atan|'
+ r'boolc|boolx|bit_set|'
+ r'char_off|charlen|ceil|cmax|cmin|condense|contains|'
+ r'contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|'
+ r'count|count_any_of|count_any_not_of|'
+ r'dbmaxlen|distance|'
+ r'escape|exp|'
+ r'find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|'
+ r'insert|'
+ r'lines|log|log10|'
+ r'match|matches|'
+ r'nmax|nmin|numofchar|'
+ r'repeat|replace|rescale|reverse|round|'
+ r'segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|'
+ r'substring|substring_after|substring_from|substring_before|substring_to|'
+ r'tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|'
+ r'xstrlen)(\()\b', bygroups(Name.Builtin, Punctuation)),
+
+ (r'&[0-9]', Name),
+ (r'[0-9]+', Number.Integer),
+
+ # operators which look like variable names before
+ # parsing variable names.
+ (r'(?<=(\s|.))(AND|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
+ r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
+ r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator),
+
+ include('variable-names'),
+
+ # standard oparators after variable names,
+ # because < and > are part of field symbols.
+ (r'[?*<>=\-+]', Operator),
+ (r"'(''|[^'])*'", String.Single),
+ (r"`([^`])*`", String.Single),
+ (r'[/;:()\[\],.]', Punctuation)
+ ],
+ }
+
+
+class OpenEdgeLexer(RegexLexer):
+ """
+ Lexer for `OpenEdge ABL (formerly Progress)
+ <http://web.progress.com/en/openedge/abl.html>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'OpenEdge ABL'
+ aliases = ['openedge', 'abl', 'progress']
+ filenames = ['*.p', '*.cls']
+ mimetypes = ['text/x-openedge', 'application/x-openedge']
+
+ types = (r'(?i)(^|(?<=[^0-9a-z_\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|'
+ r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|'
+ r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|'
+ r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|'
+ r'LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^0-9a-z_\-]))')
+
+ keywords = words(OPENEDGEKEYWORDS,
+ prefix=r'(?i)(^|(?<=[^0-9a-z_\-]))',
+ suffix=r'\s*($|(?=[^0-9a-z_\-]))')
+
+ tokens = {
+ 'root': [
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'\{', Comment.Preproc, 'preprocessor'),
+ (r'\s*&.*', Comment.Preproc),
+ (r'0[xX][0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration),
+ (types, Keyword.Type),
+ (keywords, Name.Builtin),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\s+', Text),
+ (r'[+*/=-]', Operator),
+ (r'[.:()]', Punctuation),
+ (r'.', Name.Variable), # Lazy catch-all
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'preprocessor': [
+ (r'[^{}]', Comment.Preproc),
+ (r'\{', Comment.Preproc, '#push'),
+ (r'\}', Comment.Preproc, '#pop'),
+ ],
+ }
+
+
+class GoodDataCLLexer(RegexLexer):
+ """
+ Lexer for `GoodData-CL
+ <http://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/\
+com/gooddata/processor/COMMANDS.txt>`_
+ script files.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'GoodData-CL'
+ aliases = ['gooddata-cl']
+ filenames = ['*.gdc']
+ mimetypes = ['text/x-gooddata-cl']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ # Comments
+ (r'#.*', Comment.Single),
+ # Function call
+ (r'[a-z]\w*', Name.Function),
+ # Argument list
+ (r'\(', Punctuation, 'args-list'),
+ # Punctuation
+ (r';', Punctuation),
+ # Space is not significant
+ (r'\s+', Text)
+ ],
+ 'args-list': [
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'[a-z]\w*', Name.Variable),
+ (r'=', Operator),
+ (r'"', String, 'string-literal'),
+ (r'[0-9]+(?:\.[0-9]+)?(?:e[+-]?[0-9]{1,3})?', Number),
+ # Space is not significant
+ (r'\s', Text)
+ ],
+ 'string-literal': [
+ (r'\\[tnrfbae"\\]', String.Escape),
+ (r'"', String, '#pop'),
+ (r'[^\\"]+', String)
+ ]
+ }
+
+
+class MaqlLexer(RegexLexer):
+ """
+ Lexer for `GoodData MAQL
+ <https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
+ scripts.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'MAQL'
+ aliases = ['maql']
+ filenames = ['*.maql']
+ mimetypes = ['text/x-gooddata-maql', 'application/x-gooddata-maql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ # IDENTITY
+ (r'IDENTIFIER\b', Name.Builtin),
+ # IDENTIFIER
+ (r'\{[^}]+\}', Name.Variable),
+ # NUMBER
+ (r'[0-9]+(?:\.[0-9]+)?(?:e[+-]?[0-9]{1,3})?', Number),
+ # STRING
+ (r'"', String, 'string-literal'),
+ # RELATION
+ (r'\<\>|\!\=', Operator),
+ (r'\=|\>\=|\>|\<\=|\<', Operator),
+ # :=
+ (r'\:\=', Operator),
+ # OBJECT
+ (r'\[[^]]+\]', Name.Variable.Class),
+ # keywords
+ (words((
+ 'DIMENSION', 'DIMENSIONS', 'BOTTOM', 'METRIC', 'COUNT', 'OTHER',
+ 'FACT', 'WITH', 'TOP', 'OR', 'ATTRIBUTE', 'CREATE', 'PARENT',
+ 'FALSE', 'ROW', 'ROWS', 'FROM', 'ALL', 'AS', 'PF', 'COLUMN',
+ 'COLUMNS', 'DEFINE', 'REPORT', 'LIMIT', 'TABLE', 'LIKE', 'AND',
+ 'BY', 'BETWEEN', 'EXCEPT', 'SELECT', 'MATCH', 'WHERE', 'TRUE',
+ 'FOR', 'IN', 'WITHOUT', 'FILTER', 'ALIAS', 'WHEN', 'NOT', 'ON',
+ 'KEYS', 'KEY', 'FULLSET', 'PRIMARY', 'LABELS', 'LABEL',
+ 'VISUAL', 'TITLE', 'DESCRIPTION', 'FOLDER', 'ALTER', 'DROP',
+ 'ADD', 'DATASET', 'DATATYPE', 'INT', 'BIGINT', 'DOUBLE', 'DATE',
+ 'VARCHAR', 'DECIMAL', 'SYNCHRONIZE', 'TYPE', 'DEFAULT', 'ORDER',
+ 'ASC', 'DESC', 'HYPERLINK', 'INCLUDE', 'TEMPLATE', 'MODIFY'),
+ suffix=r'\b'),
+ Keyword),
+ # FUNCNAME
+ (r'[a-z]\w*\b', Name.Function),
+ # Comments
+ (r'#.*', Comment.Single),
+ # Punctuation
+ (r'[,;()]', Punctuation),
+ # Space is not significant
+ (r'\s+', Text)
+ ],
+ 'string-literal': [
+ (r'\\[tnrfbae"\\]', String.Escape),
+ (r'"', String, '#pop'),
+ (r'[^\\"]+', String)
+ ],
+ }
diff --git a/pygments/lexers/c_cpp.py b/pygments/lexers/c_cpp.py
new file mode 100644
index 00000000..35ea517f
--- /dev/null
+++ b/pygments/lexers/c_cpp.py
@@ -0,0 +1,235 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.c_cpp
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for C/C++ languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, \
+ this, inherit, default, words
+from pygments.util import get_bool_opt
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['CLexer', 'CppLexer']
+
+
+class CFamilyLexer(RegexLexer):
+ """
+ For C family source code. This is used as a base class to avoid repetitious
+ definitions.
+ """
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ # The trailing ?, rather than *, avoids a geometric performance drop here.
+ #: only one /* */ style comment
+ _ws1 = r'\s*(?:/[*].*?[*]/\s*)?'
+
+ tokens = {
+ 'whitespace': [
+ # preprocessor directives: without whitespace
+ ('^#if\s+0', Comment.Preproc, 'if0'),
+ ('^#', Comment.Preproc, 'macro'),
+ # or with whitespace
+ ('^(' + _ws1 + r')(#if\s+0)',
+ bygroups(using(this), Comment.Preproc), 'if0'),
+ ('^(' + _ws1 + ')(#)',
+ bygroups(using(this), Comment.Preproc), 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ ],
+ 'statements': [
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.]', Punctuation),
+ (words(('auto', 'break', 'case', 'const', 'continue', 'default', 'do',
+ 'else', 'enum', 'extern', 'for', 'goto', 'if', 'register',
+ 'restricted', 'return', 'sizeof', 'static', 'struct',
+ 'switch', 'typedef', 'union', 'volatile', 'while'),
+ suffix=r'\b'), Keyword),
+ (r'(bool|int|long|float|short|double|char|unsigned|signed|void|'
+ r'[a-z_][a-z0-9_]*_t)\b',
+ Keyword.Type),
+ (words(('inline', '_inline', '__inline', 'naked', 'restrict',
+ 'thread', 'typename'), suffix=r'\b'), Keyword.Reserved),
+ # Vector intrinsics
+ (r'(__m(128i|128d|128|64))\b', Keyword.Reserved),
+ # Microsoft-isms
+ (words((
+ 'asm', 'int8', 'based', 'except', 'int16', 'stdcall', 'cdecl',
+ 'fastcall', 'int32', 'declspec', 'finally', 'int64', 'try',
+ 'leave', 'wchar_t', 'w64', 'unaligned', 'raise', 'noop',
+ 'identifier', 'forceinline', 'assume'),
+ prefix=r'__', suffix=r'\b'), Keyword.Reserved),
+ (r'(true|false|NULL)\b', Name.Builtin),
+ (r'([a-zA-Z_]\w*)(\s*)(:)(?!:)', bygroups(Name.Label, Text, Punctuation)),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'root': [
+ include('whitespace'),
+ # functions
+ (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(' + _ws + r')?(\{)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation),
+ 'function'),
+ # function declarations
+ (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(' + _ws + r')?(;)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation)),
+ default('statement'),
+ ],
+ 'statement': [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Punctuation),
+ (';', Punctuation, '#pop'),
+ ],
+ 'function': [
+ include('whitespace'),
+ include('statements'),
+ (';', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
+ r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
+
+ stdlib_types = ['size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t',
+ 'sig_atomic_t', 'fpos_t', 'clock_t', 'time_t', 'va_list',
+ 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t', 'mbstate_t',
+ 'wctrans_t', 'wint_t', 'wctype_t']
+ c99_types = ['_Bool', '_Complex', 'int8_t', 'int16_t', 'int32_t', 'int64_t',
+ 'uint8_t', 'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t',
+ 'int_least16_t', 'int_least32_t', 'int_least64_t',
+ 'uint_least8_t', 'uint_least16_t', 'uint_least32_t',
+ 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
+ 'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t',
+ 'uint_fast64_t', 'intptr_t', 'uintptr_t', 'intmax_t',
+ 'uintmax_t']
+
+ def __init__(self, **options):
+ self.stdlibhighlighting = get_bool_opt(options, 'stdlibhighlighting', True)
+ self.c99highlighting = get_bool_opt(options, 'c99highlighting', True)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if self.stdlibhighlighting and value in self.stdlib_types:
+ token = Keyword.Type
+ elif self.c99highlighting and value in self.c99_types:
+ token = Keyword.Type
+ yield index, token, value
+
+
+class CLexer(CFamilyLexer):
+ """
+ For C source code with preprocessor directives.
+ """
+ name = 'C'
+ aliases = ['c']
+ filenames = ['*.c', '*.h', '*.idc']
+ mimetypes = ['text/x-chdr', 'text/x-csrc']
+ priority = 0.1
+
+ def analyse_text(text):
+ if re.search('^\s*#include [<"]', text, re.MULTILINE):
+ return 0.1
+ if re.search('^\s*#ifdef ', text, re.MULTILINE):
+ return 0.1
+
+
+class CppLexer(CFamilyLexer):
+ """
+ For C++ source code with preprocessor directives.
+ """
+ name = 'C++'
+ aliases = ['cpp', 'c++']
+ filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++',
+ '*.cc', '*.hh', '*.cxx', '*.hxx',
+ '*.C', '*.H', '*.cp', '*.CPP']
+ mimetypes = ['text/x-c++hdr', 'text/x-c++src']
+ priority = 0.1
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'asm', 'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
+ 'export', 'friend', 'mutable', 'namespace', 'new', 'operator',
+ 'private', 'protected', 'public', 'reinterpret_cast',
+ 'restrict', 'static_cast', 'template', 'this', 'throw', 'throws',
+ 'try', 'typeid', 'typename', 'using', 'virtual',
+ 'constexpr', 'nullptr', 'decltype', 'thread_local',
+ 'alignas', 'alignof', 'static_assert', 'noexcept', 'override',
+ 'final'), suffix=r'\b'), Keyword),
+ (r'char(16_t|32_t)\b', Keyword.Type),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ inherit,
+ ],
+ 'root': [
+ inherit,
+ # C++ Microsoft-isms
+ (words(('virtual_inheritance', 'uuidof', 'super', 'single_inheritance',
+ 'multiple_inheritance', 'interface', 'event'),
+ prefix=r'__', suffix=r'\b'), Keyword.Reserved),
+ # Offload C++ extensions, http://offload.codeplay.com/
+ (r'__(offload|blockingoffload|outer)\b', Keyword.Pseudo),
+ ],
+ 'classname': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ # template specification
+ (r'\s*(?=>)', Text, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if re.search('#include <[a-z]+>', text):
+ return 0.2
+ if re.search('using namespace ', text):
+ return 0.4
diff --git a/pygments/lexers/c_like.py b/pygments/lexers/c_like.py
new file mode 100644
index 00000000..27736bff
--- /dev/null
+++ b/pygments/lexers/c_like.py
@@ -0,0 +1,539 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.c_like
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for other C-like languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, inherit, words, \
+ default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+from pygments.lexers.c_cpp import CLexer, CppLexer
+from pygments.lexers import _mql_builtins
+
+__all__ = ['PikeLexer', 'NesCLexer', 'ClayLexer', 'ECLexer', 'ValaLexer',
+ 'CudaLexer', 'SwigLexer', 'MqlLexer', 'ArduinoLexer']
+
+
+class PikeLexer(CppLexer):
+ """
+ For `Pike <http://pike.lysator.liu.se/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Pike'
+ aliases = ['pike']
+ filenames = ['*.pike', '*.pmod']
+ mimetypes = ['text/x-pike']
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'catch', 'new', 'private', 'protected', 'public', 'gauge',
+ 'throw', 'throws', 'class', 'interface', 'implement', 'abstract', 'extends', 'from',
+ 'this', 'super', 'constant', 'final', 'static', 'import', 'use', 'extern',
+ 'inline', 'proto', 'break', 'continue', 'if', 'else', 'for',
+ 'while', 'do', 'switch', 'case', 'as', 'in', 'version', 'return', 'true', 'false', 'null',
+ '__VERSION__', '__MAJOR__', '__MINOR__', '__BUILD__', '__REAL_VERSION__',
+ '__REAL_MAJOR__', '__REAL_MINOR__', '__REAL_BUILD__', '__DATE__', '__TIME__',
+ '__FILE__', '__DIR__', '__LINE__', '__AUTO_BIGNUM__', '__NT__', '__PIKE__',
+ '__amigaos__', '_Pragma', 'static_assert', 'defined', 'sscanf'), suffix=r'\b'),
+ Keyword),
+ (r'(bool|int|long|float|short|double|char|string|object|void|mapping|'
+ r'array|multiset|program|function|lambda|mixed|'
+ r'[a-z_][a-z0-9_]*_t)\b',
+ Keyword.Type),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'[~!%^&*+=|?:<>/@-]', Operator),
+ inherit,
+ ],
+ 'classname': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ # template specification
+ (r'\s*(?=>)', Text, '#pop'),
+ ],
+ }
+
+
+class NesCLexer(CLexer):
+ """
+ For `nesC <https://github.com/tinyos/nesc>`_ source code with preprocessor
+ directives.
+
+ .. versionadded:: 2.0
+ """
+ name = 'nesC'
+ aliases = ['nesc']
+ filenames = ['*.nc']
+ mimetypes = ['text/x-nescsrc']
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'abstract', 'as', 'async', 'atomic', 'call', 'command', 'component',
+ 'components', 'configuration', 'event', 'extends', 'generic',
+ 'implementation', 'includes', 'interface', 'module', 'new', 'norace',
+ 'post', 'provides', 'signal', 'task', 'uses'), suffix=r'\b'),
+ Keyword),
+ (words(('nx_struct', 'nx_union', 'nx_int8_t', 'nx_int16_t', 'nx_int32_t',
+ 'nx_int64_t', 'nx_uint8_t', 'nx_uint16_t', 'nx_uint32_t',
+ 'nx_uint64_t'), suffix=r'\b'),
+ Keyword.Type),
+ inherit,
+ ],
+ }
+
+
+class ClayLexer(RegexLexer):
+ """
+ For `Clay <http://claylabs.com/clay/>`_ source.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Clay'
+ filenames = ['*.clay']
+ aliases = ['clay']
+ mimetypes = ['text/x-clay']
+ tokens = {
+ 'root': [
+ (r'\s', Text),
+ (r'//.*?$', Comment.Singleline),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'\b(public|private|import|as|record|variant|instance'
+ r'|define|overload|default|external|alias'
+ r'|rvalue|ref|forward|inline|noinline|forceinline'
+ r'|enum|var|and|or|not|if|else|goto|return|while'
+ r'|switch|case|break|continue|for|in|true|false|try|catch|throw'
+ r'|finally|onerror|staticassert|eval|when|newtype'
+ r'|__FILE__|__LINE__|__COLUMN__|__ARG__'
+ r')\b', Keyword),
+ (r'[~!%^&*+=|:<>/-]', Operator),
+ (r'[#(){}\[\],;.]', Punctuation),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\b(true|false)\b', Name.Builtin),
+ (r'(?i)[a-z_?][\w?]*', Name),
+ (r'"""', String, 'tdqs'),
+ (r'"', String, 'dqs'),
+ ],
+ 'strings': [
+ (r'(?i)\\(x[0-9a-f]{2}|.)', String.Escape),
+ (r'.', String),
+ ],
+ 'nl': [
+ (r'\n', String),
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ include('strings'),
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl'),
+ ],
+ }
+
+
+class ECLexer(CLexer):
+ """
+ For eC source code with preprocessor directives.
+
+ .. versionadded:: 1.5
+ """
+ name = 'eC'
+ aliases = ['ec']
+ filenames = ['*.ec', '*.eh']
+ mimetypes = ['text/x-echdr', 'text/x-ecsrc']
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'virtual', 'class', 'private', 'public', 'property', 'import',
+ 'delete', 'new', 'new0', 'renew', 'renew0', 'define', 'get',
+ 'set', 'remote', 'dllexport', 'dllimport', 'stdcall', 'subclass',
+ '__on_register_module', 'namespace', 'using', 'typed_object',
+ 'any_object', 'incref', 'register', 'watch', 'stopwatching', 'firewatchers',
+ 'watchable', 'class_designer', 'class_fixed', 'class_no_expansion', 'isset',
+ 'class_default_property', 'property_category', 'class_data',
+ 'class_property', 'thisclass', 'dbtable', 'dbindex',
+ 'database_open', 'dbfield'), suffix=r'\b'), Keyword),
+ (words(('uint', 'uint16', 'uint32', 'uint64', 'bool', 'byte',
+ 'unichar', 'int64'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(null|value|this)\b', Name.Builtin),
+ inherit,
+ ],
+ 'classname': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ # template specification
+ (r'\s*(?=>)', Text, '#pop'),
+ ],
+ }
+
+
+class ValaLexer(RegexLexer):
+ """
+ For Vala source code with preprocessor directives.
+
+ .. versionadded:: 1.1
+ """
+ name = 'Vala'
+ aliases = ['vala', 'vapi']
+ filenames = ['*.vala', '*.vapi']
+ mimetypes = ['text/x-vala']
+
+ tokens = {
+ 'whitespace': [
+ (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ ],
+ 'statements': [
+ (r'[L@]?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
+ String.Char),
+ (r'(?s)""".*?"""', String), # verbatim strings
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'0[0-7]+[Ll]?', Number.Oct),
+ (r'\d+[Ll]?', Number.Integer),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'(\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\])',
+ bygroups(Punctuation, Name.Decorator, Punctuation)),
+ # TODO: "correctly" parse complex code attributes
+ (r'(\[)(CCode|(?:Integer|Floating)Type)',
+ bygroups(Punctuation, Name.Decorator)),
+ (r'[()\[\],.]', Punctuation),
+ (words((
+ 'as', 'base', 'break', 'case', 'catch', 'construct', 'continue',
+ 'default', 'delete', 'do', 'else', 'enum', 'finally', 'for',
+ 'foreach', 'get', 'if', 'in', 'is', 'lock', 'new', 'out', 'params',
+ 'return', 'set', 'sizeof', 'switch', 'this', 'throw', 'try',
+ 'typeof', 'while', 'yield'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'abstract', 'const', 'delegate', 'dynamic', 'ensures', 'extern',
+ 'inline', 'internal', 'override', 'owned', 'private', 'protected',
+ 'public', 'ref', 'requires', 'signal', 'static', 'throws', 'unowned',
+ 'var', 'virtual', 'volatile', 'weak', 'yields'), suffix=r'\b'),
+ Keyword.Declaration),
+ (r'(namespace|using)(\s+)', bygroups(Keyword.Namespace, Text),
+ 'namespace'),
+ (r'(class|errordomain|interface|struct)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'class'),
+ (r'(\.)([a-zA-Z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ # void is an actual keyword, others are in glib-2.0.vapi
+ (words((
+ 'void', 'bool', 'char', 'double', 'float', 'int', 'int8', 'int16',
+ 'int32', 'int64', 'long', 'short', 'size_t', 'ssize_t', 'string',
+ 'time_t', 'uchar', 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
+ 'ulong', 'unichar', 'ushort'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(true|false|null)\b', Name.Builtin),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'root': [
+ include('whitespace'),
+ default('statement'),
+ ],
+ 'statement': [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Punctuation),
+ (';', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'namespace': [
+ (r'[a-zA-Z_][\w.]*', Name.Namespace, '#pop')
+ ],
+ }
+
+
+class CudaLexer(CLexer):
+ """
+ For NVIDIA `CUDA™ <http://developer.nvidia.com/category/zone/cuda-zone>`_
+ source.
+
+ .. versionadded:: 1.6
+ """
+ name = 'CUDA'
+ filenames = ['*.cu', '*.cuh']
+ aliases = ['cuda', 'cu']
+ mimetypes = ['text/x-cuda']
+
+ function_qualifiers = set(('__device__', '__global__', '__host__',
+ '__noinline__', '__forceinline__'))
+ variable_qualifiers = set(('__device__', '__constant__', '__shared__',
+ '__restrict__'))
+ vector_types = set(('char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3',
+ 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2',
+ 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1',
+ 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1',
+ 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4',
+ 'ulong4', 'longlong1', 'ulonglong1', 'longlong2',
+ 'ulonglong2', 'float1', 'float2', 'float3', 'float4',
+ 'double1', 'double2', 'dim3'))
+ variables = set(('gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize'))
+ functions = set(('__threadfence_block', '__threadfence', '__threadfence_system',
+ '__syncthreads', '__syncthreads_count', '__syncthreads_and',
+ '__syncthreads_or'))
+ execution_confs = set(('<<<', '>>>'))
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in CLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self.variable_qualifiers:
+ token = Keyword.Type
+ elif value in self.vector_types:
+ token = Keyword.Type
+ elif value in self.variables:
+ token = Name.Builtin
+ elif value in self.execution_confs:
+ token = Keyword.Pseudo
+ elif value in self.function_qualifiers:
+ token = Keyword.Reserved
+ elif value in self.functions:
+ token = Name.Function
+ yield index, token, value
+
+
+class SwigLexer(CppLexer):
+ """
+ For `SWIG <http://www.swig.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'SWIG'
+ aliases = ['swig']
+ filenames = ['*.swg', '*.i']
+ mimetypes = ['text/swig']
+ priority = 0.04 # Lower than C/C++ and Objective C/C++
+
+ tokens = {
+ 'statements': [
+ # SWIG directives
+ (r'(%[a-z_][a-z0-9_]*)', Name.Function),
+ # Special variables
+ ('\$\**\&?\w+', Name),
+ # Stringification / additional preprocessor directives
+ (r'##*[a-zA-Z_]\w*', Comment.Preproc),
+ inherit,
+ ],
+ }
+
+ # This is a far from complete set of SWIG directives
+ swig_directives = set((
+ # Most common directives
+ '%apply', '%define', '%director', '%enddef', '%exception', '%extend',
+ '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include',
+ '%inline', '%insert', '%module', '%newobject', '%nspace', '%pragma',
+ '%rename', '%shared_ptr', '%template', '%typecheck', '%typemap',
+ # Less common directives
+ '%arg', '%attribute', '%bang', '%begin', '%callback', '%catches', '%clear',
+ '%constant', '%copyctor', '%csconst', '%csconstvalue', '%csenum',
+ '%csmethodmodifiers', '%csnothrowexception', '%default', '%defaultctor',
+ '%defaultdtor', '%defined', '%delete', '%delobject', '%descriptor',
+ '%exceptionclass', '%exceptionvar', '%extend_smart_pointer', '%fragments',
+ '%header', '%ifcplusplus', '%ignorewarn', '%implicit', '%implicitconv',
+ '%init', '%javaconst', '%javaconstvalue', '%javaenum', '%javaexception',
+ '%javamethodmodifiers', '%kwargs', '%luacode', '%mutable', '%naturalvar',
+ '%nestedworkaround', '%perlcode', '%pythonabc', '%pythonappend',
+ '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall',
+ '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof',
+ '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn',
+ '%warnfilter'))
+
+ def analyse_text(text):
+ rv = 0
+ # Search for SWIG directives, which are conventionally at the beginning of
+ # a line. The probability of them being within a line is low, so let another
+ # lexer win in this case.
+ matches = re.findall(r'^\s*(%[a-z_][a-z0-9_]*)', text, re.M)
+ for m in matches:
+ if m in SwigLexer.swig_directives:
+ rv = 0.98
+ break
+ else:
+ rv = 0.91 # Fraction higher than MatlabLexer
+ return rv
+
+
+class MqlLexer(CppLexer):
+ """
+ For `MQL4 <http://docs.mql4.com/>`_ and
+ `MQL5 <http://www.mql5.com/en/docs>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'MQL'
+ aliases = ['mql', 'mq4', 'mq5', 'mql4', 'mql5']
+ filenames = ['*.mq4', '*.mq5', '*.mqh']
+ mimetypes = ['text/x-mql']
+
+ tokens = {
+ 'statements': [
+ (words(_mql_builtins.keywords, suffix=r'\b'), Keyword),
+ (words(_mql_builtins.c_types, suffix=r'\b'), Keyword.Type),
+ (words(_mql_builtins.types, suffix=r'\b'), Name.Function),
+ (words(_mql_builtins.constants, suffix=r'\b'), Name.Constant),
+ (words(_mql_builtins.colors, prefix='(clr)?', suffix=r'\b'),
+ Name.Constant),
+ inherit,
+ ],
+ }
+
+class ArduinoLexer(CppLexer):
+ """
+ For `Arduino(tm) <https://arduino.cc/>`_ source.
+
+ This is an extension of the CppLexer, as the Arduino® Language is a superset
+ of C++
+ """
+
+ name = 'Arduino'
+ aliases = ['arduino']
+ filenames = ['*.ino']
+ mimetypes = ['text/x-arduino']
+
+ # Language constants
+ constants = set(( 'DIGITAL_MESSAGE', 'FIRMATA_STRING', 'ANALOG_MESSAGE',
+ 'REPORT_DIGITAL', 'REPORT_ANALOG', 'INPUT_PULLUP',
+ 'SET_PIN_MODE', 'INTERNAL2V56', 'SYSTEM_RESET', 'LED_BUILTIN',
+ 'INTERNAL1V1', 'SYSEX_START', 'INTERNAL', 'EXTERNAL',
+ 'DEFAULT', 'OUTPUT', 'INPUT', 'HIGH', 'LOW' ))
+
+ # Language sketch main structure functions
+ structure = set(( 'setup', 'loop' ))
+
+ # Language variable types
+ storage = set(( 'boolean', 'const', 'byte', 'word', 'string', 'String', 'array' ))
+
+ # Language shipped functions and class ( )
+ functions = set(( 'KeyboardController', 'MouseController', 'SoftwareSerial',
+ 'EthernetServer', 'EthernetClient', 'LiquidCrystal',
+ 'RobotControl', 'GSMVoiceCall', 'EthernetUDP', 'EsploraTFT',
+ 'HttpClient', 'RobotMotor', 'WiFiClient', 'GSMScanner',
+ 'FileSystem', 'Scheduler', 'GSMServer', 'YunClient', 'YunServer',
+ 'IPAddress', 'GSMClient', 'GSMModem', 'Keyboard', 'Ethernet',
+ 'Console', 'GSMBand', 'Esplora', 'Stepper', 'Process',
+ 'WiFiUDP', 'GSM_SMS', 'Mailbox', 'USBHost', 'Firmata', 'PImage',
+ 'Client', 'Server', 'GSMPIN', 'FileIO', 'Bridge', 'Serial',
+ 'EEPROM', 'Stream', 'Mouse', 'Audio', 'Servo', 'File', 'Task',
+ 'GPRS', 'WiFi', 'Wire', 'TFT', 'GSM', 'SPI', 'SD',
+ 'runShellCommandAsynchronously', 'analogWriteResolution',
+ 'retrieveCallingNumber', 'printFirmwareVersion',
+ 'analogReadResolution', 'sendDigitalPortPair',
+ 'noListenOnLocalhost', 'readJoystickButton', 'setFirmwareVersion',
+ 'readJoystickSwitch', 'scrollDisplayRight', 'getVoiceCallStatus',
+ 'scrollDisplayLeft', 'writeMicroseconds', 'delayMicroseconds',
+ 'beginTransmission', 'getSignalStrength', 'runAsynchronously',
+ 'getAsynchronously', 'listenOnLocalhost', 'getCurrentCarrier',
+ 'readAccelerometer', 'messageAvailable', 'sendDigitalPorts',
+ 'lineFollowConfig', 'countryNameWrite', 'runShellCommand',
+ 'readStringUntil', 'rewindDirectory', 'readTemperature',
+ 'setClockDivider', 'readLightSensor', 'endTransmission',
+ 'analogReference', 'detachInterrupt', 'countryNameRead',
+ 'attachInterrupt', 'encryptionType', 'readBytesUntil',
+ 'robotNameWrite', 'readMicrophone', 'robotNameRead', 'cityNameWrite',
+ 'userNameWrite', 'readJoystickY', 'readJoystickX', 'mouseReleased',
+ 'openNextFile', 'scanNetworks', 'noInterrupts', 'digitalWrite',
+ 'beginSpeaker', 'mousePressed', 'isActionDone', 'mouseDragged',
+ 'displayLogos', 'noAutoscroll', 'addParameter', 'remoteNumber',
+ 'getModifiers', 'keyboardRead', 'userNameRead', 'waitContinue',
+ 'processInput', 'parseCommand', 'printVersion', 'readNetworks',
+ 'writeMessage', 'blinkVersion', 'cityNameRead', 'readMessage',
+ 'setDataMode', 'parsePacket', 'isListening', 'setBitOrder',
+ 'beginPacket', 'isDirectory', 'motorsWrite', 'drawCompass',
+ 'digitalRead', 'clearScreen', 'serialEvent', 'rightToLeft',
+ 'setTextSize', 'leftToRight', 'requestFrom', 'keyReleased',
+ 'compassRead', 'analogWrite', 'interrupts', 'WiFiServer',
+ 'disconnect', 'playMelody', 'parseFloat', 'autoscroll',
+ 'getPINUsed', 'setPINUsed', 'setTimeout', 'sendAnalog',
+ 'readSlider', 'analogRead', 'beginWrite', 'createChar',
+ 'motorsStop', 'keyPressed', 'tempoWrite', 'readButton',
+ 'subnetMask', 'debugPrint', 'macAddress', 'writeGreen',
+ 'randomSeed', 'attachGPRS', 'readString', 'sendString',
+ 'remotePort', 'releaseAll', 'mouseMoved', 'background',
+ 'getXChange', 'getYChange', 'answerCall', 'getResult',
+ 'voiceCall', 'endPacket', 'constrain', 'getSocket', 'writeJSON',
+ 'getButton', 'available', 'connected', 'findUntil', 'readBytes',
+ 'exitValue', 'readGreen', 'writeBlue', 'startLoop', 'IPAddress',
+ 'isPressed', 'sendSysex', 'pauseMode', 'gatewayIP', 'setCursor',
+ 'getOemKey', 'tuneWrite', 'noDisplay', 'loadImage', 'switchPIN',
+ 'onRequest', 'onReceive', 'changePIN', 'playFile', 'noBuffer',
+ 'parseInt', 'overflow', 'checkPIN', 'knobRead', 'beginTFT',
+ 'bitClear', 'updateIR', 'bitWrite', 'position', 'writeRGB',
+ 'highByte', 'writeRed', 'setSpeed', 'readBlue', 'noStroke',
+ 'remoteIP', 'transfer', 'shutdown', 'hangCall', 'beginSMS',
+ 'endWrite', 'attached', 'maintain', 'noCursor', 'checkReg',
+ 'checkPUK', 'shiftOut', 'isValid', 'shiftIn', 'pulseIn',
+ 'connect', 'println', 'localIP', 'pinMode', 'getIMEI',
+ 'display', 'noBlink', 'process', 'getBand', 'running', 'beginSD',
+ 'drawBMP', 'lowByte', 'setBand', 'release', 'bitRead', 'prepare',
+ 'pointTo', 'readRed', 'setMode', 'noFill', 'remove', 'listen',
+ 'stroke', 'detach', 'attach', 'noTone', 'exists', 'buffer',
+ 'height', 'bitSet', 'circle', 'config', 'cursor', 'random',
+ 'IRread', 'sizeof', 'setDNS', 'endSMS', 'getKey', 'micros',
+ 'millis', 'begin', 'print', 'write', 'ready', 'flush', 'width',
+ 'isPIN', 'blink', 'clear', 'press', 'mkdir', 'rmdir', 'close',
+ 'point', 'yield', 'image', 'float', 'BSSID', 'click', 'delay',
+ 'read', 'text', 'move', 'peek', 'beep', 'rect', 'line', 'open',
+ 'seek', 'fill', 'size', 'turn', 'stop', 'home', 'find', 'char',
+ 'byte', 'step', 'word', 'long', 'tone', 'sqrt', 'RSSI', 'SSID',
+ 'end', 'bit', 'tan', 'cos', 'sin', 'pow', 'map', 'abs', 'max',
+ 'min', 'int', 'get', 'run', 'put' ))
+
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in CppLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self.constants:
+ yield index, Keyword.Constant, value
+ elif value in self.functions:
+ yield index, Name.Function, value
+ elif value in self.storage:
+ yield index, Keyword.Type, value
+ else:
+ yield index, token, value
+ elif token is Name.Function:
+ if value in self.structure:
+ yield index, Name.Other, value
+ else:
+ yield index, token, value
+ elif token is Keyword:
+ if value in self.storage:
+ yield index, Keyword.Type, value
+ else:
+ yield index, token, value
+ else:
+ yield index, token, value
diff --git a/pygments/lexers/chapel.py b/pygments/lexers/chapel.py
new file mode 100644
index 00000000..6fb6920c
--- /dev/null
+++ b/pygments/lexers/chapel.py
@@ -0,0 +1,99 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.chapel
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Chapel language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['ChapelLexer']
+
+
+class ChapelLexer(RegexLexer):
+ """
+ For `Chapel <http://chapel.cray.com/>`_ source.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Chapel'
+ filenames = ['*.chpl']
+ aliases = ['chapel', 'chpl']
+ # mimetypes = ['text/x-chapel']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text),
+
+ (r'//(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+
+ (r'(config|const|in|inout|out|param|ref|type|var)\b',
+ Keyword.Declaration),
+ (r'(false|nil|true)\b', Keyword.Constant),
+ (r'(bool|complex|imag|int|opaque|range|real|string|uint)\b',
+ Keyword.Type),
+ (words((
+ 'align', 'atomic', 'begin', 'break', 'by', 'cobegin', 'coforall',
+ 'continue', 'delete', 'dmapped', 'do', 'domain', 'else', 'enum',
+ 'export', 'extern', 'for', 'forall', 'if', 'index', 'inline',
+ 'iter', 'label', 'lambda', 'let', 'local', 'new', 'noinit', 'on',
+ 'otherwise', 'pragma', 'private', 'public', 'reduce', 'return',
+ 'scan', 'select', 'serial', 'single', 'sparse', 'subdomain',
+ 'sync', 'then', 'use', 'when', 'where', 'while', 'with', 'yield',
+ 'zip'), suffix=r'\b'),
+ Keyword),
+ (r'(proc)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'procname'),
+ (r'(class|module|record|union)(\s+)', bygroups(Keyword, Text),
+ 'classname'),
+
+ # imaginary integers
+ (r'\d+i', Number),
+ (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
+ (r'\.\d+([Ee][-+]\d+)?i', Number),
+ (r'\d+[Ee][-+]\d+i', Number),
+
+ # reals cannot end with a period due to lexical ambiguity with
+ # .. operator. See reference for rationale.
+ (r'(\d*\.\d+)([eE][+-]?[0-9]+)?i?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+i?', Number.Float),
+
+ # integer literals
+ # -- binary
+ (r'0[bB][01]+', Number.Bin),
+ # -- hex
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ # -- octal
+ (r'0[oO][0-7]+', Number.Oct),
+ # -- decimal
+ (r'[0-9]+', Number.Integer),
+
+ # strings
+ (r'["\'](\\\\|\\"|[^"\'])*["\']', String),
+
+ # tokens
+ (r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|'
+ r'<=>|<~>|\.\.|by|#|\.\.\.|'
+ r'&&|\|\||!|&|\||\^|~|<<|>>|'
+ r'==|!=|<=|>=|<|>|'
+ r'[+\-*/%]|\*\*)', Operator),
+ (r'[:;,.?()\[\]{}]', Punctuation),
+
+ # identifiers
+ (r'[a-zA-Z_][\w$]*', Name.Other),
+ ],
+ 'classname': [
+ (r'[a-zA-Z_][\w$]*', Name.Class, '#pop'),
+ ],
+ 'procname': [
+ (r'[a-zA-Z_][\w$]*', Name.Function, '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py
index 7c0a551a..1cf83d7f 100644
--- a/pygments/lexers/compiled.py
+++ b/pygments/lexers/compiled.py
@@ -3,3763 +3,31 @@
pygments.lexers.compiled
~~~~~~~~~~~~~~~~~~~~~~~~
- Lexers for compiled languages.
+ Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-from string import Template
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
- this, combined, inherit, do_insertions
-from pygments.util import get_bool_opt, get_list_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error, Literal, Generic
-from pygments.scanner import Scanner
-
-# backwards compatibility
-from pygments.lexers.functional import OcamlLexer
from pygments.lexers.jvm import JavaLexer, ScalaLexer
-
-__all__ = ['CLexer', 'CppLexer', 'DLexer', 'DelphiLexer', 'ECLexer',
- 'NesCLexer', 'DylanLexer', 'ObjectiveCLexer', 'ObjectiveCppLexer',
- 'FortranLexer', 'FortranFixedLexer', 'GLShaderLexer',
- 'PrologLexer', 'CythonLexer',
- 'ValaLexer', 'OocLexer', 'GoLexer', 'FelixLexer', 'AdaLexer',
- 'Modula2Lexer', 'BlitzMaxLexer', 'BlitzBasicLexer', 'NimrodLexer',
- 'FantomLexer', 'RustLexer', 'CudaLexer', 'MonkeyLexer', 'SwigLexer',
- 'DylanLidLexer', 'DylanConsoleLexer', 'CobolLexer',
- 'CobolFreeformatLexer', 'LogosLexer', 'ClayLexer']
-
-
-class CFamilyLexer(RegexLexer):
- """
- For C family source code. This is used as a base class to avoid repetitious
- definitions.
- """
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
- #: only one /* */ style comment
- _ws1 = r':\s*/[*].*?[*]/\s*'
-
- tokens = {
- 'whitespace': [
- # preprocessor directives: without whitespace
- ('^#if\s+0', Comment.Preproc, 'if0'),
- ('^#', Comment.Preproc, 'macro'),
- # or with whitespace
- ('^(' + _ws1 + r')(#if\s+0)',
- bygroups(using(this), Comment.Preproc), 'if0'),
- ('^(' + _ws1 + ')(#)',
- bygroups(using(this), Comment.Preproc), 'macro'),
- (r'^(\s*)([a-zA-Z_][a-zA-Z0-9_]*:(?!:))',
- bygroups(Text, Name.Label)),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- ],
- 'statements': [
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\*/', Error),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.]', Punctuation),
- (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)),
- (r'(auto|break|case|const|continue|default|do|else|enum|extern|'
- r'for|goto|if|register|restricted|return|sizeof|static|struct|'
- r'switch|typedef|union|volatile|while)\b', Keyword),
- (r'(bool|int|long|float|short|double|char|unsigned|signed|void|'
- r'[a-z_][a-z0-9_]*_t)\b',
- Keyword.Type),
- (r'(_{0,2}inline|naked|restrict|thread|typename)\b', Keyword.Reserved),
- # Vector intrinsics
- (r'(__(m128i|m128d|m128|m64))\b', Keyword.Reserved),
- # Microsoft-isms
- (r'__(asm|int8|based|except|int16|stdcall|cdecl|fastcall|int32|'
- r'declspec|finally|int64|try|leave|wchar_t|w64|unaligned|'
- r'raise|noop|identifier|forceinline|assume)\b', Keyword.Reserved),
- (r'(true|false|NULL)\b', Name.Builtin),
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
- ],
- 'root': [
- include('whitespace'),
- # functions
- (r'((?:[a-zA-Z0-9_*\s])+?(?:\s|[*]))' # return arguments
- r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'(' + _ws + r')?({)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation),
- 'function'),
- # function declarations
- (r'((?:[a-zA-Z0-9_*\s])+?(?:\s|[*]))' # return arguments
- r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'(' + _ws + r')?(;)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation)),
- ('', Text, 'statement'),
- ],
- 'statement' : [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'function': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation),
- ('{', Punctuation, '#push'),
- ('}', Punctuation, '#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
- r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
-
- stdlib_types = ['size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t',
- 'sig_atomic_t', 'fpos_t', 'clock_t', 'time_t', 'va_list',
- 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t', 'mbstate_t',
- 'wctrans_t', 'wint_t', 'wctype_t']
- c99_types = ['_Bool', '_Complex', 'int8_t', 'int16_t', 'int32_t', 'int64_t',
- 'uint8_t', 'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t',
- 'int_least16_t', 'int_least32_t', 'int_least64_t',
- 'uint_least8_t', 'uint_least16_t', 'uint_least32_t',
- 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
- 'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t',
- 'uint_fast64_t', 'intptr_t', 'uintptr_t', 'intmax_t',
- 'uintmax_t']
-
- def __init__(self, **options):
- self.stdlibhighlighting = get_bool_opt(options,
- 'stdlibhighlighting', True)
- self.c99highlighting = get_bool_opt(options,
- 'c99highlighting', True)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if self.stdlibhighlighting and value in self.stdlib_types:
- token = Keyword.Type
- elif self.c99highlighting and value in self.c99_types:
- token = Keyword.Type
- yield index, token, value
-
-
-class CLexer(CFamilyLexer):
- """
- For C source code with preprocessor directives.
- """
- name = 'C'
- aliases = ['c']
- filenames = ['*.c', '*.h', '*.idc']
- mimetypes = ['text/x-chdr', 'text/x-csrc']
- priority = 0.1
-
- def analyse_text(text):
- return 0.1
-
-
-class CppLexer(CFamilyLexer):
- """
- For C++ source code with preprocessor directives.
- """
- name = 'C++'
- aliases = ['cpp', 'c++']
- filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++',
- '*.cc', '*.hh', '*.cxx', '*.hxx',
- '*.C', '*.H', '*.cp', '*.CPP']
- mimetypes = ['text/x-c++hdr', 'text/x-c++src']
- priority = 0.1
-
- tokens = {
- 'statements': [
- (r'(asm|catch|const_cast|delete|dynamic_cast|explicit|'
- r'export|friend|mutable|namespace|new|operator|'
- r'private|protected|public|reinterpret_cast|'
- r'restrict|static_cast|template|this|throw|throws|'
- r'typeid|typename|using|virtual)\b', Keyword),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- inherit,
- ],
- 'root': [
- inherit,
- # C++ Microsoft-isms
- (r'__(virtual_inheritance|uuidof|super|single_inheritance|'
- r'multiple_inheritance|interface|event)\b', Keyword.Reserved),
- # Offload C++ extensions, http://offload.codeplay.com/
- (r'(__offload|__blockingoffload|__outer)\b', Keyword.Pseudo),
- ],
- 'classname': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop'),
- # template specification
- (r'\s*(?=>)', Text, '#pop'),
- ],
- }
-
- def analyse_text(text):
- return 0.1
-
-
-class SwigLexer(CppLexer):
- """
- For `SWIG <http://www.swig.org/>`_ source code.
-
- *New in Pygments 1.7.*
- """
- name = 'SWIG'
- aliases = ['Swig', 'swig']
- filenames = ['*.swg', '*.i']
- mimetypes = ['text/swig']
- priority = 0.04 # Lower than C/C++ and Objective C/C++
-
- tokens = {
- 'statements': [
- (r'(%[a-z_][a-z0-9_]*)', Name.Function), # SWIG directives
- ('\$\**\&?[a-zA-Z0-9_]+', Name), # Special variables
- (r'##*[a-zA-Z_][a-zA-Z0-9_]*', Comment.Preproc), # Stringification / additional preprocessor directives
- inherit,
- ],
- }
-
- # This is a far from complete set of SWIG directives
- swig_directives = (
- # Most common directives
- '%apply', '%define', '%director', '%enddef', '%exception', '%extend',
- '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include',
- '%inline', '%insert', '%module', '%newobject', '%nspace', '%pragma',
- '%rename', '%shared_ptr', '%template', '%typecheck', '%typemap',
- # Less common directives
- '%arg', '%attribute', '%bang', '%begin', '%callback', '%catches', '%clear',
- '%constant', '%copyctor', '%csconst', '%csconstvalue', '%csenum',
- '%csmethodmodifiers', '%csnothrowexception', '%default', '%defaultctor',
- '%defaultdtor', '%defined', '%delete', '%delobject', '%descriptor',
- '%exceptionclass', '%exceptionvar', '%extend_smart_pointer', '%fragments',
- '%header', '%ifcplusplus', '%ignorewarn', '%implicit', '%implicitconv',
- '%init', '%javaconst', '%javaconstvalue', '%javaenum', '%javaexception',
- '%javamethodmodifiers', '%kwargs', '%luacode', '%mutable', '%naturalvar',
- '%nestedworkaround', '%perlcode', '%pythonabc', '%pythonappend',
- '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall',
- '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof',
- '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn', '%warnfilter')
-
- def analyse_text(text):
- rv = 0.1 # Same as C/C++
- # Search for SWIG directives, which are conventionally at the beginning of
- # a line. The probability of them being within a line is low, so let another
- # lexer win in this case.
- matches = re.findall(r'^\s*(%[a-z_][a-z0-9_]*)', text, re.M)
- for m in matches:
- if m in SwigLexer.swig_directives:
- rv = 0.98
- break
- else:
- rv = 0.91 # Fraction higher than MatlabLexer
- return rv
-
-
-class ECLexer(CLexer):
- """
- For eC source code with preprocessor directives.
-
- *New in Pygments 1.5.*
- """
- name = 'eC'
- aliases = ['ec']
- filenames = ['*.ec', '*.eh']
- mimetypes = ['text/x-echdr', 'text/x-ecsrc']
-
- tokens = {
- 'statements': [
- (r'(virtual|class|private|public|property|import|delete|new|new0|'
- r'renew|renew0|define|get|set|remote|dllexport|dllimport|stdcall|'
- r'subclass|__on_register_module|namespace|using|typed_object|'
- r'any_object|incref|register|watch|stopwatching|firewatchers|'
- r'watchable|class_designer|class_fixed|class_no_expansion|isset|'
- r'class_default_property|property_category|class_data|'
- r'class_property|virtual|thisclass|'
- r'dbtable|dbindex|database_open|dbfield)\b', Keyword),
- (r'(uint|uint16|uint32|uint64|bool|byte|unichar|int64)\b',
- Keyword.Type),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(null|value|this)\b', Name.Builtin),
- inherit,
- ],
- 'classname': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop'),
- # template specification
- (r'\s*(?=>)', Text, '#pop'),
- ],
- }
-
-
-class NesCLexer(CLexer):
- """
- For `nesC <https://github.com/tinyos/nesc>`_ source code with preprocessor
- directives.
-
- *New in Pygments 1.7.*
- """
- name = 'nesC'
- aliases = ['nesc']
- filenames = ['*.nc']
- mimetypes = ['text/x-nescsrc']
-
- tokens = {
- 'statements': [
- (r'(abstract|as|async|atomic|call|command|component|components|'
- r'configuration|event|extends|generic|implementation|includes|'
- r'interface|module|new|norace|post|provides|signal|task|uses)\b',
- Keyword),
- (r'(nx_struct|nx_union|nx_int8_t|nx_int16_t|nx_int32_t|nx_int64_t|'
- r'nx_uint8_t|nx_uint16_t|nx_uint32_t|nx_uint64_t)\b',
- Keyword.Type),
- inherit,
- ],
- }
-
-
-class ClayLexer(RegexLexer):
- """
- For `Clay <http://claylabs.com/clay/>`_ source.
-
- *New in Pygments 1.7.*
- """
- name = 'Clay'
- filenames = ['*.clay']
- aliases = ['clay']
- mimetypes = ['text/x-clay']
- tokens = {
- 'root': [
- (r'\s', Text),
- (r'//.*?$', Comment.Singleline),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'\b(public|private|import|as|record|variant|instance'
- r'|define|overload|default|external|alias'
- r'|rvalue|ref|forward|inline|noinline|forceinline'
- r'|enum|var|and|or|not|if|else|goto|return|while'
- r'|switch|case|break|continue|for|in|true|false|try|catch|throw'
- r'|finally|onerror|staticassert|eval|when|newtype'
- r'|__FILE__|__LINE__|__COLUMN__|__ARG__'
- r')\b', Keyword),
- (r'[~!%^&*+=|:<>/-]', Operator),
- (r'[#(){}\[\],;.]', Punctuation),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\b(true|false)\b', Name.Builtin),
- (r'(?i)[a-z_?][a-z_?0-9]*', Name),
- (r'"""', String, 'tdqs'),
- (r'"', String, 'dqs'),
- ],
- 'strings': [
- (r'(?i)\\(x[0-9a-f]{2}|.)', String.Escape),
- (r'.', String),
- ],
- 'nl': [
- (r'\n', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings'),
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl'),
- ],
- }
-
-
-class DLexer(RegexLexer):
- """
- For D source.
-
- *New in Pygments 1.2.*
- """
- name = 'D'
- filenames = ['*.d', '*.di']
- aliases = ['d']
- mimetypes = ['text/x-dsrc']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- #(r'\\\n', Text), # line continuations
- # Comments
- (r'//(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'/\+', Comment.Multiline, 'nested_comment'),
- # Keywords
- (r'(abstract|alias|align|asm|assert|auto|body|break|case|cast'
- r'|catch|class|const|continue|debug|default|delegate|delete'
- r'|deprecated|do|else|enum|export|extern|finally|final'
- r'|foreach_reverse|foreach|for|function|goto|if|import|inout'
- r'|interface|invariant|in|is|lazy|mixin|module|new|nothrow|out'
- r'|override|package|pragma|private|protected|public|pure|ref|return'
- r'|scope|static|struct|super|switch|synchronized|template|this'
- r'|throw|try|typedef|typeid|typeof|union|unittest|version|volatile'
- r'|while|with|__traits)\b', Keyword
- ),
- (r'(bool|byte|cdouble|cent|cfloat|char|creal|dchar|double|float'
- r'|idouble|ifloat|int|ireal|long|real|short|ubyte|ucent|uint|ulong'
- r'|ushort|void|wchar)\b', Keyword.Type
- ),
- (r'(false|true|null)\b', Keyword.Constant),
- (r'macro\b', Keyword.Reserved),
- (r'(string|wstring|dstring)\b', Name.Builtin),
- # FloatLiteral
- # -- HexFloat
- (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
- r'[pP][+\-]?[0-9_]+[fFL]?[i]?', Number.Float),
- # -- DecimalFloat
- (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[fFL]?[i]?', Number.Float),
- (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[fFL]?[i]?', Number.Float),
- # IntegerLiteral
- # -- Binary
- (r'0[Bb][01_]+', Number),
- # -- Octal
- (r'0[0-7_]+', Number.Oct),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F_]+', Number.Hex),
- # -- Decimal
- (r'(0|[1-9][0-9_]*)([LUu]|Lu|LU|uL|UL)?', Number.Integer),
- # CharacterLiteral
- (r"""'(\\['"?\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\&\w+;|.)'""",
- String.Char
- ),
- # StringLiteral
- # -- WysiwygString
- (r'r"[^"]*"[cwd]?', String),
- # -- AlternateWysiwygString
- (r'`[^`]*`[cwd]?', String),
- # -- DoubleQuotedString
- (r'"(\\\\|\\"|[^"])*"[cwd]?', String),
- # -- EscapeSequence
- (r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}"
- r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)",
- String
- ),
- # -- HexString
- (r'x"[0-9a-fA-F_\s]*"[cwd]?', String),
- # -- DelimitedString
- (r'q"\[', String, 'delimited_bracket'),
- (r'q"\(', String, 'delimited_parenthesis'),
- (r'q"<', String, 'delimited_angle'),
- (r'q"{', String, 'delimited_curly'),
- (r'q"([a-zA-Z_]\w*)\n.*?\n\1"', String),
- (r'q"(.).*?\1"', String),
- # -- TokenString
- (r'q{', String, 'token_string'),
- # Tokens
- (r'(~=|\^=|%=|\*=|==|!>=|!<=|!<>=|!<>|!<|!>|!=|>>>=|>>>|>>=|>>|>='
- r'|<>=|<>|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.\.|\.\.|/=)'
- r'|[/.&|\-+<>!()\[\]{}?,;:$=*%^~]', Punctuation
- ),
- # Identifier
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'nested_comment': [
- (r'[^+/]+', Comment.Multiline),
- (r'/\+', Comment.Multiline, '#push'),
- (r'\+/', Comment.Multiline, '#pop'),
- (r'[+/]', Comment.Multiline),
- ],
- 'token_string': [
- (r'{', Punctuation, 'token_string_nest'),
- (r'}', String, '#pop'),
- include('root'),
- ],
- 'token_string_nest': [
- (r'{', Punctuation, '#push'),
- (r'}', Punctuation, '#pop'),
- include('root'),
- ],
- 'delimited_bracket': [
- (r'[^\[\]]+', String),
- (r'\[', String, 'delimited_inside_bracket'),
- (r'\]"', String, '#pop'),
- ],
- 'delimited_inside_bracket': [
- (r'[^\[\]]+', String),
- (r'\[', String, '#push'),
- (r'\]', String, '#pop'),
- ],
- 'delimited_parenthesis': [
- (r'[^\(\)]+', String),
- (r'\(', String, 'delimited_inside_parenthesis'),
- (r'\)"', String, '#pop'),
- ],
- 'delimited_inside_parenthesis': [
- (r'[^\(\)]+', String),
- (r'\(', String, '#push'),
- (r'\)', String, '#pop'),
- ],
- 'delimited_angle': [
- (r'[^<>]+', String),
- (r'<', String, 'delimited_inside_angle'),
- (r'>"', String, '#pop'),
- ],
- 'delimited_inside_angle': [
- (r'[^<>]+', String),
- (r'<', String, '#push'),
- (r'>', String, '#pop'),
- ],
- 'delimited_curly': [
- (r'[^{}]+', String),
- (r'{', String, 'delimited_inside_curly'),
- (r'}"', String, '#pop'),
- ],
- 'delimited_inside_curly': [
- (r'[^{}]+', String),
- (r'{', String, '#push'),
- (r'}', String, '#pop'),
- ],
- }
-
-
-class DelphiLexer(Lexer):
- """
- For `Delphi <http://www.borland.com/delphi/>`_ (Borland Object Pascal),
- Turbo Pascal and Free Pascal source code.
-
- Additional options accepted:
-
- `turbopascal`
- Highlight Turbo Pascal specific keywords (default: ``True``).
- `delphi`
- Highlight Borland Delphi specific keywords (default: ``True``).
- `freepascal`
- Highlight Free Pascal specific keywords (default: ``True``).
- `units`
- A list of units that should be considered builtin, supported are
- ``System``, ``SysUtils``, ``Classes`` and ``Math``.
- Default is to consider all of them builtin.
- """
- name = 'Delphi'
- aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
- filenames = ['*.pas']
- mimetypes = ['text/x-pascal']
-
- TURBO_PASCAL_KEYWORDS = [
- 'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
- 'const', 'constructor', 'continue', 'destructor', 'div', 'do',
- 'downto', 'else', 'end', 'file', 'for', 'function', 'goto',
- 'if', 'implementation', 'in', 'inherited', 'inline', 'interface',
- 'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator',
- 'or', 'packed', 'procedure', 'program', 'record', 'reintroduce',
- 'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to',
- 'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor'
- ]
-
- DELPHI_KEYWORDS = [
- 'as', 'class', 'except', 'exports', 'finalization', 'finally',
- 'initialization', 'is', 'library', 'on', 'property', 'raise',
- 'threadvar', 'try'
- ]
-
- FREE_PASCAL_KEYWORDS = [
- 'dispose', 'exit', 'false', 'new', 'true'
- ]
-
- BLOCK_KEYWORDS = set([
- 'begin', 'class', 'const', 'constructor', 'destructor', 'end',
- 'finalization', 'function', 'implementation', 'initialization',
- 'label', 'library', 'operator', 'procedure', 'program', 'property',
- 'record', 'threadvar', 'type', 'unit', 'uses', 'var'
- ])
-
- FUNCTION_MODIFIERS = set([
- 'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
- 'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
- 'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
- 'override', 'assembler'
- ])
-
- # XXX: those aren't global. but currently we know no way for defining
- # them just for the type context.
- DIRECTIVES = set([
- 'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
- 'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
- 'published', 'public'
- ])
-
- BUILTIN_TYPES = set([
- 'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
- 'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
- 'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
- 'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean',
- 'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency',
- 'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle',
- 'pint64', 'pinteger', 'plongint', 'plongword', 'pointer',
- 'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint',
- 'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword',
- 'pwordarray', 'pwordbool', 'real', 'real48', 'shortint',
- 'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
- 'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
- 'widechar', 'widestring', 'word', 'wordbool'
- ])
-
- BUILTIN_UNITS = {
- 'System': [
- 'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8',
- 'append', 'arctan', 'assert', 'assigned', 'assignfile',
- 'beginthread', 'blockread', 'blockwrite', 'break', 'chdir',
- 'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble',
- 'concat', 'continue', 'copy', 'cos', 'dec', 'delete',
- 'dispose', 'doubletocomp', 'endthread', 'enummodules',
- 'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr',
- 'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize',
- 'fillchar', 'finalize', 'findclasshinstance', 'findhinstance',
- 'findresourcehinstance', 'flush', 'frac', 'freemem',
- 'get8087cw', 'getdir', 'getlasterror', 'getmem',
- 'getmemorymanager', 'getmodulefilename', 'getvariantmanager',
- 'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert',
- 'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset',
- 'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd',
- 'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount',
- 'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random',
- 'randomize', 'read', 'readln', 'reallocmem',
- 'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir',
- 'round', 'runerror', 'seek', 'seekeof', 'seekeoln',
- 'set8087cw', 'setlength', 'setlinebreakstyle',
- 'setmemorymanager', 'setstring', 'settextbuf',
- 'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt',
- 'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar',
- 'succ', 'swap', 'trunc', 'truncate', 'typeinfo',
- 'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring',
- 'upcase', 'utf8decode', 'utf8encode', 'utf8toansi',
- 'utf8tounicode', 'val', 'vararrayredim', 'varclear',
- 'widecharlentostring', 'widecharlentostrvar',
- 'widechartostring', 'widechartostrvar',
- 'widestringtoucs4string', 'write', 'writeln'
- ],
- 'SysUtils': [
- 'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks',
- 'allocmem', 'ansicomparefilename', 'ansicomparestr',
- 'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr',
- 'ansilastchar', 'ansilowercase', 'ansilowercasefilename',
- 'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext',
- 'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp',
- 'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan',
- 'ansistrscan', 'ansistrupper', 'ansiuppercase',
- 'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep',
- 'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype',
- 'callterminateprocs', 'changefileext', 'charlength',
- 'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr',
- 'comparetext', 'createdir', 'createguid', 'currentyear',
- 'currtostr', 'currtostrf', 'date', 'datetimetofiledate',
- 'datetimetostr', 'datetimetostring', 'datetimetosystemtime',
- 'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate',
- 'decodedatefully', 'decodetime', 'deletefile', 'directoryexists',
- 'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime',
- 'exceptionerrormessage', 'excludetrailingbackslash',
- 'excludetrailingpathdelimiter', 'expandfilename',
- 'expandfilenamecase', 'expanduncfilename', 'extractfiledir',
- 'extractfiledrive', 'extractfileext', 'extractfilename',
- 'extractfilepath', 'extractrelativepath', 'extractshortpathname',
- 'fileage', 'fileclose', 'filecreate', 'filedatetodatetime',
- 'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly',
- 'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr',
- 'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage',
- 'findclose', 'findcmdlineswitch', 'findfirst', 'findnext',
- 'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr',
- 'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr',
- 'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr',
- 'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir',
- 'getenvironmentvariable', 'getfileversion', 'getformatsettings',
- 'getlocaleformatsettings', 'getmodulename', 'getpackagedescription',
- 'getpackageinfo', 'gettime', 'guidtostring', 'incamonth',
- 'includetrailingbackslash', 'includetrailingpathdelimiter',
- 'incmonth', 'initializepackage', 'interlockeddecrement',
- 'interlockedexchange', 'interlockedexchangeadd',
- 'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter',
- 'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident',
- 'languages', 'lastdelimiter', 'loadpackage', 'loadstr',
- 'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now',
- 'outofmemoryerror', 'quotedstr', 'raiselastoserror',
- 'raiselastwin32error', 'removedir', 'renamefile', 'replacedate',
- 'replacetime', 'safeloadlibrary', 'samefilename', 'sametext',
- 'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize',
- 'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy',
- 'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp',
- 'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy',
- 'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew',
- 'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos',
- 'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr',
- 'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime',
- 'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint',
- 'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime',
- 'strtotimedef', 'strupper', 'supports', 'syserrormessage',
- 'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime',
- 'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright',
- 'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime',
- 'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime',
- 'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime',
- 'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext',
- 'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase',
- 'widesamestr', 'widesametext', 'wideuppercase', 'win32check',
- 'wraptext'
- ],
- 'Classes': [
- 'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize',
- 'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect',
- 'extractstrings', 'findclass', 'findglobalcomponent', 'getclass',
- 'groupdescendantswith', 'hextobin', 'identtoint',
- 'initinheritedcomponent', 'inttoident', 'invalidpoint',
- 'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext',
- 'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource',
- 'pointsequal', 'readcomponentres', 'readcomponentresex',
- 'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias',
- 'registerclasses', 'registercomponents', 'registerintegerconsts',
- 'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup',
- 'teststreamformat', 'unregisterclass', 'unregisterclasses',
- 'unregisterintegerconsts', 'unregistermoduleclasses',
- 'writecomponentresfile'
- ],
- 'Math': [
- 'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec',
- 'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil',
- 'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc',
- 'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle',
- 'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance',
- 'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask',
- 'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg',
- 'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate',
- 'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero',
- 'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue',
- 'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue',
- 'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods',
- 'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance',
- 'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd',
- 'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant',
- 'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode',
- 'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev',
- 'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation',
- 'tan', 'tanh', 'totalvariance', 'variance'
- ]
- }
-
- ASM_REGISTERS = set([
- 'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
- 'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
- 'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
- 'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp',
- 'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6',
- 'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
- 'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
- 'xmm6', 'xmm7'
- ])
-
- ASM_INSTRUCTIONS = set([
- 'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
- 'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
- 'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
- 'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg',
- 'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb',
- 'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl',
- 'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo',
- 'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb',
- 'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid',
- 'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt',
- 'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd',
- 'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd',
- 'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe',
- 'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle',
- 'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge',
- 'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe',
- 'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave',
- 'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw',
- 'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw',
- 'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr',
- 'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx',
- 'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd',
- 'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw',
- 'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw',
- 'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe',
- 'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror',
- 'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb',
- 'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe',
- 'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle',
- 'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng',
- 'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz',
- 'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl',
- 'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold',
- 'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str',
- 'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit',
- 'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
- 'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
- 'xlatb', 'xor'
- ])
-
- def __init__(self, **options):
- Lexer.__init__(self, **options)
- self.keywords = set()
- if get_bool_opt(options, 'turbopascal', True):
- self.keywords.update(self.TURBO_PASCAL_KEYWORDS)
- if get_bool_opt(options, 'delphi', True):
- self.keywords.update(self.DELPHI_KEYWORDS)
- if get_bool_opt(options, 'freepascal', True):
- self.keywords.update(self.FREE_PASCAL_KEYWORDS)
- self.builtins = set()
- for unit in get_list_opt(options, 'units', self.BUILTIN_UNITS.keys()):
- self.builtins.update(self.BUILTIN_UNITS[unit])
-
- def get_tokens_unprocessed(self, text):
- scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE)
- stack = ['initial']
- in_function_block = False
- in_property_block = False
- was_dot = False
- next_token_is_function = False
- next_token_is_property = False
- collect_labels = False
- block_labels = set()
- brace_balance = [0, 0]
-
- while not scanner.eos:
- token = Error
-
- if stack[-1] == 'initial':
- if scanner.scan(r'\s+'):
- token = Text
- elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
- if scanner.match.startswith('$'):
- token = Comment.Preproc
- else:
- token = Comment.Multiline
- elif scanner.scan(r'//.*?$'):
- token = Comment.Single
- elif scanner.scan(r'[-+*\/=<>:;,.@\^]'):
- token = Operator
- # stop label highlighting on next ";"
- if collect_labels and scanner.match == ';':
- collect_labels = False
- elif scanner.scan(r'[\(\)\[\]]+'):
- token = Punctuation
- # abort function naming ``foo = Function(...)``
- next_token_is_function = False
- # if we are in a function block we count the open
- # braces because ootherwise it's impossible to
- # determine the end of the modifier context
- if in_function_block or in_property_block:
- if scanner.match == '(':
- brace_balance[0] += 1
- elif scanner.match == ')':
- brace_balance[0] -= 1
- elif scanner.match == '[':
- brace_balance[1] += 1
- elif scanner.match == ']':
- brace_balance[1] -= 1
- elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
- lowercase_name = scanner.match.lower()
- if lowercase_name == 'result':
- token = Name.Builtin.Pseudo
- elif lowercase_name in self.keywords:
- token = Keyword
- # if we are in a special block and a
- # block ending keyword occours (and the parenthesis
- # is balanced) we end the current block context
- if (in_function_block or in_property_block) and \
- lowercase_name in self.BLOCK_KEYWORDS and \
- brace_balance[0] <= 0 and \
- brace_balance[1] <= 0:
- in_function_block = False
- in_property_block = False
- brace_balance = [0, 0]
- block_labels = set()
- if lowercase_name in ('label', 'goto'):
- collect_labels = True
- elif lowercase_name == 'asm':
- stack.append('asm')
- elif lowercase_name == 'property':
- in_property_block = True
- next_token_is_property = True
- elif lowercase_name in ('procedure', 'operator',
- 'function', 'constructor',
- 'destructor'):
- in_function_block = True
- next_token_is_function = True
- # we are in a function block and the current name
- # is in the set of registered modifiers. highlight
- # it as pseudo keyword
- elif in_function_block and \
- lowercase_name in self.FUNCTION_MODIFIERS:
- token = Keyword.Pseudo
- # if we are in a property highlight some more
- # modifiers
- elif in_property_block and \
- lowercase_name in ('read', 'write'):
- token = Keyword.Pseudo
- next_token_is_function = True
- # if the last iteration set next_token_is_function
- # to true we now want this name highlighted as
- # function. so do that and reset the state
- elif next_token_is_function:
- # Look if the next token is a dot. If yes it's
- # not a function, but a class name and the
- # part after the dot a function name
- if scanner.test(r'\s*\.\s*'):
- token = Name.Class
- # it's not a dot, our job is done
- else:
- token = Name.Function
- next_token_is_function = False
- # same for properties
- elif next_token_is_property:
- token = Name.Property
- next_token_is_property = False
- # Highlight this token as label and add it
- # to the list of known labels
- elif collect_labels:
- token = Name.Label
- block_labels.add(scanner.match.lower())
- # name is in list of known labels
- elif lowercase_name in block_labels:
- token = Name.Label
- elif lowercase_name in self.BUILTIN_TYPES:
- token = Keyword.Type
- elif lowercase_name in self.DIRECTIVES:
- token = Keyword.Pseudo
- # builtins are just builtins if the token
- # before isn't a dot
- elif not was_dot and lowercase_name in self.builtins:
- token = Name.Builtin
- else:
- token = Name
- elif scanner.scan(r"'"):
- token = String
- stack.append('string')
- elif scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'):
- token = String.Char
- elif scanner.scan(r'\$[0-9A-Fa-f]+'):
- token = Number.Hex
- elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
- token = Number.Integer
- elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
- token = Number.Float
- else:
- # if the stack depth is deeper than once, pop
- if len(stack) > 1:
- stack.pop()
- scanner.get_char()
-
- elif stack[-1] == 'string':
- if scanner.scan(r"''"):
- token = String.Escape
- elif scanner.scan(r"'"):
- token = String
- stack.pop()
- elif scanner.scan(r"[^']*"):
- token = String
- else:
- scanner.get_char()
- stack.pop()
-
- elif stack[-1] == 'asm':
- if scanner.scan(r'\s+'):
- token = Text
- elif scanner.scan(r'end'):
- token = Keyword
- stack.pop()
- elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
- if scanner.match.startswith('$'):
- token = Comment.Preproc
- else:
- token = Comment.Multiline
- elif scanner.scan(r'//.*?$'):
- token = Comment.Single
- elif scanner.scan(r"'"):
- token = String
- stack.append('string')
- elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'):
- token = Name.Label
- elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
- lowercase_name = scanner.match.lower()
- if lowercase_name in self.ASM_INSTRUCTIONS:
- token = Keyword
- elif lowercase_name in self.ASM_REGISTERS:
- token = Name.Builtin
- else:
- token = Name
- elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'):
- token = Operator
- elif scanner.scan(r'[\(\)\[\]]+'):
- token = Punctuation
- elif scanner.scan(r'\$[0-9A-Fa-f]+'):
- token = Number.Hex
- elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
- token = Number.Integer
- elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
- token = Number.Float
- else:
- scanner.get_char()
- stack.pop()
-
- # save the dot!!!11
- if scanner.match.strip():
- was_dot = scanner.match == '.'
- yield scanner.start_pos, token, scanner.match or ''
-
-
-class DylanLexer(RegexLexer):
- """
- For the `Dylan <http://www.opendylan.org/>`_ language.
-
- *New in Pygments 0.7.*
- """
-
- name = 'Dylan'
- aliases = ['dylan']
- filenames = ['*.dylan', '*.dyl', '*.intr']
- mimetypes = ['text/x-dylan']
-
- flags = re.IGNORECASE
-
- builtins = set([
- 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
- 'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
- 'each-subclass', 'exception', 'exclude', 'function', 'generic',
- 'handler', 'inherited', 'inline', 'inline-only', 'instance',
- 'interface', 'import', 'keyword', 'library', 'macro', 'method',
- 'module', 'open', 'primary', 'required', 'sealed', 'sideways',
- 'singleton', 'slot', 'thread', 'variable', 'virtual'])
-
- keywords = set([
- 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
- 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
- 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
- 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
- 'while'])
-
- operators = set([
- '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
- '>', '>=', '&', '|'])
-
- functions = set([
- 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
- 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
- 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
- 'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol',
- 'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose',
- 'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as',
- 'condition-format-arguments', 'condition-format-string', 'conjoin',
- 'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions',
- 'direct-subclasses', 'direct-superclasses', 'disjoin', 'do',
- 'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?',
- 'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first',
- 'first-setter', 'floor', 'floor/', 'forward-iteration-protocol',
- 'function-arguments', 'function-return-values',
- 'function-specializers', 'gcd', 'generic-function-mandatory-keywords',
- 'generic-function-methods', 'head', 'head-setter', 'identity',
- 'initialize', 'instance?', 'integral?', 'intersection',
- 'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited',
- 'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make',
- 'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes',
- 'min', 'modulo', 'negative', 'negative?', 'next-method',
- 'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop',
- 'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank',
- 'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!',
- 'remove-duplicates', 'remove-duplicates!', 'remove-key!',
- 'remove-method', 'replace-elements!', 'replace-subsequence!',
- 'restart-query', 'return-allowed?', 'return-description',
- 'return-query', 'reverse', 'reverse!', 'round', 'round/',
- 'row-major-index', 'second', 'second-setter', 'shallow-copy',
- 'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?',
- 'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position',
- 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
- 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
- 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
- 'vector', 'zero?'])
-
- valid_name = '\\\\?[a-zA-Z0-9' + re.escape('!&*<>|^$%@_-+~?/=') + ']+'
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- lowercase_value = value.lower()
- if lowercase_value in self.builtins:
- yield index, Name.Builtin, value
- continue
- if lowercase_value in self.keywords:
- yield index, Keyword, value
- continue
- if lowercase_value in self.functions:
- yield index, Name.Builtin, value
- continue
- if lowercase_value in self.operators:
- yield index, Operator, value
- continue
- yield index, token, value
-
- tokens = {
- 'root': [
- # Whitespace
- (r'\s+', Text),
-
- # single line comment
- (r'//.*?\n', Comment.Single),
-
- # lid header
- (r'([A-Za-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
- bygroups(Name.Attribute, Operator, Text, String)),
-
- ('', Text, 'code') # no header match, switch to code
- ],
- 'code': [
- # Whitespace
- (r'\s+', Text),
-
- # single line comment
- (r'//.*?\n', Comment.Single),
-
- # multi-line comment
- (r'/\*', Comment.Multiline, 'comment'),
-
- # strings and characters
- (r'"', String, 'string'),
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
-
- # binary integer
- (r'#[bB][01]+', Number),
-
- # octal integer
- (r'#[oO][0-7]+', Number.Oct),
-
- # floating point
- (r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float),
-
- # decimal integer
- (r'[-+]?\d+', Number.Integer),
-
- # hex integer
- (r'#[xX][0-9a-fA-F]+', Number.Hex),
-
- # Macro parameters
- (r'(\?' + valid_name + ')(:)'
- r'(token|name|variable|expression|body|case-body|\*)',
- bygroups(Name.Tag, Operator, Name.Builtin)),
- (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
- bygroups(Name.Tag, Operator, Name.Builtin)),
- (r'\?' + valid_name, Name.Tag),
-
- # Punctuation
- (r'(=>|::|#\(|#\[|##|\?|\?\?|\?=|[(){}\[\],\.;])', Punctuation),
-
- # Most operators are picked up as names and then re-flagged.
- # This one isn't valid in a name though, so we pick it up now.
- (r':=', Operator),
-
- # Pick up #t / #f before we match other stuff with #.
- (r'#[tf]', Literal),
-
- # #"foo" style keywords
- (r'#"', String.Symbol, 'keyword'),
-
- # #rest, #key, #all-keys, etc.
- (r'#[a-zA-Z0-9-]+', Keyword),
-
- # required-init-keyword: style keywords.
- (valid_name + ':', Keyword),
-
- # class names
- (r'<' + valid_name + '>', Name.Class),
-
- # define variable forms.
- (r'\*' + valid_name + '\*', Name.Variable.Global),
-
- # define constant forms.
- (r'\$' + valid_name, Name.Constant),
-
- # everything else. We re-flag some of these in the method above.
- (valid_name, Name),
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'keyword': [
- (r'"', String.Symbol, '#pop'),
- (r'[^\\"]+', String.Symbol), # all other characters
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ]
- }
-
-
-class DylanLidLexer(RegexLexer):
- """
- For Dylan LID (Library Interchange Definition) files.
-
- *New in Pygments 1.6.*
- """
-
- name = 'DylanLID'
- aliases = ['dylan-lid', 'lid']
- filenames = ['*.lid', '*.hdp']
- mimetypes = ['text/x-dylan-lid']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- # Whitespace
- (r'\s+', Text),
-
- # single line comment
- (r'//.*?\n', Comment.Single),
-
- # lid header
- (r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
- bygroups(Name.Attribute, Operator, Text, String)),
- ]
- }
-
-
-class DylanConsoleLexer(Lexer):
- """
- For Dylan interactive console output like:
-
- .. sourcecode:: dylan-console
-
- ? let a = 1;
- => 1
- ? a
- => 1
-
- This is based on a copy of the RubyConsoleLexer.
-
- *New in Pygments 1.6.*
- """
- name = 'Dylan session'
- aliases = ['dylan-console', 'dylan-repl']
- filenames = ['*.dylan-console']
- mimetypes = ['text/x-dylan-console']
-
- _line_re = re.compile('.*?\n')
- _prompt_re = re.compile('\?| ')
-
- def get_tokens_unprocessed(self, text):
- dylexer = DylanLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in self._line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- for item in do_insertions(insertions,
- dylexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- yield match.start(), Generic.Output, line
- if curcode:
- for item in do_insertions(insertions,
- dylexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-def objective(baselexer):
- """
- Generate a subclass of baselexer that accepts the Objective-C syntax
- extensions.
- """
-
- # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,
- # since that's quite common in ordinary C/C++ files. It's OK to match
- # JavaDoc/Doxygen keywords that only apply to Objective-C, mind.
- #
- # The upshot of this is that we CANNOT match @class or @interface
- _oc_keywords = re.compile(r'@(?:end|implementation|protocol)')
-
- # Matches [ <ws>? identifier <ws> ( identifier <ws>? ] | identifier? : )
- # (note the identifier is *optional* when there is a ':'!)
- _oc_message = re.compile(r'\[\s*[a-zA-Z_][a-zA-Z0-9_]*\s+'
- r'(?:[a-zA-Z_][a-zA-Z0-9_]*\s*\]|'
- r'(?:[a-zA-Z_][a-zA-Z0-9_]*)?:)')
-
- class GeneratedObjectiveCVariant(baselexer):
- """
- Implements Objective-C syntax on top of an existing C family lexer.
- """
-
- tokens = {
- 'statements': [
- (r'@"', String, 'string'),
- (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'@0[0-7]+[Ll]?', Number.Oct),
- (r'@\d+[Ll]?', Number.Integer),
- (r'(in|@selector|@private|@protected|@public|@encode|'
- r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
- r'@synthesize|@dynamic|@optional)\b', Keyword),
- (r'(id|Class|IMP|SEL|BOOL|IBOutlet|IBAction|unichar)\b',
- Keyword.Type),
- (r'@(true|false|YES|NO)\n', Name.Builtin),
- (r'(YES|NO|nil)\b', Name.Builtin),
- (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'oc_classname')),
- (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'oc_forward_classname')),
- # @ can also prefix other expressions like @{...} or @(...)
- (r'@', Punctuation),
- inherit,
- ],
- 'oc_classname' : [
- # interface definition that inherits
- ('([a-zA-Z$_][a-zA-Z0-9$_]*)(\s*:\s*)([a-zA-Z$_][a-zA-Z0-9$_]*)?',
- bygroups(Name.Class, Text, Name.Class), '#pop'),
- # interface definition for a category
- ('([a-zA-Z$_][a-zA-Z0-9$_]*)(\s*)(\([a-zA-Z$_][a-zA-Z0-9$_]*\))',
- bygroups(Name.Class, Text, Name.Label), '#pop'),
- # simple interface / implementation
- ('([a-zA-Z$_][a-zA-Z0-9$_]*)', Name.Class, '#pop')
- ],
- 'oc_forward_classname' : [
- ('([a-zA-Z$_][a-zA-Z0-9$_]*)(\s*,\s*)',
- bygroups(Name.Class, Text), 'oc_forward_classname'),
- ('([a-zA-Z$_][a-zA-Z0-9$_]*)(\s*;?)',
- bygroups(Name.Class, Text), '#pop')
- ],
- 'root': [
- # methods
- (r'^([-+])(\s*)' # method marker
- r'(\(.*?\))?(\s*)' # return type
- r'([a-zA-Z$_][a-zA-Z0-9$_]*:?)', # begin of method name
- bygroups(Keyword, Text, using(this),
- Text, Name.Function),
- 'method'),
- inherit,
- ],
- 'method': [
- include('whitespace'),
- # TODO unsure if ellipses are allowed elsewhere, see
- # discussion in Issue 789
- (r',', Punctuation),
- (r'\.\.\.', Punctuation),
- (r'(\(.*?\))([a-zA-Z$_][a-zA-Z0-9$_]*)', bygroups(using(this),
- Name.Variable)),
- (r'[a-zA-Z$_][a-zA-Z0-9$_]*:', Name.Function),
- (';', Punctuation, '#pop'),
- ('{', Punctuation, 'function'),
- ('', Text, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if _oc_keywords.search(text):
- return 1.0
- elif '@"' in text: # strings
- return 0.8
- elif _oc_message.search(text):
- return 0.8
- return 0
-
- return GeneratedObjectiveCVariant
-
-
-class ObjectiveCLexer(objective(CLexer)):
- """
- For Objective-C source code with preprocessor directives.
- """
-
- name = 'Objective-C'
- aliases = ['objective-c', 'objectivec', 'obj-c', 'objc']
- filenames = ['*.m', '*.h']
- mimetypes = ['text/x-objective-c']
- priority = 0.05 # Lower than C
-
-
-class ObjectiveCppLexer(objective(CppLexer)):
- """
- For Objective-C++ source code with preprocessor directives.
- """
-
- name = 'Objective-C++'
- aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++']
- filenames = ['*.mm', '*.hh']
- mimetypes = ['text/x-objective-c++']
- priority = 0.05 # Lower than C++
-
-
-class FortranLexer(RegexLexer):
- """
- Lexer for FORTRAN 90 code.
-
- *New in Pygments 0.10.*
- """
- name = 'Fortran'
- aliases = ['fortran']
- filenames = ['*.f90', '*.F90', '*.f03', '*.F03']
- mimetypes = ['text/x-fortran']
- flags = re.IGNORECASE
-
- # Data Types: INTEGER, REAL, COMPLEX, LOGICAL, CHARACTER and DOUBLE PRECISION
- # Operators: **, *, +, -, /, <, >, <=, >=, ==, /=
- # Logical (?): NOT, AND, OR, EQV, NEQV
-
- # Builtins:
- # http://gcc.gnu.org/onlinedocs/gcc-3.4.6/g77/Table-of-Intrinsic-Functions.html
-
- tokens = {
- 'root': [
- (r'!.*\n', Comment),
- include('strings'),
- include('core'),
- (r'[a-z][a-z0-9_]*', Name.Variable),
- include('nums'),
- (r'[\s]+', Text),
- ],
- 'core': [
- # Statements
- (r'\b(ABSTRACT|ACCEPT|ALLOCATABLE|ALLOCATE|ARRAY|ASSIGN|ASYNCHRONOUS|'
- r'BACKSPACE|BIND|BLOCK( DATA)?|BYTE|CALL|CASE|CLASS|CLOSE|COMMON|CONTAINS|'
- r'CONTINUE|CYCLE|DATA|DEALLOCATE|DECODE|DEFERRED|DIMENSION|DO|'
- r'ELEMENTAL|ELSE|ENCODE|END( FILE)?|ENDIF|ENTRY|ENUMERATOR|EQUIVALENCE|'
- r'EXIT|EXTERNAL|EXTRINSIC|FINAL|FORALL|FORMAT|FUNCTION|GENERIC|'
- r'GOTO|IF|IMPLICIT|IMPORT|INCLUDE|INQUIRE|INTENT|INTERFACE|'
- r'INTRINSIC|MODULE|NAMELIST|NULLIFY|NONE|NON_INTRINSIC|'
- r'NON_OVERRIDABLE|NOPASS|OPEN|OPTIONAL|OPTIONS|PARAMETER|PASS|'
- r'PAUSE|POINTER|PRINT|PRIVATE|PROGRAM|PROTECTED|PUBLIC|PURE|READ|'
- r'RECURSIVE|RESULT|RETURN|REWIND|SAVE|SELECT|SEQUENCE|STOP|SUBROUTINE|'
- r'TARGET|THEN|TYPE|USE|VALUE|VOLATILE|WHERE|WRITE|WHILE)\s*\b',
- Keyword),
-
- # Data Types
- (r'\b(CHARACTER|COMPLEX|DOUBLE PRECISION|DOUBLE COMPLEX|INTEGER|'
- r'LOGICAL|REAL|C_INT|C_SHORT|C_LONG|C_LONG_LONG|C_SIGNED_CHAR|'
- r'C_SIZE_T|C_INT8_T|C_INT16_T|C_INT32_T|C_INT64_T|C_INT_LEAST8_T|'
- r'C_INT_LEAST16_T|C_INT_LEAST32_T|C_INT_LEAST64_T|C_INT_FAST8_T|'
- r'C_INT_FAST16_T|C_INT_FAST32_T|C_INT_FAST64_T|C_INTMAX_T|'
- r'C_INTPTR_T|C_FLOAT|C_DOUBLE|C_LONG_DOUBLE|C_FLOAT_COMPLEX|'
- r'C_DOUBLE_COMPLEX|C_LONG_DOUBLE_COMPLEX|C_BOOL|C_CHAR|C_PTR|'
- r'C_FUNPTR)\s*\b',
- Keyword.Type),
-
- # Operators
- (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator),
-
- (r'(::)', Keyword.Declaration),
-
- (r'[(),:&%;]', Punctuation),
-
- # Intrinsics
- (r'\b(Abort|Abs|Access|AChar|ACos|AdjustL|AdjustR|AImag|AInt|Alarm|'
- r'All|Allocated|ALog|AMax|AMin|AMod|And|ANInt|Any|ASin|Associated|'
- r'ATan|BesJ|BesJN|BesY|BesYN|Bit_Size|BTest|CAbs|CCos|Ceiling|'
- r'CExp|Char|ChDir|ChMod|CLog|Cmplx|Command_Argument_Count|Complex|'
- r'Conjg|Cos|CosH|Count|CPU_Time|CShift|CSin|CSqRt|CTime|C_Funloc|'
- r'C_Loc|C_Associated|C_Null_Ptr|C_Null_Funptr|C_F_Pointer|'
- r'C_Null_Char|C_Alert|C_Backspace|C_Form_Feed|C_New_Line|'
- r'C_Carriage_Return|C_Horizontal_Tab|C_Vertical_Tab|'
- r'DAbs|DACos|DASin|DATan|Date_and_Time|DbesJ|'
- r'DbesJ|DbesJN|DbesY|DbesY|DbesYN|Dble|DCos|DCosH|DDiM|DErF|DErFC|'
- r'DExp|Digits|DiM|DInt|DLog|DLog|DMax|DMin|DMod|DNInt|Dot_Product|'
- r'DProd|DSign|DSinH|DSin|DSqRt|DTanH|DTan|DTime|EOShift|Epsilon|'
- r'ErF|ErFC|ETime|Exit|Exp|Exponent|Extends_Type_Of|FDate|FGet|'
- r'FGetC|Float|Floor|Flush|FNum|FPutC|FPut|Fraction|FSeek|FStat|'
- r'FTell|GError|GetArg|Get_Command|Get_Command_Argument|'
- r'Get_Environment_Variable|GetCWD|GetEnv|GetGId|GetLog|GetPId|'
- r'GetUId|GMTime|HostNm|Huge|IAbs|IAChar|IAnd|IArgC|IBClr|IBits|'
- r'IBSet|IChar|IDate|IDiM|IDInt|IDNInt|IEOr|IErrNo|IFix|Imag|'
- r'ImagPart|Index|Int|IOr|IRand|IsaTty|IShft|IShftC|ISign|'
- r'Iso_C_Binding|Is_Iostat_End|Is_Iostat_Eor|ITime|Kill|Kind|'
- r'LBound|Len|Len_Trim|LGe|LGt|Link|LLe|LLt|LnBlnk|Loc|Log|'
- r'Logical|Long|LShift|LStat|LTime|MatMul|Max|MaxExponent|MaxLoc|'
- r'MaxVal|MClock|Merge|Move_Alloc|Min|MinExponent|MinLoc|MinVal|'
- r'Mod|Modulo|MvBits|Nearest|New_Line|NInt|Not|Or|Pack|PError|'
- r'Precision|Present|Product|Radix|Rand|Random_Number|Random_Seed|'
- r'Range|Real|RealPart|Rename|Repeat|Reshape|RRSpacing|RShift|'
- r'Same_Type_As|Scale|Scan|Second|Selected_Int_Kind|'
- r'Selected_Real_Kind|Set_Exponent|Shape|Short|Sign|Signal|SinH|'
- r'Sin|Sleep|Sngl|Spacing|Spread|SqRt|SRand|Stat|Sum|SymLnk|'
- r'System|System_Clock|Tan|TanH|Time|Tiny|Transfer|Transpose|Trim|'
- r'TtyNam|UBound|UMask|Unlink|Unpack|Verify|XOr|ZAbs|ZCos|ZExp|'
- r'ZLog|ZSin|ZSqRt)\s*\b',
- Name.Builtin),
-
- # Booleans
- (r'\.(true|false)\.', Name.Builtin),
- # Comparing Operators
- (r'\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.', Operator.Word),
- ],
-
- 'strings': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- ],
-
- 'nums': [
- (r'\d+(?![.Ee])', Number.Integer),
- (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
- (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
- ],
- }
-
-class FortranFixedLexer(RegexLexer):
- """
- Lexer for fixed format Fortran.
- """
- name = 'FortranFixed'
- aliases = ['fortranfixed']
- filenames = ['*.f', '*.F']
-
- flags = re.IGNORECASE
-
- def _lex_fortran(self, match, ctx=None):
- """Lex a line just as free form fortran without line break."""
- lexer = FortranLexer()
- text = match.group(0) + "\n"
- for index, token, value in lexer.get_tokens_unprocessed(text):
- value = value.replace('\n','')
- if value != '':
- yield index, token, value
-
- tokens = {
- 'root': [
- (r'[C*].*\n', Comment),
- (r'#.*\n', Comment.Preproc),
- (r' {0,4}!.*\n', Comment),
- (r'(.{5})', Name.Label, 'cont-char'),
- (r'.*\n', using(FortranLexer)),
- ],
-
- 'cont-char': [
- (' ', Text, 'code'),
- ('0', Comment, 'code'),
- ('.', Generic.Strong, 'code')
- ],
-
- 'code' : [
- (r'(.{66})(.*)(\n)',
- bygroups(_lex_fortran, Comment, Text), 'root'),
- (r'(.*)(\n)', bygroups(_lex_fortran, Text), 'root'),
- (r'', Text, 'root')]
- }
-
-
-class GLShaderLexer(RegexLexer):
- """
- GLSL (OpenGL Shader) lexer.
-
- *New in Pygments 1.1.*
- """
- name = 'GLSL'
- aliases = ['glsl']
- filenames = ['*.vert', '*.frag', '*.geo']
- mimetypes = ['text/x-glslsrc']
-
- tokens = {
- 'root': [
- (r'^#.*', Comment.Preproc),
- (r'//.*', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
- Operator),
- (r'[?:]', Operator), # quick hack for ternary
- (r'\bdefined\b', Operator),
- (r'[;{}(),\[\]]', Punctuation),
- #FIXME when e is present, no decimal point needed
- (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
- (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
- (r'0[xX][0-9a-fA-F]*', Number.Hex),
- (r'0[0-7]*', Number.Oct),
- (r'[1-9][0-9]*', Number.Integer),
- (r'\b(attribute|const|uniform|varying|centroid|break|continue|'
- r'do|for|while|if|else|in|out|inout|float|int|void|bool|true|'
- r'false|invariant|discard|return|mat[234]|mat[234]x[234]|'
- r'vec[234]|[ib]vec[234]|sampler[123]D|samplerCube|'
- r'sampler[12]DShadow|struct)\b', Keyword),
- (r'\b(asm|class|union|enum|typedef|template|this|packed|goto|'
- r'switch|default|inline|noinline|volatile|public|static|extern|'
- r'external|interface|long|short|double|half|fixed|unsigned|'
- r'lowp|mediump|highp|precision|input|output|hvec[234]|'
- r'[df]vec[234]|sampler[23]DRect|sampler2DRectShadow|sizeof|'
- r'cast|namespace|using)\b', Keyword), #future use
- (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
- (r'\.', Punctuation),
- (r'\s+', Text),
- ],
- }
-
-
-class PrologLexer(RegexLexer):
- """
- Lexer for Prolog files.
- """
- name = 'Prolog'
- aliases = ['prolog']
- filenames = ['*.prolog', '*.pro', '*.pl']
- mimetypes = ['text/x-prolog']
-
- flags = re.UNICODE
-
- tokens = {
- 'root': [
- (r'^#.*', Comment.Single),
- (r'/\*', Comment.Multiline, 'nested-comment'),
- (r'%.*', Comment.Single),
- # character literal
- (r'0\'.', String.Char),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- # literal with prepended base
- (r'\d\d?\'[a-zA-Z0-9]+', Number.Integer),
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer),
- (r'[\[\](){}|.,;!]', Punctuation),
- (r':-|-->', Punctuation),
- (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
- r'\\[0-7]+\\|\\[\w\W]|[^"])*"', String.Double),
- (r"'(?:''|[^'])*'", String.Atom), # quoted atom
- # Needs to not be followed by an atom.
- #(r'=(?=\s|[a-zA-Z\[])', Operator),
- (r'is\b', Operator),
- (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])',
- Operator),
- (r'(mod|div|not)\b', Operator),
- (r'_', Keyword), # The don't-care variable
- (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
- (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- u'[a-zA-Z0-9_$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
- u'(\\s*)(:-|-->)',
- bygroups(Name.Function, Text, Operator)), # function defn
- (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- u'[a-zA-Z0-9_$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
- u'(\\s*)(\\()',
- bygroups(Name.Function, Text, Punctuation)),
- (u'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- u'[a-zA-Z0-9_$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
- String.Atom), # atom, characters
- # This one includes !
- (u'[#&*+\\-./:<=>?@\\\\^~\u00a1-\u00bf\u2010-\u303f]+',
- String.Atom), # atom, graphics
- (r'[A-Z_][A-Za-z0-9_]*', Name.Variable),
- (u'\\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text),
- ],
- 'nested-comment': [
- (r'\*/', Comment.Multiline, '#pop'),
- (r'/\*', Comment.Multiline, '#push'),
- (r'[^*/]+', Comment.Multiline),
- (r'[*/]', Comment.Multiline),
- ],
- }
-
- def analyse_text(text):
- return ':-' in text
-
-
-class CythonLexer(RegexLexer):
- """
- For Pyrex and `Cython <http://cython.org>`_ source code.
-
- *New in Pygments 1.1.*
- """
-
- name = 'Cython'
- aliases = ['cython', 'pyx', 'pyrex']
- filenames = ['*.pyx', '*.pxd', '*.pxi']
- mimetypes = ['text/x-cython', 'application/x-cython']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
- (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
- (r'[^\S\n]+', Text),
- (r'#.*$', Comment),
- (r'[]{}:(),;[]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'(<)([a-zA-Z0-9.?]+)(>)',
- bygroups(Punctuation, Keyword.Type, Punctuation)),
- (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
- (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
- bygroups(Keyword, Number.Integer, Operator, Name, Operator,
- Name, Punctuation)),
- include('keywords'),
- (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'),
- (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'),
- (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
- (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'),
- include('builtins'),
- include('backtick'),
- ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
- ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
- ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
- ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
- ('[uU]?"""', String, combined('stringescape', 'tdqs')),
- ("[uU]?'''", String, combined('stringescape', 'tsqs')),
- ('[uU]?"', String, combined('stringescape', 'dqs')),
- ("[uU]?'", String, combined('stringescape', 'sqs')),
- include('name'),
- include('numbers'),
- ],
- 'keywords': [
- (r'(assert|break|by|continue|ctypedef|del|elif|else|except\??|exec|'
- r'finally|for|gil|global|if|include|lambda|nogil|pass|print|raise|'
- r'return|try|while|yield|as|with)\b', Keyword),
- (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc),
- ],
- 'builtins': [
- (r'(?<!\.)(__import__|abs|all|any|apply|basestring|bin|bool|buffer|'
- r'bytearray|bytes|callable|chr|classmethod|cmp|coerce|compile|'
- r'complex|delattr|dict|dir|divmod|enumerate|eval|execfile|exit|'
- r'file|filter|float|frozenset|getattr|globals|hasattr|hash|hex|id|'
- r'input|int|intern|isinstance|issubclass|iter|len|list|locals|'
- r'long|map|max|min|next|object|oct|open|ord|pow|property|range|'
- r'raw_input|reduce|reload|repr|reversed|round|set|setattr|slice|'
- r'sorted|staticmethod|str|sum|super|tuple|type|unichr|unicode|'
- r'vars|xrange|zip)\b', Name.Builtin),
- (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|NULL'
- r')\b', Name.Builtin.Pseudo),
- (r'(?<!\.)(ArithmeticError|AssertionError|AttributeError|'
- r'BaseException|DeprecationWarning|EOFError|EnvironmentError|'
- r'Exception|FloatingPointError|FutureWarning|GeneratorExit|IOError|'
- r'ImportError|ImportWarning|IndentationError|IndexError|KeyError|'
- r'KeyboardInterrupt|LookupError|MemoryError|NameError|'
- r'NotImplemented|NotImplementedError|OSError|OverflowError|'
- r'OverflowWarning|PendingDeprecationWarning|ReferenceError|'
- r'RuntimeError|RuntimeWarning|StandardError|StopIteration|'
- r'SyntaxError|SyntaxWarning|SystemError|SystemExit|TabError|'
- r'TypeError|UnboundLocalError|UnicodeDecodeError|'
- r'UnicodeEncodeError|UnicodeError|UnicodeTranslateError|'
- r'UnicodeWarning|UserWarning|ValueError|Warning|ZeroDivisionError'
- r')\b', Name.Exception),
- ],
- 'numbers': [
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'backtick': [
- ('`.*?`', String.Backtick),
- ],
- 'name': [
- (r'@[a-zA-Z0-9_]+', Name.Decorator),
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
- ],
- 'funcname': [
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
- ],
- 'cdef': [
- (r'(public|readonly|extern|api|inline)\b', Keyword.Reserved),
- (r'(struct|enum|union|class)\b', Keyword),
- (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(?=[(:#=]|$)',
- bygroups(Name.Function, Text), '#pop'),
- (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(,)',
- bygroups(Name.Function, Text, Punctuation)),
- (r'from\b', Keyword, '#pop'),
- (r'as\b', Keyword),
- (r':', Punctuation, '#pop'),
- (r'(?=["\'])', Text, '#pop'),
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Keyword.Type),
- (r'.', Text),
- ],
- 'classname': [
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
- ],
- 'import': [
- (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
- (r'[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace),
- (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
- (r'', Text, '#pop') # all else: go back
- ],
- 'fromimport': [
- (r'(\s+)(c?import)\b', bygroups(Text, Keyword), '#pop'),
- (r'[a-zA-Z_.][a-zA-Z0-9_.]*', Name.Namespace),
- # ``cdef foo from "header"``, or ``for foo from 0 < i < 10``
- (r'', Text, '#pop'),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String)
- # newlines are an error (use "nl" state)
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here again for raw strings
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here again for raw strings
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- include('strings'),
- include('nl')
- ],
- }
-
-
-class ValaLexer(RegexLexer):
- """
- For Vala source code with preprocessor directives.
-
- *New in Pygments 1.1.*
- """
- name = 'Vala'
- aliases = ['vala', 'vapi']
- filenames = ['*.vala', '*.vapi']
- mimetypes = ['text/x-vala']
-
- tokens = {
- 'whitespace': [
- (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- ],
- 'statements': [
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'(\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\])',
- bygroups(Punctuation, Name.Decorator, Punctuation)),
- # TODO: "correctly" parse complex code attributes
- (r'(\[)(CCode|(?:Integer|Floating)Type)',
- bygroups(Punctuation, Name.Decorator)),
- (r'[()\[\],.]', Punctuation),
- (r'(as|base|break|case|catch|construct|continue|default|delete|do|'
- r'else|enum|finally|for|foreach|get|if|in|is|lock|new|out|params|'
- r'return|set|sizeof|switch|this|throw|try|typeof|while|yield)\b',
- Keyword),
- (r'(abstract|const|delegate|dynamic|ensures|extern|inline|internal|'
- r'override|owned|private|protected|public|ref|requires|signal|'
- r'static|throws|unowned|var|virtual|volatile|weak|yields)\b',
- Keyword.Declaration),
- (r'(namespace|using)(\s+)', bygroups(Keyword.Namespace, Text),
- 'namespace'),
- (r'(class|errordomain|interface|struct)(\s+)',
- bygroups(Keyword.Declaration, Text), 'class'),
- (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)',
- bygroups(Operator, Name.Attribute)),
- # void is an actual keyword, others are in glib-2.0.vapi
- (r'(void|bool|char|double|float|int|int8|int16|int32|int64|long|'
- r'short|size_t|ssize_t|string|time_t|uchar|uint|uint8|uint16|'
- r'uint32|uint64|ulong|unichar|ushort)\b', Keyword.Type),
- (r'(true|false|null)\b', Name.Builtin),
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
- ],
- 'root': [
- include('whitespace'),
- ('', Text, 'statement'),
- ],
- 'statement' : [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ],
- 'class': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
- ],
- 'namespace': [
- (r'[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace, '#pop')
- ],
- }
-
-
-class OocLexer(RegexLexer):
- """
- For `Ooc <http://ooc-lang.org/>`_ source code
-
- *New in Pygments 1.2.*
- """
- name = 'Ooc'
- aliases = ['ooc']
- filenames = ['*.ooc']
- mimetypes = ['text/x-ooc']
-
- tokens = {
- 'root': [
- (r'\b(class|interface|implement|abstract|extends|from|'
- r'this|super|new|const|final|static|import|use|extern|'
- r'inline|proto|break|continue|fallthrough|operator|if|else|for|'
- r'while|do|switch|case|as|in|version|return|true|false|null)\b',
- Keyword),
- (r'include\b', Keyword, 'include'),
- (r'(cover)([ \t]+)(from)([ \t]+)([a-zA-Z0-9_]+[*@]?)',
- bygroups(Keyword, Text, Keyword, Text, Name.Class)),
- (r'(func)((?:[ \t]|\\\n)+)(~[a-z_][a-zA-Z0-9_]*)',
- bygroups(Keyword, Text, Name.Function)),
- (r'\bfunc\b', Keyword),
- # Note: %= and ^= not listed on http://ooc-lang.org/syntax
- (r'//.*', Comment),
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- (r'(==?|\+=?|-[=>]?|\*=?|/=?|:=|!=?|%=?|\?|>{1,3}=?|<{1,3}=?|\.\.|'
- r'&&?|\|\|?|\^=?)', Operator),
- (r'(\.)([ \t]*)([a-z]\w*)', bygroups(Operator, Text,
- Name.Function)),
- (r'[A-Z][A-Z0-9_]+', Name.Constant),
- (r'[A-Z][a-zA-Z0-9_]*([@*]|\[[ \t]*\])?', Name.Class),
-
- (r'([a-z][a-zA-Z0-9_]*(?:~[a-z][a-zA-Z0-9_]*)?)((?:[ \t]|\\\n)*)(?=\()',
- bygroups(Name.Function, Text)),
- (r'[a-z][a-zA-Z0-9_]*', Name.Variable),
-
- # : introduces types
- (r'[:(){}\[\];,]', Punctuation),
-
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'0c[0-9]+', Number.Oct),
- (r'0b[01]+', Number.Binary),
- (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float),
- (r'[0-9_]+', Number.Decimal),
-
- (r'"(?:\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\"])*"',
- String.Double),
- (r"'(?:\\.|\\[0-9]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'@', Punctuation), # pointer dereference
- (r'\.', Punctuation), # imports or chain operator
-
- (r'\\[ \t\n]', Text),
- (r'[ \t]+', Text),
- ],
- 'include': [
- (r'[\w/]+', Name),
- (r',', Punctuation),
- (r'[ \t]', Text),
- (r'[;\n]', Text, '#pop'),
- ],
- }
-
-
-class GoLexer(RegexLexer):
- """
- For `Go <http://golang.org>`_ source.
- """
- name = 'Go'
- filenames = ['*.go']
- aliases = ['go']
- mimetypes = ['text/x-gosrc']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuations
- (r'//(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'(import|package)\b', Keyword.Namespace),
- (r'(var|func|struct|map|chan|type|interface|const)\b', Keyword.Declaration),
- (r'(break|default|select|case|defer|go'
- r'|else|goto|switch|fallthrough|if|range'
- r'|continue|for|return)\b', Keyword),
- (r'(true|false|iota|nil)\b', Keyword.Constant),
- # It seems the builtin types aren't actually keywords, but
- # can be used as functions. So we need two declarations.
- (r'(uint|uint8|uint16|uint32|uint64'
- r'|int|int8|int16|int32|int64'
- r'|float|float32|float64'
- r'|complex64|complex128|byte|rune'
- r'|string|bool|error|uintptr'
- r'|print|println|panic|recover|close|complex|real|imag'
- r'|len|cap|append|copy|delete|new|make)\b(\()',
- bygroups(Name.Builtin, Punctuation)),
- (r'(uint|uint8|uint16|uint32|uint64'
- r'|int|int8|int16|int32|int64'
- r'|float|float32|float64'
- r'|complex64|complex128|byte|rune'
- r'|string|bool|error|uintptr)\b', Keyword.Type),
- # imaginary_lit
- (r'\d+i', Number),
- (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
- (r'\.\d+([Ee][-+]\d+)?i', Number),
- (r'\d+[Ee][-+]\d+i', Number),
- # float_lit
- (r'\d+(\.\d+[eE][+\-]?\d+|'
- r'\.\d*|[eE][+\-]?\d+)', Number.Float),
- (r'\.\d+([eE][+\-]?\d+)?', Number.Float),
- # int_lit
- # -- octal_lit
- (r'0[0-7]+', Number.Oct),
- # -- hex_lit
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- # -- decimal_lit
- (r'(0|[1-9][0-9]*)', Number.Integer),
- # char_lit
- (r"""'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'""",
- String.Char
- ),
- # StringLiteral
- # -- raw_string_lit
- (r'`[^`]*`', String),
- # -- interpreted_string_lit
- (r'"(\\\\|\\"|[^"])*"', String),
- # Tokens
- (r'(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|'
- r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])', Operator),
- (r'[|^<>=!()\[\]{}.,;:]', Punctuation),
- # identifier
- (r'[a-zA-Z_]\w*', Name.Other),
- ]
- }
-
-
-class FelixLexer(RegexLexer):
- """
- For `Felix <http://www.felix-lang.org>`_ source code.
-
- *New in Pygments 1.2.*
- """
-
- name = 'Felix'
- aliases = ['felix', 'flx']
- filenames = ['*.flx', '*.flxh']
- mimetypes = ['text/x-felix']
-
- preproc = [
- 'elif', 'else', 'endif', 'if', 'ifdef', 'ifndef',
- ]
-
- keywords = [
- '_', '_deref', 'all', 'as',
- 'assert', 'attempt', 'call', 'callback', 'case', 'caseno', 'cclass',
- 'code', 'compound', 'ctypes', 'do', 'done', 'downto', 'elif', 'else',
- 'endattempt', 'endcase', 'endif', 'endmatch', 'enum', 'except',
- 'exceptions', 'expect', 'finally', 'for', 'forall', 'forget', 'fork',
- 'functor', 'goto', 'ident', 'if', 'incomplete', 'inherit', 'instance',
- 'interface', 'jump', 'lambda', 'loop', 'match', 'module', 'namespace',
- 'new', 'noexpand', 'nonterm', 'obj', 'of', 'open', 'parse', 'raise',
- 'regexp', 'reglex', 'regmatch', 'rename', 'return', 'the', 'then',
- 'to', 'type', 'typecase', 'typedef', 'typematch', 'typeof', 'upto',
- 'when', 'whilst', 'with', 'yield',
- ]
-
- keyword_directives = [
- '_gc_pointer', '_gc_type', 'body', 'comment', 'const', 'export',
- 'header', 'inline', 'lval', 'macro', 'noinline', 'noreturn',
- 'package', 'private', 'pod', 'property', 'public', 'publish',
- 'requires', 'todo', 'virtual', 'use',
- ]
-
- keyword_declarations = [
- 'def', 'let', 'ref', 'val', 'var',
- ]
-
- keyword_types = [
- 'unit', 'void', 'any', 'bool',
- 'byte', 'offset',
- 'address', 'caddress', 'cvaddress', 'vaddress',
- 'tiny', 'short', 'int', 'long', 'vlong',
- 'utiny', 'ushort', 'vshort', 'uint', 'ulong', 'uvlong',
- 'int8', 'int16', 'int32', 'int64',
- 'uint8', 'uint16', 'uint32', 'uint64',
- 'float', 'double', 'ldouble',
- 'complex', 'dcomplex', 'lcomplex',
- 'imaginary', 'dimaginary', 'limaginary',
- 'char', 'wchar', 'uchar',
- 'charp', 'charcp', 'ucharp', 'ucharcp',
- 'string', 'wstring', 'ustring',
- 'cont',
- 'array', 'varray', 'list',
- 'lvalue', 'opt', 'slice',
- ]
-
- keyword_constants = [
- 'false', 'true',
- ]
-
- operator_words = [
- 'and', 'not', 'in', 'is', 'isin', 'or', 'xor',
- ]
-
- name_builtins = [
- '_svc', 'while',
- ]
-
- name_pseudo = [
- 'root', 'self', 'this',
- ]
-
- decimal_suffixes = '([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?'
-
- tokens = {
- 'root': [
- include('whitespace'),
-
- # Keywords
- (r'(axiom|ctor|fun|gen|proc|reduce|union)\b', Keyword,
- 'funcname'),
- (r'(class|cclass|cstruct|obj|struct)\b', Keyword, 'classname'),
- (r'(instance|module|typeclass)\b', Keyword, 'modulename'),
-
- (r'(%s)\b' % '|'.join(keywords), Keyword),
- (r'(%s)\b' % '|'.join(keyword_directives), Name.Decorator),
- (r'(%s)\b' % '|'.join(keyword_declarations), Keyword.Declaration),
- (r'(%s)\b' % '|'.join(keyword_types), Keyword.Type),
- (r'(%s)\b' % '|'.join(keyword_constants), Keyword.Constant),
-
- # Operators
- include('operators'),
-
- # Float Literal
- # -- Hex Float
- (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
- r'[pP][+\-]?[0-9_]+[lLfFdD]?', Number.Float),
- # -- DecimalFloat
- (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[lLfFdD]?', Number.Float),
- (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[lLfFdD]?',
- Number.Float),
-
- # IntegerLiteral
- # -- Binary
- (r'0[Bb][01_]+%s' % decimal_suffixes, Number),
- # -- Octal
- (r'0[0-7_]+%s' % decimal_suffixes, Number.Oct),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F_]+%s' % decimal_suffixes, Number.Hex),
- # -- Decimal
- (r'(0|[1-9][0-9_]*)%s' % decimal_suffixes, Number.Integer),
-
- # Strings
- ('([rR][cC]?|[cC][rR])"""', String, 'tdqs'),
- ("([rR][cC]?|[cC][rR])'''", String, 'tsqs'),
- ('([rR][cC]?|[cC][rR])"', String, 'dqs'),
- ("([rR][cC]?|[cC][rR])'", String, 'sqs'),
- ('[cCfFqQwWuU]?"""', String, combined('stringescape', 'tdqs')),
- ("[cCfFqQwWuU]?'''", String, combined('stringescape', 'tsqs')),
- ('[cCfFqQwWuU]?"', String, combined('stringescape', 'dqs')),
- ("[cCfFqQwWuU]?'", String, combined('stringescape', 'sqs')),
-
- # Punctuation
- (r'[\[\]{}:(),;?]', Punctuation),
-
- # Labels
- (r'[a-zA-Z_]\w*:>', Name.Label),
-
- # Identifiers
- (r'(%s)\b' % '|'.join(name_builtins), Name.Builtin),
- (r'(%s)\b' % '|'.join(name_pseudo), Name.Builtin.Pseudo),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'whitespace': [
- (r'\n', Text),
- (r'\s+', Text),
-
- include('comment'),
-
- # Preprocessor
- (r'#\s*if\s+0', Comment.Preproc, 'if0'),
- (r'#', Comment.Preproc, 'macro'),
- ],
- 'operators': [
- (r'(%s)\b' % '|'.join(operator_words), Operator.Word),
- (r'!=|==|<<|>>|\|\||&&|[-~+/*%=<>&^|.$]', Operator),
- ],
- 'comment': [
- (r'//(.*?)\n', Comment.Single),
- (r'/[*]', Comment.Multiline, 'comment2'),
- ],
- 'comment2': [
- (r'[^\/*]', Comment.Multiline),
- (r'/[*]', Comment.Multiline, '#push'),
- (r'[*]/', Comment.Multiline, '#pop'),
- (r'[\/*]', Comment.Multiline),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment, '#pop'),
- (r'.*?\n', Comment),
- ],
- 'macro': [
- include('comment'),
- (r'(import|include)(\s+)(<[^>]*?>)',
- bygroups(Comment.Preproc, Text, String), '#pop'),
- (r'(import|include)(\s+)("[^"]*?")',
- bygroups(Comment.Preproc, Text, String), '#pop'),
- (r"(import|include)(\s+)('[^']*?')",
- bygroups(Comment.Preproc, Text, String), '#pop'),
- (r'[^/\n]+', Comment.Preproc),
- ##(r'/[*](.|\n)*?[*]/', Comment),
- ##(r'//.*?\n', Comment, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'funcname': [
- include('whitespace'),
- (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
- # anonymous functions
- (r'(?=\()', Text, '#pop'),
- ],
- 'classname': [
- include('whitespace'),
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- # anonymous classes
- (r'(?=\{)', Text, '#pop'),
- ],
- 'modulename': [
- include('whitespace'),
- (r'\[', Punctuation, ('modulename2', 'tvarlist')),
- (r'', Error, 'modulename2'),
- ],
- 'modulename2': [
- include('whitespace'),
- (r'([a-zA-Z_]\w*)', Name.Namespace, '#pop:2'),
- ],
- 'tvarlist': [
- include('whitespace'),
- include('operators'),
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'(with|where)\b', Keyword),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String)
- # newlines are an error (use "nl" state)
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- # included here again for raw strings
- (r'\\\\|\\"|\\\n', String.Escape),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- # included here again for raw strings
- (r"\\\\|\\'|\\\n", String.Escape),
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- include('strings'),
- include('nl')
- ],
- }
-
-
-class AdaLexer(RegexLexer):
- """
- For Ada source code.
-
- *New in Pygments 1.3.*
- """
-
- name = 'Ada'
- aliases = ['ada', 'ada95' 'ada2005']
- filenames = ['*.adb', '*.ads', '*.ada']
- mimetypes = ['text/x-ada']
-
- flags = re.MULTILINE | re.I # Ignore case
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'--.*?\n', Comment.Single),
- (r'[^\S\n]+', Text),
- (r'function|procedure|entry', Keyword.Declaration, 'subprogram'),
- (r'(subtype|type)(\s+)([a-z0-9_]+)',
- bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
- (r'task|protected', Keyword.Declaration),
- (r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)),
- (r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'),
- (r'(pragma)(\s+)([a-zA-Z0-9_]+)', bygroups(Keyword.Reserved, Text,
- Comment.Preproc)),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(Address|Byte|Boolean|Character|Controlled|Count|Cursor|'
- r'Duration|File_Mode|File_Type|Float|Generator|Integer|Long_Float|'
- r'Long_Integer|Long_Long_Float|Long_Long_Integer|Natural|Positive|'
- r'Reference_Type|Short_Float|Short_Integer|Short_Short_Float|'
- r'Short_Short_Integer|String|Wide_Character|Wide_String)\b',
- Keyword.Type),
- (r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word),
- (r'generic|private', Keyword.Declaration),
- (r'package', Keyword.Declaration, 'package'),
- (r'array\b', Keyword.Reserved, 'array_def'),
- (r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'([a-z0-9_]+)(\s*)(:)(\s*)(constant)',
- bygroups(Name.Constant, Text, Punctuation, Text,
- Keyword.Reserved)),
- (r'<<[a-z0-9_]+>>', Name.Label),
- (r'([a-z0-9_]+)(\s*)(:)(\s*)(declare|begin|loop|for|while)',
- bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)),
- (r'\b(abort|abs|abstract|accept|access|aliased|all|array|at|begin|'
- r'body|case|constant|declare|delay|delta|digits|do|else|elsif|end|'
- r'entry|exception|exit|interface|for|goto|if|is|limited|loop|new|'
- r'null|of|or|others|out|overriding|pragma|protected|raise|range|'
- r'record|renames|requeue|return|reverse|select|separate|subtype|'
- r'synchronized|task|tagged|terminate|then|type|until|when|while|'
- r'xor)\b',
- Keyword.Reserved),
- (r'"[^"]*"', String),
- include('attribute'),
- include('numbers'),
- (r"'[^']'", String.Character),
- (r'([a-z0-9_]+)(\s*|[(,])', bygroups(Name, using(this))),
- (r"(<>|=>|:=|[()|:;,.'])", Punctuation),
- (r'[*<>+=/&-]', Operator),
- (r'\n+', Text),
- ],
- 'numbers' : [
- (r'[0-9_]+#[0-9a-f]+#', Number.Hex),
- (r'[0-9_]+\.[0-9_]*', Number.Float),
- (r'[0-9_]+', Number.Integer),
- ],
- 'attribute' : [
- (r"(')([a-zA-Z0-9_]+)", bygroups(Punctuation, Name.Attribute)),
- ],
- 'subprogram' : [
- (r'\(', Punctuation, ('#pop', 'formal_part')),
- (r';', Punctuation, '#pop'),
- (r'is\b', Keyword.Reserved, '#pop'),
- (r'"[^"]+"|[a-z0-9_]+', Name.Function),
- include('root'),
- ],
- 'end' : [
- ('(if|case|record|loop|select)', Keyword.Reserved),
- ('"[^"]+"|[a-zA-Z0-9_.]+', Name.Function),
- ('\s+', Text),
- (';', Punctuation, '#pop'),
- ],
- 'type_def': [
- (r';', Punctuation, '#pop'),
- (r'\(', Punctuation, 'formal_part'),
- (r'with|and|use', Keyword.Reserved),
- (r'array\b', Keyword.Reserved, ('#pop', 'array_def')),
- (r'record\b', Keyword.Reserved, ('record_def')),
- (r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'),
- include('root'),
- ],
- 'array_def' : [
- (r';', Punctuation, '#pop'),
- (r'([a-z0-9_]+)(\s+)(range)', bygroups(Keyword.Type, Text,
- Keyword.Reserved)),
- include('root'),
- ],
- 'record_def' : [
- (r'end record', Keyword.Reserved, '#pop'),
- include('root'),
- ],
- 'import': [
- (r'[a-z0-9_.]+', Name.Namespace, '#pop'),
- (r'', Text, '#pop'),
- ],
- 'formal_part' : [
- (r'\)', Punctuation, '#pop'),
- (r'[a-z0-9_]+', Name.Variable),
- (r',|:[^=]', Punctuation),
- (r'(in|not|null|out|access)\b', Keyword.Reserved),
- include('root'),
- ],
- 'package': [
- ('body', Keyword.Declaration),
- ('is\s+new|renames', Keyword.Reserved),
- ('is', Keyword.Reserved, '#pop'),
- (';', Punctuation, '#pop'),
- ('\(', Punctuation, 'package_instantiation'),
- ('([a-zA-Z0-9_.]+)', Name.Class),
- include('root'),
- ],
- 'package_instantiation': [
- (r'("[^"]+"|[a-z0-9_]+)(\s+)(=>)', bygroups(Name.Variable,
- Text, Punctuation)),
- (r'[a-z0-9._\'"]', Text),
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
- }
-
-
-class Modula2Lexer(RegexLexer):
- """
- For `Modula-2 <http://www.modula2.org/>`_ source code.
-
- Additional options that determine which keywords are highlighted:
-
- `pim`
- Select PIM Modula-2 dialect (default: True).
- `iso`
- Select ISO Modula-2 dialect (default: False).
- `objm2`
- Select Objective Modula-2 dialect (default: False).
- `gm2ext`
- Also highlight GNU extensions (default: False).
-
- *New in Pygments 1.3.*
- """
- name = 'Modula-2'
- aliases = ['modula2', 'm2']
- filenames = ['*.def', '*.mod']
- mimetypes = ['text/x-modula2']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'whitespace': [
- (r'\n+', Text), # blank lines
- (r'\s+', Text), # whitespace
- ],
- 'identifiers': [
- (r'([a-zA-Z_\$][a-zA-Z0-9_\$]*)', Name),
- ],
- 'numliterals': [
- (r'[01]+B', Number.Binary), # binary number (ObjM2)
- (r'[0-7]+B', Number.Oct), # octal number (PIM + ISO)
- (r'[0-7]+C', Number.Oct), # char code (PIM + ISO)
- (r'[0-9A-F]+C', Number.Hex), # char code (ObjM2)
- (r'[0-9A-F]+H', Number.Hex), # hexadecimal number
- (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number
- (r'[0-9]+\.[0-9]+', Number.Float), # real number
- (r'[0-9]+', Number.Integer), # decimal whole number
- ],
- 'strings': [
- (r"'(\\\\|\\'|[^'])*'", String), # single quoted string
- (r'"(\\\\|\\"|[^"])*"', String), # double quoted string
- ],
- 'operators': [
- (r'[*/+=#~&<>\^-]', Operator),
- (r':=', Operator), # assignment
- (r'@', Operator), # pointer deref (ISO)
- (r'\.\.', Operator), # ellipsis or range
- (r'`', Operator), # Smalltalk message (ObjM2)
- (r'::', Operator), # type conversion (ObjM2)
- ],
- 'punctuation': [
- (r'[\(\)\[\]{},.:;|]', Punctuation),
- ],
- 'comments': [
- (r'//.*?\n', Comment.Single), # ObjM2
- (r'/\*(.*?)\*/', Comment.Multiline), # ObjM2
- (r'\(\*([^\$].*?)\*\)', Comment.Multiline),
- # TO DO: nesting of (* ... *) comments
- ],
- 'pragmas': [
- (r'\(\*\$(.*?)\*\)', Comment.Preproc), # PIM
- (r'<\*(.*?)\*>', Comment.Preproc), # ISO + ObjM2
- ],
- 'root': [
- include('whitespace'),
- include('comments'),
- include('pragmas'),
- include('identifiers'),
- include('numliterals'),
- include('strings'),
- include('operators'),
- include('punctuation'),
- ]
- }
-
- pim_reserved_words = [
- # 40 reserved words
- 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION',
- 'DIV', 'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'EXPORT', 'FOR',
- 'FROM', 'IF', 'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD',
- 'MODULE', 'NOT', 'OF', 'OR', 'POINTER', 'PROCEDURE', 'QUALIFIED',
- 'RECORD', 'REPEAT', 'RETURN', 'SET', 'THEN', 'TO', 'TYPE',
- 'UNTIL', 'VAR', 'WHILE', 'WITH',
- ]
-
- pim_pervasives = [
- # 31 pervasives
- 'ABS', 'BITSET', 'BOOLEAN', 'CAP', 'CARDINAL', 'CHAR', 'CHR', 'DEC',
- 'DISPOSE', 'EXCL', 'FALSE', 'FLOAT', 'HALT', 'HIGH', 'INC', 'INCL',
- 'INTEGER', 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEW', 'NIL', 'ODD',
- 'ORD', 'PROC', 'REAL', 'SIZE', 'TRUE', 'TRUNC', 'VAL',
- ]
-
- iso_reserved_words = [
- # 46 reserved words
- 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
- 'DO', 'ELSE', 'ELSIF', 'END', 'EXCEPT', 'EXIT', 'EXPORT', 'FINALLY',
- 'FOR', 'FORWARD', 'FROM', 'IF', 'IMPLEMENTATION', 'IMPORT', 'IN',
- 'LOOP', 'MOD', 'MODULE', 'NOT', 'OF', 'OR', 'PACKEDSET', 'POINTER',
- 'PROCEDURE', 'QUALIFIED', 'RECORD', 'REPEAT', 'REM', 'RETRY',
- 'RETURN', 'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE',
- 'WITH',
- ]
-
- iso_pervasives = [
- # 42 pervasives
- 'ABS', 'BITSET', 'BOOLEAN', 'CAP', 'CARDINAL', 'CHAR', 'CHR', 'CMPLX',
- 'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FALSE', 'FLOAT', 'HALT', 'HIGH',
- 'IM', 'INC', 'INCL', 'INT', 'INTEGER', 'INTERRUPTIBLE', 'LENGTH',
- 'LFLOAT', 'LONGCOMPLEX', 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEW',
- 'NIL', 'ODD', 'ORD', 'PROC', 'PROTECTION', 'RE', 'REAL', 'SIZE',
- 'TRUE', 'TRUNC', 'UNINTERRUBTIBLE', 'VAL',
- ]
-
- objm2_reserved_words = [
- # base language, 42 reserved words
- 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
- 'DO', 'ELSE', 'ELSIF', 'END', 'ENUM', 'EXIT', 'FOR', 'FROM', 'IF',
- 'IMMUTABLE', 'IMPLEMENTATION', 'IMPORT', 'IN', 'IS', 'LOOP', 'MOD',
- 'MODULE', 'NOT', 'OF', 'OPAQUE', 'OR', 'POINTER', 'PROCEDURE',
- 'RECORD', 'REPEAT', 'RETURN', 'SET', 'THEN', 'TO', 'TYPE',
- 'UNTIL', 'VAR', 'VARIADIC', 'WHILE',
- # OO extensions, 16 reserved words
- 'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD',
- 'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC',
- 'SUPER', 'TRY',
- ]
-
- objm2_pervasives = [
- # base language, 38 pervasives
- 'ABS', 'BITSET', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'DISPOSE',
- 'FALSE', 'HALT', 'HIGH', 'INTEGER', 'INRANGE', 'LENGTH', 'LONGCARD',
- 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEG', 'NEW', 'NEXTV', 'NIL',
- 'OCTET', 'ODD', 'ORD', 'PRED', 'PROC', 'READ', 'REAL', 'SUCC', 'TMAX',
- 'TMIN', 'TRUE', 'TSIZE', 'UNICHAR', 'VAL', 'WRITE', 'WRITEF',
- # OO extensions, 3 pervasives
- 'OBJECT', 'NO', 'YES',
- ]
-
- gnu_reserved_words = [
- # 10 additional reserved words
- 'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__',
- '__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE',
- ]
-
- gnu_pervasives = [
- # 21 identifiers, actually from pseudo-module SYSTEM
- # but we will highlight them as if they were pervasives
- 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
- 'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96',
- 'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64',
- 'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW',
- ]
-
- def __init__(self, **options):
- self.reserved_words = set()
- self.pervasives = set()
- # ISO Modula-2
- if get_bool_opt(options, 'iso', False):
- self.reserved_words.update(self.iso_reserved_words)
- self.pervasives.update(self.iso_pervasives)
- # Objective Modula-2
- elif get_bool_opt(options, 'objm2', False):
- self.reserved_words.update(self.objm2_reserved_words)
- self.pervasives.update(self.objm2_pervasives)
- # PIM Modula-2 (DEFAULT)
- else:
- self.reserved_words.update(self.pim_reserved_words)
- self.pervasives.update(self.pim_pervasives)
- # GNU extensions
- if get_bool_opt(options, 'gm2ext', False):
- self.reserved_words.update(self.gnu_reserved_words)
- self.pervasives.update(self.gnu_pervasives)
- # initialise
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- # check for reserved words and pervasives
- if token is Name:
- if value in self.reserved_words:
- token = Keyword.Reserved
- elif value in self.pervasives:
- token = Keyword.Pervasive
- # return result
- yield index, token, value
-
-
-class BlitzMaxLexer(RegexLexer):
- """
- For `BlitzMax <http://blitzbasic.com>`_ source code.
-
- *New in Pygments 1.4.*
- """
-
- name = 'BlitzMax'
- aliases = ['blitzmax', 'bmax']
- filenames = ['*.bmx']
- mimetypes = ['text/x-bmx']
-
- bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b'
- bmax_sktypes = r'@{1,2}|[!#$%]'
- bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b'
- bmax_name = r'[a-z_][a-z0-9_]*'
- bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)'
- r'|([ \t]*)([:])([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \
- (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name)
- bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])'
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- # Text
- (r'[ \t]+', Text),
- (r'\.\.\n', Text), # Line continuation
- # Comments
- (r"'.*?\n", Comment.Single),
- (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline),
- # Data types
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]*(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-f]+', Number.Hex),
- (r'\%[10]+', Number), # Binary
- # Other
- (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' %
- (bmax_vopwords), Operator),
- (r'[(),.:\[\]]', Punctuation),
- (r'(?:#[\w \t]*)', Name.Label),
- (r'(?:\?[\w \t]*)', Comment.Preproc),
- # Identifiers
- (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name),
- bygroups(Keyword.Reserved, Text, Punctuation, Name.Class)),
- (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' %
- (bmax_name, bmax_name),
- bygroups(Keyword.Reserved, Text, Keyword.Namespace)),
- (bmax_func, bygroups(Name.Function, Text, Keyword.Type,
- Operator, Text, Punctuation, Text,
- Keyword.Type, Name.Class, Text,
- Keyword.Type, Text, Punctuation)),
- (bmax_var, bygroups(Name.Variable, Text, Keyword.Type, Operator,
- Text, Punctuation, Text, Keyword.Type,
- Name.Class, Text, Keyword.Type)),
- (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name),
- bygroups(Keyword.Reserved, Text, Name.Class)),
- # Keywords
- (r'\b(Ptr)\b', Keyword.Type),
- (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant),
- (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration),
- (r'\b(TNullMethodException|TNullFunctionException|'
- r'TNullObjectException|TArrayBoundsException|'
- r'TRuntimeException)\b', Name.Exception),
- (r'\b(Strict|SuperStrict|Module|ModuleInfo|'
- r'End|Return|Continue|Exit|Public|Private|'
- r'Var|VarPtr|Chr|Len|Asc|SizeOf|Sgn|Abs|Min|Max|'
- r'New|Release|Delete|'
- r'Incbin|IncbinPtr|IncbinLen|'
- r'Framework|Include|Import|Extern|EndExtern|'
- r'Function|EndFunction|'
- r'Type|EndType|Extends|'
- r'Method|EndMethod|'
- r'Abstract|Final|'
- r'If|Then|Else|ElseIf|EndIf|'
- r'For|To|Next|Step|EachIn|'
- r'While|Wend|EndWhile|'
- r'Repeat|Until|Forever|'
- r'Select|Case|Default|EndSelect|'
- r'Try|Catch|EndTry|Throw|Assert|'
- r'Goto|DefData|ReadData|RestoreData)\b', Keyword.Reserved),
- # Final resolve (for variable names and such)
- (r'(%s)' % (bmax_name), Name.Variable),
- ],
- 'string': [
- (r'""', String.Double),
- (r'"C?', String.Double, '#pop'),
- (r'[^"]+', String.Double),
- ],
- }
-
-
-class BlitzBasicLexer(RegexLexer):
- """
- For `BlitzBasic <http://blitzbasic.com>`_ source code.
-
- *New in Pygments 1.7.*
- """
-
- name = 'BlitzBasic'
- aliases = ['blitzbasic', 'b3d', 'bplus']
- filenames = ['*.bb', '*.decls']
- mimetypes = ['text/x-bb']
-
- bb_vopwords = (r'\b(Shl|Shr|Sar|Mod|Or|And|Not|'
- r'Abs|Sgn|Handle|Int|Float|Str|'
- r'First|Last|Before|After)\b')
- bb_sktypes = r'@{1,2}|[#$%]'
- bb_name = r'[a-z][a-z0-9_]*'
- bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \
- (bb_name, bb_sktypes, bb_name)
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- # Text
- (r'[ \t]+', Text),
- # Comments
- (r";.*?\n", Comment.Single),
- # Data types
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]+(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-f]+', Number.Hex),
- (r'\%[10]+', Number), # Binary
- # Other
- (r'(?:%s|([+\-*/~=<>^]))' % (bb_vopwords), Operator),
- (r'[(),:\[\]\\]', Punctuation),
- (r'\.([ \t]*)(%s)' % bb_name, Name.Label),
- # Identifiers
- (r'\b(New)\b([ \t]+)(%s)' % (bb_name),
- bygroups(Keyword.Reserved, Text, Name.Class)),
- (r'\b(Gosub|Goto)\b([ \t]+)(%s)' % (bb_name),
- bygroups(Keyword.Reserved, Text, Name.Label)),
- (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name),
- bygroups(Operator, Text, Punctuation, Text, Name.Class)),
- (r'\b%s\b([ \t]*)(\()' % bb_var,
- bygroups(Name.Function, Text, Keyword.Type,Text, Punctuation,
- Text, Name.Class, Text, Punctuation)),
- (r'\b(Function)\b([ \t]+)%s' % bb_var,
- bygroups(Keyword.Reserved, Text, Name.Function, Text, Keyword.Type,
- Text, Punctuation, Text, Name.Class)),
- (r'\b(Type)([ \t]+)(%s)' % (bb_name),
- bygroups(Keyword.Reserved, Text, Name.Class)),
- # Keywords
- (r'\b(Pi|True|False|Null)\b', Keyword.Constant),
- (r'\b(Local|Global|Const|Field|Dim)\b', Keyword.Declaration),
- (r'\b(End|Return|Exit|'
- r'Chr|Len|Asc|'
- r'New|Delete|Insert|'
- r'Include|'
- r'Function|'
- r'Type|'
- r'If|Then|Else|ElseIf|EndIf|'
- r'For|To|Next|Step|Each|'
- r'While|Wend|'
- r'Repeat|Until|Forever|'
- r'Select|Case|Default|'
- r'Goto|Gosub|Data|Read|Restore)\b', Keyword.Reserved),
- # Final resolve (for variable names and such)
-# (r'(%s)' % (bb_name), Name.Variable),
- (bb_var, bygroups(Name.Variable, Text, Keyword.Type,
- Text, Punctuation, Text, Name.Class)),
- ],
- 'string': [
- (r'""', String.Double),
- (r'"C?', String.Double, '#pop'),
- (r'[^"]+', String.Double),
- ],
- }
-
-
-class NimrodLexer(RegexLexer):
- """
- For `Nimrod <http://nimrod-code.org/>`_ source code.
-
- *New in Pygments 1.5.*
- """
-
- name = 'Nimrod'
- aliases = ['nimrod', 'nim']
- filenames = ['*.nim', '*.nimrod']
- mimetypes = ['text/x-nimrod']
-
- flags = re.MULTILINE | re.IGNORECASE | re.UNICODE
-
- def underscorize(words):
- newWords = []
- new = ""
- for word in words:
- for ch in word:
- new += (ch + "_?")
- newWords.append(new)
- new = ""
- return "|".join(newWords)
-
- keywords = [
- 'addr', 'and', 'as', 'asm', 'atomic', 'bind', 'block', 'break',
- 'case', 'cast', 'const', 'continue', 'converter', 'discard',
- 'distinct', 'div', 'elif', 'else', 'end', 'enum', 'except', 'finally',
- 'for', 'generic', 'if', 'implies', 'in', 'yield',
- 'is', 'isnot', 'iterator', 'lambda', 'let', 'macro', 'method',
- 'mod', 'not', 'notin', 'object', 'of', 'or', 'out', 'proc',
- 'ptr', 'raise', 'ref', 'return', 'shl', 'shr', 'template', 'try',
- 'tuple', 'type' , 'when', 'while', 'with', 'without', 'xor'
- ]
-
- keywordsPseudo = [
- 'nil', 'true', 'false'
- ]
-
- opWords = [
- 'and', 'or', 'not', 'xor', 'shl', 'shr', 'div', 'mod', 'in',
- 'notin', 'is', 'isnot'
- ]
-
- types = [
- 'int', 'int8', 'int16', 'int32', 'int64', 'float', 'float32', 'float64',
- 'bool', 'char', 'range', 'array', 'seq', 'set', 'string'
- ]
-
- tokens = {
- 'root': [
- (r'##.*$', String.Doc),
- (r'#.*$', Comment),
- (r'\*|=|>|<|\+|-|/|@|\$|~|&|%|\!|\?|\||\\|\[|\]', Operator),
- (r'\.\.|\.|,|\[\.|\.\]|{\.|\.}|\(\.|\.\)|{|}|\(|\)|:|\^|`|;',
- Punctuation),
-
- # Strings
- (r'(?:[\w]+)"', String, 'rdqs'),
- (r'"""', String, 'tdqs'),
- ('"', String, 'dqs'),
-
- # Char
- ("'", String.Char, 'chars'),
-
- # Keywords
- (r'(%s)\b' % underscorize(opWords), Operator.Word),
- (r'(p_?r_?o_?c_?\s)(?![\(\[\]])', Keyword, 'funcname'),
- (r'(%s)\b' % underscorize(keywords), Keyword),
- (r'(%s)\b' % underscorize(['from', 'import', 'include']),
- Keyword.Namespace),
- (r'(v_?a_?r)\b', Keyword.Declaration),
- (r'(%s)\b' % underscorize(types), Keyword.Type),
- (r'(%s)\b' % underscorize(keywordsPseudo), Keyword.Pseudo),
- # Identifiers
- (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name),
- # Numbers
- (r'[0-9][0-9_]*(?=([eE.]|\'[fF](32|64)))',
- Number.Float, ('float-suffix', 'float-number')),
- (r'0[xX][a-fA-F0-9][a-fA-F0-9_]*', Number.Hex, 'int-suffix'),
- (r'0[bB][01][01_]*', Number, 'int-suffix'),
- (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'),
- (r'[0-9][0-9_]*', Number.Integer, 'int-suffix'),
- # Whitespace
- (r'\s+', Text),
- (r'.+$', Error),
- ],
- 'chars': [
- (r'\\([\\abcefnrtvl"\']|x[a-fA-F0-9]{2}|[0-9]{1,3})', String.Escape),
- (r"'", String.Char, '#pop'),
- (r".", String.Char)
- ],
- 'strings': [
- (r'(?<!\$)\$(\d+|#|\w+)+', String.Interpol),
- (r'[^\\\'"\$\n]+', String),
- # quotes, dollars and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'\$', String)
- # newlines are an error (use "nl" state)
- ],
- 'dqs': [
- (r'\\([\\abcefnrtvl"\']|\n|x[a-fA-F0-9]{2}|[0-9]{1,3})',
- String.Escape),
- (r'"', String, '#pop'),
- include('strings')
- ],
- 'rdqs': [
- (r'"(?!")', String, '#pop'),
- (r'""', String.Escape),
- include('strings')
- ],
- 'tdqs': [
- (r'"""(?!")', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'funcname': [
- (r'((?![\d_])\w)(((?!_)\w)|(_(?!_)\w))*', Name.Function, '#pop'),
- (r'`.+`', Name.Function, '#pop')
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'float-number': [
- (r'\.(?!\.)[0-9_]*', Number.Float),
- (r'[eE][+-]?[0-9][0-9_]*', Number.Float),
- (r'', Text, '#pop')
- ],
- 'float-suffix': [
- (r'\'[fF](32|64)', Number.Float),
- (r'', Text, '#pop')
- ],
- 'int-suffix': [
- (r'\'[iI](32|64)', Number.Integer.Long),
- (r'\'[iI](8|16)', Number.Integer),
- (r'', Text, '#pop')
- ],
- }
-
-
-class FantomLexer(RegexLexer):
- """
- For Fantom source code.
-
- *New in Pygments 1.5.*
- """
- name = 'Fantom'
- aliases = ['fan']
- filenames = ['*.fan']
- mimetypes = ['application/x-fantom']
-
- # often used regexes
- def s(str):
- return Template(str).substitute(
- dict (
- pod = r'[\"\w\.]+',
- eos = r'\n|;',
- id = r'[a-zA-Z_][a-zA-Z0-9_]*',
- # all chars which can be part of type definition. Starts with
- # either letter, or [ (maps), or | (funcs)
- type = r'(?:\[|[a-zA-Z_]|\|)[:\w\[\]\|\->\?]*?',
- )
- )
-
-
- tokens = {
- 'comments': [
- (r'(?s)/\*.*?\*/', Comment.Multiline), #Multiline
- (r'//.*?\n', Comment.Single), #Single line
- #todo: highlight references in fandocs
- (r'\*\*.*?\n', Comment.Special), #Fandoc
- (r'#.*\n', Comment.Single) #Shell-style
- ],
- 'literals': [
- (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number), #Duration
- (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number),
- #Duration with dot
- (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float), #Float/Decimal
- (r'\b-?0x[0-9a-fA-F_]+', Number.Hex), #Hex
- (r'\b-?[\d_]+', Number.Integer), #Int
- (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), #Char
- (r'"', Punctuation, 'insideStr'), #Opening quote
- (r'`', Punctuation, 'insideUri'), #Opening accent
- (r'\b(true|false|null)\b', Keyword.Constant), #Bool & null
- (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)', #DSL
- bygroups(Name.Namespace, Punctuation, Name.Class,
- Punctuation, String, Punctuation)),
- (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?', #Type/slot literal
- bygroups(Name.Namespace, Punctuation, Name.Class,
- Punctuation, Name.Function)),
- (r'\[,\]', Literal), # Empty list
- (s(r'($type)(\[,\])'), # Typed empty list
- bygroups(using(this, state = 'inType'), Literal)),
- (r'\[:\]', Literal), # Empty Map
- (s(r'($type)(\[:\])'),
- bygroups(using(this, state = 'inType'), Literal)),
- ],
- 'insideStr': [
- (r'\\\\', String.Escape), #Escaped backslash
- (r'\\"', String.Escape), #Escaped "
- (r'\\`', String.Escape), #Escaped `
- (r'\$\w+', String.Interpol), #Subst var
- (r'\${.*?}', String.Interpol), #Subst expr
- (r'"', Punctuation, '#pop'), #Closing quot
- (r'.', String) #String content
- ],
- 'insideUri': [ #TODO: remove copy/paste str/uri
- (r'\\\\', String.Escape), #Escaped backslash
- (r'\\"', String.Escape), #Escaped "
- (r'\\`', String.Escape), #Escaped `
- (r'\$\w+', String.Interpol), #Subst var
- (r'\${.*?}', String.Interpol), #Subst expr
- (r'`', Punctuation, '#pop'), #Closing tick
- (r'.', String.Backtick) #URI content
- ],
- 'protectionKeywords': [
- (r'\b(public|protected|private|internal)\b', Keyword),
- ],
- 'typeKeywords': [
- (r'\b(abstract|final|const|native|facet|enum)\b', Keyword),
- ],
- 'methodKeywords': [
- (r'\b(abstract|native|once|override|static|virtual|final)\b',
- Keyword),
- ],
- 'fieldKeywords': [
- (r'\b(abstract|const|final|native|override|static|virtual|'
- r'readonly)\b', Keyword)
- ],
- 'otherKeywords': [
- (r'\b(try|catch|throw|finally|for|if|else|while|as|is|isnot|'
- r'switch|case|default|continue|break|do|return|get|set)\b',
- Keyword),
- (r'\b(it|this|super)\b', Name.Builtin.Pseudo),
- ],
- 'operators': [
- (r'\+\+|\-\-|\+|\-|\*|/|\|\||&&|<=>|<=|<|>=|>|=|!|\[|\]', Operator)
- ],
- 'inType': [
- (r'[\[\]\|\->:\?]', Punctuation),
- (s(r'$id'), Name.Class),
- (r'', Text, '#pop'),
-
- ],
- 'root': [
- include('comments'),
- include('protectionKeywords'),
- include('typeKeywords'),
- include('methodKeywords'),
- include('fieldKeywords'),
- include('literals'),
- include('otherKeywords'),
- include('operators'),
- (r'using\b', Keyword.Namespace, 'using'), # Using stmt
- (r'@\w+', Name.Decorator, 'facet'), # Symbol
- (r'(class|mixin)(\s+)(\w+)', bygroups(Keyword, Text, Name.Class),
- 'inheritance'), # Inheritance list
-
-
- ### Type var := val
- (s(r'($type)([ \t]+)($id)(\s*)(:=)'),
- bygroups(using(this, state = 'inType'), Text,
- Name.Variable, Text, Operator)),
-
- ### var := val
- (s(r'($id)(\s*)(:=)'),
- bygroups(Name.Variable, Text, Operator)),
-
- ### .someId( or ->someId( ###
- (s(r'(\.|(?:\->))($id)(\s*)(\()'),
- bygroups(Operator, Name.Function, Text, Punctuation),
- 'insideParen'),
-
- ### .someId or ->someId
- (s(r'(\.|(?:\->))($id)'),
- bygroups(Operator, Name.Function)),
-
- ### new makeXXX ( ####
- (r'(new)(\s+)(make\w*)(\s*)(\()',
- bygroups(Keyword, Text, Name.Function, Text, Punctuation),
- 'insideMethodDeclArgs'),
-
- ### Type name ( ####
- (s(r'($type)([ \t]+)' #Return type and whitespace
- r'($id)(\s*)(\()'), #method name + open brace
- bygroups(using(this, state = 'inType'), Text,
- Name.Function, Text, Punctuation),
- 'insideMethodDeclArgs'),
-
- ### ArgType argName, #####
- (s(r'($type)(\s+)($id)(\s*)(,)'),
- bygroups(using(this, state='inType'), Text, Name.Variable,
- Text, Punctuation)),
-
- #### ArgType argName) ####
- ## Covered in 'insideParen' state
-
- ### ArgType argName -> ArgType| ###
- (s(r'($type)(\s+)($id)(\s*)(\->)(\s*)($type)(\|)'),
- bygroups(using(this, state='inType'), Text, Name.Variable,
- Text, Punctuation, Text, using(this, state = 'inType'),
- Punctuation)),
-
- ### ArgType argName| ###
- (s(r'($type)(\s+)($id)(\s*)(\|)'),
- bygroups(using(this, state='inType'), Text, Name.Variable,
- Text, Punctuation)),
-
- ### Type var
- (s(r'($type)([ \t]+)($id)'),
- bygroups(using(this, state='inType'), Text,
- Name.Variable)),
-
- (r'\(', Punctuation, 'insideParen'),
- (r'\{', Punctuation, 'insideBrace'),
- (r'.', Text)
- ],
- 'insideParen': [
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
- 'insideMethodDeclArgs': [
- (r'\)', Punctuation, '#pop'),
- (s(r'($type)(\s+)($id)(\s*)(\))'),
- bygroups(using(this, state='inType'), Text, Name.Variable,
- Text, Punctuation), '#pop'),
- include('root'),
- ],
- 'insideBrace': [
- (r'\}', Punctuation, '#pop'),
- include('root'),
- ],
- 'inheritance': [
- (r'\s+', Text), #Whitespace
- (r':|,', Punctuation),
- (r'(?:(\w+)(::))?(\w+)',
- bygroups(Name.Namespace, Punctuation, Name.Class)),
- (r'{', Punctuation, '#pop')
- ],
- 'using': [
- (r'[ \t]+', Text), # consume whitespaces
- (r'(\[)(\w+)(\])',
- bygroups(Punctuation, Comment.Special, Punctuation)), #ffi
- (r'(\")?([\w\.]+)(\")?',
- bygroups(Punctuation, Name.Namespace, Punctuation)), #podname
- (r'::', Punctuation, 'usingClass'),
- (r'', Text, '#pop')
- ],
- 'usingClass': [
- (r'[ \t]+', Text), # consume whitespaces
- (r'(as)(\s+)(\w+)',
- bygroups(Keyword.Declaration, Text, Name.Class), '#pop:2'),
- (r'[\w\$]+', Name.Class),
- (r'', Text, '#pop:2') # jump out to root state
- ],
- 'facet': [
- (r'\s+', Text),
- (r'{', Punctuation, 'facetFields'),
- (r'', Text, '#pop')
- ],
- 'facetFields': [
- include('comments'),
- include('literals'),
- include('operators'),
- (r'\s+', Text),
- (r'(\s*)(\w+)(\s*)(=)', bygroups(Text, Name, Text, Operator)),
- (r'}', Punctuation, '#pop'),
- (r'.', Text)
- ],
- }
-
-
-class RustLexer(RegexLexer):
- """
- Lexer for Mozilla's Rust programming language.
-
- *New in Pygments 1.6.*
- """
- name = 'Rust'
- filenames = ['*.rs', '*.rc']
- aliases = ['rust']
- mimetypes = ['text/x-rustsrc']
-
- tokens = {
- 'root': [
- # Whitespace and Comments
- (r'\n', Text),
- (r'\s+', Text),
- (r'//(.*?)\n', Comment.Single),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
-
- # Keywords
- (r'(as|assert|break|const'
- r'|copy|do|else|enum|extern|fail'
- r'|false|fn|for|if|impl|let|log'
- r'|loop|match|mod|move|mut|once|priv|pub|pure'
- r'|ref|return|static|struct|trait|true|type|unsafe|use|while'
- r'|u8|u16|u32|u64|i8|i16|i32|i64|uint'
- r'|int|float|f32|f64|str)\b', Keyword),
-
- # Character Literal
- (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
- String.Char),
- # Binary Literal
- (r'0[Bb][01_]+', Number, 'number_lit'),
- # Octal Literal
- (r'0[0-7_]+', Number.Oct, 'number_lit'),
- # Hexadecimal Literal
- (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
- # Decimal Literal
- (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?'
- r'[0-9_]+|\.[0-9_]*|[eE][+\-]?[0-9_]+)?', Number, 'number_lit'),
- # String Literal
- (r'"', String, 'string'),
-
- # Operators and Punctuation
- (r'[{}()\[\],.;]', Punctuation),
- (r'[+\-*/%&|<>^!~@=:?]', Operator),
-
- # Identifier
- (r'[a-zA-Z_$][a-zA-Z0-9_]*', Name),
-
- # Attributes
- (r'#\[', Comment.Preproc, 'attribute['),
- (r'#\(', Comment.Preproc, 'attribute('),
- # Macros
- (r'[A-Za-z_][A-Za-z0-9_]*!\[', Comment.Preproc, 'attribute['),
- (r'[A-Za-z_][A-Za-z0-9_]*!\(', Comment.Preproc, 'attribute('),
- ],
- 'number_lit': [
- (r'(([ui](8|16|32|64)?)|(f(32|64)?))?', Keyword, '#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
- (r'[^\\"]+', String),
- (r'\\', String),
- ],
- 'attribute_common': [
- (r'"', String, 'string'),
- (r'\[', Comment.Preproc, 'attribute['),
- (r'\(', Comment.Preproc, 'attribute('),
- ],
- 'attribute[': [
- include('attribute_common'),
- (r'\];?', Comment.Preproc, '#pop'),
- (r'[^"\]]+', Comment.Preproc),
- ],
- 'attribute(': [
- include('attribute_common'),
- (r'\);?', Comment.Preproc, '#pop'),
- (r'[^"\)]+', Comment.Preproc),
- ],
- }
-
-
-class CudaLexer(CLexer):
- """
- For NVIDIA `CUDA™ <http://developer.nvidia.com/category/zone/cuda-zone>`_
- source.
-
- *New in Pygments 1.6.*
- """
- name = 'CUDA'
- filenames = ['*.cu', '*.cuh']
- aliases = ['cuda', 'cu']
- mimetypes = ['text/x-cuda']
-
- function_qualifiers = ['__device__', '__global__', '__host__',
- '__noinline__', '__forceinline__']
- variable_qualifiers = ['__device__', '__constant__', '__shared__',
- '__restrict__']
- vector_types = ['char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3',
- 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2',
- 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1',
- 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1',
- 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4',
- 'ulong4', 'longlong1', 'ulonglong1', 'longlong2',
- 'ulonglong2', 'float1', 'float2', 'float3', 'float4',
- 'double1', 'double2', 'dim3']
- variables = ['gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize']
- functions = ['__threadfence_block', '__threadfence', '__threadfence_system',
- '__syncthreads', '__syncthreads_count', '__syncthreads_and',
- '__syncthreads_or']
- execution_confs = ['<<<', '>>>']
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- CLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if value in self.variable_qualifiers:
- token = Keyword.Type
- elif value in self.vector_types:
- token = Keyword.Type
- elif value in self.variables:
- token = Name.Builtin
- elif value in self.execution_confs:
- token = Keyword.Pseudo
- elif value in self.function_qualifiers:
- token = Keyword.Reserved
- elif value in self.functions:
- token = Name.Function
- yield index, token, value
-
-
-class MonkeyLexer(RegexLexer):
- """
- For
- `Monkey <https://en.wikipedia.org/wiki/Monkey_(programming_language)>`_
- source code.
-
- *New in Pygments 1.6.*
- """
-
- name = 'Monkey'
- aliases = ['monkey']
- filenames = ['*.monkey']
- mimetypes = ['text/x-monkey']
-
- name_variable = r'[a-z_][a-zA-Z0-9_]*'
- name_function = r'[A-Z][a-zA-Z0-9_]*'
- name_constant = r'[A-Z_][A-Z0-9_]*'
- name_class = r'[A-Z][a-zA-Z0-9_]*'
- name_module = r'[a-z0-9_]*'
-
- keyword_type = r'(?:Int|Float|String|Bool|Object|Array|Void)'
- # ? == Bool // % == Int // # == Float // $ == String
- keyword_type_special = r'[?%#$]'
-
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- #Text
- (r'\s+', Text),
- # Comments
- (r"'.*", Comment),
- (r'(?i)^#rem\b', Comment.Multiline, 'comment'),
- # preprocessor directives
- (r'(?i)^(?:#If|#ElseIf|#Else|#EndIf|#End|#Print|#Error)\b', Comment.Preproc),
- # preprocessor variable (any line starting with '#' that is not a directive)
- (r'^#', Comment.Preproc, 'variables'),
- # String
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]+(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-fA-Z]+', Number.Hex),
- (r'\%[10]+', Number), # Binary
- # Native data types
- (r'\b%s\b' % keyword_type, Keyword.Type),
- # Exception handling
- (r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved),
- (r'Throwable', Name.Exception),
- # Builtins
- (r'(?i)\b(?:Null|True|False)\b', Name.Builtin),
- (r'(?i)\b(?:Self|Super)\b', Name.Builtin.Pseudo),
- (r'\b(?:HOST|LANG|TARGET|CONFIG)\b', Name.Constant),
- # Keywords
- (r'(?i)^(Import)(\s+)(.*)(\n)',
- bygroups(Keyword.Namespace, Text, Name.Namespace, Text)),
- (r'(?i)^Strict\b.*\n', Keyword.Reserved),
- (r'(?i)(Const|Local|Global|Field)(\s+)',
- bygroups(Keyword.Declaration, Text), 'variables'),
- (r'(?i)(New|Class|Interface|Extends|Implements)(\s+)',
- bygroups(Keyword.Reserved, Text), 'classname'),
- (r'(?i)(Function|Method)(\s+)',
- bygroups(Keyword.Reserved, Text), 'funcname'),
- (r'(?i)(?:End|Return|Public|Private|Extern|Property|'
- r'Final|Abstract)\b', Keyword.Reserved),
- # Flow Control stuff
- (r'(?i)(?:If|Then|Else|ElseIf|EndIf|'
- r'Select|Case|Default|'
- r'While|Wend|'
- r'Repeat|Until|Forever|'
- r'For|To|Until|Step|EachIn|Next|'
- r'Exit|Continue)\s+', Keyword.Reserved),
- # not used yet
- (r'(?i)\b(?:Module|Inline)\b', Keyword.Reserved),
- # Array
- (r'[\[\]]', Punctuation),
- # Other
- (r'<=|>=|<>|\*=|/=|\+=|-=|&=|~=|\|=|[-&*/^+=<>|~]', Operator),
- (r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word),
- (r'[\(\){}!#,.:]', Punctuation),
- # catch the rest
- (r'%s\b' % name_constant, Name.Constant),
- (r'%s\b' % name_function, Name.Function),
- (r'%s\b' % name_variable, Name.Variable),
- ],
- 'funcname': [
- (r'(?i)%s\b' % name_function, Name.Function),
- (r':', Punctuation, 'classname'),
- (r'\s+', Text),
- (r'\(', Punctuation, 'variables'),
- (r'\)', Punctuation, '#pop')
- ],
- 'classname': [
- (r'%s\.' % name_module, Name.Namespace),
- (r'%s\b' % keyword_type, Keyword.Type),
- (r'%s\b' % name_class, Name.Class),
- # array (of given size)
- (r'(\[)(\s*)(\d*)(\s*)(\])',
- bygroups(Punctuation, Text, Number.Integer, Text, Punctuation)),
- # generics
- (r'\s+(?!<)', Text, '#pop'),
- (r'<', Punctuation, '#push'),
- (r'>', Punctuation, '#pop'),
- (r'\n', Text, '#pop'),
- (r'', Text, '#pop')
- ],
- 'variables': [
- (r'%s\b' % name_constant, Name.Constant),
- (r'%s\b' % name_variable, Name.Variable),
- (r'%s' % keyword_type_special, Keyword.Type),
- (r'\s+', Text),
- (r':', Punctuation, 'classname'),
- (r',', Punctuation, '#push'),
- (r'', Text, '#pop')
- ],
- 'string': [
- (r'[^"~]+', String.Double),
- (r'~q|~n|~r|~t|~z|~~', String.Escape),
- (r'"', String.Double, '#pop'),
- ],
- 'comment' : [
- (r'(?i)^#rem.*?', Comment.Multiline, "#push"),
- (r'(?i)^#end.*?', Comment.Multiline, "#pop"),
- (r'\n', Comment.Multiline),
- (r'.+', Comment.Multiline),
- ],
- }
-
-
-class CobolLexer(RegexLexer):
- """
- Lexer for OpenCOBOL code.
-
- *New in Pygments 1.6.*
- """
- name = 'COBOL'
- aliases = ['cobol']
- filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY']
- mimetypes = ['text/x-cobol']
- flags = re.IGNORECASE | re.MULTILINE
-
- # Data Types: by PICTURE and USAGE
- # Operators: **, *, +, -, /, <, >, <=, >=, =, <>
- # Logical (?): NOT, AND, OR
-
- # Reserved words:
- # http://opencobol.add1tocobol.com/#reserved-words
- # Intrinsics:
- # http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions
-
- tokens = {
- 'root': [
- include('comment'),
- include('strings'),
- include('core'),
- include('nums'),
- (r'[a-z0-9]([_a-z0-9\-]*[a-z0-9]+)?', Name.Variable),
- # (r'[\s]+', Text),
- (r'[ \t]+', Text),
- ],
- 'comment': [
- (r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment),
- ],
- 'core': [
- # Figurative constants
- (r'(^|(?<=[^0-9a-z_\-]))(ALL\s+)?'
- r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)'
- r'\s*($|(?=[^0-9a-z_\-]))',
- Name.Constant),
-
- # Reserved words STATEMENTS and other bolds
- (r'(^|(?<=[^0-9a-z_\-]))'
- r'(ACCEPT|ADD|ALLOCATE|CALL|CANCEL|CLOSE|COMPUTE|'
- r'CONFIGURATION|CONTINUE|'
- r'DATA|DELETE|DISPLAY|DIVIDE|DIVISION|ELSE|END|END-ACCEPT|'
- r'END-ADD|END-CALL|END-COMPUTE|END-DELETE|END-DISPLAY|'
- r'END-DIVIDE|END-EVALUATE|END-IF|END-MULTIPLY|END-OF-PAGE|'
- r'END-PERFORM|END-READ|END-RETURN|END-REWRITE|END-SEARCH|'
- r'END-START|END-STRING|END-SUBTRACT|END-UNSTRING|END-WRITE|'
- r'ENVIRONMENT|EVALUATE|EXIT|FD|FILE|FILE-CONTROL|FOREVER|'
- r'FREE|GENERATE|GO|GOBACK|'
- r'IDENTIFICATION|IF|INITIALIZE|'
- r'INITIATE|INPUT-OUTPUT|INSPECT|INVOKE|I-O-CONTROL|LINKAGE|'
- r'LOCAL-STORAGE|MERGE|MOVE|MULTIPLY|OPEN|'
- r'PERFORM|PROCEDURE|PROGRAM-ID|RAISE|READ|RELEASE|RESUME|'
- r'RETURN|REWRITE|SCREEN|'
- r'SD|SEARCH|SECTION|SET|SORT|START|STOP|STRING|SUBTRACT|'
- r'SUPPRESS|TERMINATE|THEN|UNLOCK|UNSTRING|USE|VALIDATE|'
- r'WORKING-STORAGE|WRITE)'
- r'\s*($|(?=[^0-9a-z_\-]))', Keyword.Reserved),
-
- # Reserved words
- (r'(^|(?<=[^0-9a-z_\-]))'
- r'(ACCESS|ADDRESS|ADVANCING|AFTER|ALL|'
- r'ALPHABET|ALPHABETIC|ALPHABETIC-LOWER|ALPHABETIC-UPPER|'
- r'ALPHANUMERIC|ALPHANUMERIC-EDITED|ALSO|ALTER|ALTERNATE'
- r'ANY|ARE|AREA|AREAS|ARGUMENT-NUMBER|ARGUMENT-VALUE|AS|'
- r'ASCENDING|ASSIGN|AT|AUTO|AUTO-SKIP|AUTOMATIC|AUTOTERMINATE|'
- r'BACKGROUND-COLOR|BASED|BEEP|BEFORE|BELL|'
- r'BLANK|'
- r'BLINK|BLOCK|BOTTOM|BY|BYTE-LENGTH|CHAINING|'
- r'CHARACTER|CHARACTERS|CLASS|CODE|CODE-SET|COL|COLLATING|'
- r'COLS|COLUMN|COLUMNS|COMMA|COMMAND-LINE|COMMIT|COMMON|'
- r'CONSTANT|CONTAINS|CONTENT|CONTROL|'
- r'CONTROLS|CONVERTING|COPY|CORR|CORRESPONDING|COUNT|CRT|'
- r'CURRENCY|CURSOR|CYCLE|DATE|DAY|DAY-OF-WEEK|DE|DEBUGGING|'
- r'DECIMAL-POINT|DECLARATIVES|DEFAULT|DELIMITED|'
- r'DELIMITER|DEPENDING|DESCENDING|DETAIL|DISK|'
- r'DOWN|DUPLICATES|DYNAMIC|EBCDIC|'
- r'ENTRY|ENVIRONMENT-NAME|ENVIRONMENT-VALUE|EOL|EOP|'
- r'EOS|ERASE|ERROR|ESCAPE|EXCEPTION|'
- r'EXCLUSIVE|EXTEND|EXTERNAL|'
- r'FILE-ID|FILLER|FINAL|FIRST|FIXED|FLOAT-LONG|FLOAT-SHORT|'
- r'FOOTING|FOR|FOREGROUND-COLOR|FORMAT|FROM|FULL|FUNCTION|'
- r'FUNCTION-ID|GIVING|GLOBAL|GROUP|'
- r'HEADING|HIGHLIGHT|I-O|ID|'
- r'IGNORE|IGNORING|IN|INDEX|INDEXED|INDICATE|'
- r'INITIAL|INITIALIZED|INPUT|'
- r'INTO|INTRINSIC|INVALID|IS|JUST|JUSTIFIED|KEY|LABEL|'
- r'LAST|LEADING|LEFT|LENGTH|LIMIT|LIMITS|LINAGE|'
- r'LINAGE-COUNTER|LINE|LINES|LOCALE|LOCK|'
- r'LOWLIGHT|MANUAL|MEMORY|MINUS|MODE|'
- r'MULTIPLE|NATIONAL|NATIONAL-EDITED|NATIVE|'
- r'NEGATIVE|NEXT|NO|NULL|NULLS|NUMBER|NUMBERS|NUMERIC|'
- r'NUMERIC-EDITED|OBJECT-COMPUTER|OCCURS|OF|OFF|OMITTED|ON|ONLY|'
- r'OPTIONAL|ORDER|ORGANIZATION|OTHER|OUTPUT|OVERFLOW|'
- r'OVERLINE|PACKED-DECIMAL|PADDING|PAGE|PARAGRAPH|'
- r'PLUS|POINTER|POSITION|POSITIVE|PRESENT|PREVIOUS|'
- r'PRINTER|PRINTING|PROCEDURE-POINTER|PROCEDURES|'
- r'PROCEED|PROGRAM|PROGRAM-POINTER|PROMPT|QUOTE|'
- r'QUOTES|RANDOM|RD|RECORD|RECORDING|RECORDS|RECURSIVE|'
- r'REDEFINES|REEL|REFERENCE|RELATIVE|REMAINDER|REMOVAL|'
- r'RENAMES|REPLACING|REPORT|REPORTING|REPORTS|REPOSITORY|'
- r'REQUIRED|RESERVE|RETURNING|REVERSE-VIDEO|REWIND|'
- r'RIGHT|ROLLBACK|ROUNDED|RUN|SAME|SCROLL|'
- r'SECURE|SEGMENT-LIMIT|SELECT|SENTENCE|SEPARATE|'
- r'SEQUENCE|SEQUENTIAL|SHARING|SIGN|SIGNED|SIGNED-INT|'
- r'SIGNED-LONG|SIGNED-SHORT|SIZE|SORT-MERGE|SOURCE|'
- r'SOURCE-COMPUTER|SPECIAL-NAMES|STANDARD|'
- r'STANDARD-1|STANDARD-2|STATUS|SUM|'
- r'SYMBOLIC|SYNC|SYNCHRONIZED|TALLYING|TAPE|'
- r'TEST|THROUGH|THRU|TIME|TIMES|TO|TOP|TRAILING|'
- r'TRANSFORM|TYPE|UNDERLINE|UNIT|UNSIGNED|'
- r'UNSIGNED-INT|UNSIGNED-LONG|UNSIGNED-SHORT|UNTIL|UP|'
- r'UPDATE|UPON|USAGE|USING|VALUE|VALUES|VARYING|WAIT|WHEN|'
- r'WITH|WORDS|YYYYDDD|YYYYMMDD)'
- r'\s*($|(?=[^0-9a-z_\-]))', Keyword.Pseudo),
-
- # inactive reserved words
- (r'(^|(?<=[^0-9a-z_\-]))'
- r'(ACTIVE-CLASS|ALIGNED|ANYCASE|ARITHMETIC|ATTRIBUTE|B-AND|'
- r'B-NOT|B-OR|B-XOR|BIT|BOOLEAN|CD|CENTER|CF|CH|CHAIN|CLASS-ID|'
- r'CLASSIFICATION|COMMUNICATION|CONDITION|DATA-POINTER|'
- r'DESTINATION|DISABLE|EC|EGI|EMI|ENABLE|END-RECEIVE|'
- r'ENTRY-CONVENTION|EO|ESI|EXCEPTION-OBJECT|EXPANDS|FACTORY|'
- r'FLOAT-BINARY-16|FLOAT-BINARY-34|FLOAT-BINARY-7|'
- r'FLOAT-DECIMAL-16|FLOAT-DECIMAL-34|FLOAT-EXTENDED|FORMAT|'
- r'FUNCTION-POINTER|GET|GROUP-USAGE|IMPLEMENTS|INFINITY|'
- r'INHERITS|INTERFACE|INTERFACE-ID|INVOKE|LC_ALL|LC_COLLATE|'
- r'LC_CTYPE|LC_MESSAGES|LC_MONETARY|LC_NUMERIC|LC_TIME|'
- r'LINE-COUNTER|MESSAGE|METHOD|METHOD-ID|NESTED|NONE|NORMAL|'
- r'OBJECT|OBJECT-REFERENCE|OPTIONS|OVERRIDE|PAGE-COUNTER|PF|PH|'
- r'PROPERTY|PROTOTYPE|PURGE|QUEUE|RAISE|RAISING|RECEIVE|'
- r'RELATION|REPLACE|REPRESENTS-NOT-A-NUMBER|RESET|RESUME|RETRY|'
- r'RF|RH|SECONDS|SEGMENT|SELF|SEND|SOURCES|STATEMENT|STEP|'
- r'STRONG|SUB-QUEUE-1|SUB-QUEUE-2|SUB-QUEUE-3|SUPER|SYMBOL|'
- r'SYSTEM-DEFAULT|TABLE|TERMINAL|TEXT|TYPEDEF|UCS-4|UNIVERSAL|'
- r'USER-DEFAULT|UTF-16|UTF-8|VAL-STATUS|VALID|VALIDATE|'
- r'VALIDATE-STATUS)\s*($|(?=[^0-9a-z_\-]))', Error),
-
- # Data Types
- (r'(^|(?<=[^0-9a-z_\-]))'
- r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|'
- r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|'
- r'BINARY-C-LONG|'
- r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|'
- r'BINARY)\s*($|(?=[^0-9a-z_\-]))', Keyword.Type),
-
- # Operators
- (r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator),
-
- # (r'(::)', Keyword.Declaration),
-
- (r'([(),;:&%.])', Punctuation),
-
- # Intrinsics
- (r'(^|(?<=[^0-9a-z_\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|'
- r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|'
- r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|'
- r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|'
- r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|'
- r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG10|LOG|'
- r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|'
- r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|'
- r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|'
- r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|'
- r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|'
- r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*'
- r'($|(?=[^0-9a-z_\-]))', Name.Function),
-
- # Booleans
- (r'(^|(?<=[^0-9a-z_\-]))(true|false)\s*($|(?=[^0-9a-z_\-]))', Name.Builtin),
- # Comparing Operators
- (r'(^|(?<=[^0-9a-z_\-]))(equal|equals|ne|lt|le|gt|ge|'
- r'greater|less|than|not|and|or)\s*($|(?=[^0-9a-z_\-]))', Operator.Word),
- ],
-
- # \"[^\"\n]*\"|\'[^\'\n]*\'
- 'strings': [
- # apparently strings can be delimited by EOL if they are continued
- # in the next line
- (r'"[^"\n]*("|\n)', String.Double),
- (r"'[^'\n]*('|\n)", String.Single),
- ],
-
- 'nums': [
- (r'\d+(\s*|\.$|$)', Number.Integer),
- (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
- (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
- ],
- }
-
-
-class CobolFreeformatLexer(CobolLexer):
- """
- Lexer for Free format OpenCOBOL code.
-
- *New in Pygments 1.6.*
- """
- name = 'COBOLFree'
- aliases = ['cobolfree']
- filenames = ['*.cbl', '*.CBL']
- mimetypes = []
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'comment': [
- (r'(\*>.*\n|^\w*\*.*$)', Comment),
- ],
- }
-
-
-class LogosLexer(ObjectiveCppLexer):
- """
- For Logos + Objective-C source code with preprocessor directives.
-
- *New in Pygments 1.6.*
- """
-
- name = 'Logos'
- aliases = ['logos']
- filenames = ['*.x', '*.xi', '*.xm', '*.xmi']
- mimetypes = ['text/x-logos']
- priority = 0.25
-
- tokens = {
- 'statements': [
- (r'(%orig|%log)\b', Keyword),
- (r'(%c)\b(\()(\s*)([a-zA-Z$_][a-zA-Z0-9$_]*)(\s*)(\))',
- bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)),
- (r'(%init)\b(\()',
- bygroups(Keyword, Punctuation), 'logos_init_directive'),
- (r'(%init)(?=\s*;)', bygroups(Keyword)),
- (r'(%hook|%group)(\s+)([a-zA-Z$_][a-zA-Z0-9$_]+)',
- bygroups(Keyword, Text, Name.Class), '#pop'),
- (r'(%subclass)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'logos_classname')),
- inherit,
- ],
- 'logos_init_directive' : [
- ('\s+', Text),
- (',', Punctuation, ('logos_init_directive', '#pop')),
- ('([a-zA-Z$_][a-zA-Z0-9$_]*)(\s*)(=)(\s*)([^);]*)',
- bygroups(Name.Class, Text, Punctuation, Text, Text)),
- ('([a-zA-Z$_][a-zA-Z0-9$_]*)', Name.Class),
- ('\)', Punctuation, '#pop'),
- ],
- 'logos_classname' : [
- ('([a-zA-Z$_][a-zA-Z0-9$_]*)(\s*:\s*)([a-zA-Z$_][a-zA-Z0-9$_]*)?',
- bygroups(Name.Class, Text, Name.Class), '#pop'),
- ('([a-zA-Z$_][a-zA-Z0-9$_]*)', Name.Class, '#pop')
- ],
- 'root': [
- (r'(%subclass)(\s+)', bygroups(Keyword, Text),
- 'logos_classname'),
- (r'(%hook|%group)(\s+)([a-zA-Z$_][a-zA-Z0-9$_]+)',
- bygroups(Keyword, Text, Name.Class)),
- (r'(%config)(\s*\(\s*)(\w+)(\s*=\s*)(.*?)(\s*\)\s*)',
- bygroups(Keyword, Text, Name.Variable, Text, String, Text)),
- (r'(%ctor)(\s*)({)', bygroups(Keyword, Text, Punctuation),
- 'function'),
- (r'(%new)(\s*)(\()(\s*.*?\s*)(\))',
- bygroups(Keyword, Text, Keyword, String, Keyword)),
- (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)),
- inherit,
- ],
- }
-
- _logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()')
-
- def analyse_text(text):
- if LogosLexer._logos_keywords.search(text):
- return 1.0
- return 0
+from pygments.lexers.c_cpp import CLexer, CppLexer
+from pygments.lexers.d import DLexer
+from pygments.lexers.objective import ObjectiveCLexer, \
+ ObjectiveCppLexer, LogosLexer
+from pygments.lexers.go import GoLexer
+from pygments.lexers.rust import RustLexer
+from pygments.lexers.c_like import ECLexer, ValaLexer, CudaLexer
+from pygments.lexers.pascal import DelphiLexer, Modula2Lexer, AdaLexer
+from pygments.lexers.business import CobolLexer, CobolFreeformatLexer
+from pygments.lexers.fortran import FortranLexer
+from pygments.lexers.prolog import PrologLexer
+from pygments.lexers.python import CythonLexer
+from pygments.lexers.graphics import GLShaderLexer
+from pygments.lexers.ml import OcamlLexer
+from pygments.lexers.basic import BlitzBasicLexer, BlitzMaxLexer, MonkeyLexer
+from pygments.lexers.dylan import DylanLexer, DylanLidLexer, DylanConsoleLexer
+from pygments.lexers.ooc import OocLexer
+from pygments.lexers.felix import FelixLexer
+from pygments.lexers.nimrod import NimrodLexer
+
+__all__ = []
diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py
new file mode 100644
index 00000000..1bd8f55a
--- /dev/null
+++ b/pygments/lexers/configs.py
@@ -0,0 +1,546 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.configs
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for configuration file formats.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, default, words, bygroups, include, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Whitespace
+from pygments.lexers.shell import BashLexer
+
+__all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer',
+ 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer',
+ 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer']
+
+
+class IniLexer(RegexLexer):
+ """
+ Lexer for configuration files in INI style.
+ """
+
+ name = 'INI'
+ aliases = ['ini', 'cfg', 'dosini']
+ filenames = ['*.ini', '*.cfg']
+ mimetypes = ['text/x-ini']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'[;#].*', Comment.Single),
+ (r'\[.*?\]$', Keyword),
+ (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
+ bygroups(Name.Attribute, Text, Operator, Text, String))
+ ]
+ }
+
+ def analyse_text(text):
+ npos = text.find('\n')
+ if npos < 3:
+ return False
+ return text[0] == '[' and text[npos-1] == ']'
+
+
+class RegeditLexer(RegexLexer):
+ """
+ Lexer for `Windows Registry
+ <http://en.wikipedia.org/wiki/Windows_Registry#.REG_files>`_ files produced
+ by regedit.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'reg'
+ aliases = ['registry']
+ filenames = ['*.reg']
+ mimetypes = ['text/x-windows-registry']
+
+ tokens = {
+ 'root': [
+ (r'Windows Registry Editor.*', Text),
+ (r'\s+', Text),
+ (r'[;#].*', Comment.Single),
+ (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
+ bygroups(Keyword, Operator, Name.Builtin, Keyword)),
+ # String keys, which obey somewhat normal escaping
+ (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
+ bygroups(Name.Attribute, Text, Operator, Text),
+ 'value'),
+ # Bare keys (includes @)
+ (r'(.*?)([ \t]*)(=)([ \t]*)',
+ bygroups(Name.Attribute, Text, Operator, Text),
+ 'value'),
+ ],
+ 'value': [
+ (r'-', Operator, '#pop'), # delete value
+ (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
+ bygroups(Name.Variable, Punctuation, Number), '#pop'),
+ # As far as I know, .reg files do not support line continuation.
+ (r'.+', String, '#pop'),
+ default('#pop'),
+ ]
+ }
+
+ def analyse_text(text):
+ return text.startswith('Windows Registry Editor')
+
+
+class PropertiesLexer(RegexLexer):
+ """
+ Lexer for configuration files in Java's properties format.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Properties'
+ aliases = ['properties', 'jproperties']
+ filenames = ['*.properties']
+ mimetypes = ['text/x-java-properties']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'(?:[;#]|//).*$', Comment),
+ (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
+ bygroups(Name.Attribute, Text, Operator, Text, String)),
+ ],
+ }
+
+
+def _rx_indent(level):
+ # Kconfig *always* interprets a tab as 8 spaces, so this is the default.
+ # Edit this if you are in an environment where KconfigLexer gets expanded
+ # input (tabs expanded to spaces) and the expansion tab width is != 8,
+ # e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width).
+ # Value range here is 2 <= {tab_width} <= 8.
+ tab_width = 8
+ # Regex matching a given indentation {level}, assuming that indentation is
+ # a multiple of {tab_width}. In other cases there might be problems.
+ if tab_width == 2:
+ space_repeat = '+'
+ else:
+ space_repeat = '{1,%d}' % (tab_width - 1)
+ if level == 1:
+ level_repeat = ''
+ else:
+ level_repeat = '{%s}' % level
+ return r'(?:\t| %s\t| {%s})%s.*\n' % (space_repeat, tab_width, level_repeat)
+
+
+class KconfigLexer(RegexLexer):
+ """
+ For Linux-style Kconfig files.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Kconfig'
+ aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
+ # Adjust this if new kconfig file names appear in your environment
+ filenames = ['Kconfig', '*Config.in*', 'external.in*',
+ 'standard-modules.in']
+ mimetypes = ['text/x-kconfig']
+ # No re.MULTILINE, indentation-aware help text needs line-by-line handling
+ flags = 0
+
+ def call_indent(level):
+ # If indentation >= {level} is detected, enter state 'indent{level}'
+ return (_rx_indent(level), String.Doc, 'indent%s' % level)
+
+ def do_indent(level):
+ # Print paragraphs of indentation level >= {level} as String.Doc,
+ # ignoring blank lines. Then return to 'root' state.
+ return [
+ (_rx_indent(level), String.Doc),
+ (r'\s*\n', Text),
+ default('#pop:2')
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*?\n', Comment.Single),
+ (words((
+ 'mainmenu', 'config', 'menuconfig', 'choice', 'endchoice',
+ 'comment', 'menu', 'endmenu', 'visible if', 'if', 'endif',
+ 'source', 'prompt', 'select', 'depends on', 'default',
+ 'range', 'option'), suffix=r'\b'),
+ Keyword),
+ (r'(---help---|help)[\t ]*\n', Keyword, 'help'),
+ (r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b',
+ Name.Builtin),
+ (r'[!=&|]', Operator),
+ (r'[()]', Punctuation),
+ (r'[0-9]+', Number.Integer),
+ (r"'(''|[^'])*'", String.Single),
+ (r'"(""|[^"])*"', String.Double),
+ (r'\S+', Text),
+ ],
+ # Help text is indented, multi-line and ends when a lower indentation
+ # level is detected.
+ 'help': [
+ # Skip blank lines after help token, if any
+ (r'\s*\n', Text),
+ # Determine the first help line's indentation level heuristically(!).
+ # Attention: this is not perfect, but works for 99% of "normal"
+ # indentation schemes up to a max. indentation level of 7.
+ call_indent(7),
+ call_indent(6),
+ call_indent(5),
+ call_indent(4),
+ call_indent(3),
+ call_indent(2),
+ call_indent(1),
+ default('#pop'), # for incomplete help sections without text
+ ],
+ # Handle text for indentation levels 7 to 1
+ 'indent7': do_indent(7),
+ 'indent6': do_indent(6),
+ 'indent5': do_indent(5),
+ 'indent4': do_indent(4),
+ 'indent3': do_indent(3),
+ 'indent2': do_indent(2),
+ 'indent1': do_indent(1),
+ }
+
+
+class Cfengine3Lexer(RegexLexer):
+ """
+ Lexer for `CFEngine3 <http://cfengine.org>`_ policy files.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'CFEngine3'
+ aliases = ['cfengine3', 'cf3']
+ filenames = ['*.cf']
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'#.*?\n', Comment),
+ (r'(body)(\s+)(\S+)(\s+)(control)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword)),
+ (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()',
+ bygroups(Keyword, Text, Keyword, Text, Name.Function, Punctuation),
+ 'arglist'),
+ (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)',
+ bygroups(Keyword, Text, Keyword, Text, Name.Function)),
+ (r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)',
+ bygroups(Punctuation, Name.Variable, Punctuation,
+ Text, Keyword.Type, Text, Operator, Text)),
+ (r'(\S+)(\s*)(=>)(\s*)',
+ bygroups(Keyword.Reserved, Text, Operator, Text)),
+ (r'"', String, 'string'),
+ (r'(\w+)(\()', bygroups(Name.Function, Punctuation)),
+ (r'([\w.!&|()]+)(::)', bygroups(Name.Class, Punctuation)),
+ (r'(\w+)(:)', bygroups(Keyword.Declaration, Punctuation)),
+ (r'@[{(][^)}]+[})]', Name.Variable),
+ (r'[(){},;]', Punctuation),
+ (r'=>', Operator),
+ (r'->', Operator),
+ (r'\d+\.\d+', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'\w+', Name.Function),
+ (r'\s+', Text),
+ ],
+ 'string': [
+ (r'\$[{(]', String.Interpol, 'interpol'),
+ (r'\\.', String.Escape),
+ (r'"', String, '#pop'),
+ (r'\n', String),
+ (r'.', String),
+ ],
+ 'interpol': [
+ (r'\$[{(]', String.Interpol, '#push'),
+ (r'[})]', String.Interpol, '#pop'),
+ (r'[^${()}]+', String.Interpol),
+ ],
+ 'arglist': [
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'\w+', Name.Variable),
+ (r'\s+', Text),
+ ],
+ }
+
+
+class ApacheConfLexer(RegexLexer):
+ """
+ Lexer for configuration files following the Apache config file
+ format.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'ApacheConf'
+ aliases = ['apacheconf', 'aconf', 'apache']
+ filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
+ mimetypes = ['text/x-apacheconf']
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'(#.*?)$', Comment),
+ (r'(<[^\s>]+)(?:(\s+)(.*?))?(>)',
+ bygroups(Name.Tag, Text, String, Name.Tag)),
+ (r'([a-z]\w*)(\s+)',
+ bygroups(Name.Builtin, Text), 'value'),
+ (r'\.+', Text),
+ ],
+ 'value': [
+ (r'\\\n', Text),
+ (r'$', Text, '#pop'),
+ (r'\\', Text),
+ (r'[^\S\n]+', Text),
+ (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
+ (r'\d+', Number),
+ (r'/([a-z0-9][\w./-]+)', String.Other),
+ (r'(on|off|none|any|all|double|email|dns|min|minimal|'
+ r'os|productonly|full|emerg|alert|crit|error|warn|'
+ r'notice|info|debug|registry|script|inetd|standalone|'
+ r'user|group)\b', Keyword),
+ (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
+ (r'[^\s"\\]+', Text)
+ ],
+ }
+
+
+class SquidConfLexer(RegexLexer):
+ """
+ Lexer for `squid <http://www.squid-cache.org/>`_ configuration files.
+
+ .. versionadded:: 0.9
+ """
+
+ name = 'SquidConf'
+ aliases = ['squidconf', 'squid.conf', 'squid']
+ filenames = ['squid.conf']
+ mimetypes = ['text/x-squidconf']
+ flags = re.IGNORECASE
+
+ keywords = (
+ "access_log", "acl", "always_direct", "announce_host",
+ "announce_period", "announce_port", "announce_to", "anonymize_headers",
+ "append_domain", "as_whois_server", "auth_param_basic",
+ "authenticate_children", "authenticate_program", "authenticate_ttl",
+ "broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
+ "cache_dir", "cache_dns_program", "cache_effective_group",
+ "cache_effective_user", "cache_host", "cache_host_acl",
+ "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
+ "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
+ "cache_peer_access", "cahce_replacement_policy", "cache_stoplist",
+ "cache_stoplist_pattern", "cache_store_log", "cache_swap",
+ "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
+ "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
+ "dead_peer_timeout", "debug_options", "delay_access", "delay_class",
+ "delay_initial_bucket_level", "delay_parameters", "delay_pools",
+ "deny_info", "dns_children", "dns_defnames", "dns_nameservers",
+ "dns_testnames", "emulate_httpd_log", "err_html_text",
+ "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
+ "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
+ "ftp_passive", "ftp_user", "half_closed_clients", "header_access",
+ "header_replace", "hierarchy_stoplist", "high_response_time_warning",
+ "high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
+ "http_anonymizer", "httpd_accel", "httpd_accel_host",
+ "httpd_accel_port", "httpd_accel_uses_host_header",
+ "httpd_accel_with_proxy", "http_port", "http_reply_access",
+ "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
+ "ident_lookup", "ident_lookup_access", "ident_timeout",
+ "incoming_http_average", "incoming_icp_average", "inside_firewall",
+ "ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
+ "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
+ "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
+ "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
+ "mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
+ "memory_pools_limit", "memory_replacement_policy", "mime_table",
+ "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
+ "minimum_object_size", "minimum_retry_timeout", "miss_access",
+ "negative_dns_ttl", "negative_ttl", "neighbor_timeout",
+ "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
+ "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
+ "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
+ "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
+ "quick_abort", "quick_abort_max", "quick_abort_min",
+ "quick_abort_pct", "range_offset_limit", "read_timeout",
+ "redirect_children", "redirect_program",
+ "redirect_rewrites_host_header", "reference_age",
+ "refresh_pattern", "reload_into_ims", "request_body_max_size",
+ "request_size", "request_timeout", "shutdown_lifetime",
+ "single_parent_bypass", "siteselect_timeout", "snmp_access",
+ "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
+ "store_avg_object_size", "store_objects_per_bucket",
+ "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
+ "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
+ "test_reachability", "udp_hit_obj", "udp_hit_obj_size",
+ "udp_incoming_address", "udp_outgoing_address", "unique_hostname",
+ "unlinkd_program", "uri_whitespace", "useragent_log",
+ "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
+ )
+
+ opts = (
+ "proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
+ "multicast-responder", "on", "off", "all", "deny", "allow", "via",
+ "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
+ "credentialsttl", "none", "disable", "offline_toggle", "diskd",
+ )
+
+ actions = (
+ "shutdown", "info", "parameter", "server_list", "client_list",
+ r'squid.conf',
+ )
+
+ actions_stats = (
+ "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
+ "redirector", "io", "reply_headers", "filedescriptors", "netdb",
+ )
+
+ actions_log = ("status", "enable", "disable", "clear")
+
+ acls = (
+ "url_regex", "urlpath_regex", "referer_regex", "port", "proto",
+ "req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
+ "dst", "time", "dstdomain", "ident", "snmp_community",
+ )
+
+ ip_re = (
+ r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|'
+ r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|'
+ r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|'
+ r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}'
+ r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|'
+ r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|'
+ r'[1-9]?\d)){3}))'
+ )
+
+ tokens = {
+ 'root': [
+ (r'\s+', Whitespace),
+ (r'#', Comment, 'comment'),
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(opts, prefix=r'\b', suffix=r'\b'), Name.Constant),
+ # Actions
+ (words(actions, prefix=r'\b', suffix=r'\b'), String),
+ (words(actions_stats, prefix=r'stats/', suffix=r'\b'), String),
+ (words(actions_log, prefix=r'log/', suffix=r'='), String),
+ (words(acls, prefix=r'\b', suffix=r'\b'), Keyword),
+ (ip_re + r'(?:/(?:' + ip_re + r'|\b\d+\b))?', Number.Float),
+ (r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
+ (r'\S+', Text),
+ ],
+ 'comment': [
+ (r'\s*TAG:.*', String.Escape, '#pop'),
+ (r'.+', Comment, '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+class NginxConfLexer(RegexLexer):
+ """
+ Lexer for `Nginx <http://nginx.net/>`_ configuration files.
+
+ .. versionadded:: 0.11
+ """
+ name = 'Nginx configuration file'
+ aliases = ['nginx']
+ filenames = []
+ mimetypes = ['text/x-nginx-conf']
+
+ tokens = {
+ 'root': [
+ (r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Text, Name)),
+ (r'[^\s;#]+', Keyword, 'stmt'),
+ include('base'),
+ ],
+ 'block': [
+ (r'\}', Punctuation, '#pop:2'),
+ (r'[^\s;#]+', Keyword.Namespace, 'stmt'),
+ include('base'),
+ ],
+ 'stmt': [
+ (r'\{', Punctuation, 'block'),
+ (r';', Punctuation, '#pop'),
+ include('base'),
+ ],
+ 'base': [
+ (r'#.*\n', Comment.Single),
+ (r'on|off', Name.Constant),
+ (r'\$[^\s;#()]+', Name.Variable),
+ (r'([a-z0-9.-]+)(:)([0-9]+)',
+ bygroups(Name, Punctuation, Number.Integer)),
+ (r'[a-z-]+/[a-z-+]+', String), # mimetype
+ # (r'[a-zA-Z._-]+', Keyword),
+ (r'[0-9]+[km]?\b', Number.Integer),
+ (r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Text, String.Regex)),
+ (r'[:=~]', Punctuation),
+ (r'[^\s;#{}$]+', String), # catch all
+ (r'/[^\s;#]*', Name), # pathname
+ (r'\s+', Text),
+ (r'[$;]', Text), # leftover characters
+ ],
+ }
+
+
+class LighttpdConfLexer(RegexLexer):
+ """
+ Lexer for `Lighttpd <http://lighttpd.net/>`_ configuration files.
+
+ .. versionadded:: 0.11
+ """
+ name = 'Lighttpd configuration file'
+ aliases = ['lighty', 'lighttpd']
+ filenames = []
+ mimetypes = ['text/x-lighttpd-conf']
+
+ tokens = {
+ 'root': [
+ (r'#.*\n', Comment.Single),
+ (r'/\S*', Name), # pathname
+ (r'[a-zA-Z._-]+', Keyword),
+ (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
+ (r'[0-9]+', Number),
+ (r'=>|=~|\+=|==|=|\+', Operator),
+ (r'\$[A-Z]+', Name.Builtin),
+ (r'[(){}\[\],]', Punctuation),
+ (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
+ (r'\s+', Text),
+ ],
+
+ }
+
+
+class DockerLexer(RegexLexer):
+ """
+ Lexer for `Docker <http://docker.io>`_ configuration files.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Docker'
+ aliases = ['docker', 'dockerfile']
+ filenames = ['Dockerfile', '*.docker']
+ mimetypes = ['text/x-dockerfile-config']
+
+ _keywords = (r'(?:FROM|MAINTAINER|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|'
+ r'VOLUME|WORKDIR)')
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'^(ONBUILD)(\s+)(%s)\b' % (_keywords,),
+ bygroups(Name.Keyword, Whitespace, Keyword)),
+ (r'^(%s)\b(.*)' % (_keywords,), bygroups(Keyword, String)),
+ (r'#.*', Comment),
+ (r'RUN', Keyword), # Rest of line falls through
+ (r'(.*\\\n)*.+', using(BashLexer)),
+ ],
+ }
diff --git a/pygments/lexers/console.py b/pygments/lexers/console.py
new file mode 100644
index 00000000..1d89b770
--- /dev/null
+++ b/pygments/lexers/console.py
@@ -0,0 +1,114 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.console
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for misc console output.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Generic, Comment, String, Text, Keyword, Name, \
+ Punctuation, Number
+
+__all__ = ['VCTreeStatusLexer', 'PyPyLogLexer']
+
+
+class VCTreeStatusLexer(RegexLexer):
+ """
+ For colorizing output of version control status commands, like "hg
+ status" or "svn status".
+
+ .. versionadded:: 2.0
+ """
+ name = 'VCTreeStatus'
+ aliases = ['vctreestatus']
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'^A \+ C\s+', Generic.Error),
+ (r'^A\s+\+?\s+', String),
+ (r'^M\s+', Generic.Inserted),
+ (r'^C\s+', Generic.Error),
+ (r'^D\s+', Generic.Deleted),
+ (r'^[?!]\s+', Comment.Preproc),
+ (r' >\s+.*\n', Comment.Preproc),
+ (r'.*\n', Text)
+ ]
+ }
+
+
+class PyPyLogLexer(RegexLexer):
+ """
+ Lexer for PyPy log files.
+
+ .. versionadded:: 1.5
+ """
+ name = "PyPy Log"
+ aliases = ["pypylog", "pypy"]
+ filenames = ["*.pypylog"]
+ mimetypes = ['application/x-pypylog']
+
+ tokens = {
+ "root": [
+ (r"\[\w+\] \{jit-log-.*?$", Keyword, "jit-log"),
+ (r"\[\w+\] \{jit-backend-counts$", Keyword, "jit-backend-counts"),
+ include("extra-stuff"),
+ ],
+ "jit-log": [
+ (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
+ (r"^\+\d+: ", Comment),
+ (r"--end of the loop--", Comment),
+ (r"[ifp]\d+", Name),
+ (r"ptr\d+", Name),
+ (r"(\()(\w+(?:\.\w+)?)(\))",
+ bygroups(Punctuation, Name.Builtin, Punctuation)),
+ (r"[\[\]=,()]", Punctuation),
+ (r"(\d+\.\d+|inf|-inf)", Number.Float),
+ (r"-?\d+", Number.Integer),
+ (r"'.*'", String),
+ (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name),
+ (r"<.*?>+", Name.Builtin),
+ (r"(label|debug_merge_point|jump|finish)", Name.Class),
+ (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
+ r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
+ r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
+ r"int_is_true|"
+ r"uint_floordiv|uint_ge|uint_lt|"
+ r"float_add|float_sub|float_mul|float_truediv|float_neg|"
+ r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
+ r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|"
+ r"cast_int_to_float|cast_float_to_int|"
+ r"force_token|quasiimmut_field|same_as|virtual_ref_finish|"
+ r"virtual_ref|mark_opaque_ptr|"
+ r"call_may_force|call_assembler|call_loopinvariant|"
+ r"call_release_gil|call_pure|call|"
+ r"new_with_vtable|new_array|newstr|newunicode|new|"
+ r"arraylen_gc|"
+ r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|"
+ r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|"
+ r"getfield_gc|getinteriorfield_gc|setinteriorfield_gc|"
+ r"getfield_raw|setfield_gc|setfield_raw|"
+ r"strgetitem|strsetitem|strlen|copystrcontent|"
+ r"unicodegetitem|unicodesetitem|unicodelen|"
+ r"guard_true|guard_false|guard_value|guard_isnull|"
+ r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|"
+ r"guard_not_forced|guard_no_exception|guard_not_invalidated)",
+ Name.Builtin),
+ include("extra-stuff"),
+ ],
+ "jit-backend-counts": [
+ (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"),
+ (r":", Punctuation),
+ (r"\d+", Number),
+ include("extra-stuff"),
+ ],
+ "extra-stuff": [
+ (r"\s+", Text),
+ (r"#.*?$", Comment),
+ ],
+ }
diff --git a/pygments/lexers/css.py b/pygments/lexers/css.py
new file mode 100644
index 00000000..6f27d63c
--- /dev/null
+++ b/pygments/lexers/css.py
@@ -0,0 +1,499 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.css
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for CSS and related stylesheet formats.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+import copy
+
+from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \
+ default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+from pygments.util import iteritems
+
+__all__ = ['CssLexer', 'SassLexer', 'ScssLexer']
+
+
+class CssLexer(RegexLexer):
+ """
+ For CSS (Cascading Style Sheets).
+ """
+
+ name = 'CSS'
+ aliases = ['css']
+ filenames = ['*.css']
+ mimetypes = ['text/css']
+
+ tokens = {
+ 'root': [
+ include('basics'),
+ ],
+ 'basics': [
+ (r'\s+', Text),
+ (r'/\*(?:.|\n)*?\*/', Comment),
+ (r'\{', Punctuation, 'content'),
+ (r'\:[\w-]+', Name.Decorator),
+ (r'\.[\w-]+', Name.Class),
+ (r'\#[\w-]+', Name.Function),
+ (r'@[\w-]+', Keyword, 'atrule'),
+ (r'[\w-]+', Name.Tag),
+ (r'[~^*!%&$\[\]()<>|+=@:;,./?-]', Operator),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single)
+ ],
+ 'atrule': [
+ (r'\{', Punctuation, 'atcontent'),
+ (r';', Punctuation, '#pop'),
+ include('basics'),
+ ],
+ 'atcontent': [
+ include('basics'),
+ (r'\}', Punctuation, '#pop:2'),
+ ],
+ 'content': [
+ (r'\s+', Text),
+ (r'\}', Punctuation, '#pop'),
+ (r'url\(.*?\)', String.Other),
+ (r'^@.*?$', Comment.Preproc),
+ (words((
+ 'azimuth', 'background-attachment', 'background-color',
+ 'background-image', 'background-position', 'background-repeat',
+ 'background', 'border-bottom-color', 'border-bottom-style',
+ 'border-bottom-width', 'border-left-color', 'border-left-style',
+ 'border-left-width', 'border-right', 'border-right-color',
+ 'border-right-style', 'border-right-width', 'border-top-color',
+ 'border-top-style', 'border-top-width', 'border-bottom',
+ 'border-collapse', 'border-left', 'border-width', 'border-color',
+ 'border-spacing', 'border-style', 'border-top', 'border', 'caption-side',
+ 'clear', 'clip', 'color', 'content', 'counter-increment', 'counter-reset',
+ 'cue-after', 'cue-before', 'cue', 'cursor', 'direction', 'display',
+ 'elevation', 'empty-cells', 'float', 'font-family', 'font-size',
+ 'font-size-adjust', 'font-stretch', 'font-style', 'font-variant',
+ 'font-weight', 'font', 'height', 'letter-spacing', 'line-height',
+ 'list-style-type', 'list-style-image', 'list-style-position',
+ 'list-style', 'margin-bottom', 'margin-left', 'margin-right',
+ 'margin-top', 'margin', 'marker-offset', 'marks', 'max-height', 'max-width',
+ 'min-height', 'min-width', 'opacity', 'orphans', 'outline-color',
+ 'outline-style', 'outline-width', 'outline', 'overflow', 'overflow-x',
+ 'overflow-y', 'padding-bottom', 'padding-left', 'padding-right', 'padding-top',
+ 'padding', 'page', 'page-break-after', 'page-break-before', 'page-break-inside',
+ 'pause-after', 'pause-before', 'pause', 'pitch-range', 'pitch',
+ 'play-during', 'position', 'quotes', 'richness', 'right', 'size',
+ 'speak-header', 'speak-numeral', 'speak-punctuation', 'speak',
+ 'speech-rate', 'stress', 'table-layout', 'text-align', 'text-decoration',
+ 'text-indent', 'text-shadow', 'text-transform', 'top', 'unicode-bidi',
+ 'vertical-align', 'visibility', 'voice-family', 'volume', 'white-space',
+ 'widows', 'width', 'word-spacing', 'z-index', 'bottom',
+ 'above', 'absolute', 'always', 'armenian', 'aural', 'auto', 'avoid', 'baseline',
+ 'behind', 'below', 'bidi-override', 'blink', 'block', 'bolder', 'bold', 'both',
+ 'capitalize', 'center-left', 'center-right', 'center', 'circle',
+ 'cjk-ideographic', 'close-quote', 'collapse', 'condensed', 'continuous',
+ 'crop', 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero',
+ 'decimal', 'default', 'digits', 'disc', 'dotted', 'double', 'e-resize', 'embed',
+ 'extra-condensed', 'extra-expanded', 'expanded', 'fantasy', 'far-left',
+ 'far-right', 'faster', 'fast', 'fixed', 'georgian', 'groove', 'hebrew', 'help',
+ 'hidden', 'hide', 'higher', 'high', 'hiragana-iroha', 'hiragana', 'icon',
+ 'inherit', 'inline-table', 'inline', 'inset', 'inside', 'invert', 'italic',
+ 'justify', 'katakana-iroha', 'katakana', 'landscape', 'larger', 'large',
+ 'left-side', 'leftwards', 'left', 'level', 'lighter', 'line-through', 'list-item',
+ 'loud', 'lower-alpha', 'lower-greek', 'lower-roman', 'lowercase', 'ltr',
+ 'lower', 'low', 'medium', 'message-box', 'middle', 'mix', 'monospace',
+ 'n-resize', 'narrower', 'ne-resize', 'no-close-quote', 'no-open-quote',
+ 'no-repeat', 'none', 'normal', 'nowrap', 'nw-resize', 'oblique', 'once',
+ 'open-quote', 'outset', 'outside', 'overline', 'pointer', 'portrait', 'px',
+ 'relative', 'repeat-x', 'repeat-y', 'repeat', 'rgb', 'ridge', 'right-side',
+ 'rightwards', 's-resize', 'sans-serif', 'scroll', 'se-resize',
+ 'semi-condensed', 'semi-expanded', 'separate', 'serif', 'show', 'silent',
+ 'slower', 'slow', 'small-caps', 'small-caption', 'smaller', 'soft', 'solid',
+ 'spell-out', 'square', 'static', 'status-bar', 'super', 'sw-resize',
+ 'table-caption', 'table-cell', 'table-column', 'table-column-group',
+ 'table-footer-group', 'table-header-group', 'table-row',
+ 'table-row-group', 'text-bottom', 'text-top', 'text', 'thick', 'thin',
+ 'transparent', 'ultra-condensed', 'ultra-expanded', 'underline',
+ 'upper-alpha', 'upper-latin', 'upper-roman', 'uppercase', 'url',
+ 'visible', 'w-resize', 'wait', 'wider', 'x-fast', 'x-high', 'x-large', 'x-loud',
+ 'x-low', 'x-small', 'x-soft', 'xx-large', 'xx-small', 'yes'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'indigo', 'gold', 'firebrick', 'indianred', 'yellow', 'darkolivegreen',
+ 'darkseagreen', 'mediumvioletred', 'mediumorchid', 'chartreuse',
+ 'mediumslateblue', 'black', 'springgreen', 'crimson', 'lightsalmon', 'brown',
+ 'turquoise', 'olivedrab', 'cyan', 'silver', 'skyblue', 'gray', 'darkturquoise',
+ 'goldenrod', 'darkgreen', 'darkviolet', 'darkgray', 'lightpink', 'teal',
+ 'darkmagenta', 'lightgoldenrodyellow', 'lavender', 'yellowgreen', 'thistle',
+ 'violet', 'navy', 'orchid', 'blue', 'ghostwhite', 'honeydew', 'cornflowerblue',
+ 'darkblue', 'darkkhaki', 'mediumpurple', 'cornsilk', 'red', 'bisque', 'slategray',
+ 'darkcyan', 'khaki', 'wheat', 'deepskyblue', 'darkred', 'steelblue', 'aliceblue',
+ 'gainsboro', 'mediumturquoise', 'floralwhite', 'coral', 'purple', 'lightgrey',
+ 'lightcyan', 'darksalmon', 'beige', 'azure', 'lightsteelblue', 'oldlace',
+ 'greenyellow', 'royalblue', 'lightseagreen', 'mistyrose', 'sienna',
+ 'lightcoral', 'orangered', 'navajowhite', 'lime', 'palegreen', 'burlywood',
+ 'seashell', 'mediumspringgreen', 'fuchsia', 'papayawhip', 'blanchedalmond',
+ 'peru', 'aquamarine', 'white', 'darkslategray', 'ivory', 'dodgerblue',
+ 'lemonchiffon', 'chocolate', 'orange', 'forestgreen', 'slateblue', 'olive',
+ 'mintcream', 'antiquewhite', 'darkorange', 'cadetblue', 'moccasin',
+ 'limegreen', 'saddlebrown', 'darkslateblue', 'lightskyblue', 'deeppink',
+ 'plum', 'aqua', 'darkgoldenrod', 'maroon', 'sandybrown', 'magenta', 'tan',
+ 'rosybrown', 'pink', 'lightblue', 'palevioletred', 'mediumseagreen',
+ 'dimgray', 'powderblue', 'seagreen', 'snow', 'mediumblue', 'midnightblue',
+ 'paleturquoise', 'palegoldenrod', 'whitesmoke', 'darkorchid', 'salmon',
+ 'lightslategray', 'lawngreen', 'lightgreen', 'tomato', 'hotpink',
+ 'lightyellow', 'lavenderblush', 'linen', 'mediumaquamarine', 'green',
+ 'blueviolet', 'peachpuff'), suffix=r'\b'),
+ Name.Builtin),
+ (r'\!important', Comment.Preproc),
+ (r'/\*(?:.|\n)*?\*/', Comment),
+ (r'\#[a-zA-Z0-9]{1,6}', Number),
+ (r'[.-]?[0-9]*[.]?[0-9]+(em|px|pt|pc|in|mm|cm|ex|s)\b', Number),
+ # Separate regex for percentages, as can't do word boundaries with %
+ (r'[.-]?[0-9]*[.]?[0-9]+%', Number),
+ (r'-?[0-9]+', Number),
+ (r'[~^*!%&<>|+=@:,./?-]+', Operator),
+ (r'[\[\]();]+', Punctuation),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'[a-zA-Z_]\w*', Name)
+ ]
+ }
+
+
+common_sass_tokens = {
+ 'value': [
+ (r'[ \t]+', Text),
+ (r'[!$][\w-]+', Name.Variable),
+ (r'url\(', String.Other, 'string-url'),
+ (r'[a-z_-][\w-]*(?=\()', Name.Function),
+ (words((
+ 'azimuth', 'background-attachment', 'background-color',
+ 'background-image', 'background-position', 'background-repeat',
+ 'background', 'border-bottom-color', 'border-bottom-style',
+ 'border-bottom-width', 'border-left-color', 'border-left-style',
+ 'border-left-width', 'border-right', 'border-right-color',
+ 'border-right-style', 'border-right-width', 'border-top-color',
+ 'border-top-style', 'border-top-width', 'border-bottom',
+ 'border-collapse', 'border-left', 'border-width', 'border-color',
+ 'border-spacing', 'border-style', 'border-top', 'border', 'caption-side',
+ 'clear', 'clip', 'color', 'content', 'counter-increment', 'counter-reset',
+ 'cue-after', 'cue-before', 'cue', 'cursor', 'direction', 'display',
+ 'elevation', 'empty-cells', 'float', 'font-family', 'font-size',
+ 'font-size-adjust', 'font-stretch', 'font-style', 'font-variant',
+ 'font-weight', 'font', 'height', 'letter-spacing', 'line-height',
+ 'list-style-type', 'list-style-image', 'list-style-position',
+ 'list-style', 'margin-bottom', 'margin-left', 'margin-right',
+ 'margin-top', 'margin', 'marker-offset', 'marks', 'max-height', 'max-width',
+ 'min-height', 'min-width', 'opacity', 'orphans', 'outline', 'outline-color',
+ 'outline-style', 'outline-width', 'overflow', 'padding-bottom',
+ 'padding-left', 'padding-right', 'padding-top', 'padding', 'page',
+ 'page-break-after', 'page-break-before', 'page-break-inside',
+ 'pause-after', 'pause-before', 'pause', 'pitch', 'pitch-range',
+ 'play-during', 'position', 'quotes', 'richness', 'right', 'size',
+ 'speak-header', 'speak-numeral', 'speak-punctuation', 'speak',
+ 'speech-rate', 'stress', 'table-layout', 'text-align', 'text-decoration',
+ 'text-indent', 'text-shadow', 'text-transform', 'top', 'unicode-bidi',
+ 'vertical-align', 'visibility', 'voice-family', 'volume', 'white-space',
+ 'widows', 'width', 'word-spacing', 'z-index', 'bottom', 'left',
+ 'above', 'absolute', 'always', 'armenian', 'aural', 'auto', 'avoid', 'baseline',
+ 'behind', 'below', 'bidi-override', 'blink', 'block', 'bold', 'bolder', 'both',
+ 'capitalize', 'center-left', 'center-right', 'center', 'circle',
+ 'cjk-ideographic', 'close-quote', 'collapse', 'condensed', 'continuous',
+ 'crop', 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero',
+ 'decimal', 'default', 'digits', 'disc', 'dotted', 'double', 'e-resize', 'embed',
+ 'extra-condensed', 'extra-expanded', 'expanded', 'fantasy', 'far-left',
+ 'far-right', 'faster', 'fast', 'fixed', 'georgian', 'groove', 'hebrew', 'help',
+ 'hidden', 'hide', 'higher', 'high', 'hiragana-iroha', 'hiragana', 'icon',
+ 'inherit', 'inline-table', 'inline', 'inset', 'inside', 'invert', 'italic',
+ 'justify', 'katakana-iroha', 'katakana', 'landscape', 'larger', 'large',
+ 'left-side', 'leftwards', 'level', 'lighter', 'line-through', 'list-item',
+ 'loud', 'lower-alpha', 'lower-greek', 'lower-roman', 'lowercase', 'ltr',
+ 'lower', 'low', 'medium', 'message-box', 'middle', 'mix', 'monospace',
+ 'n-resize', 'narrower', 'ne-resize', 'no-close-quote', 'no-open-quote',
+ 'no-repeat', 'none', 'normal', 'nowrap', 'nw-resize', 'oblique', 'once',
+ 'open-quote', 'outset', 'outside', 'overline', 'pointer', 'portrait', 'px',
+ 'relative', 'repeat-x', 'repeat-y', 'repeat', 'rgb', 'ridge', 'right-side',
+ 'rightwards', 's-resize', 'sans-serif', 'scroll', 'se-resize',
+ 'semi-condensed', 'semi-expanded', 'separate', 'serif', 'show', 'silent',
+ 'slow', 'slower', 'small-caps', 'small-caption', 'smaller', 'soft', 'solid',
+ 'spell-out', 'square', 'static', 'status-bar', 'super', 'sw-resize',
+ 'table-caption', 'table-cell', 'table-column', 'table-column-group',
+ 'table-footer-group', 'table-header-group', 'table-row',
+ 'table-row-group', 'text', 'text-bottom', 'text-top', 'thick', 'thin',
+ 'transparent', 'ultra-condensed', 'ultra-expanded', 'underline',
+ 'upper-alpha', 'upper-latin', 'upper-roman', 'uppercase', 'url',
+ 'visible', 'w-resize', 'wait', 'wider', 'x-fast', 'x-high', 'x-large', 'x-loud',
+ 'x-low', 'x-small', 'x-soft', 'xx-large', 'xx-small', 'yes'), suffix=r'\b'),
+ Name.Constant),
+ (words((
+ 'indigo', 'gold', 'firebrick', 'indianred', 'darkolivegreen',
+ 'darkseagreen', 'mediumvioletred', 'mediumorchid', 'chartreuse',
+ 'mediumslateblue', 'springgreen', 'crimson', 'lightsalmon', 'brown',
+ 'turquoise', 'olivedrab', 'cyan', 'skyblue', 'darkturquoise',
+ 'goldenrod', 'darkgreen', 'darkviolet', 'darkgray', 'lightpink',
+ 'darkmagenta', 'lightgoldenrodyellow', 'lavender', 'yellowgreen', 'thistle',
+ 'violet', 'orchid', 'ghostwhite', 'honeydew', 'cornflowerblue',
+ 'darkblue', 'darkkhaki', 'mediumpurple', 'cornsilk', 'bisque', 'slategray',
+ 'darkcyan', 'khaki', 'wheat', 'deepskyblue', 'darkred', 'steelblue', 'aliceblue',
+ 'gainsboro', 'mediumturquoise', 'floralwhite', 'coral', 'lightgrey',
+ 'lightcyan', 'darksalmon', 'beige', 'azure', 'lightsteelblue', 'oldlace',
+ 'greenyellow', 'royalblue', 'lightseagreen', 'mistyrose', 'sienna',
+ 'lightcoral', 'orangered', 'navajowhite', 'palegreen', 'burlywood',
+ 'seashell', 'mediumspringgreen', 'papayawhip', 'blanchedalmond',
+ 'peru', 'aquamarine', 'darkslategray', 'ivory', 'dodgerblue',
+ 'lemonchiffon', 'chocolate', 'orange', 'forestgreen', 'slateblue',
+ 'mintcream', 'antiquewhite', 'darkorange', 'cadetblue', 'moccasin',
+ 'limegreen', 'saddlebrown', 'darkslateblue', 'lightskyblue', 'deeppink',
+ 'plum', 'darkgoldenrod', 'sandybrown', 'magenta', 'tan',
+ 'rosybrown', 'pink', 'lightblue', 'palevioletred', 'mediumseagreen',
+ 'dimgray', 'powderblue', 'seagreen', 'snow', 'mediumblue', 'midnightblue',
+ 'paleturquoise', 'palegoldenrod', 'whitesmoke', 'darkorchid', 'salmon',
+ 'lightslategray', 'lawngreen', 'lightgreen', 'tomato', 'hotpink',
+ 'lightyellow', 'lavenderblush', 'linen', 'mediumaquamarine',
+ 'blueviolet', 'peachpuff'), suffix=r'\b'),
+ Name.Entity),
+ (words((
+ 'black', 'silver', 'gray', 'white', 'maroon', 'red', 'purple', 'fuchsia', 'green',
+ 'lime', 'olive', 'yellow', 'navy', 'blue', 'teal', 'aqua'), suffix=r'\b'),
+ Name.Builtin),
+ (r'\!(important|default)', Name.Exception),
+ (r'(true|false)', Name.Pseudo),
+ (r'(and|or|not)', Operator.Word),
+ (r'/\*', Comment.Multiline, 'inline-comment'),
+ (r'//[^\n]*', Comment.Single),
+ (r'\#[a-z0-9]{1,6}', Number.Hex),
+ (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
+ (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'[~^*!&%<>|+=@:,./?-]+', Operator),
+ (r'[\[\]()]+', Punctuation),
+ (r'"', String.Double, 'string-double'),
+ (r"'", String.Single, 'string-single'),
+ (r'[a-z_-][\w-]*', Name),
+ ],
+
+ 'interpolation': [
+ (r'\}', String.Interpol, '#pop'),
+ include('value'),
+ ],
+
+ 'selector': [
+ (r'[ \t]+', Text),
+ (r'\:', Name.Decorator, 'pseudo-class'),
+ (r'\.', Name.Class, 'class'),
+ (r'\#', Name.Namespace, 'id'),
+ (r'[\w-]+', Name.Tag),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'&', Keyword),
+ (r'[~^*!&\[\]()<>|+=@:;,./?-]', Operator),
+ (r'"', String.Double, 'string-double'),
+ (r"'", String.Single, 'string-single'),
+ ],
+
+ 'string-double': [
+ (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'"', String.Double, '#pop'),
+ ],
+
+ 'string-single': [
+ (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r"'", String.Double, '#pop'),
+ ],
+
+ 'string-url': [
+ (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'\)', String.Other, '#pop'),
+ ],
+
+ 'pseudo-class': [
+ (r'[\w-]+', Name.Decorator),
+ (r'#\{', String.Interpol, 'interpolation'),
+ default('#pop'),
+ ],
+
+ 'class': [
+ (r'[\w-]+', Name.Class),
+ (r'#\{', String.Interpol, 'interpolation'),
+ default('#pop'),
+ ],
+
+ 'id': [
+ (r'[\w-]+', Name.Namespace),
+ (r'#\{', String.Interpol, 'interpolation'),
+ default('#pop'),
+ ],
+
+ 'for': [
+ (r'(from|to|through)', Operator.Word),
+ include('value'),
+ ],
+}
+
+
+def _indentation(lexer, match, ctx):
+ indentation = match.group(0)
+ yield match.start(), Text, indentation
+ ctx.last_indentation = indentation
+ ctx.pos = match.end()
+
+ if hasattr(ctx, 'block_state') and ctx.block_state and \
+ indentation.startswith(ctx.block_indentation) and \
+ indentation != ctx.block_indentation:
+ ctx.stack.append(ctx.block_state)
+ else:
+ ctx.block_state = None
+ ctx.block_indentation = None
+ ctx.stack.append('content')
+
+
+def _starts_block(token, state):
+ def callback(lexer, match, ctx):
+ yield match.start(), token, match.group(0)
+
+ if hasattr(ctx, 'last_indentation'):
+ ctx.block_indentation = ctx.last_indentation
+ else:
+ ctx.block_indentation = ''
+
+ ctx.block_state = state
+ ctx.pos = match.end()
+
+ return callback
+
+
+class SassLexer(ExtendedRegexLexer):
+ """
+ For Sass stylesheets.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Sass'
+ aliases = ['sass']
+ filenames = ['*.sass']
+ mimetypes = ['text/x-sass']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'content': [
+ (r'//[^\n]*', _starts_block(Comment.Single, 'single-comment'),
+ 'root'),
+ (r'/\*[^\n]*', _starts_block(Comment.Multiline, 'multi-comment'),
+ 'root'),
+ (r'@import', Keyword, 'import'),
+ (r'@for', Keyword, 'for'),
+ (r'@(debug|warn|if|while)', Keyword, 'value'),
+ (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
+ (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
+ (r'@extend', Keyword, 'selector'),
+ (r'@[\w-]+', Keyword, 'selector'),
+ (r'=[\w-]+', Name.Function, 'value'),
+ (r'\+[\w-]+', Name.Decorator, 'value'),
+ (r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))',
+ bygroups(Name.Variable, Operator), 'value'),
+ (r':', Name.Attribute, 'old-style-attr'),
+ (r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'),
+ default('selector'),
+ ],
+
+ 'single-comment': [
+ (r'.+', Comment.Single),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'multi-comment': [
+ (r'.+', Comment.Multiline),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'import': [
+ (r'[ \t]+', Text),
+ (r'\S+', String),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'old-style-attr': [
+ (r'[^\s:="\[]+', Name.Attribute),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'[ \t]*=', Operator, 'value'),
+ default('value'),
+ ],
+
+ 'new-style-attr': [
+ (r'[^\s:="\[]+', Name.Attribute),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'[ \t]*[=:]', Operator, 'value'),
+ ],
+
+ 'inline-comment': [
+ (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r"\*/", Comment, '#pop'),
+ ],
+ }
+ for group, common in iteritems(common_sass_tokens):
+ tokens[group] = copy.copy(common)
+ tokens['value'].append((r'\n', Text, 'root'))
+ tokens['selector'].append((r'\n', Text, 'root'))
+
+
+class ScssLexer(RegexLexer):
+ """
+ For SCSS stylesheets.
+ """
+
+ name = 'SCSS'
+ aliases = ['scss']
+ filenames = ['*.scss']
+ mimetypes = ['text/x-scss']
+
+ flags = re.IGNORECASE | re.DOTALL
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@import', Keyword, 'value'),
+ (r'@for', Keyword, 'for'),
+ (r'@(debug|warn|if|while)', Keyword, 'value'),
+ (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
+ (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
+ (r'@extend', Keyword, 'selector'),
+ (r'(@media)(\s+)', bygroups(Keyword, Text), 'value'),
+ (r'@[\w-]+', Keyword, 'selector'),
+ (r'(\$[\w-]*\w)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
+ (r'(?=[^;{}][;}])', Name.Attribute, 'attr'),
+ (r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'),
+ default('selector'),
+ ],
+
+ 'attr': [
+ (r'[^\s:="\[]+', Name.Attribute),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'[ \t]*:', Operator, 'value'),
+ default('#pop'),
+ ],
+
+ 'inline-comment': [
+ (r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r"\*/", Comment, '#pop'),
+ ],
+ }
+ for group, common in iteritems(common_sass_tokens):
+ tokens[group] = copy.copy(common)
+ tokens['value'].extend([(r'\n', Text), (r'[;{}]', Punctuation, '#pop')])
+ tokens['selector'].extend([(r'\n', Text), (r'[;{}]', Punctuation, '#pop')])
diff --git a/pygments/lexers/d.py b/pygments/lexers/d.py
new file mode 100644
index 00000000..98e01dcf
--- /dev/null
+++ b/pygments/lexers/d.py
@@ -0,0 +1,251 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.d
+ ~~~~~~~~~~~~~~~~~
+
+ Lexers for D languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['DLexer', 'CrocLexer', 'MiniDLexer']
+
+
+class DLexer(RegexLexer):
+ """
+ For D source.
+
+ .. versionadded:: 1.2
+ """
+ name = 'D'
+ filenames = ['*.d', '*.di']
+ aliases = ['d']
+ mimetypes = ['text/x-dsrc']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ # (r'\\\n', Text), # line continuations
+ # Comments
+ (r'//(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'/\+', Comment.Multiline, 'nested_comment'),
+ # Keywords
+ (words((
+ 'abstract', 'alias', 'align', 'asm', 'assert', 'auto', 'body',
+ 'break', 'case', 'cast', 'catch', 'class', 'const', 'continue',
+ 'debug', 'default', 'delegate', 'delete', 'deprecated', 'do', 'else',
+ 'enum', 'export', 'extern', 'finally', 'final', 'foreach_reverse',
+ 'foreach', 'for', 'function', 'goto', 'if', 'immutable', 'import',
+ 'interface', 'invariant', 'inout', 'in', 'is', 'lazy', 'mixin',
+ 'module', 'new', 'nothrow', 'out', 'override', 'package', 'pragma',
+ 'private', 'protected', 'public', 'pure', 'ref', 'return', 'scope',
+ 'shared', 'static', 'struct', 'super', 'switch', 'synchronized',
+ 'template', 'this', 'throw', 'try', 'typedef', 'typeid', 'typeof',
+ 'union', 'unittest', 'version', 'volatile', 'while', 'with',
+ '__gshared', '__traits', '__vector', '__parameters'),
+ suffix=r'\b'),
+ Keyword),
+ (words((
+ 'bool', 'byte', 'cdouble', 'cent', 'cfloat', 'char', 'creal',
+ 'dchar', 'double', 'float', 'idouble', 'ifloat', 'int', 'ireal',
+ 'long', 'real', 'short', 'ubyte', 'ucent', 'uint', 'ulong',
+ 'ushort', 'void', 'wchar'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(false|true|null)\b', Keyword.Constant),
+ (words((
+ '__FILE__', '__MODULE__', '__LINE__', '__FUNCTION__', '__PRETTY_FUNCTION__'
+ '', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__', '__VENDOR__',
+ '__VERSION__'), suffix=r'\b'),
+ Keyword.Pseudo),
+ (r'macro\b', Keyword.Reserved),
+ (r'(string|wstring|dstring|size_t|ptrdiff_t)\b', Name.Builtin),
+ # FloatLiteral
+ # -- HexFloat
+ (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
+ r'[pP][+\-]?[0-9_]+[fFL]?[i]?', Number.Float),
+ # -- DecimalFloat
+ (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[fFL]?[i]?', Number.Float),
+ (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[fFL]?[i]?', Number.Float),
+ # IntegerLiteral
+ # -- Binary
+ (r'0[Bb][01_]+', Number.Bin),
+ # -- Octal
+ (r'0[0-7_]+', Number.Oct),
+ # -- Hexadecimal
+ (r'0[xX][0-9a-fA-F_]+', Number.Hex),
+ # -- Decimal
+ (r'(0|[1-9][0-9_]*)([LUu]|Lu|LU|uL|UL)?', Number.Integer),
+ # CharacterLiteral
+ (r"""'(\\['"?\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\&\w+;|.)'""",
+ String.Char),
+ # StringLiteral
+ # -- WysiwygString
+ (r'r"[^"]*"[cwd]?', String),
+ # -- AlternateWysiwygString
+ (r'`[^`]*`[cwd]?', String),
+ # -- DoubleQuotedString
+ (r'"(\\\\|\\"|[^"])*"[cwd]?', String),
+ # -- EscapeSequence
+ (r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}"
+ r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)",
+ String),
+ # -- HexString
+ (r'x"[0-9a-fA-F_\s]*"[cwd]?', String),
+ # -- DelimitedString
+ (r'q"\[', String, 'delimited_bracket'),
+ (r'q"\(', String, 'delimited_parenthesis'),
+ (r'q"<', String, 'delimited_angle'),
+ (r'q"\{', String, 'delimited_curly'),
+ (r'q"([a-zA-Z_]\w*)\n.*?\n\1"', String),
+ (r'q"(.).*?\1"', String),
+ # -- TokenString
+ (r'q\{', String, 'token_string'),
+ # Attributes
+ (r'@([a-zA-Z_]\w*)?', Name.Decorator),
+ # Tokens
+ (r'(~=|\^=|%=|\*=|==|!>=|!<=|!<>=|!<>|!<|!>|!=|>>>=|>>>|>>=|>>|>='
+ r'|<>=|<>|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.\.|\.\.|/=)'
+ r'|[/.&|\-+<>!()\[\]{}?,;:$=*%^~]', Punctuation),
+ # Identifier
+ (r'[a-zA-Z_]\w*', Name),
+ # Line
+ (r'#line\s.*\n', Comment.Special),
+ ],
+ 'nested_comment': [
+ (r'[^+/]+', Comment.Multiline),
+ (r'/\+', Comment.Multiline, '#push'),
+ (r'\+/', Comment.Multiline, '#pop'),
+ (r'[+/]', Comment.Multiline),
+ ],
+ 'token_string': [
+ (r'\{', Punctuation, 'token_string_nest'),
+ (r'\}', String, '#pop'),
+ include('root'),
+ ],
+ 'token_string_nest': [
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'delimited_bracket': [
+ (r'[^\[\]]+', String),
+ (r'\[', String, 'delimited_inside_bracket'),
+ (r'\]"', String, '#pop'),
+ ],
+ 'delimited_inside_bracket': [
+ (r'[^\[\]]+', String),
+ (r'\[', String, '#push'),
+ (r'\]', String, '#pop'),
+ ],
+ 'delimited_parenthesis': [
+ (r'[^()]+', String),
+ (r'\(', String, 'delimited_inside_parenthesis'),
+ (r'\)"', String, '#pop'),
+ ],
+ 'delimited_inside_parenthesis': [
+ (r'[^()]+', String),
+ (r'\(', String, '#push'),
+ (r'\)', String, '#pop'),
+ ],
+ 'delimited_angle': [
+ (r'[^<>]+', String),
+ (r'<', String, 'delimited_inside_angle'),
+ (r'>"', String, '#pop'),
+ ],
+ 'delimited_inside_angle': [
+ (r'[^<>]+', String),
+ (r'<', String, '#push'),
+ (r'>', String, '#pop'),
+ ],
+ 'delimited_curly': [
+ (r'[^{}]+', String),
+ (r'\{', String, 'delimited_inside_curly'),
+ (r'\}"', String, '#pop'),
+ ],
+ 'delimited_inside_curly': [
+ (r'[^{}]+', String),
+ (r'\{', String, '#push'),
+ (r'\}', String, '#pop'),
+ ],
+ }
+
+
+class CrocLexer(RegexLexer):
+ """
+ For `Croc <http://jfbillingsley.com/croc>`_ source.
+ """
+ name = 'Croc'
+ filenames = ['*.croc']
+ aliases = ['croc']
+ mimetypes = ['text/x-crocsrc']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ # Comments
+ (r'//(.*?)\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'nestedcomment'),
+ # Keywords
+ (words((
+ 'as', 'assert', 'break', 'case', 'catch', 'class', 'continue',
+ 'default', 'do', 'else', 'finally', 'for', 'foreach', 'function',
+ 'global', 'namespace', 'if', 'import', 'in', 'is', 'local',
+ 'module', 'return', 'scope', 'super', 'switch', 'this', 'throw',
+ 'try', 'vararg', 'while', 'with', 'yield'), suffix=r'\b'),
+ Keyword),
+ (r'(false|true|null)\b', Keyword.Constant),
+ # FloatLiteral
+ (r'([0-9][0-9_]*)(?=[.eE])(\.[0-9][0-9_]*)?([eE][+\-]?[0-9_]+)?',
+ Number.Float),
+ # IntegerLiteral
+ # -- Binary
+ (r'0[bB][01][01_]*', Number.Bin),
+ # -- Hexadecimal
+ (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex),
+ # -- Decimal
+ (r'([0-9][0-9_]*)(?![.eE])', Number.Integer),
+ # CharacterLiteral
+ (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-9]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
+ String.Char),
+ # StringLiteral
+ # -- WysiwygString
+ (r'@"(""|[^"])*"', String),
+ (r'@`(``|[^`])*`', String),
+ (r"@'(''|[^'])*'", String),
+ # -- DoubleQuotedString
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # Tokens
+ (r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>'
+ r'|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.|/=)'
+ r'|[-/.&$@|\+<>!()\[\]{}?,;:=*%^~#\\]', Punctuation),
+ # Identifier
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ 'nestedcomment': [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ }
+
+
+class MiniDLexer(CrocLexer):
+ """
+ For MiniD source. MiniD is now known as Croc.
+ """
+ name = 'MiniD'
+ filenames = [] # don't lex .md as MiniD, reserve for Markdown
+ aliases = ['minid']
+ mimetypes = ['text/x-minidsrc']
diff --git a/pygments/lexers/dalvik.py b/pygments/lexers/dalvik.py
index de9b11fa..2f26fa04 100644
--- a/pygments/lexers/dalvik.py
+++ b/pygments/lexers/dalvik.py
@@ -5,13 +5,15 @@
Pygments lexers for Dalvik VM-related languages.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+import re
+
from pygments.lexer import RegexLexer, include, bygroups
from pygments.token import Keyword, Text, Comment, Name, String, Number, \
- Punctuation
+ Punctuation
__all__ = ['SmaliLexer']
@@ -21,7 +23,7 @@ class SmaliLexer(RegexLexer):
For `Smali <http://code.google.com/p/smali/>`_ (Android/Dalvik) assembly
code.
- *New in Pygments 1.6.*
+ .. versionadded:: 1.6
"""
name = 'Smali'
aliases = ['smali']
@@ -63,8 +65,8 @@ class SmaliLexer(RegexLexer):
(r'\s+', Text),
],
'instruction': [
- (r'\b[vp]\d+\b', Name.Builtin), # registers
- (r'\b[a-z][A-Za-z0-9/-]+\s+', Text), # instructions
+ (r'\b[vp]\d+\b', Name.Builtin), # registers
+ (r'\b[a-z][A-Za-z0-9/-]+\s+', Text), # instructions
],
'literal': [
(r'".*"', String),
@@ -73,27 +75,27 @@ class SmaliLexer(RegexLexer):
(r'[0-9]+L?', Number.Integer),
],
'field': [
- (r'(\$?\b)([A-Za-z0-9_$]*)(:)',
+ (r'(\$?\b)([\w$]*)(:)',
bygroups(Punctuation, Name.Variable, Punctuation)),
],
'method': [
- (r'<(?:cl)?init>', Name.Function), # constructor
- (r'(\$?\b)([A-Za-z0-9_$]*)(\()',
+ (r'<(?:cl)?init>', Name.Function), # constructor
+ (r'(\$?\b)([\w$]*)(\()',
bygroups(Punctuation, Name.Function, Punctuation)),
],
'label': [
- (r':[A-Za-z0-9_]+', Name.Label),
+ (r':\w+', Name.Label),
],
'class': [
# class names in the form Lcom/namespace/ClassName;
# I only want to color the ClassName part, so the namespace part is
# treated as 'Text'
- (r'(L)((?:[A-Za-z0-9_$]+/)*)([A-Za-z0-9_$]+)(;)',
+ (r'(L)((?:[\w$]+/)*)([\w$]+)(;)',
bygroups(Keyword.Type, Text, Name.Class, Text)),
],
'punctuation': [
(r'->', Punctuation),
- (r'[{},\(\):=\.-]', Punctuation),
+ (r'[{},():=.-]', Punctuation),
],
'type': [
(r'[ZBSCIJFDV\[]+', Keyword.Type),
@@ -102,3 +104,22 @@ class SmaliLexer(RegexLexer):
(r'#.*?\n', Comment),
],
}
+
+ def analyse_text(text):
+ score = 0
+ if re.search(r'^\s*\.class\s', text, re.MULTILINE):
+ score += 0.5
+ if re.search(r'\b((check-cast|instance-of|throw-verification-error'
+ r')\b|(-to|add|[ais]get|[ais]put|and|cmpl|const|div|'
+ r'if|invoke|move|mul|neg|not|or|rem|return|rsub|shl|'
+ r'shr|sub|ushr)[-/])|{|}', text, re.MULTILINE):
+ score += 0.3
+ if re.search(r'(\.(catchall|epilogue|restart local|prologue)|'
+ r'\b(array-data|class-change-error|declared-synchronized|'
+ r'(field|inline|vtable)@0x[0-9a-fA-F]|generic-error|'
+ r'illegal-class-access|illegal-field-access|'
+ r'illegal-method-access|instantiation-error|no-error|'
+ r'no-such-class|no-such-field|no-such-method|'
+ r'packed-switch|sparse-switch))\b', text, re.MULTILINE):
+ score += 0.6
+ return score
diff --git a/pygments/lexers/data.py b/pygments/lexers/data.py
new file mode 100644
index 00000000..c88375d5
--- /dev/null
+++ b/pygments/lexers/data.py
@@ -0,0 +1,530 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.data
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for data file format.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, LexerContext, \
+ include, bygroups, inherit
+from pygments.token import Text, Comment, Keyword, Name, String, Number, \
+ Punctuation, Literal
+
+__all__ = ['YamlLexer', 'JsonLexer', 'JsonLdLexer']
+
+
+class YamlLexerContext(LexerContext):
+ """Indentation context for the YAML lexer."""
+
+ def __init__(self, *args, **kwds):
+ super(YamlLexerContext, self).__init__(*args, **kwds)
+ self.indent_stack = []
+ self.indent = -1
+ self.next_indent = 0
+ self.block_scalar_indent = None
+
+
+class YamlLexer(ExtendedRegexLexer):
+ """
+ Lexer for `YAML <http://yaml.org/>`_, a human-friendly data serialization
+ language.
+
+ .. versionadded:: 0.11
+ """
+
+ name = 'YAML'
+ aliases = ['yaml']
+ filenames = ['*.yaml', '*.yml']
+ mimetypes = ['text/x-yaml']
+
+ def something(token_class):
+ """Do not produce empty tokens."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if not text:
+ return
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def reset_indent(token_class):
+ """Reset the indentation levels."""
+ def callback(lexer, match, context):
+ text = match.group()
+ context.indent_stack = []
+ context.indent = -1
+ context.next_indent = 0
+ context.block_scalar_indent = None
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def save_indent(token_class, start=False):
+ """Save a possible indentation level."""
+ def callback(lexer, match, context):
+ text = match.group()
+ extra = ''
+ if start:
+ context.next_indent = len(text)
+ if context.next_indent < context.indent:
+ while context.next_indent < context.indent:
+ context.indent = context.indent_stack.pop()
+ if context.next_indent > context.indent:
+ extra = text[context.indent:]
+ text = text[:context.indent]
+ else:
+ context.next_indent += len(text)
+ if text:
+ yield match.start(), token_class, text
+ if extra:
+ yield match.start()+len(text), token_class.Error, extra
+ context.pos = match.end()
+ return callback
+
+ def set_indent(token_class, implicit=False):
+ """Set the previously saved indentation level."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if context.indent < context.next_indent:
+ context.indent_stack.append(context.indent)
+ context.indent = context.next_indent
+ if not implicit:
+ context.next_indent += len(text)
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def set_block_scalar_indent(token_class):
+ """Set an explicit indentation level for a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ context.block_scalar_indent = None
+ if not text:
+ return
+ increment = match.group(1)
+ if increment:
+ current_indent = max(context.indent, 0)
+ increment = int(increment)
+ context.block_scalar_indent = current_indent + increment
+ if text:
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def parse_block_scalar_empty_line(indent_token_class, content_token_class):
+ """Process an empty line in a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if (context.block_scalar_indent is None or
+ len(text) <= context.block_scalar_indent):
+ if text:
+ yield match.start(), indent_token_class, text
+ else:
+ indentation = text[:context.block_scalar_indent]
+ content = text[context.block_scalar_indent:]
+ yield match.start(), indent_token_class, indentation
+ yield (match.start()+context.block_scalar_indent,
+ content_token_class, content)
+ context.pos = match.end()
+ return callback
+
+ def parse_block_scalar_indent(token_class):
+ """Process indentation spaces in a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if context.block_scalar_indent is None:
+ if len(text) <= max(context.indent, 0):
+ context.stack.pop()
+ context.stack.pop()
+ return
+ context.block_scalar_indent = len(text)
+ else:
+ if len(text) < context.block_scalar_indent:
+ context.stack.pop()
+ context.stack.pop()
+ return
+ if text:
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def parse_plain_scalar_indent(token_class):
+ """Process indentation spaces in a plain scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if len(text) <= context.indent:
+ context.stack.pop()
+ context.stack.pop()
+ return
+ if text:
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ tokens = {
+ # the root rules
+ 'root': [
+ # ignored whitespaces
+ (r'[ ]+(?=#|$)', Text),
+ # line breaks
+ (r'\n+', Text),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # the '%YAML' directive
+ (r'^%YAML(?=[ ]|$)', reset_indent(Name.Tag), 'yaml-directive'),
+ # the %TAG directive
+ (r'^%TAG(?=[ ]|$)', reset_indent(Name.Tag), 'tag-directive'),
+ # document start and document end indicators
+ (r'^(?:---|\.\.\.)(?=[ ]|$)', reset_indent(Name.Namespace),
+ 'block-line'),
+ # indentation spaces
+ (r'[ ]*(?!\s|$)', save_indent(Text, start=True),
+ ('block-line', 'indentation')),
+ ],
+
+ # trailing whitespaces after directives or a block scalar indicator
+ 'ignored-line': [
+ # ignored whitespaces
+ (r'[ ]+(?=#|$)', Text),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # line break
+ (r'\n', Text, '#pop:2'),
+ ],
+
+ # the %YAML directive
+ 'yaml-directive': [
+ # the version number
+ (r'([ ]+)([0-9]+\.[0-9]+)',
+ bygroups(Text, Number), 'ignored-line'),
+ ],
+
+ # the %YAG directive
+ 'tag-directive': [
+ # a tag handle and the corresponding prefix
+ (r'([ ]+)(!|![\w-]*!)'
+ r'([ ]+)(!|!?[\w;/?:@&=+$,.!~*\'()\[\]%-]+)',
+ bygroups(Text, Keyword.Type, Text, Keyword.Type),
+ 'ignored-line'),
+ ],
+
+ # block scalar indicators and indentation spaces
+ 'indentation': [
+ # trailing whitespaces are ignored
+ (r'[ ]*$', something(Text), '#pop:2'),
+ # whitespaces preceeding block collection indicators
+ (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text)),
+ # block collection indicators
+ (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
+ # the beginning a block line
+ (r'[ ]*', save_indent(Text), '#pop'),
+ ],
+
+ # an indented line in the block context
+ 'block-line': [
+ # the line end
+ (r'[ ]*(?=#|$)', something(Text), '#pop'),
+ # whitespaces separating tokens
+ (r'[ ]+', Text),
+ # tags, anchors and aliases,
+ include('descriptors'),
+ # block collections and scalars
+ include('block-nodes'),
+ # flow collections and quoted scalars
+ include('flow-nodes'),
+ # a plain scalar
+ (r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`-]|[?:-]\S)',
+ something(Name.Variable),
+ 'plain-scalar-in-block-context'),
+ ],
+
+ # tags, anchors, aliases
+ 'descriptors': [
+ # a full-form tag
+ (r'!<[\w;/?:@&=+$,.!~*\'()\[\]%-]+>', Keyword.Type),
+ # a tag in the form '!', '!suffix' or '!handle!suffix'
+ (r'!(?:[\w-]+)?'
+ r'(?:![\w;/?:@&=+$,.!~*\'()\[\]%-]+)?', Keyword.Type),
+ # an anchor
+ (r'&[\w-]+', Name.Label),
+ # an alias
+ (r'\*[\w-]+', Name.Variable),
+ ],
+
+ # block collections and scalars
+ 'block-nodes': [
+ # implicit key
+ (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
+ # literal and folded scalars
+ (r'[|>]', Punctuation.Indicator,
+ ('block-scalar-content', 'block-scalar-header')),
+ ],
+
+ # flow collections and quoted scalars
+ 'flow-nodes': [
+ # a flow sequence
+ (r'\[', Punctuation.Indicator, 'flow-sequence'),
+ # a flow mapping
+ (r'\{', Punctuation.Indicator, 'flow-mapping'),
+ # a single-quoted scalar
+ (r'\'', String, 'single-quoted-scalar'),
+ # a double-quoted scalar
+ (r'\"', String, 'double-quoted-scalar'),
+ ],
+
+ # the content of a flow collection
+ 'flow-collection': [
+ # whitespaces
+ (r'[ ]+', Text),
+ # line breaks
+ (r'\n+', Text),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # simple indicators
+ (r'[?:,]', Punctuation.Indicator),
+ # tags, anchors and aliases
+ include('descriptors'),
+ # nested collections and quoted scalars
+ include('flow-nodes'),
+ # a plain scalar
+ (r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`])',
+ something(Name.Variable),
+ 'plain-scalar-in-flow-context'),
+ ],
+
+ # a flow sequence indicated by '[' and ']'
+ 'flow-sequence': [
+ # include flow collection rules
+ include('flow-collection'),
+ # the closing indicator
+ (r'\]', Punctuation.Indicator, '#pop'),
+ ],
+
+ # a flow mapping indicated by '{' and '}'
+ 'flow-mapping': [
+ # include flow collection rules
+ include('flow-collection'),
+ # the closing indicator
+ (r'\}', Punctuation.Indicator, '#pop'),
+ ],
+
+ # block scalar lines
+ 'block-scalar-content': [
+ # line break
+ (r'\n', Text),
+ # empty line
+ (r'^[ ]+$',
+ parse_block_scalar_empty_line(Text, Name.Constant)),
+ # indentation spaces (we may leave the state here)
+ (r'^[ ]*', parse_block_scalar_indent(Text)),
+ # line content
+ (r'[\S\t ]+', Name.Constant),
+ ],
+
+ # the content of a literal or folded scalar
+ 'block-scalar-header': [
+ # indentation indicator followed by chomping flag
+ (r'([1-9])?[+-]?(?=[ ]|$)',
+ set_block_scalar_indent(Punctuation.Indicator),
+ 'ignored-line'),
+ # chomping flag followed by indentation indicator
+ (r'[+-]?([1-9])?(?=[ ]|$)',
+ set_block_scalar_indent(Punctuation.Indicator),
+ 'ignored-line'),
+ ],
+
+ # ignored and regular whitespaces in quoted scalars
+ 'quoted-scalar-whitespaces': [
+ # leading and trailing whitespaces are ignored
+ (r'^[ ]+', Text),
+ (r'[ ]+$', Text),
+ # line breaks are ignored
+ (r'\n+', Text),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Name.Variable),
+ ],
+
+ # single-quoted scalars
+ 'single-quoted-scalar': [
+ # include whitespace and line break rules
+ include('quoted-scalar-whitespaces'),
+ # escaping of the quote character
+ (r'\'\'', String.Escape),
+ # regular non-whitespace characters
+ (r'[^\s\']+', String),
+ # the closing quote
+ (r'\'', String, '#pop'),
+ ],
+
+ # double-quoted scalars
+ 'double-quoted-scalar': [
+ # include whitespace and line break rules
+ include('quoted-scalar-whitespaces'),
+ # escaping of special characters
+ (r'\\[0abt\tn\nvfre "\\N_LP]', String),
+ # escape codes
+ (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
+ String.Escape),
+ # regular non-whitespace characters
+ (r'[^\s"\\]+', String),
+ # the closing quote
+ (r'"', String, '#pop'),
+ ],
+
+ # the beginning of a new line while scanning a plain scalar
+ 'plain-scalar-in-block-context-new-line': [
+ # empty lines
+ (r'^[ ]+$', Text),
+ # line breaks
+ (r'\n+', Text),
+ # document start and document end indicators
+ (r'^(?=---|\.\.\.)', something(Name.Namespace), '#pop:3'),
+ # indentation spaces (we may leave the block line state here)
+ (r'^[ ]*', parse_plain_scalar_indent(Text), '#pop'),
+ ],
+
+ # a plain scalar in the block context
+ 'plain-scalar-in-block-context': [
+ # the scalar ends with the ':' indicator
+ (r'[ ]*(?=:[ ]|:$)', something(Text), '#pop'),
+ # the scalar ends with whitespaces followed by a comment
+ (r'[ ]+(?=#)', Text, '#pop'),
+ # trailing whitespaces are ignored
+ (r'[ ]+$', Text),
+ # line breaks are ignored
+ (r'\n+', Text, 'plain-scalar-in-block-context-new-line'),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Literal.Scalar.Plain),
+ # regular non-whitespace characters
+ (r'(?::(?!\s)|[^\s:])+', Literal.Scalar.Plain),
+ ],
+
+ # a plain scalar is the flow context
+ 'plain-scalar-in-flow-context': [
+ # the scalar ends with an indicator character
+ (r'[ ]*(?=[,:?\[\]{}])', something(Text), '#pop'),
+ # the scalar ends with a comment
+ (r'[ ]+(?=#)', Text, '#pop'),
+ # leading and trailing whitespaces are ignored
+ (r'^[ ]+', Text),
+ (r'[ ]+$', Text),
+ # line breaks are ignored
+ (r'\n+', Text),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Name.Variable),
+ # regular non-whitespace characters
+ (r'[^\s,:?\[\]{}]+', Name.Variable),
+ ],
+
+ }
+
+ def get_tokens_unprocessed(self, text=None, context=None):
+ if context is None:
+ context = YamlLexerContext(text, 0)
+ return super(YamlLexer, self).get_tokens_unprocessed(text, context)
+
+
+class JsonLexer(RegexLexer):
+ """
+ For JSON data structures.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'JSON'
+ aliases = ['json']
+ filenames = ['*.json']
+ mimetypes = ['application/json']
+
+ flags = re.DOTALL
+
+ # integer part of a number
+ int_part = r'-?(0|[1-9]\d*)'
+
+ # fractional part of a number
+ frac_part = r'\.\d+'
+
+ # exponential part of a number
+ exp_part = r'[eE](\+|-)?\d+'
+
+ tokens = {
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+
+ # represents a simple terminal value
+ 'simplevalue': [
+ (r'(true|false|null)\b', Keyword.Constant),
+ (('%(int_part)s(%(frac_part)s%(exp_part)s|'
+ '%(exp_part)s|%(frac_part)s)') % vars(),
+ Number.Float),
+ (int_part, Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ ],
+
+
+ # the right hand side of an object, after the attribute name
+ 'objectattribute': [
+ include('value'),
+ (r':', Punctuation),
+ # comma terminates the attribute but expects more
+ (r',', Punctuation, '#pop'),
+ # a closing bracket terminates the entire object, so pop twice
+ (r'\}', Punctuation, ('#pop', '#pop')),
+ ],
+
+ # a json object - { attr, attr, ... }
+ 'objectvalue': [
+ include('whitespace'),
+ (r'"(\\\\|\\"|[^"])*"', Name.Tag, 'objectattribute'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+
+ # json array - [ value, value, ... }
+ 'arrayvalue': [
+ include('whitespace'),
+ include('value'),
+ (r',', Punctuation),
+ (r'\]', Punctuation, '#pop'),
+ ],
+
+ # a json value - either a simple value or a complex value (object or array)
+ 'value': [
+ include('whitespace'),
+ include('simplevalue'),
+ (r'\{', Punctuation, 'objectvalue'),
+ (r'\[', Punctuation, 'arrayvalue'),
+ ],
+
+ # the root of a json document whould be a value
+ 'root': [
+ include('value'),
+ ],
+ }
+
+class JsonLdLexer(JsonLexer):
+ """
+ For `JSON-LD <http://json-ld.org/>`_ linked data.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'JSON-LD'
+ aliases = ['jsonld', 'json-ld']
+ filenames = ['*.jsonld']
+ mimetypes = ['application/ld+json']
+
+ tokens = {
+ 'objectvalue': [
+ (r'"@(context|id|value|language|type|container|list|set|'
+ r'reverse|index|base|vocab|graph)"', Name.Decorator,
+ 'objectattribute'),
+ inherit,
+ ],
+ }
diff --git a/pygments/lexers/diff.py b/pygments/lexers/diff.py
new file mode 100644
index 00000000..d3b1589d
--- /dev/null
+++ b/pygments/lexers/diff.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.diff
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for diff/patch formats.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, Generic, \
+ Literal
+
+__all__ = ['DiffLexer', 'DarcsPatchLexer']
+
+
+class DiffLexer(RegexLexer):
+ """
+ Lexer for unified or context-style diffs or patches.
+ """
+
+ name = 'Diff'
+ aliases = ['diff', 'udiff']
+ filenames = ['*.diff', '*.patch']
+ mimetypes = ['text/x-diff', 'text/x-patch']
+
+ tokens = {
+ 'root': [
+ (r' .*\n', Text),
+ (r'\+.*\n', Generic.Inserted),
+ (r'-.*\n', Generic.Deleted),
+ (r'!.*\n', Generic.Strong),
+ (r'@.*\n', Generic.Subheading),
+ (r'([Ii]ndex|diff).*\n', Generic.Heading),
+ (r'=.*\n', Generic.Heading),
+ (r'.*\n', Text),
+ ]
+ }
+
+ def analyse_text(text):
+ if text[:7] == 'Index: ':
+ return True
+ if text[:5] == 'diff ':
+ return True
+ if text[:4] == '--- ':
+ return 0.9
+
+
+class DarcsPatchLexer(RegexLexer):
+ """
+ DarcsPatchLexer is a lexer for the various versions of the darcs patch
+ format. Examples of this format are derived by commands such as
+ ``darcs annotate --patch`` and ``darcs send``.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Darcs Patch'
+ aliases = ['dpatch']
+ filenames = ['*.dpatch', '*.darcspatch']
+
+ DPATCH_KEYWORDS = ('hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move',
+ 'replace')
+
+ tokens = {
+ 'root': [
+ (r'<', Operator),
+ (r'>', Operator),
+ (r'\{', Operator),
+ (r'\}', Operator),
+ (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])',
+ bygroups(Operator, Keyword, Name, Text, Name, Operator,
+ Literal.Date, Text, Operator)),
+ (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)',
+ bygroups(Operator, Keyword, Name, Text, Name, Operator,
+ Literal.Date, Text), 'comment'),
+ (r'New patches:', Generic.Heading),
+ (r'Context:', Generic.Heading),
+ (r'Patch bundle hash:', Generic.Heading),
+ (r'(\s*)(%s)(.*\n)' % '|'.join(DPATCH_KEYWORDS),
+ bygroups(Text, Keyword, Text)),
+ (r'\+', Generic.Inserted, "insert"),
+ (r'-', Generic.Deleted, "delete"),
+ (r'.*\n', Text),
+ ],
+ 'comment': [
+ (r'[^\]].*\n', Comment),
+ (r'\]', Operator, "#pop"),
+ ],
+ 'specialText': [ # darcs add [_CODE_] special operators for clarity
+ (r'\n', Text, "#pop"), # line-based
+ (r'\[_[^_]*_]', Operator),
+ ],
+ 'insert': [
+ include('specialText'),
+ (r'\[', Generic.Inserted),
+ (r'[^\n\[]+', Generic.Inserted),
+ ],
+ 'delete': [
+ include('specialText'),
+ (r'\[', Generic.Deleted),
+ (r'[^\n\[]+', Generic.Deleted),
+ ],
+ }
diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py
index a603086b..21f8d1e4 100644
--- a/pygments/lexers/dotnet.py
+++ b/pygments/lexers/dotnet.py
@@ -5,19 +5,19 @@
Lexers for .net languages.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, include, \
- using, this
+ using, this, default
from pygments.token import Punctuation, \
- Text, Comment, Operator, Keyword, Name, String, Number, Literal, Other
-from pygments.util import get_choice_opt
+ Text, Comment, Operator, Keyword, Name, String, Number, Literal, Other
+from pygments.util import get_choice_opt, iteritems
from pygments import unistring as uni
-from pygments.lexers.web import XmlLexer
+from pygments.lexers.html import XmlLexer
__all__ = ['CSharpLexer', 'NemerleLexer', 'BooLexer', 'VbNetLexer',
'CSharpAspxLexer', 'VbNetAspxLexer', 'FSharpLexer']
@@ -44,24 +44,24 @@ class CSharpLexer(RegexLexer):
The default value is ``basic``.
- *New in Pygments 0.8.*
+ .. versionadded:: 0.8
"""
name = 'C#'
aliases = ['csharp', 'c#']
filenames = ['*.cs']
- mimetypes = ['text/x-csharp'] # inferred
+ mimetypes = ['text/x-csharp'] # inferred
flags = re.MULTILINE | re.DOTALL | re.UNICODE
- # for the range of allowed unicode characters in identifiers,
- # see http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
+ # for the range of allowed unicode characters in identifiers, see
+ # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
levels = {
- 'none': '@?[_a-zA-Z][a-zA-Z0-9_]*',
- 'basic': ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
- '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
- uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
+ 'none': '@?[_a-zA-Z]\w*',
+ 'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
+ '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
+ 'Cf', 'Mn', 'Mc') + ']*'),
'full': ('@?(?:_|[^' +
uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
+ '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
@@ -71,17 +71,17 @@ class CSharpLexer(RegexLexer):
tokens = {}
token_variants = True
- for levelname, cs_ident in levels.items():
+ for levelname, cs_ident in iteritems(levels):
tokens[levelname] = {
'root': [
# method names
- (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
- r'(' + cs_ident + ')' # method name
+ (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
+ r'(' + cs_ident + ')' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Punctuation)),
(r'^\s*\[.*?\]', Name.Attribute),
(r'[^\S\n]+', Text),
- (r'\\\n', Text), # line continuation
+ (r'\\\n', Text), # line continuation
(r'//.*?\n', Comment.Single),
(r'/[*].*?[*]/', Comment.Multiline),
(r'\n', Text),
@@ -117,16 +117,17 @@ class CSharpLexer(RegexLexer):
(cs_ident, Name),
],
'class': [
- (cs_ident, Name.Class, '#pop')
+ (cs_ident, Name.Class, '#pop'),
+ default('#pop'),
],
'namespace': [
- (r'(?=\()', Text, '#pop'), # using (resource)
- ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
+ (r'(?=\()', Text, '#pop'), # using (resource)
+ ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop'),
]
}
def __init__(self, **options):
- level = get_choice_opt(options, 'unicodelevel', self.tokens.keys(), 'basic')
+ level = get_choice_opt(options, 'unicodelevel', list(self.tokens), 'basic')
if level not in self._all_tokens:
# compile the regexes now
self._tokens = self.__class__.process_tokendef(level)
@@ -156,44 +157,44 @@ class NemerleLexer(RegexLexer):
The default value is ``basic``.
- *New in Pygments 1.5.*
+ .. versionadded:: 1.5
"""
name = 'Nemerle'
aliases = ['nemerle']
filenames = ['*.n']
- mimetypes = ['text/x-nemerle'] # inferred
+ mimetypes = ['text/x-nemerle'] # inferred
flags = re.MULTILINE | re.DOTALL | re.UNICODE
# for the range of allowed unicode characters in identifiers, see
# http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
- levels = dict(
- none = '@?[_a-zA-Z][a-zA-Z0-9_]*',
- basic = ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
- '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
- uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
- full = ('@?(?:_|[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo',
- 'Nl') + '])'
- + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
- 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
- )
+ levels = {
+ 'none': '@?[_a-zA-Z]\w*',
+ 'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
+ '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
+ 'Cf', 'Mn', 'Mc') + ']*'),
+ 'full': ('@?(?:_|[^' +
+ uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
+ + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
+ 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
+ }
tokens = {}
token_variants = True
- for levelname, cs_ident in levels.items():
+ for levelname, cs_ident in iteritems(levels):
tokens[levelname] = {
'root': [
# method names
- (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
- r'(' + cs_ident + ')' # method name
+ (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
+ r'(' + cs_ident + ')' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Punctuation)),
(r'^\s*\[.*?\]', Name.Attribute),
(r'[^\S\n]+', Text),
- (r'\\\n', Text), # line continuation
+ (r'\\\n', Text), # line continuation
(r'//.*?\n', Comment.Single),
(r'/[*].*?[*]/', Comment.Multiline),
(r'\n', Text),
@@ -249,7 +250,7 @@ class NemerleLexer(RegexLexer):
(cs_ident, Name.Class, '#pop')
],
'namespace': [
- (r'(?=\()', Text, '#pop'), # using (resource)
+ (r'(?=\()', Text, '#pop'), # using (resource)
('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
],
'splice-string': [
@@ -284,7 +285,7 @@ class NemerleLexer(RegexLexer):
}
def __init__(self, **options):
- level = get_choice_opt(options, 'unicodelevel', self.tokens.keys(),
+ level = get_choice_opt(options, 'unicodelevel', list(self.tokens),
'basic')
if level not in self._all_tokens:
# compile the regexes now
@@ -336,9 +337,9 @@ class BooLexer(RegexLexer):
(r'"""(\\\\|\\"|.*?)"""', String.Double),
(r'"(\\\\|\\"|[^"]*?)"', String.Double),
(r"'(\\\\|\\'|[^']*?)'", String.Single),
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ (r'[a-zA-Z_]\w*', Name),
(r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
- (r'[0-9][0-9\.]*(ms?|d|h|s)', Number),
+ (r'[0-9][0-9.]*(ms?|d|h|s)', Number),
(r'0\d+', Number.Oct),
(r'0x[a-fA-F0-9]+', Number.Hex),
(r'\d+L', Number.Integer.Long),
@@ -351,13 +352,13 @@ class BooLexer(RegexLexer):
('[*/]', Comment.Multiline)
],
'funcname': [
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
+ ('[a-zA-Z_]\w*', Name.Function, '#pop')
],
'classname': [
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+ ('[a-zA-Z_]\w*', Name.Class, '#pop')
],
'namespace': [
- ('[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace, '#pop')
+ ('[a-zA-Z_][\w.]*', Name.Namespace, '#pop')
]
}
@@ -372,7 +373,11 @@ class VbNetLexer(RegexLexer):
name = 'VB.net'
aliases = ['vb.net', 'vbnet']
filenames = ['*.vb', '*.bas']
- mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
+ mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
+
+ uni_name = '[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' + \
+ '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
+ 'Cf', 'Mn', 'Mc') + ']*'
flags = re.MULTILINE | re.IGNORECASE
tokens = {
@@ -382,11 +387,11 @@ class VbNetLexer(RegexLexer):
(r'\n', Text),
(r'rem\b.*?\n', Comment),
(r"'.*?\n", Comment),
- (r'#If\s.*?\sThen|#ElseIf\s.*?\sThen|#End\s+If|#Const|'
+ (r'#If\s.*?\sThen|#ElseIf\s.*?\sThen|#Else|#End\s+If|#Const|'
r'#ExternalSource.*?\n|#End\s+ExternalSource|'
r'#Region.*?\n|#End\s+Region|#ExternalChecksum',
Comment.Preproc),
- (r'[\(\){}!#,.:]', Punctuation),
+ (r'[(){}!#,.:]', Punctuation),
(r'Option\s+(Strict|Explicit|Compare)\s+'
r'(On|Off|Binary|Text)', Keyword.Declaration),
(r'(?<!\.)(AddHandler|Alias|'
@@ -422,16 +427,16 @@ class VbNetLexer(RegexLexer):
(r'(?<!\.)(AddressOf|And|AndAlso|As|GetType|In|Is|IsNot|Like|Mod|'
r'Or|OrElse|TypeOf|Xor)\b', Operator.Word),
(r'&=|[*]=|/=|\\=|\^=|\+=|-=|<<=|>>=|<<|>>|:=|'
- r'<=|>=|<>|[-&*/\\^+=<>]',
+ r'<=|>=|<>|[-&*/\\^+=<>\[\]]',
Operator),
('"', String, 'string'),
- ('[a-zA-Z_][a-zA-Z0-9_]*[%&@!#$]?', Name),
+ (r'_\n', Text), # Line continuation (must be before Name)
+ (uni_name + '[%&@!#$]?', Name),
('#.*?#', Literal.Date),
- (r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
+ (r'(\d+\.\d*|\d*\.\d+)(F[+-]?[0-9]+)?', Number.Float),
(r'\d+([SILDFR]|US|UI|UL)?', Number.Integer),
(r'&H[0-9a-f]+([SILDFR]|US|UI|UL)?', Number.Integer),
(r'&O[0-7]+([SILDFR]|US|UI|UL)?', Number.Integer),
- (r'_\n', Text), # Line continuation
],
'string': [
(r'""', String),
@@ -439,26 +444,32 @@ class VbNetLexer(RegexLexer):
(r'[^"]+', String),
],
'dim': [
- (r'[a-z_][a-z0-9_]*', Name.Variable, '#pop'),
- (r'', Text, '#pop'), # any other syntax
+ (uni_name, Name.Variable, '#pop'),
+ default('#pop'), # any other syntax
],
'funcname': [
- (r'[a-z_][a-z0-9_]*', Name.Function, '#pop'),
+ (uni_name, Name.Function, '#pop'),
],
'classname': [
- (r'[a-z_][a-z0-9_]*', Name.Class, '#pop'),
+ (uni_name, Name.Class, '#pop'),
],
'namespace': [
- (r'[a-z_][a-z0-9_.]*', Name.Namespace, '#pop'),
+ (uni_name, Name.Namespace),
+ (r'\.', Name.Namespace),
+ default('#pop'),
],
'end': [
(r'\s+', Text),
(r'(Function|Sub|Property|Class|Structure|Enum|Module|Namespace)\b',
Keyword, '#pop'),
- (r'', Text, '#pop'),
+ default('#pop'),
]
}
+ def analyse_text(text):
+ if re.search(r'^\s*(#If|Module|Namespace)', text, re.MULTILINE):
+ return 0.5
+
class GenericAspxLexer(RegexLexer):
"""
@@ -483,10 +494,10 @@ class GenericAspxLexer(RegexLexer):
}
-#TODO support multiple languages within the same source file
+# TODO support multiple languages within the same source file
class CSharpAspxLexer(DelegatingLexer):
"""
- Lexer for highligting C# within ASP.NET pages.
+ Lexer for highlighting C# within ASP.NET pages.
"""
name = 'aspx-cs'
@@ -495,7 +506,7 @@ class CSharpAspxLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super(CSharpAspxLexer, self).__init__(CSharpLexer,GenericAspxLexer,
+ super(CSharpAspxLexer, self).__init__(CSharpLexer, GenericAspxLexer,
**options)
def analyse_text(text):
@@ -507,7 +518,7 @@ class CSharpAspxLexer(DelegatingLexer):
class VbNetAspxLexer(DelegatingLexer):
"""
- Lexer for highligting Visual Basic.net within ASP.NET pages.
+ Lexer for highlighting Visual Basic.net within ASP.NET pages.
"""
name = 'aspx-vb'
@@ -516,8 +527,8 @@ class VbNetAspxLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super(VbNetAspxLexer, self).__init__(VbNetLexer,GenericAspxLexer,
- **options)
+ super(VbNetAspxLexer, self).__init__(VbNetLexer, GenericAspxLexer,
+ **options)
def analyse_text(text):
if re.search(r'Page\s*Language="Vb"', text, re.I) is not None:
@@ -531,7 +542,10 @@ class FSharpLexer(RegexLexer):
"""
For the F# language (version 3.0).
- *New in Pygments 1.5.*
+ AAAAACK Strings
+ http://research.microsoft.com/en-us/um/cambridge/projects/fsharp/manual/spec.html#_Toc335818775
+
+ .. versionadded:: 1.5
"""
name = 'FSharp'
@@ -540,15 +554,15 @@ class FSharpLexer(RegexLexer):
mimetypes = ['text/x-fsharp']
keywords = [
- 'abstract', 'as', 'assert', 'base', 'begin', 'class', 'default',
- 'delegate', 'do!', 'do', 'done', 'downcast', 'downto', 'elif', 'else',
- 'end', 'exception', 'extern', 'false', 'finally', 'for', 'function',
- 'fun', 'global', 'if', 'inherit', 'inline', 'interface', 'internal',
- 'in', 'lazy', 'let!', 'let', 'match', 'member', 'module', 'mutable',
- 'namespace', 'new', 'null', 'of', 'open', 'override', 'private', 'public',
- 'rec', 'return!', 'return', 'select', 'static', 'struct', 'then', 'to',
- 'true', 'try', 'type', 'upcast', 'use!', 'use', 'val', 'void', 'when',
- 'while', 'with', 'yield!', 'yield',
+ 'abstract', 'as', 'assert', 'base', 'begin', 'class', 'default',
+ 'delegate', 'do!', 'do', 'done', 'downcast', 'downto', 'elif', 'else',
+ 'end', 'exception', 'extern', 'false', 'finally', 'for', 'function',
+ 'fun', 'global', 'if', 'inherit', 'inline', 'interface', 'internal',
+ 'in', 'lazy', 'let!', 'let', 'match', 'member', 'module', 'mutable',
+ 'namespace', 'new', 'null', 'of', 'open', 'override', 'private', 'public',
+ 'rec', 'return!', 'return', 'select', 'static', 'struct', 'then', 'to',
+ 'true', 'try', 'type', 'upcast', 'use!', 'use', 'val', 'void', 'when',
+ 'while', 'with', 'yield!', 'yield',
]
# Reserved words; cannot hurt to color them as keywords too.
keywords += [
@@ -559,10 +573,10 @@ class FSharpLexer(RegexLexer):
'virtual', 'volatile',
]
keyopts = [
- '!=', '#', '&&', '&', '\(', '\)', '\*', '\+', ',', '-\.',
- '->', '-', '\.\.', '\.', '::', ':=', ':>', ':', ';;', ';', '<-',
- '<\]', '<', '>\]', '>', '\?\?', '\?', '\[<', '\[\|', '\[', '\]',
- '_', '`', '{', '\|\]', '\|', '}', '~', '<@@', '<@', '=', '@>', '@@>',
+ '!=', '#', '&&', '&', '\(', '\)', '\*', '\+', ',', '-\.',
+ '->', '-', '\.\.', '\.', '::', ':=', ':>', ':', ';;', ';', '<-',
+ '<\]', '<', '>\]', '>', '\?\?', '\?', '\[<', '\[\|', '\[', '\]',
+ '_', '`', '\{', '\|\]', '\|', '\}', '~', '<@@', '<@', '=', '@>', '@@>',
]
operators = r'[!$%&*+\./:<=>?@^|~-]'
@@ -581,7 +595,7 @@ class FSharpLexer(RegexLexer):
tokens = {
'escape-sequence': [
- (r'\\[\\\"\'ntbrafv]', String.Escape),
+ (r'\\[\\"\'ntbrafv]', String.Escape),
(r'\\[0-9]{3}', String.Escape),
(r'\\u[0-9a-fA-F]{4}', String.Escape),
(r'\\U[0-9a-fA-F]{8}', String.Escape),
@@ -589,9 +603,9 @@ class FSharpLexer(RegexLexer):
'root': [
(r'\s+', Text),
(r'\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\b(?<!\.)([A-Z][A-Za-z0-9_\']*)(?=\s*\.)',
+ (r'\b(?<!\.)([A-Z][\w\']*)(?=\s*\.)',
Name.Namespace, 'dotted'),
- (r'\b([A-Z][A-Za-z0-9_\']*)', Name),
+ (r'\b([A-Z][\w\']*)', Name),
(r'///.*?\n', String.Doc),
(r'//.*?\n', Comment.Single),
(r'\(\*(?!\))', Comment, 'comment'),
@@ -600,15 +614,16 @@ class FSharpLexer(RegexLexer):
(r'"""', String, 'tqs'),
(r'"', String, 'string'),
- (r'\b(open|module)(\s+)([a-zA-Z0-9_.]+)',
+ (r'\b(open|module)(\s+)([\w.]+)',
bygroups(Keyword, Text, Name.Namespace)),
- (r'\b(let!?)(\s+)([a-zA-Z0-9_]+)',
+ (r'\b(let!?)(\s+)(\w+)',
bygroups(Keyword, Text, Name.Variable)),
- (r'\b(type)(\s+)([a-zA-Z0-9_]+)',
+ (r'\b(type)(\s+)(\w+)',
bygroups(Keyword, Text, Name.Class)),
- (r'\b(member|override)(\s+)([a-zA-Z0-9_]+)(\.)([a-zA-Z0-9_]+)',
+ (r'\b(member|override)(\s+)(\w+)(\.)(\w+)',
bygroups(Keyword, Text, Name, Punctuation, Name.Function)),
(r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ (r'``([^`\n\r\t]|`[^`\n\r\t])+``', Name),
(r'(%s)' % '|'.join(keyopts), Operator),
(r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
(r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
@@ -621,23 +636,27 @@ class FSharpLexer(RegexLexer):
(r'\d[\d_]*[uU]?[yslLnQRZINGmM]?', Number.Integer),
(r'0[xX][\da-fA-F][\da-fA-F_]*[uU]?[yslLn]?[fF]?', Number.Hex),
(r'0[oO][0-7][0-7_]*[uU]?[yslLn]?', Number.Oct),
- (r'0[bB][01][01_]*[uU]?[yslLn]?', Number.Binary),
+ (r'0[bB][01][01_]*[uU]?[yslLn]?', Number.Bin),
(r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)[fFmM]?',
Number.Float),
(r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'B?",
String.Char),
(r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
+ (r"'", Keyword), # a stray quote is another syntax element
+
+ (r'@?"', String.Double, 'string'),
(r'[~?][a-z][\w\']*:', Name.Variable),
],
'dotted': [
(r'\s+', Text),
(r'\.', Punctuation),
- (r'[A-Z][A-Za-z0-9_\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][A-Za-z0-9_\']*', Name, '#pop'),
- (r'[a-z_][A-Za-z0-9_\']*', Name, '#pop'),
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+ (r'[A-Z][\w\']*', Name, '#pop'),
+ (r'[a-z_][\w\']*', Name, '#pop'),
+ # e.g. dictionary index access
+ default('#pop'),
],
'comment': [
(r'[^(*)@"]+', Comment),
diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py
new file mode 100644
index 00000000..433287d4
--- /dev/null
+++ b/pygments/lexers/dsls.py
@@ -0,0 +1,514 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.dsls
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various domain-specific languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words, include, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal
+
+__all__ = ['ProtoBufLexer', 'BroLexer', 'PuppetLexer', 'RslLexer',
+ 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer']
+
+
+class ProtoBufLexer(RegexLexer):
+ """
+ Lexer for `Protocol Buffer <http://code.google.com/p/protobuf/>`_
+ definition files.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Protocol Buffer'
+ aliases = ['protobuf', 'proto']
+ filenames = ['*.proto']
+
+ tokens = {
+ 'root': [
+ (r'[ \t]+', Text),
+ (r'[,;{}\[\]()]', Punctuation),
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
+ (words((
+ 'import', 'option', 'optional', 'required', 'repeated', 'default',
+ 'packed', 'ctype', 'extensions', 'to', 'max', 'rpc', 'returns',
+ 'oneof'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (words((
+ 'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64',
+ 'fixed32', 'fixed64', 'sfixed32', 'sfixed64',
+ 'float', 'double', 'bool', 'string', 'bytes'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'package'),
+ (r'(message|extend)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'message'),
+ (r'(enum|group|service)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'type'),
+ (r'\".*?\"', String),
+ (r'\'.*?\'', String),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'(\-?(inf|nan))\b', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'[+-=]', Operator),
+ (r'([a-zA-Z_][\w.]*)([ \t]*)(=)',
+ bygroups(Name.Attribute, Text, Operator)),
+ ('[a-zA-Z_][\w.]*', Name),
+ ],
+ 'package': [
+ (r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
+ default('#pop'),
+ ],
+ 'message': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ default('#pop'),
+ ],
+ 'type': [
+ (r'[a-zA-Z_]\w*', Name, '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+class BroLexer(RegexLexer):
+ """
+ For `Bro <http://bro-ids.org/>`_ scripts.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Bro'
+ aliases = ['bro']
+ filenames = ['*.bro']
+
+ _hex = r'[0-9a-fA-F_]'
+ _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
+ _h = r'[A-Za-z0-9][-A-Za-z0-9]*'
+
+ tokens = {
+ 'root': [
+ # Whitespace
+ (r'^@.*?\n', Comment.Preproc),
+ (r'#.*?\n', Comment.Single),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text),
+ # Keywords
+ (r'(add|alarm|break|case|const|continue|delete|do|else|enum|event'
+ r'|export|for|function|if|global|hook|local|module|next'
+ r'|of|print|redef|return|schedule|switch|type|when|while)\b', Keyword),
+ (r'(addr|any|bool|count|counter|double|file|int|interval|net'
+ r'|pattern|port|record|set|string|subnet|table|time|timer'
+ r'|vector)\b', Keyword.Type),
+ (r'(T|F)\b', Keyword.Constant),
+ (r'(&)((?:add|delete|expire)_func|attr|(?:create|read|write)_expire'
+ r'|default|disable_print_hook|raw_output|encrypt|group|log'
+ r'|mergeable|optional|persistent|priority|redef'
+ r'|rotate_(?:interval|size)|synchronized)\b',
+ bygroups(Punctuation, Keyword)),
+ (r'\s+module\b', Keyword.Namespace),
+ # Addresses, ports and networks
+ (r'\d+/(tcp|udp|icmp|unknown)\b', Number),
+ (r'(\d+\.){3}\d+', Number),
+ (r'(' + _hex + r'){7}' + _hex, Number),
+ (r'0x' + _hex + r'(' + _hex + r'|:)*::(' + _hex + r'|:)*', Number),
+ (r'((\d+|:)(' + _hex + r'|:)*)?::(' + _hex + r'|:)*', Number),
+ (r'(\d+\.\d+\.|(\d+\.){2}\d+)', Number),
+ # Hostnames
+ (_h + r'(\.' + _h + r')+', String),
+ # Numeric
+ (_float + r'\s+(day|hr|min|sec|msec|usec)s?\b', Literal.Date),
+ (r'0[xX]' + _hex, Number.Hex),
+ (_float, Number.Float),
+ (r'\d+', Number.Integer),
+ (r'/', String.Regex, 'regex'),
+ (r'"', String, 'string'),
+ # Operators
+ (r'[!%*/+:<=>?~|-]', Operator),
+ (r'([-+=&|]{2}|[+=!><-]=)', Operator),
+ (r'(in|match)\b', Operator.Word),
+ (r'[{}()\[\]$.,;]', Punctuation),
+ # Identfier
+ (r'([_a-zA-Z]\w*)(::)', bygroups(Name, Name.Namespace)),
+ (r'[a-zA-Z_]\w*', Name)
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String),
+ (r'\\\n', String),
+ (r'\\', String)
+ ],
+ 'regex': [
+ (r'/', String.Regex, '#pop'),
+ (r'\\[\\nt/]', String.Regex), # String.Escape is too intense here.
+ (r'[^\\/\n]+', String.Regex),
+ (r'\\\n', String.Regex),
+ (r'\\', String.Regex)
+ ]
+ }
+
+
+class PuppetLexer(RegexLexer):
+ """
+ For `Puppet <http://puppetlabs.com/>`__ configuration DSL.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Puppet'
+ aliases = ['puppet']
+ filenames = ['*.pp']
+
+ tokens = {
+ 'root': [
+ include('comments'),
+ include('keywords'),
+ include('names'),
+ include('numbers'),
+ include('operators'),
+ include('strings'),
+
+ (r'[]{}:(),;[]', Punctuation),
+ (r'[^\S\n]+', Text),
+ ],
+
+ 'comments': [
+ (r'\s*#.*$', Comment),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ ],
+
+ 'operators': [
+ (r'(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)', Operator),
+ (r'(in|and|or|not)\b', Operator.Word),
+ ],
+
+ 'names': [
+ ('[a-zA-Z_]\w*', Name.Attribute),
+ (r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation,
+ String, Punctuation)),
+ (r'\$\S+', Name.Variable),
+ ],
+
+ 'numbers': [
+ # Copypasta from the Python lexer
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+j?', Number.Integer)
+ ],
+
+ 'keywords': [
+ # Left out 'group' and 'require'
+ # Since they're often used as attributes
+ (words((
+ 'absent', 'alert', 'alias', 'audit', 'augeas', 'before', 'case',
+ 'check', 'class', 'computer', 'configured', 'contained',
+ 'create_resources', 'crit', 'cron', 'debug', 'default',
+ 'define', 'defined', 'directory', 'else', 'elsif', 'emerg',
+ 'err', 'exec', 'extlookup', 'fail', 'false', 'file',
+ 'filebucket', 'fqdn_rand', 'generate', 'host', 'if', 'import',
+ 'include', 'info', 'inherits', 'inline_template', 'installed',
+ 'interface', 'k5login', 'latest', 'link', 'loglevel',
+ 'macauthorization', 'mailalias', 'maillist', 'mcx', 'md5',
+ 'mount', 'mounted', 'nagios_command', 'nagios_contact',
+ 'nagios_contactgroup', 'nagios_host', 'nagios_hostdependency',
+ 'nagios_hostescalation', 'nagios_hostextinfo', 'nagios_hostgroup',
+ 'nagios_service', 'nagios_servicedependency', 'nagios_serviceescalation',
+ 'nagios_serviceextinfo', 'nagios_servicegroup', 'nagios_timeperiod',
+ 'node', 'noop', 'notice', 'notify', 'package', 'present', 'purged',
+ 'realize', 'regsubst', 'resources', 'role', 'router', 'running',
+ 'schedule', 'scheduled_task', 'search', 'selboolean', 'selmodule',
+ 'service', 'sha1', 'shellquote', 'split', 'sprintf',
+ 'ssh_authorized_key', 'sshkey', 'stage', 'stopped', 'subscribe',
+ 'tag', 'tagged', 'template', 'tidy', 'true', 'undef', 'unmounted',
+ 'user', 'versioncmp', 'vlan', 'warning', 'yumrepo', 'zfs', 'zone',
+ 'zpool'), prefix='(?i)', suffix=r'\b'),
+ Keyword),
+ ],
+
+ 'strings': [
+ (r'"([^"])*"', String),
+ (r"'(\\'|[^'])*'", String),
+ ],
+
+ }
+
+
+class RslLexer(RegexLexer):
+ """
+ `RSL <http://en.wikipedia.org/wiki/RAISE>`_ is the formal specification
+ language used in RAISE (Rigorous Approach to Industrial Software Engineering)
+ method.
+
+ .. versionadded:: 2.0
+ """
+ name = 'RSL'
+ aliases = ['rsl']
+ filenames = ['*.rsl']
+ mimetypes = ['text/rsl']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ (words((
+ 'Bool', 'Char', 'Int', 'Nat', 'Real', 'Text', 'Unit', 'abs',
+ 'all', 'always', 'any', 'as', 'axiom', 'card', 'case', 'channel',
+ 'chaos', 'class', 'devt_relation', 'dom', 'elems', 'else', 'elif',
+ 'end', 'exists', 'extend', 'false', 'for', 'hd', 'hide', 'if',
+ 'in', 'is', 'inds', 'initialise', 'int', 'inter', 'isin', 'len',
+ 'let', 'local', 'ltl_assertion', 'object', 'of', 'out', 'post',
+ 'pre', 'read', 'real', 'rng', 'scheme', 'skip', 'stop', 'swap',
+ 'then', 'theory', 'test_case', 'tl', 'transition_system', 'true',
+ 'type', 'union', 'until', 'use', 'value', 'variable', 'while',
+ 'with', 'write', '~isin', '-inflist', '-infset', '-list',
+ '-set'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (r'(variable|value)\b', Keyword.Declaration),
+ (r'--.*?\n', Comment),
+ (r'<:.*?:>', Comment),
+ (r'\{!.*?!\}', Comment),
+ (r'/\*.*?\*/', Comment),
+ (r'^[ \t]*([\w]+)[ \t]*:[^:]', Name.Function),
+ (r'(^[ \t]*)([\w]+)([ \t]*\([\w\s,]*\)[ \t]*)(is|as)',
+ bygroups(Text, Name.Function, Text, Keyword)),
+ (r'\b[A-Z]\w*\b', Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'".*"', String),
+ (r'\'.\'', String.Char),
+ (r'(><|->|-m->|/\\|<=|<<=|<\.|\|\||\|\^\||-~->|-~m->|\\/|>=|>>|'
+ r'\.>|\+\+|-\\|<->|=>|:-|~=|\*\*|<<|>>=|\+>|!!|\|=\||#)',
+ Operator),
+ (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'.', Text),
+ ],
+ }
+
+ def analyse_text(text):
+ """
+ Check for the most common text in the beginning of a RSL file.
+ """
+ if re.search(r'scheme\s*.*?=\s*class\s*type', text, re.I) is not None:
+ return 1.0
+
+
+class MscgenLexer(RegexLexer):
+ """
+ For `Mscgen <http://www.mcternan.me.uk/mscgen/>`_ files.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Mscgen'
+ aliases = ['mscgen', 'msc']
+ filenames = ['*.msc']
+
+ _var = r'(\w+|"(?:\\"|[^"])*")'
+
+ tokens = {
+ 'root': [
+ (r'msc\b', Keyword.Type),
+ # Options
+ (r'(hscale|HSCALE|width|WIDTH|wordwraparcs|WORDWRAPARCS'
+ r'|arcgradient|ARCGRADIENT)\b', Name.Property),
+ # Operators
+ (r'(abox|ABOX|rbox|RBOX|box|BOX|note|NOTE)\b', Operator.Word),
+ (r'(\.|-|\|){3}', Keyword),
+ (r'(?:-|=|\.|:){2}'
+ r'|<<=>>|<->|<=>|<<>>|<:>'
+ r'|->|=>>|>>|=>|:>|-x|-X'
+ r'|<-|<<=|<<|<=|<:|x-|X-|=', Operator),
+ # Names
+ (r'\*', Name.Builtin),
+ (_var, Name.Variable),
+ # Other
+ (r'\[', Punctuation, 'attrs'),
+ (r'\{|\}|,|;', Punctuation),
+ include('comments')
+ ],
+ 'attrs': [
+ (r'\]', Punctuation, '#pop'),
+ (_var + r'(\s*)(=)(\s*)' + _var,
+ bygroups(Name.Attribute, Text.Whitespace, Operator, Text.Whitespace,
+ String)),
+ (r',', Punctuation),
+ include('comments')
+ ],
+ 'comments': [
+ (r'(?://|#).*?\n', Comment.Single),
+ (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
+ (r'[ \t\r\n]+', Text.Whitespace)
+ ]
+ }
+
+
+class VGLLexer(RegexLexer):
+ """
+ For `SampleManager VGL <http://www.thermoscientific.com/samplemanager>`_
+ source code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'VGL'
+ aliases = ['vgl']
+ filenames = ['*.rpf']
+
+ flags = re.MULTILINE | re.DOTALL | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\{[^}]*\}', Comment.Multiline),
+ (r'declare', Keyword.Constant),
+ (r'(if|then|else|endif|while|do|endwhile|and|or|prompt|object'
+ r'|create|on|line|with|global|routine|value|endroutine|constant'
+ r'|global|set|join|library|compile_option|file|exists|create|copy'
+ r'|delete|enable|windows|name|notprotected)(?! *[=<>.,()])',
+ Keyword),
+ (r'(true|false|null|empty|error|locked)', Keyword.Constant),
+ (r'[~^*#!%&\[\]()<>|+=:;,./?-]', Operator),
+ (r'"[^"]*"', String),
+ (r'(\.)([a-z_$][\w$]*)', bygroups(Operator, Name.Attribute)),
+ (r'[0-9][0-9]*(\.[0-9]+(e[+\-]?[0-9]+)?)?', Number),
+ (r'[a-z_$][\w$]*', Name),
+ (r'[\r\n]+', Text),
+ (r'\s+', Text)
+ ]
+ }
+
+
+class AlloyLexer(RegexLexer):
+ """
+ For `Alloy <http://alloy.mit.edu>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Alloy'
+ aliases = ['alloy']
+ filenames = ['*.als']
+ mimetypes = ['text/x-alloy']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ iden_rex = r'[a-zA-Z_][\w\']*'
+ text_tuple = (r'[^\S\n]+', Text)
+
+ tokens = {
+ 'sig': [
+ (r'(extends)\b', Keyword, '#pop'),
+ (iden_rex, Name),
+ text_tuple,
+ (r',', Punctuation),
+ (r'\{', Operator, '#pop'),
+ ],
+ 'module': [
+ text_tuple,
+ (iden_rex, Name, '#pop'),
+ ],
+ 'fun': [
+ text_tuple,
+ (r'\{', Operator, '#pop'),
+ (iden_rex, Name, '#pop'),
+ ],
+ 'root': [
+ (r'--.*?$', Comment.Single),
+ (r'//.*?$', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ text_tuple,
+ (r'(module|open)(\s+)', bygroups(Keyword.Namespace, Text),
+ 'module'),
+ (r'(sig|enum)(\s+)', bygroups(Keyword.Declaration, Text), 'sig'),
+ (r'(iden|univ|none)\b', Keyword.Constant),
+ (r'(int|Int)\b', Keyword.Type),
+ (r'(this|abstract|extends|set|seq|one|lone|let)\b', Keyword),
+ (r'(all|some|no|sum|disj|when|else)\b', Keyword),
+ (r'(run|check|for|but|exactly|expect|as)\b', Keyword),
+ (r'(and|or|implies|iff|in)\b', Operator.Word),
+ (r'(fun|pred|fact|assert)(\s+)', bygroups(Keyword, Text), 'fun'),
+ (r'!|#|&&|\+\+|<<|>>|>=|<=>|<=|\.|->', Operator),
+ (r'[-+/*%=<>&!^|~{}\[\]().]', Operator),
+ (iden_rex, Name),
+ (r'[:,]', Punctuation),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r'\n', Text),
+ ]
+ }
+
+
+class PanLexer(RegexLexer):
+ """
+ Lexer for `pan <http://github.com/quattor/pan/>`_ source files.
+
+ Based on tcsh lexer.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Pan'
+ aliases = ['pan']
+ filenames = ['*.pan']
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ (r'\(', Keyword, 'paren'),
+ (r'\{', Keyword, 'curly'),
+ include('data'),
+ ],
+ 'basic': [
+ (words((
+ 'if', 'for', 'with', 'else', 'type', 'bind', 'while', 'valid', 'final', 'prefix',
+ 'unique', 'object', 'foreach', 'include', 'template', 'function', 'variable',
+ 'structure', 'extensible', 'declaration'), prefix=r'\b', suffix=r'\s*\b'),
+ Keyword),
+ (words((
+ 'file_contents', 'format', 'index', 'length', 'match', 'matches', 'replace',
+ 'splice', 'split', 'substr', 'to_lowercase', 'to_uppercase', 'debug', 'error',
+ 'traceback', 'deprecated', 'base64_decode', 'base64_encode', 'digest', 'escape',
+ 'unescape', 'append', 'create', 'first', 'nlist', 'key', 'list', 'merge', 'next',
+ 'prepend', 'is_boolean', 'is_defined', 'is_double', 'is_list', 'is_long',
+ 'is_nlist', 'is_null', 'is_number', 'is_property', 'is_resource', 'is_string',
+ 'to_boolean', 'to_double', 'to_long', 'to_string', 'clone', 'delete', 'exists',
+ 'path_exists', 'if_exists', 'return', 'value'), prefix=r'\b', suffix=r'\s*\b'),
+ Name.Builtin),
+ (r'#.*', Comment),
+ (r'\\[\w\W]', String.Escape),
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
+ (r'[\[\]{}()=]+', Operator),
+ (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r';', Punctuation),
+ ],
+ 'data': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r'\s+', Text),
+ (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
+ (r'\d+(?= |\Z)', Number),
+ ],
+ 'curly': [
+ (r'\}', Keyword, '#pop'),
+ (r':-', Keyword),
+ (r'\w+', Name.Variable),
+ (r'[^}:"\'`$]+', Punctuation),
+ (r':', Punctuation),
+ include('root'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ }
diff --git a/pygments/lexers/dylan.py b/pygments/lexers/dylan.py
new file mode 100644
index 00000000..600a78e5
--- /dev/null
+++ b/pygments/lexers/dylan.py
@@ -0,0 +1,289 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.dylan
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Dylan language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Literal
+
+__all__ = ['DylanLexer', 'DylanConsoleLexer', 'DylanLidLexer']
+
+
+class DylanLexer(RegexLexer):
+ """
+ For the `Dylan <http://www.opendylan.org/>`_ language.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'Dylan'
+ aliases = ['dylan']
+ filenames = ['*.dylan', '*.dyl', '*.intr']
+ mimetypes = ['text/x-dylan']
+
+ flags = re.IGNORECASE
+
+ builtins = set((
+ 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
+ 'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
+ 'each-subclass', 'exception', 'exclude', 'function', 'generic',
+ 'handler', 'inherited', 'inline', 'inline-only', 'instance',
+ 'interface', 'import', 'keyword', 'library', 'macro', 'method',
+ 'module', 'open', 'primary', 'required', 'sealed', 'sideways',
+ 'singleton', 'slot', 'thread', 'variable', 'virtual'))
+
+ keywords = set((
+ 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
+ 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
+ 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
+ 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
+ 'while'))
+
+ operators = set((
+ '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
+ '>', '>=', '&', '|'))
+
+ functions = set((
+ 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
+ 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
+ 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
+ 'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol',
+ 'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose',
+ 'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as',
+ 'condition-format-arguments', 'condition-format-string', 'conjoin',
+ 'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions',
+ 'direct-subclasses', 'direct-superclasses', 'disjoin', 'do',
+ 'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?',
+ 'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first',
+ 'first-setter', 'floor', 'floor/', 'forward-iteration-protocol',
+ 'function-arguments', 'function-return-values',
+ 'function-specializers', 'gcd', 'generic-function-mandatory-keywords',
+ 'generic-function-methods', 'head', 'head-setter', 'identity',
+ 'initialize', 'instance?', 'integral?', 'intersection',
+ 'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited',
+ 'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make',
+ 'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes',
+ 'min', 'modulo', 'negative', 'negative?', 'next-method',
+ 'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop',
+ 'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank',
+ 'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!',
+ 'remove-duplicates', 'remove-duplicates!', 'remove-key!',
+ 'remove-method', 'replace-elements!', 'replace-subsequence!',
+ 'restart-query', 'return-allowed?', 'return-description',
+ 'return-query', 'reverse', 'reverse!', 'round', 'round/',
+ 'row-major-index', 'second', 'second-setter', 'shallow-copy',
+ 'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?',
+ 'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position',
+ 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
+ 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
+ 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
+ 'vector', 'zero?'))
+
+ valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+'
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ lowercase_value = value.lower()
+ if lowercase_value in self.builtins:
+ yield index, Name.Builtin, value
+ continue
+ if lowercase_value in self.keywords:
+ yield index, Keyword, value
+ continue
+ if lowercase_value in self.functions:
+ yield index, Name.Builtin, value
+ continue
+ if lowercase_value in self.operators:
+ yield index, Operator, value
+ continue
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ # Whitespace
+ (r'\s+', Text),
+
+ # single line comment
+ (r'//.*?\n', Comment.Single),
+
+ # lid header
+ (r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
+ bygroups(Name.Attribute, Operator, Text, String)),
+
+ default('code') # no header match, switch to code
+ ],
+ 'code': [
+ # Whitespace
+ (r'\s+', Text),
+
+ # single line comment
+ (r'//.*?\n', Comment.Single),
+
+ # multi-line comment
+ (r'/\*', Comment.Multiline, 'comment'),
+
+ # strings and characters
+ (r'"', String, 'string'),
+ (r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char),
+
+ # binary integer
+ (r'#b[01]+', Number.Bin),
+
+ # octal integer
+ (r'#o[0-7]+', Number.Oct),
+
+ # floating point
+ (r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float),
+
+ # decimal integer
+ (r'[-+]?\d+', Number.Integer),
+
+ # hex integer
+ (r'#x[0-9a-f]+', Number.Hex),
+
+ # Macro parameters
+ (r'(\?' + valid_name + ')(:)'
+ r'(token|name|variable|expression|body|case-body|\*)',
+ bygroups(Name.Tag, Operator, Name.Builtin)),
+ (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
+ bygroups(Name.Tag, Operator, Name.Builtin)),
+ (r'\?' + valid_name, Name.Tag),
+
+ # Punctuation
+ (r'(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])', Punctuation),
+
+ # Most operators are picked up as names and then re-flagged.
+ # This one isn't valid in a name though, so we pick it up now.
+ (r':=', Operator),
+
+ # Pick up #t / #f before we match other stuff with #.
+ (r'#[tf]', Literal),
+
+ # #"foo" style keywords
+ (r'#"', String.Symbol, 'keyword'),
+
+ # #rest, #key, #all-keys, etc.
+ (r'#[a-z0-9-]+', Keyword),
+
+ # required-init-keyword: style keywords.
+ (valid_name + ':', Keyword),
+
+ # class names
+ (r'<' + valid_name + '>', Name.Class),
+
+ # define variable forms.
+ (r'\*' + valid_name + '\*', Name.Variable.Global),
+
+ # define constant forms.
+ (r'\$' + valid_name, Name.Constant),
+
+ # everything else. We re-flag some of these in the method above.
+ (valid_name, Name),
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'keyword': [
+ (r'"', String.Symbol, '#pop'),
+ (r'[^\\"]+', String.Symbol), # all other characters
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ]
+ }
+
+
+class DylanLidLexer(RegexLexer):
+ """
+ For Dylan LID (Library Interchange Definition) files.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'DylanLID'
+ aliases = ['dylan-lid', 'lid']
+ filenames = ['*.lid', '*.hdp']
+ mimetypes = ['text/x-dylan-lid']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ # Whitespace
+ (r'\s+', Text),
+
+ # single line comment
+ (r'//.*?\n', Comment.Single),
+
+ # lid header
+ (r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
+ bygroups(Name.Attribute, Operator, Text, String)),
+ ]
+ }
+
+
+class DylanConsoleLexer(Lexer):
+ """
+ For Dylan interactive console output like:
+
+ .. sourcecode:: dylan-console
+
+ ? let a = 1;
+ => 1
+ ? a
+ => 1
+
+ This is based on a copy of the RubyConsoleLexer.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Dylan session'
+ aliases = ['dylan-console', 'dylan-repl']
+ filenames = ['*.dylan-console']
+ mimetypes = ['text/x-dylan-console']
+
+ _line_re = re.compile('.*?\n')
+ _prompt_re = re.compile('\?| ')
+
+ def get_tokens_unprocessed(self, text):
+ dylexer = DylanLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in self._line_re.finditer(text):
+ line = match.group()
+ m = self._prompt_re.match(line)
+ if m is not None:
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
+ for item in do_insertions(insertions,
+ dylexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ yield match.start(), Generic.Output, line
+ if curcode:
+ for item in do_insertions(insertions,
+ dylexer.get_tokens_unprocessed(curcode)):
+ yield item
diff --git a/pygments/lexers/ecl.py b/pygments/lexers/ecl.py
new file mode 100644
index 00000000..95572ba7
--- /dev/null
+++ b/pygments/lexers/ecl.py
@@ -0,0 +1,125 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ecl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the ECL language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['ECLLexer']
+
+
+class ECLLexer(RegexLexer):
+ """
+ Lexer for the declarative big-data `ECL
+ <http://hpccsystems.com/community/docs/ecl-language-reference/html>`_
+ language.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'ECL'
+ aliases = ['ecl']
+ filenames = ['*.ecl']
+ mimetypes = ['application/x-ecl']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('statements'),
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ (r'\/\/.*', Comment.Single),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
+ ],
+ 'statements': [
+ include('types'),
+ include('keywords'),
+ include('functions'),
+ include('hash'),
+ (r'"', String, 'string'),
+ (r'\'', String, 'string'),
+ (r'(\d+\.\d*|\.\d+|\d+)e[+-]?\d+[lu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+f)f?', Number.Float),
+ (r'0x[0-9a-f]+[lu]*', Number.Hex),
+ (r'0[0-7]+[lu]*', Number.Oct),
+ (r'\d+[lu]*', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]+', Operator),
+ (r'[{}()\[\],.;]', Punctuation),
+ (r'[a-z_]\w*', Name),
+ ],
+ 'hash': [
+ (r'^#.*$', Comment.Preproc),
+ ],
+ 'types': [
+ (r'(RECORD|END)\D', Keyword.Declaration),
+ (r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|'
+ r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|'
+ r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)',
+ bygroups(Keyword.Type, Text)),
+ ],
+ 'keywords': [
+ (words((
+ 'APPLY', 'ASSERT', 'BUILD', 'BUILDINDEX', 'EVALUATE', 'FAIL',
+ 'KEYDIFF', 'KEYPATCH', 'LOADXML', 'NOTHOR', 'NOTIFY', 'OUTPUT',
+ 'PARALLEL', 'SEQUENTIAL', 'SOAPCALL', 'CHECKPOINT', 'DEPRECATED',
+ 'FAILCODE', 'FAILMESSAGE', 'FAILURE', 'GLOBAL', 'INDEPENDENT',
+ 'ONWARNING', 'PERSIST', 'PRIORITY', 'RECOVERY', 'STORED', 'SUCCESS',
+ 'WAIT', 'WHEN'), suffix=r'\b'),
+ Keyword.Reserved),
+ # These are classed differently, check later
+ (words((
+ 'ALL', 'AND', 'ANY', 'AS', 'ATMOST', 'BEFORE', 'BEGINC++', 'BEST', 'BETWEEN', 'CASE',
+ 'CONST', 'COUNTER', 'CSV', 'DESCEND', 'ENCRYPT', 'ENDC++', 'ENDMACRO', 'EXCEPT',
+ 'EXCLUSIVE', 'EXPIRE', 'EXPORT', 'EXTEND', 'FALSE', 'FEW', 'FIRST', 'FLAT', 'FULL',
+ 'FUNCTION', 'GROUP', 'HEADER', 'HEADING', 'HOLE', 'IFBLOCK', 'IMPORT', 'IN', 'JOINED',
+ 'KEEP', 'KEYED', 'LAST', 'LEFT', 'LIMIT', 'LOAD', 'LOCAL', 'LOCALE', 'LOOKUP', 'MACRO',
+ 'MANY', 'MAXCOUNT', 'MAXLENGTH', 'MIN SKEW', 'MODULE', 'INTERFACE', 'NAMED', 'NOCASE',
+ 'NOROOT', 'NOSCAN', 'NOSORT', 'NOT', 'OF', 'ONLY', 'OPT', 'OR', 'OUTER', 'OVERWRITE',
+ 'PACKED', 'PARTITION', 'PENALTY', 'PHYSICALLENGTH', 'PIPE', 'QUOTE', 'RELATIONSHIP',
+ 'REPEAT', 'RETURN', 'RIGHT', 'SCAN', 'SELF', 'SEPARATOR', 'SERVICE', 'SHARED', 'SKEW',
+ 'SKIP', 'SQL', 'STORE', 'TERMINATOR', 'THOR', 'THRESHOLD', 'TOKEN', 'TRANSFORM', 'TRIM',
+ 'TRUE', 'TYPE', 'UNICODEORDER', 'UNSORTED', 'VALIDATE', 'VIRTUAL', 'WHOLE', 'WILD',
+ 'WITHIN', 'XML', 'XPATH', '__COMPRESSED__'), suffix=r'\b'),
+ Keyword.Reserved),
+ ],
+ 'functions': [
+ (words((
+ 'ABS', 'ACOS', 'ALLNODES', 'ASCII', 'ASIN', 'ASSTRING', 'ATAN', 'ATAN2', 'AVE', 'CASE',
+ 'CHOOSE', 'CHOOSEN', 'CHOOSESETS', 'CLUSTERSIZE', 'COMBINE', 'CORRELATION', 'COS',
+ 'COSH', 'COUNT', 'COVARIANCE', 'CRON', 'DATASET', 'DEDUP', 'DEFINE', 'DENORMALIZE',
+ 'DISTRIBUTE', 'DISTRIBUTED', 'DISTRIBUTION', 'EBCDIC', 'ENTH', 'ERROR', 'EVALUATE',
+ 'EVENT', 'EVENTEXTRA', 'EVENTNAME', 'EXISTS', 'EXP', 'FAILCODE', 'FAILMESSAGE',
+ 'FETCH', 'FROMUNICODE', 'GETISVALID', 'GLOBAL', 'GRAPH', 'GROUP', 'HASH', 'HASH32',
+ 'HASH64', 'HASHCRC', 'HASHMD5', 'HAVING', 'IF', 'INDEX', 'INTFORMAT', 'ISVALID',
+ 'ITERATE', 'JOIN', 'KEYUNICODE', 'LENGTH', 'LIBRARY', 'LIMIT', 'LN', 'LOCAL', 'LOG', 'LOOP',
+ 'MAP', 'MATCHED', 'MATCHLENGTH', 'MATCHPOSITION', 'MATCHTEXT', 'MATCHUNICODE',
+ 'MAX', 'MERGE', 'MERGEJOIN', 'MIN', 'NOLOCAL', 'NONEMPTY', 'NORMALIZE', 'PARSE', 'PIPE',
+ 'POWER', 'PRELOAD', 'PROCESS', 'PROJECT', 'PULL', 'RANDOM', 'RANGE', 'RANK', 'RANKED',
+ 'REALFORMAT', 'RECORDOF', 'REGEXFIND', 'REGEXREPLACE', 'REGROUP', 'REJECTED',
+ 'ROLLUP', 'ROUND', 'ROUNDUP', 'ROW', 'ROWDIFF', 'SAMPLE', 'SET', 'SIN', 'SINH', 'SIZEOF',
+ 'SOAPCALL', 'SORT', 'SORTED', 'SQRT', 'STEPPED', 'STORED', 'SUM', 'TABLE', 'TAN', 'TANH',
+ 'THISNODE', 'TOPN', 'TOUNICODE', 'TRANSFER', 'TRIM', 'TRUNCATE', 'TYPEOF', 'UNGROUP',
+ 'UNICODEORDER', 'VARIANCE', 'WHICH', 'WORKUNIT', 'XMLDECODE', 'XMLENCODE',
+ 'XMLTEXT', 'XMLUNICODE'), suffix=r'\b'),
+ Name.Function),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\'', String, '#pop'),
+ (r'[^"\']+', String),
+ ],
+ }
diff --git a/pygments/lexers/eiffel.py b/pygments/lexers/eiffel.py
new file mode 100644
index 00000000..8a244613
--- /dev/null
+++ b/pygments/lexers/eiffel.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.eiffel
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Eiffel language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['EiffelLexer']
+
+
+class EiffelLexer(RegexLexer):
+ """
+ For `Eiffel <http://www.eiffel.com>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Eiffel'
+ aliases = ['eiffel']
+ filenames = ['*.e']
+ mimetypes = ['text/x-eiffel']
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+ (r'--.*?\n', Comment.Single),
+ (r'[^\S\n]+', Text),
+ # Please note that keyword and operator are case insensitive.
+ (r'(?i)(true|false|void|current|result|precursor)\b', Keyword.Constant),
+ (r'(?i)(and(\s+then)?|not|xor|implies|or(\s+else)?)\b', Operator.Word),
+ (words((
+ 'across', 'agent', 'alias', 'all', 'as', 'assign', 'attached',
+ 'attribute', 'check', 'class', 'convert', 'create', 'debug',
+ 'deferred', 'detachable', 'do', 'else', 'elseif', 'end', 'ensure',
+ 'expanded', 'export', 'external', 'feature', 'from', 'frozen', 'if',
+ 'inherit', 'inspect', 'invariant', 'like', 'local', 'loop', 'none',
+ 'note', 'obsolete', 'old', 'once', 'only', 'redefine', 'rename',
+ 'require', 'rescue', 'retry', 'select', 'separate', 'then',
+ 'undefine', 'until', 'variant', 'when'), prefix=r'(?i)\b', suffix=r'\b'),
+ Keyword.Reserved),
+ (r'"\[(([^\]%]|\n)|%(.|\n)|\][^"])*?\]"', String),
+ (r'"([^"%\n]|%.)*?"', String),
+ include('numbers'),
+ (r"'([^'%]|%'|%%)'", String.Char),
+ (r"(//|\\\\|>=|<=|:=|/=|~|/~|[\\?!#%&@|+/\-=>*$<^\[\]])", Operator),
+ (r"([{}():;,.])", Punctuation),
+ (r'([a-z]\w*)|([A-Z][A-Z0-9_]*[a-z]\w*)', Name),
+ (r'([A-Z][A-Z0-9_]*)', Name.Class),
+ (r'\n+', Text),
+ ],
+ 'numbers': [
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'0[bB][01]+', Number.Bin),
+ (r'0[cC][0-7]+', Number.Oct),
+ (r'([0-9]+\.[0-9]*)|([0-9]*\.[0-9]+)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ ],
+ }
diff --git a/pygments/lexers/erlang.py b/pygments/lexers/erlang.py
new file mode 100644
index 00000000..c353a4dc
--- /dev/null
+++ b/pygments/lexers/erlang.py
@@ -0,0 +1,511 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.erlang
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Erlang.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions, \
+ include, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['ErlangLexer', 'ErlangShellLexer', 'ElixirConsoleLexer',
+ 'ElixirLexer']
+
+
+line_re = re.compile('.*?\n')
+
+
+class ErlangLexer(RegexLexer):
+ """
+ For the Erlang functional programming language.
+
+ Blame Jeremy Thurgood (http://jerith.za.net/).
+
+ .. versionadded:: 0.9
+ """
+
+ name = 'Erlang'
+ aliases = ['erlang']
+ filenames = ['*.erl', '*.hrl', '*.es', '*.escript']
+ mimetypes = ['text/x-erlang']
+
+ keywords = (
+ 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if',
+ 'let', 'of', 'query', 'receive', 'try', 'when',
+ )
+
+ builtins = ( # See erlang(3) man page
+ 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list',
+ 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions',
+ 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module',
+ 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit',
+ 'float', 'float_to_list', 'fun_info', 'fun_to_list',
+ 'function_exported', 'garbage_collect', 'get', 'get_keys',
+ 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary',
+ 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean',
+ 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list',
+ 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record',
+ 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom',
+ 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom',
+ 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple',
+ 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5',
+ 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor',
+ 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2',
+ 'pid_to_list', 'port_close', 'port_command', 'port_connect',
+ 'port_control', 'port_call', 'port_info', 'port_to_list',
+ 'process_display', 'process_flag', 'process_info', 'purge_module',
+ 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process',
+ 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie',
+ 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor',
+ 'spawn_opt', 'split_binary', 'start_timer', 'statistics',
+ 'suspend_process', 'system_flag', 'system_info', 'system_monitor',
+ 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered',
+ 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list',
+ 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis'
+ )
+
+ operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)'
+ word_operators = (
+ 'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor',
+ 'div', 'not', 'or', 'orelse', 'rem', 'xor'
+ )
+
+ atom_re = r"(?:[a-z]\w*|'[^\n']*[^\\]')"
+
+ variable_re = r'(?:[A-Z_]\w*)'
+
+ escape_re = r'(?:\\(?:[bdefnrstv\'"\\/]|[0-7][0-7]?[0-7]?|\^[a-zA-Z]))'
+
+ macro_re = r'(?:'+variable_re+r'|'+atom_re+r')'
+
+ base_re = r'(?:[2-9]|[12][0-9]|3[0-6])'
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'%.*\n', Comment),
+ (words(keywords, suffix=r'\b'), Keyword),
+ (words(builtins, suffix=r'\b'), Name.Builtin),
+ (words(word_operators, suffix=r'\b'), Operator.Word),
+ (r'^-', Punctuation, 'directive'),
+ (operators, Operator),
+ (r'"', String, 'string'),
+ (r'<<', Name.Label),
+ (r'>>', Name.Label),
+ ('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)),
+ ('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[+-]?' + base_re + r'#[0-9a-zA-Z]+', Number.Integer),
+ (r'[+-]?\d+', Number.Integer),
+ (r'[+-]?\d+.\d+', Number.Float),
+ (r'[]\[:_@\".{}()|;,]', Punctuation),
+ (variable_re, Name.Variable),
+ (atom_re, Name),
+ (r'\?'+macro_re, Name.Constant),
+ (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char),
+ (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label),
+ ],
+ 'string': [
+ (escape_re, String.Escape),
+ (r'"', String, '#pop'),
+ (r'~[0-9.*]*[~#+bBcdefginpPswWxX]', String.Interpol),
+ (r'[^"\\~]+', String),
+ (r'~', String),
+ ],
+ 'directive': [
+ (r'(define)(\s*)(\()('+macro_re+r')',
+ bygroups(Name.Entity, Text, Punctuation, Name.Constant), '#pop'),
+ (r'(record)(\s*)(\()('+macro_re+r')',
+ bygroups(Name.Entity, Text, Punctuation, Name.Label), '#pop'),
+ (atom_re, Name.Entity, '#pop'),
+ ],
+ }
+
+
+class ErlangShellLexer(Lexer):
+ """
+ Shell sessions in erl (for Erlang code).
+
+ .. versionadded:: 1.1
+ """
+ name = 'Erlang erl session'
+ aliases = ['erl']
+ filenames = ['*.erl-sh']
+ mimetypes = ['text/x-erl-shellsession']
+
+ _prompt_re = re.compile(r'\d+>(?=\s|\Z)')
+
+ def get_tokens_unprocessed(self, text):
+ erlexer = ErlangLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = self._prompt_re.match(line)
+ if m is not None:
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
+ for item in do_insertions(insertions,
+ erlexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ if line.startswith('*'):
+ yield match.start(), Generic.Traceback, line
+ else:
+ yield match.start(), Generic.Output, line
+ if curcode:
+ for item in do_insertions(insertions,
+ erlexer.get_tokens_unprocessed(curcode)):
+ yield item
+
+
+def gen_elixir_string_rules(name, symbol, token):
+ states = {}
+ states['string_' + name] = [
+ (r'[^#%s\\]+' % (symbol,), token),
+ include('escapes'),
+ (r'\\.', token),
+ (r'(%s)' % (symbol,), bygroups(token), "#pop"),
+ include('interpol')
+ ]
+ return states
+
+
+def gen_elixir_sigstr_rules(term, token, interpol=True):
+ if interpol:
+ return [
+ (r'[^#%s\\]+' % (term,), token),
+ include('escapes'),
+ (r'\\.', token),
+ (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
+ include('interpol')
+ ]
+ else:
+ return [
+ (r'[^%s\\]+' % (term,), token),
+ (r'\\.', token),
+ (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
+ ]
+
+
+class ElixirLexer(RegexLexer):
+ """
+ For the `Elixir language <http://elixir-lang.org>`_.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Elixir'
+ aliases = ['elixir', 'ex', 'exs']
+ filenames = ['*.ex', '*.exs']
+ mimetypes = ['text/x-elixir']
+
+ KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch')
+ KEYWORD_OPERATOR = ('not', 'and', 'or', 'when', 'in')
+ BUILTIN = (
+ 'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise',
+ 'quote', 'unquote', 'unquote_splicing', 'throw', 'super'
+ )
+ BUILTIN_DECLARATION = (
+ 'def', 'defp', 'defmodule', 'defprotocol', 'defmacro', 'defmacrop',
+ 'defdelegate', 'defexception', 'defstruct', 'defimpl', 'defcallback'
+ )
+
+ BUILTIN_NAMESPACE = ('import', 'require', 'use', 'alias')
+ CONSTANT = ('nil', 'true', 'false')
+
+ PSEUDO_VAR = ('_', '__MODULE__', '__DIR__', '__ENV__', '__CALLER__')
+
+ OPERATORS3 = (
+ '<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==',
+ '~>>', '<~>', '|~>', '<|>',
+ )
+ OPERATORS2 = (
+ '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~',
+ '->', '<-', '|', '.', '=', '~>', '<~',
+ )
+ OPERATORS1 = ('<', '>', '+', '-', '*', '/', '!', '^', '&')
+
+ PUNCTUATION = (
+ '\\\\', '<<', '>>', '=>', '(', ')', ':', ';', ',', '[', ']'
+ )
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self.KEYWORD:
+ yield index, Keyword, value
+ elif value in self.KEYWORD_OPERATOR:
+ yield index, Operator.Word, value
+ elif value in self.BUILTIN:
+ yield index, Keyword, value
+ elif value in self.BUILTIN_DECLARATION:
+ yield index, Keyword.Declaration, value
+ elif value in self.BUILTIN_NAMESPACE:
+ yield index, Keyword.Namespace, value
+ elif value in self.CONSTANT:
+ yield index, Name.Constant, value
+ elif value in self.PSEUDO_VAR:
+ yield index, Name.Builtin.Pseudo, value
+ else:
+ yield index, token, value
+ else:
+ yield index, token, value
+
+ def gen_elixir_sigil_rules():
+ # all valid sigil terminators (excluding heredocs)
+ terminators = [
+ (r'\{', r'\}', 'cb'),
+ (r'\[', r'\]', 'sb'),
+ (r'\(', r'\)', 'pa'),
+ (r'<', r'>', 'ab'),
+ (r'/', r'/', 'slas'),
+ (r'\|', r'\|', 'pipe'),
+ ('"', '"', 'quot'),
+ ("'", "'", 'apos'),
+ ]
+
+ # heredocs have slightly different rules
+ triquotes = [(r'"""', 'triquot'), (r"'''", 'triapos')]
+
+ token = String.Other
+ states = {'sigils': []}
+
+ for term, name in triquotes:
+ states['sigils'] += [
+ (r'(~[a-z])(%s)' % (term,), bygroups(token, String.Heredoc),
+ (name + '-end', name + '-intp')),
+ (r'(~[A-Z])(%s)' % (term,), bygroups(token, String.Heredoc),
+ (name + '-end', name + '-no-intp')),
+ ]
+
+ states[name + '-end'] = [
+ (r'[a-zA-Z]+', token, '#pop'),
+ default('#pop'),
+ ]
+ states[name + '-intp'] = [
+ (r'^\s*' + term, String.Heredoc, '#pop'),
+ include('heredoc_interpol'),
+ ]
+ states[name + '-no-intp'] = [
+ (r'^\s*' + term, String.Heredoc, '#pop'),
+ include('heredoc_no_interpol'),
+ ]
+
+ for lterm, rterm, name in terminators:
+ states['sigils'] += [
+ (r'~[a-z]' + lterm, token, name + '-intp'),
+ (r'~[A-Z]' + lterm, token, name + '-no-intp'),
+ ]
+ states[name + '-intp'] = gen_elixir_sigstr_rules(rterm, token)
+ states[name + '-no-intp'] = \
+ gen_elixir_sigstr_rules(rterm, token, interpol=False)
+
+ return states
+
+ op3_re = "|".join(re.escape(s) for s in OPERATORS3)
+ op2_re = "|".join(re.escape(s) for s in OPERATORS2)
+ op1_re = "|".join(re.escape(s) for s in OPERATORS1)
+ ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re)
+ punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION)
+ alnum = '\w'
+ name_re = r'(?:\.\.\.|[a-z_]%s*[!?]?)' % alnum
+ modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum}
+ complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re)
+ special_atom_re = r'(?:\.\.\.|<<>>|%\{\}|%|\{\})'
+
+ long_hex_char_re = r'(\\x\{)([\da-fA-F]+)(\})'
+ hex_char_re = r'(\\x[\da-fA-F]{1,2})'
+ escape_char_re = r'(\\[abdefnrstv])'
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*$', Comment.Single),
+
+ # Various kinds of characters
+ (r'(\?)' + long_hex_char_re,
+ bygroups(String.Char,
+ String.Escape, Number.Hex, String.Escape)),
+ (r'(\?)' + hex_char_re,
+ bygroups(String.Char, String.Escape)),
+ (r'(\?)' + escape_char_re,
+ bygroups(String.Char, String.Escape)),
+ (r'\?\\?.', String.Char),
+
+ # '::' has to go before atoms
+ (r':::', String.Symbol),
+ (r'::', Operator),
+
+ # atoms
+ (r':' + special_atom_re, String.Symbol),
+ (r':' + complex_name_re, String.Symbol),
+ (r':"', String.Symbol, 'string_double_atom'),
+ (r":'", String.Symbol, 'string_single_atom'),
+
+ # [keywords: ...]
+ (r'(%s|%s)(:)(?=\s|\n)' % (special_atom_re, complex_name_re),
+ bygroups(String.Symbol, Punctuation)),
+
+ # @attributes
+ (r'@' + name_re, Name.Attribute),
+
+ # identifiers
+ (name_re, Name),
+ (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)),
+
+ # operators and punctuation
+ (op3_re, Operator),
+ (op2_re, Operator),
+ (punctuation_re, Punctuation),
+ (r'&\d', Name.Entity), # anon func arguments
+ (op1_re, Operator),
+
+ # numbers
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[\da-fA-F]+', Number.Hex),
+ (r'\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?', Number.Float),
+ (r'\d(_?\d)*', Number.Integer),
+
+ # strings and heredocs
+ (r'"""\s*', String.Heredoc, 'heredoc_double'),
+ (r"'''\s*$", String.Heredoc, 'heredoc_single'),
+ (r'"', String.Double, 'string_double'),
+ (r"'", String.Single, 'string_single'),
+
+ include('sigils'),
+
+ (r'%\{', Punctuation, 'map_key'),
+ (r'\{', Punctuation, 'tuple'),
+ ],
+ 'heredoc_double': [
+ (r'^\s*"""', String.Heredoc, '#pop'),
+ include('heredoc_interpol'),
+ ],
+ 'heredoc_single': [
+ (r"^\s*'''", String.Heredoc, '#pop'),
+ include('heredoc_interpol'),
+ ],
+ 'heredoc_interpol': [
+ (r'[^#\\\n]+', String.Heredoc),
+ include('escapes'),
+ (r'\\.', String.Heredoc),
+ (r'\n+', String.Heredoc),
+ include('interpol'),
+ ],
+ 'heredoc_no_interpol': [
+ (r'[^\\\n]+', String.Heredoc),
+ (r'\\.', String.Heredoc),
+ (r'\n+', String.Heredoc),
+ ],
+ 'escapes': [
+ (long_hex_char_re,
+ bygroups(String.Escape, Number.Hex, String.Escape)),
+ (hex_char_re, String.Escape),
+ (escape_char_re, String.Escape),
+ ],
+ 'interpol': [
+ (r'#\{', String.Interpol, 'interpol_string'),
+ ],
+ 'interpol_string': [
+ (r'\}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'map_key': [
+ include('root'),
+ (r':', Punctuation, 'map_val'),
+ (r'=>', Punctuation, 'map_val'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'map_val': [
+ include('root'),
+ (r',', Punctuation, '#pop'),
+ (r'(?=\})', Punctuation, '#pop'),
+ ],
+ 'tuple': [
+ include('root'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ }
+ tokens.update(gen_elixir_string_rules('double', '"', String.Double))
+ tokens.update(gen_elixir_string_rules('single', "'", String.Single))
+ tokens.update(gen_elixir_string_rules('double_atom', '"', String.Symbol))
+ tokens.update(gen_elixir_string_rules('single_atom', "'", String.Symbol))
+ tokens.update(gen_elixir_sigil_rules())
+
+
+class ElixirConsoleLexer(Lexer):
+ """
+ For Elixir interactive console (iex) output like:
+
+ .. sourcecode:: iex
+
+ iex> [head | tail] = [1,2,3]
+ [1,2,3]
+ iex> head
+ 1
+ iex> tail
+ [2,3]
+ iex> [head | tail]
+ [1,2,3]
+ iex> length [head | tail]
+ 3
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Elixir iex session'
+ aliases = ['iex']
+ mimetypes = ['text/x-elixir-shellsession']
+
+ _prompt_re = re.compile('(iex|\.{3})(\(\d+\))?> ')
+
+ def get_tokens_unprocessed(self, text):
+ exlexer = ElixirLexer(**self.options)
+
+ curcode = ''
+ in_error = False
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith(u'** '):
+ in_error = True
+ insertions.append((len(curcode),
+ [(0, Generic.Error, line[:-1])]))
+ curcode += line[-1:]
+ else:
+ m = self._prompt_re.match(line)
+ if m is not None:
+ in_error = False
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, exlexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ token = Generic.Error if in_error else Generic.Output
+ yield match.start(), token, line
+ if curcode:
+ for item in do_insertions(
+ insertions, exlexer.get_tokens_unprocessed(curcode)):
+ yield item
diff --git a/pygments/lexers/esoteric.py b/pygments/lexers/esoteric.py
new file mode 100644
index 00000000..f61b292d
--- /dev/null
+++ b/pygments/lexers/esoteric.py
@@ -0,0 +1,114 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.esoteric
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for esoteric languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer']
+
+
+class BrainfuckLexer(RegexLexer):
+ """
+ Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
+ language.
+ """
+
+ name = 'Brainfuck'
+ aliases = ['brainfuck', 'bf']
+ filenames = ['*.bf', '*.b']
+ mimetypes = ['application/x-brainfuck']
+
+ tokens = {
+ 'common': [
+ # use different colors for different instruction types
+ (r'[.,]+', Name.Tag),
+ (r'[+-]+', Name.Builtin),
+ (r'[<>]+', Name.Variable),
+ (r'[^.,+\-<>\[\]]+', Comment),
+ ],
+ 'root': [
+ (r'\[', Keyword, 'loop'),
+ (r'\]', Error),
+ include('common'),
+ ],
+ 'loop': [
+ (r'\[', Keyword, '#push'),
+ (r'\]', Keyword, '#pop'),
+ include('common'),
+ ]
+ }
+
+
+class BefungeLexer(RegexLexer):
+ """
+ Lexer for the esoteric `Befunge <http://en.wikipedia.org/wiki/Befunge>`_
+ language.
+
+ .. versionadded:: 0.7
+ """
+ name = 'Befunge'
+ aliases = ['befunge']
+ filenames = ['*.befunge']
+ mimetypes = ['application/x-befunge']
+
+ tokens = {
+ 'root': [
+ (r'[0-9a-f]', Number),
+ (r'[+*/%!`-]', Operator), # Traditional math
+ (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives
+ (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives
+ (r'[|_mw]', Keyword),
+ (r'[{}]', Name.Tag), # Befunge-98 stack ops
+ (r'".*?"', String.Double), # Strings don't appear to allow escapes
+ (r'\'.', String.Single), # Single character
+ (r'[#;]', Comment), # Trampoline... depends on direction hit
+ (r'[pg&~=@iotsy]', Keyword), # Misc
+ (r'[()A-Z]', Comment), # Fingerprints
+ (r'\s+', Text), # Whitespace doesn't matter
+ ],
+ }
+
+
+class RedcodeLexer(RegexLexer):
+ """
+ A simple Redcode lexer based on ICWS'94.
+ Contributed by Adam Blinkinsop <blinks@acm.org>.
+
+ .. versionadded:: 0.8
+ """
+ name = 'Redcode'
+ aliases = ['redcode']
+ filenames = ['*.cw']
+
+ opcodes = ('DAT', 'MOV', 'ADD', 'SUB', 'MUL', 'DIV', 'MOD',
+ 'JMP', 'JMZ', 'JMN', 'DJN', 'CMP', 'SLT', 'SPL',
+ 'ORG', 'EQU', 'END')
+ modifiers = ('A', 'B', 'AB', 'BA', 'F', 'X', 'I')
+
+ tokens = {
+ 'root': [
+ # Whitespace:
+ (r'\s+', Text),
+ (r';.*$', Comment.Single),
+ # Lexemes:
+ # Identifiers
+ (r'\b(%s)\b' % '|'.join(opcodes), Name.Function),
+ (r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator),
+ (r'[A-Za-z_]\w+', Name),
+ # Operators
+ (r'[-+*/%]', Operator),
+ (r'[#$@<>]', Operator), # mode
+ (r'[.,]', Punctuation), # mode
+ # Numbers
+ (r'[-+]?\d+', Number.Integer),
+ ],
+ }
diff --git a/pygments/lexers/factor.py b/pygments/lexers/factor.py
new file mode 100644
index 00000000..6a39a1d4
--- /dev/null
+++ b/pygments/lexers/factor.py
@@ -0,0 +1,344 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.factor
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Factor language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, default, words
+from pygments.token import Text, Comment, Keyword, Name, String, Number
+
+__all__ = ['FactorLexer']
+
+
+class FactorLexer(RegexLexer):
+ """
+ Lexer for the `Factor <http://factorcode.org>`_ language.
+
+ .. versionadded:: 1.4
+ """
+ name = 'Factor'
+ aliases = ['factor']
+ filenames = ['*.factor']
+ mimetypes = ['text/x-factor']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ builtin_kernel = words((
+ '-rot', '2bi', '2bi@', '2bi*', '2curry', '2dip', '2drop', '2dup', '2keep', '2nip',
+ '2over', '2tri', '2tri@', '2tri*', '3bi', '3curry', '3dip', '3drop', '3dup', '3keep',
+ '3tri', '4dip', '4drop', '4dup', '4keep', '<wrapper>', '=', '>boolean', 'clone',
+ '?', '?execute', '?if', 'and', 'assert', 'assert=', 'assert?', 'bi', 'bi-curry',
+ 'bi-curry@', 'bi-curry*', 'bi@', 'bi*', 'boa', 'boolean', 'boolean?', 'both?',
+ 'build', 'call', 'callstack', 'callstack>array', 'callstack?', 'clear', '(clone)',
+ 'compose', 'compose?', 'curry', 'curry?', 'datastack', 'die', 'dip', 'do', 'drop',
+ 'dup', 'dupd', 'either?', 'eq?', 'equal?', 'execute', 'hashcode', 'hashcode*',
+ 'identity-hashcode', 'identity-tuple', 'identity-tuple?', 'if', 'if*',
+ 'keep', 'loop', 'most', 'new', 'nip', 'not', 'null', 'object', 'or', 'over',
+ 'pick', 'prepose', 'retainstack', 'rot', 'same?', 'swap', 'swapd', 'throw',
+ 'tri', 'tri-curry', 'tri-curry@', 'tri-curry*', 'tri@', 'tri*', 'tuple',
+ 'tuple?', 'unless', 'unless*', 'until', 'when', 'when*', 'while', 'with',
+ 'wrapper', 'wrapper?', 'xor'), suffix=r'\s')
+
+ builtin_assocs = words((
+ '2cache', '<enum>', '>alist', '?at', '?of', 'assoc', 'assoc-all?',
+ 'assoc-any?', 'assoc-clone-like', 'assoc-combine', 'assoc-diff',
+ 'assoc-diff!', 'assoc-differ', 'assoc-each', 'assoc-empty?',
+ 'assoc-filter', 'assoc-filter!', 'assoc-filter-as', 'assoc-find',
+ 'assoc-hashcode', 'assoc-intersect', 'assoc-like', 'assoc-map',
+ 'assoc-map-as', 'assoc-partition', 'assoc-refine', 'assoc-size',
+ 'assoc-stack', 'assoc-subset?', 'assoc-union', 'assoc-union!',
+ 'assoc=', 'assoc>map', 'assoc?', 'at', 'at+', 'at*', 'cache', 'change-at',
+ 'clear-assoc', 'delete-at', 'delete-at*', 'enum', 'enum?', 'extract-keys',
+ 'inc-at', 'key?', 'keys', 'map>assoc', 'maybe-set-at', 'new-assoc', 'of',
+ 'push-at', 'rename-at', 'set-at', 'sift-keys', 'sift-values', 'substitute',
+ 'unzip', 'value-at', 'value-at*', 'value?', 'values', 'zip'), suffix=r'\s')
+
+ builtin_combinators = words((
+ '2cleave', '2cleave>quot', '3cleave', '3cleave>quot', '4cleave',
+ '4cleave>quot', 'alist>quot', 'call-effect', 'case', 'case-find',
+ 'case>quot', 'cleave', 'cleave>quot', 'cond', 'cond>quot', 'deep-spread>quot',
+ 'execute-effect', 'linear-case-quot', 'no-case', 'no-case?', 'no-cond',
+ 'no-cond?', 'recursive-hashcode', 'shallow-spread>quot', 'spread',
+ 'to-fixed-point', 'wrong-values', 'wrong-values?'), suffix=r'\s')
+
+ builtin_math = words((
+ '-', '/', '/f', '/i', '/mod', '2/', '2^', '<', '<=', '<fp-nan>', '>',
+ '>=', '>bignum', '>fixnum', '>float', '>integer', '(all-integers?)',
+ '(each-integer)', '(find-integer)', '*', '+', '?1+',
+ 'abs', 'align', 'all-integers?', 'bignum', 'bignum?', 'bit?', 'bitand',
+ 'bitnot', 'bitor', 'bits>double', 'bits>float', 'bitxor', 'complex',
+ 'complex?', 'denominator', 'double>bits', 'each-integer', 'even?',
+ 'find-integer', 'find-last-integer', 'fixnum', 'fixnum?', 'float',
+ 'float>bits', 'float?', 'fp-bitwise=', 'fp-infinity?', 'fp-nan-payload',
+ 'fp-nan?', 'fp-qnan?', 'fp-sign', 'fp-snan?', 'fp-special?',
+ 'if-zero', 'imaginary-part', 'integer', 'integer>fixnum',
+ 'integer>fixnum-strict', 'integer?', 'log2', 'log2-expects-positive',
+ 'log2-expects-positive?', 'mod', 'neg', 'neg?', 'next-float',
+ 'next-power-of-2', 'number', 'number=', 'number?', 'numerator', 'odd?',
+ 'out-of-fixnum-range', 'out-of-fixnum-range?', 'power-of-2?',
+ 'prev-float', 'ratio', 'ratio?', 'rational', 'rational?', 'real',
+ 'real-part', 'real?', 'recip', 'rem', 'sgn', 'shift', 'sq', 'times',
+ 'u<', 'u<=', 'u>', 'u>=', 'unless-zero', 'unordered?', 'when-zero',
+ 'zero?'), suffix=r'\s')
+
+ builtin_sequences = words((
+ '1sequence', '2all?', '2each', '2map', '2map-as', '2map-reduce', '2reduce',
+ '2selector', '2sequence', '3append', '3append-as', '3each', '3map', '3map-as',
+ '3sequence', '4sequence', '<repetition>', '<reversed>', '<slice>', '?first',
+ '?last', '?nth', '?second', '?set-nth', 'accumulate', 'accumulate!',
+ 'accumulate-as', 'all?', 'any?', 'append', 'append!', 'append-as',
+ 'assert-sequence', 'assert-sequence=', 'assert-sequence?',
+ 'binary-reduce', 'bounds-check', 'bounds-check?', 'bounds-error',
+ 'bounds-error?', 'but-last', 'but-last-slice', 'cartesian-each',
+ 'cartesian-map', 'cartesian-product', 'change-nth', 'check-slice',
+ 'check-slice-error', 'clone-like', 'collapse-slice', 'collector',
+ 'collector-for', 'concat', 'concat-as', 'copy', 'count', 'cut', 'cut-slice',
+ 'cut*', 'delete-all', 'delete-slice', 'drop-prefix', 'each', 'each-from',
+ 'each-index', 'empty?', 'exchange', 'filter', 'filter!', 'filter-as', 'find',
+ 'find-from', 'find-index', 'find-index-from', 'find-last', 'find-last-from',
+ 'first', 'first2', 'first3', 'first4', 'flip', 'follow', 'fourth', 'glue', 'halves',
+ 'harvest', 'head', 'head-slice', 'head-slice*', 'head*', 'head?',
+ 'if-empty', 'immutable', 'immutable-sequence', 'immutable-sequence?',
+ 'immutable?', 'index', 'index-from', 'indices', 'infimum', 'infimum-by',
+ 'insert-nth', 'interleave', 'iota', 'iota-tuple', 'iota-tuple?', 'join',
+ 'join-as', 'last', 'last-index', 'last-index-from', 'length', 'lengthen',
+ 'like', 'longer', 'longer?', 'longest', 'map', 'map!', 'map-as', 'map-find',
+ 'map-find-last', 'map-index', 'map-integers', 'map-reduce', 'map-sum',
+ 'max-length', 'member-eq?', 'member?', 'midpoint@', 'min-length',
+ 'mismatch', 'move', 'new-like', 'new-resizable', 'new-sequence',
+ 'non-negative-integer-expected', 'non-negative-integer-expected?',
+ 'nth', 'nths', 'pad-head', 'pad-tail', 'padding', 'partition', 'pop', 'pop*',
+ 'prefix', 'prepend', 'prepend-as', 'produce', 'produce-as', 'product', 'push',
+ 'push-all', 'push-either', 'push-if', 'reduce', 'reduce-index', 'remove',
+ 'remove!', 'remove-eq', 'remove-eq!', 'remove-nth', 'remove-nth!', 'repetition',
+ 'repetition?', 'replace-slice', 'replicate', 'replicate-as', 'rest',
+ 'rest-slice', 'reverse', 'reverse!', 'reversed', 'reversed?', 'second',
+ 'selector', 'selector-for', 'sequence', 'sequence-hashcode', 'sequence=',
+ 'sequence?', 'set-first', 'set-fourth', 'set-last', 'set-length', 'set-nth',
+ 'set-second', 'set-third', 'short', 'shorten', 'shorter', 'shorter?',
+ 'shortest', 'sift', 'slice', 'slice-error', 'slice-error?', 'slice?',
+ 'snip', 'snip-slice', 'start', 'start*', 'subseq', 'subseq?', 'suffix',
+ 'suffix!', 'sum', 'sum-lengths', 'supremum', 'supremum-by', 'surround', 'tail',
+ 'tail-slice', 'tail-slice*', 'tail*', 'tail?', 'third', 'trim',
+ 'trim-head', 'trim-head-slice', 'trim-slice', 'trim-tail', 'trim-tail-slice',
+ 'unclip', 'unclip-last', 'unclip-last-slice', 'unclip-slice', 'unless-empty',
+ 'virtual-exemplar', 'virtual-sequence', 'virtual-sequence?', 'virtual@',
+ 'when-empty'), suffix=r'\s')
+
+ builtin_namespaces = words((
+ '+@', 'change', 'change-global', 'counter', 'dec', 'get', 'get-global',
+ 'global', 'inc', 'init-namespaces', 'initialize', 'is-global', 'make-assoc',
+ 'namespace', 'namestack', 'off', 'on', 'set', 'set-global', 'set-namestack',
+ 'toggle', 'with-global', 'with-scope', 'with-variable', 'with-variables'),
+ suffix=r'\s')
+
+ builtin_arrays = words((
+ '1array', '2array', '3array', '4array', '<array>', '>array', 'array',
+ 'array?', 'pair', 'pair?', 'resize-array'), suffix=r'\s')
+
+ builtin_io = words((
+ '(each-stream-block-slice)', '(each-stream-block)',
+ '(stream-contents-by-block)', '(stream-contents-by-element)',
+ '(stream-contents-by-length-or-block)',
+ '(stream-contents-by-length)', '+byte+', '+character+',
+ 'bad-seek-type', 'bad-seek-type?', 'bl', 'contents', 'each-block',
+ 'each-block-size', 'each-block-slice', 'each-line', 'each-morsel',
+ 'each-stream-block', 'each-stream-block-slice', 'each-stream-line',
+ 'error-stream', 'flush', 'input-stream', 'input-stream?',
+ 'invalid-read-buffer', 'invalid-read-buffer?', 'lines', 'nl',
+ 'output-stream', 'output-stream?', 'print', 'read', 'read-into',
+ 'read-partial', 'read-partial-into', 'read-until', 'read1', 'readln',
+ 'seek-absolute', 'seek-absolute?', 'seek-end', 'seek-end?',
+ 'seek-input', 'seek-output', 'seek-relative', 'seek-relative?',
+ 'stream-bl', 'stream-contents', 'stream-contents*', 'stream-copy',
+ 'stream-copy*', 'stream-element-type', 'stream-flush',
+ 'stream-length', 'stream-lines', 'stream-nl', 'stream-print',
+ 'stream-read', 'stream-read-into', 'stream-read-partial',
+ 'stream-read-partial-into', 'stream-read-partial-unsafe',
+ 'stream-read-unsafe', 'stream-read-until', 'stream-read1',
+ 'stream-readln', 'stream-seek', 'stream-seekable?', 'stream-tell',
+ 'stream-write', 'stream-write1', 'tell-input', 'tell-output',
+ 'with-error-stream', 'with-error-stream*', 'with-error>output',
+ 'with-input-output+error-streams',
+ 'with-input-output+error-streams*', 'with-input-stream',
+ 'with-input-stream*', 'with-output-stream', 'with-output-stream*',
+ 'with-output>error', 'with-output+error-stream',
+ 'with-output+error-stream*', 'with-streams', 'with-streams*',
+ 'write', 'write1'), suffix=r'\s')
+
+ builtin_strings = words((
+ '1string', '<string>', '>string', 'resize-string', 'string',
+ 'string?'), suffix=r'\s')
+
+ builtin_vectors = words((
+ '1vector', '<vector>', '>vector', '?push', 'vector', 'vector?'),
+ suffix=r'\s')
+
+ builtin_continuations = words((
+ '<condition>', '<continuation>', '<restart>', 'attempt-all',
+ 'attempt-all-error', 'attempt-all-error?', 'callback-error-hook',
+ 'callcc0', 'callcc1', 'cleanup', 'compute-restarts', 'condition',
+ 'condition?', 'continuation', 'continuation?', 'continue',
+ 'continue-restart', 'continue-with', 'current-continuation',
+ 'error', 'error-continuation', 'error-in-thread', 'error-thread',
+ 'ifcc', 'ignore-errors', 'in-callback?', 'original-error', 'recover',
+ 'restart', 'restart?', 'restarts', 'rethrow', 'rethrow-restarts',
+ 'return', 'return-continuation', 'thread-error-hook', 'throw-continue',
+ 'throw-restarts', 'with-datastack', 'with-return'), suffix=r'\s')
+
+ tokens = {
+ 'root': [
+ # factor allows a file to start with a shebang
+ (r'#!.*$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ (r'\s+', Text),
+
+ # defining words
+ (r'((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(M:[:]?)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class, Text, Name.Function)),
+ (r'(C:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function, Text, Name.Class)),
+ (r'(GENERIC:)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function, Text, Name.Function)),
+ (r'\(\s', Name.Function, 'stackeffect'),
+ (r';\s', Keyword),
+
+ # imports and namespaces
+ (r'(USING:)(\s+)',
+ bygroups(Keyword.Namespace, Text), 'vocabs'),
+ (r'(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace)),
+ (r'(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace, Text, Name.Namespace)),
+ (r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace, Text), 'words'),
+ (r'(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+=>\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Namespace, Text, Name.Function)),
+ (r'(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Function)),
+ (r'(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Function)),
+
+ # tuples and classes
+ (r'(TUPLE:|ERROR:)(\s+)(\S+)(\s+<\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class, Text, Name.Class), 'slots'),
+ (r'(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class), 'slots'),
+ (r'(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class)),
+ (r'(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class, Text, Name.Class)),
+ (r'(C:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function, Text, Name.Class)),
+ (r'(INSTANCE:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class, Text, Name.Class)),
+ (r'(SLOT:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Function)),
+ (r'(SINGLETON:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
+ (r'SINGLETONS:', Keyword, 'classes'),
+
+ # other syntax
+ (r'(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'SYMBOLS:\s', Keyword, 'words'),
+ (r'SYNTAX:\s', Keyword),
+ (r'ALIEN:\s', Keyword),
+ (r'(STRUCT:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
+ (r'(FUNCTION:)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)',
+ bygroups(Keyword.Namespace, Text, Name.Function, Text)),
+ (r'(FUNCTION-ALIAS:)(\s+)(\S+)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)',
+ bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Function, Text)),
+
+ # vocab.private
+ (r'(?:<PRIVATE|PRIVATE>)\s', Keyword.Namespace),
+
+ # strings
+ (r'"""\s+(?:.|\n)*?\s+"""', String),
+ (r'"(?:\\\\|\\"|[^"])*"', String),
+ (r'\S+"\s+(?:\\\\|\\"|[^"])*"', String),
+ (r'CHAR:\s+(?:\\[\\abfnrstv]|[^\\]\S*)\s', String.Char),
+
+ # comments
+ (r'!\s+.*$', Comment),
+ (r'#!\s+.*$', Comment),
+ (r'/\*\s+(?:.|\n)*?\s\*/\s', Comment),
+
+ # boolean constants
+ (r'[tf]\s', Name.Constant),
+
+ # symbols and literals
+ (r'[\\$]\s+\S+', Name.Constant),
+ (r'M\\\s+\S+\s+\S+', Name.Constant),
+
+ # numbers
+ (r'[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s', Number),
+ (r'[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s', Number),
+ (r'0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
+ (r'NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
+ (r'0b[01]+\s', Number.Bin),
+ (r'0o[0-7]+\s', Number.Oct),
+ (r'(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
+ (r'(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
+
+ # keywords
+ (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s',
+ Keyword),
+
+ # builtins
+ (builtin_kernel, Name.Builtin),
+ (builtin_assocs, Name.Builtin),
+ (builtin_combinators, Name.Builtin),
+ (builtin_math, Name.Builtin),
+ (builtin_sequences, Name.Builtin),
+ (builtin_namespaces, Name.Builtin),
+ (builtin_arrays, Name.Builtin),
+ (builtin_io, Name.Builtin),
+ (builtin_strings, Name.Builtin),
+ (builtin_vectors, Name.Builtin),
+ (builtin_continuations, Name.Builtin),
+
+ # everything else is text
+ (r'\S+', Text),
+ ],
+ 'stackeffect': [
+ (r'\s+', Text),
+ (r'\(\s+', Name.Function, 'stackeffect'),
+ (r'\)\s', Name.Function, '#pop'),
+ (r'--\s', Name.Function),
+ (r'\S+', Name.Variable),
+ ],
+ 'slots': [
+ (r'\s+', Text),
+ (r';\s', Keyword, '#pop'),
+ (r'(\{\s+)(\S+)(\s+[^}]+\s+\}\s)',
+ bygroups(Text, Name.Variable, Text)),
+ (r'\S+', Name.Variable),
+ ],
+ 'vocabs': [
+ (r'\s+', Text),
+ (r';\s', Keyword, '#pop'),
+ (r'\S+', Name.Namespace),
+ ],
+ 'classes': [
+ (r'\s+', Text),
+ (r';\s', Keyword, '#pop'),
+ (r'\S+', Name.Class),
+ ],
+ 'words': [
+ (r'\s+', Text),
+ (r';\s', Keyword, '#pop'),
+ (r'\S+', Name.Function),
+ ],
+ }
diff --git a/pygments/lexers/fantom.py b/pygments/lexers/fantom.py
new file mode 100644
index 00000000..c20a3f38
--- /dev/null
+++ b/pygments/lexers/fantom.py
@@ -0,0 +1,250 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.fantom
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Fantom language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from string import Template
+
+from pygments.lexer import RegexLexer, include, bygroups, using, \
+ this, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal
+
+__all__ = ['FantomLexer']
+
+
+class FantomLexer(RegexLexer):
+ """
+ For Fantom source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Fantom'
+ aliases = ['fan']
+ filenames = ['*.fan']
+ mimetypes = ['application/x-fantom']
+
+ # often used regexes
+ def s(str):
+ return Template(str).substitute(
+ dict(
+ pod=r'[\"\w\.]+',
+ eos=r'\n|;',
+ id=r'[a-zA-Z_]\w*',
+ # all chars which can be part of type definition. Starts with
+ # either letter, or [ (maps), or | (funcs)
+ type=r'(?:\[|[a-zA-Z_]|\|)[:\w\[\]|\->?]*?',
+ )
+ )
+
+ tokens = {
+ 'comments': [
+ (r'(?s)/\*.*?\*/', Comment.Multiline), # Multiline
+ (r'//.*?\n', Comment.Single), # Single line
+ # TODO: highlight references in fandocs
+ (r'\*\*.*?\n', Comment.Special), # Fandoc
+ (r'#.*\n', Comment.Single) # Shell-style
+ ],
+ 'literals': [
+ (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration
+ (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration with dot
+ (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float), # Float/Decimal
+ (r'\b-?0x[0-9a-fA-F_]+', Number.Hex), # Hex
+ (r'\b-?[\d_]+', Number.Integer), # Int
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), # Char
+ (r'"', Punctuation, 'insideStr'), # Opening quote
+ (r'`', Punctuation, 'insideUri'), # Opening accent
+ (r'\b(true|false|null)\b', Keyword.Constant), # Bool & null
+ (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)', # DSL
+ bygroups(Name.Namespace, Punctuation, Name.Class,
+ Punctuation, String, Punctuation)),
+ (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?', # Type/slot literal
+ bygroups(Name.Namespace, Punctuation, Name.Class,
+ Punctuation, Name.Function)),
+ (r'\[,\]', Literal), # Empty list
+ (s(r'($type)(\[,\])'), # Typed empty list
+ bygroups(using(this, state='inType'), Literal)),
+ (r'\[:\]', Literal), # Empty Map
+ (s(r'($type)(\[:\])'),
+ bygroups(using(this, state='inType'), Literal)),
+ ],
+ 'insideStr': [
+ (r'\\\\', String.Escape), # Escaped backslash
+ (r'\\"', String.Escape), # Escaped "
+ (r'\\`', String.Escape), # Escaped `
+ (r'\$\w+', String.Interpol), # Subst var
+ (r'\$\{.*?\}', String.Interpol), # Subst expr
+ (r'"', Punctuation, '#pop'), # Closing quot
+ (r'.', String) # String content
+ ],
+ 'insideUri': [ # TODO: remove copy/paste str/uri
+ (r'\\\\', String.Escape), # Escaped backslash
+ (r'\\"', String.Escape), # Escaped "
+ (r'\\`', String.Escape), # Escaped `
+ (r'\$\w+', String.Interpol), # Subst var
+ (r'\$\{.*?\}', String.Interpol), # Subst expr
+ (r'`', Punctuation, '#pop'), # Closing tick
+ (r'.', String.Backtick) # URI content
+ ],
+ 'protectionKeywords': [
+ (r'\b(public|protected|private|internal)\b', Keyword),
+ ],
+ 'typeKeywords': [
+ (r'\b(abstract|final|const|native|facet|enum)\b', Keyword),
+ ],
+ 'methodKeywords': [
+ (r'\b(abstract|native|once|override|static|virtual|final)\b',
+ Keyword),
+ ],
+ 'fieldKeywords': [
+ (r'\b(abstract|const|final|native|override|static|virtual|'
+ r'readonly)\b', Keyword)
+ ],
+ 'otherKeywords': [
+ (words((
+ 'try', 'catch', 'throw', 'finally', 'for', 'if', 'else', 'while',
+ 'as', 'is', 'isnot', 'switch', 'case', 'default', 'continue',
+ 'break', 'do', 'return', 'get', 'set'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (r'\b(it|this|super)\b', Name.Builtin.Pseudo),
+ ],
+ 'operators': [
+ (r'\+\+|\-\-|\+|\-|\*|/|\|\||&&|<=>|<=|<|>=|>|=|!|\[|\]', Operator)
+ ],
+ 'inType': [
+ (r'[\[\]|\->:?]', Punctuation),
+ (s(r'$id'), Name.Class),
+ default('#pop'),
+
+ ],
+ 'root': [
+ include('comments'),
+ include('protectionKeywords'),
+ include('typeKeywords'),
+ include('methodKeywords'),
+ include('fieldKeywords'),
+ include('literals'),
+ include('otherKeywords'),
+ include('operators'),
+ (r'using\b', Keyword.Namespace, 'using'), # Using stmt
+ (r'@\w+', Name.Decorator, 'facet'), # Symbol
+ (r'(class|mixin)(\s+)(\w+)', bygroups(Keyword, Text, Name.Class),
+ 'inheritance'), # Inheritance list
+
+ # Type var := val
+ (s(r'($type)([ \t]+)($id)(\s*)(:=)'),
+ bygroups(using(this, state='inType'), Text,
+ Name.Variable, Text, Operator)),
+
+ # var := val
+ (s(r'($id)(\s*)(:=)'),
+ bygroups(Name.Variable, Text, Operator)),
+
+ # .someId( or ->someId( ###
+ (s(r'(\.|(?:\->))($id)(\s*)(\()'),
+ bygroups(Operator, Name.Function, Text, Punctuation),
+ 'insideParen'),
+
+ # .someId or ->someId
+ (s(r'(\.|(?:\->))($id)'),
+ bygroups(Operator, Name.Function)),
+
+ # new makeXXX (
+ (r'(new)(\s+)(make\w*)(\s*)(\()',
+ bygroups(Keyword, Text, Name.Function, Text, Punctuation),
+ 'insideMethodDeclArgs'),
+
+ # Type name (
+ (s(r'($type)([ \t]+)' # Return type and whitespace
+ r'($id)(\s*)(\()'), # method name + open brace
+ bygroups(using(this, state='inType'), Text,
+ Name.Function, Text, Punctuation),
+ 'insideMethodDeclArgs'),
+
+ # ArgType argName,
+ (s(r'($type)(\s+)($id)(\s*)(,)'),
+ bygroups(using(this, state='inType'), Text, Name.Variable,
+ Text, Punctuation)),
+
+ # ArgType argName)
+ # Covered in 'insideParen' state
+
+ # ArgType argName -> ArgType|
+ (s(r'($type)(\s+)($id)(\s*)(\->)(\s*)($type)(\|)'),
+ bygroups(using(this, state='inType'), Text, Name.Variable,
+ Text, Punctuation, Text, using(this, state='inType'),
+ Punctuation)),
+
+ # ArgType argName|
+ (s(r'($type)(\s+)($id)(\s*)(\|)'),
+ bygroups(using(this, state='inType'), Text, Name.Variable,
+ Text, Punctuation)),
+
+ # Type var
+ (s(r'($type)([ \t]+)($id)'),
+ bygroups(using(this, state='inType'), Text,
+ Name.Variable)),
+
+ (r'\(', Punctuation, 'insideParen'),
+ (r'\{', Punctuation, 'insideBrace'),
+ (r'.', Text)
+ ],
+ 'insideParen': [
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'insideMethodDeclArgs': [
+ (r'\)', Punctuation, '#pop'),
+ (s(r'($type)(\s+)($id)(\s*)(\))'),
+ bygroups(using(this, state='inType'), Text, Name.Variable,
+ Text, Punctuation), '#pop'),
+ include('root'),
+ ],
+ 'insideBrace': [
+ (r'\}', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'inheritance': [
+ (r'\s+', Text), # Whitespace
+ (r':|,', Punctuation),
+ (r'(?:(\w+)(::))?(\w+)',
+ bygroups(Name.Namespace, Punctuation, Name.Class)),
+ (r'\{', Punctuation, '#pop')
+ ],
+ 'using': [
+ (r'[ \t]+', Text), # consume whitespaces
+ (r'(\[)(\w+)(\])',
+ bygroups(Punctuation, Comment.Special, Punctuation)), # ffi
+ (r'(\")?([\w.]+)(\")?',
+ bygroups(Punctuation, Name.Namespace, Punctuation)), # podname
+ (r'::', Punctuation, 'usingClass'),
+ default('#pop')
+ ],
+ 'usingClass': [
+ (r'[ \t]+', Text), # consume whitespaces
+ (r'(as)(\s+)(\w+)',
+ bygroups(Keyword.Declaration, Text, Name.Class), '#pop:2'),
+ (r'[\w$]+', Name.Class),
+ default('#pop:2') # jump out to root state
+ ],
+ 'facet': [
+ (r'\s+', Text),
+ (r'\{', Punctuation, 'facetFields'),
+ default('#pop')
+ ],
+ 'facetFields': [
+ include('comments'),
+ include('literals'),
+ include('operators'),
+ (r'\s+', Text),
+ (r'(\s*)(\w+)(\s*)(=)', bygroups(Text, Name, Text, Operator)),
+ (r'\}', Punctuation, '#pop'),
+ (r'.', Text)
+ ],
+ }
diff --git a/pygments/lexers/felix.py b/pygments/lexers/felix.py
new file mode 100644
index 00000000..b7659769
--- /dev/null
+++ b/pygments/lexers/felix.py
@@ -0,0 +1,273 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.felix
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Felix language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, default, words, \
+ combined
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['FelixLexer']
+
+
+class FelixLexer(RegexLexer):
+ """
+ For `Felix <http://www.felix-lang.org>`_ source code.
+
+ .. versionadded:: 1.2
+ """
+
+ name = 'Felix'
+ aliases = ['felix', 'flx']
+ filenames = ['*.flx', '*.flxh']
+ mimetypes = ['text/x-felix']
+
+ preproc = (
+ 'elif', 'else', 'endif', 'if', 'ifdef', 'ifndef',
+ )
+
+ keywords = (
+ '_', '_deref', 'all', 'as',
+ 'assert', 'attempt', 'call', 'callback', 'case', 'caseno', 'cclass',
+ 'code', 'compound', 'ctypes', 'do', 'done', 'downto', 'elif', 'else',
+ 'endattempt', 'endcase', 'endif', 'endmatch', 'enum', 'except',
+ 'exceptions', 'expect', 'finally', 'for', 'forall', 'forget', 'fork',
+ 'functor', 'goto', 'ident', 'if', 'incomplete', 'inherit', 'instance',
+ 'interface', 'jump', 'lambda', 'loop', 'match', 'module', 'namespace',
+ 'new', 'noexpand', 'nonterm', 'obj', 'of', 'open', 'parse', 'raise',
+ 'regexp', 'reglex', 'regmatch', 'rename', 'return', 'the', 'then',
+ 'to', 'type', 'typecase', 'typedef', 'typematch', 'typeof', 'upto',
+ 'when', 'whilst', 'with', 'yield',
+ )
+
+ keyword_directives = (
+ '_gc_pointer', '_gc_type', 'body', 'comment', 'const', 'export',
+ 'header', 'inline', 'lval', 'macro', 'noinline', 'noreturn',
+ 'package', 'private', 'pod', 'property', 'public', 'publish',
+ 'requires', 'todo', 'virtual', 'use',
+ )
+
+ keyword_declarations = (
+ 'def', 'let', 'ref', 'val', 'var',
+ )
+
+ keyword_types = (
+ 'unit', 'void', 'any', 'bool',
+ 'byte', 'offset',
+ 'address', 'caddress', 'cvaddress', 'vaddress',
+ 'tiny', 'short', 'int', 'long', 'vlong',
+ 'utiny', 'ushort', 'vshort', 'uint', 'ulong', 'uvlong',
+ 'int8', 'int16', 'int32', 'int64',
+ 'uint8', 'uint16', 'uint32', 'uint64',
+ 'float', 'double', 'ldouble',
+ 'complex', 'dcomplex', 'lcomplex',
+ 'imaginary', 'dimaginary', 'limaginary',
+ 'char', 'wchar', 'uchar',
+ 'charp', 'charcp', 'ucharp', 'ucharcp',
+ 'string', 'wstring', 'ustring',
+ 'cont',
+ 'array', 'varray', 'list',
+ 'lvalue', 'opt', 'slice',
+ )
+
+ keyword_constants = (
+ 'false', 'true',
+ )
+
+ operator_words = (
+ 'and', 'not', 'in', 'is', 'isin', 'or', 'xor',
+ )
+
+ name_builtins = (
+ '_svc', 'while',
+ )
+
+ name_pseudo = (
+ 'root', 'self', 'this',
+ )
+
+ decimal_suffixes = '([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?'
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ # Keywords
+ (words(('axiom', 'ctor', 'fun', 'gen', 'proc', 'reduce',
+ 'union'), suffix=r'\b'),
+ Keyword, 'funcname'),
+ (words(('class', 'cclass', 'cstruct', 'obj', 'struct'), suffix=r'\b'),
+ Keyword, 'classname'),
+ (r'(instance|module|typeclass)\b', Keyword, 'modulename'),
+
+ (words(keywords, suffix=r'\b'), Keyword),
+ (words(keyword_directives, suffix=r'\b'), Name.Decorator),
+ (words(keyword_declarations, suffix=r'\b'), Keyword.Declaration),
+ (words(keyword_types, suffix=r'\b'), Keyword.Type),
+ (words(keyword_constants, suffix=r'\b'), Keyword.Constant),
+
+ # Operators
+ include('operators'),
+
+ # Float Literal
+ # -- Hex Float
+ (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
+ r'[pP][+\-]?[0-9_]+[lLfFdD]?', Number.Float),
+ # -- DecimalFloat
+ (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[lLfFdD]?', Number.Float),
+ (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[lLfFdD]?',
+ Number.Float),
+
+ # IntegerLiteral
+ # -- Binary
+ (r'0[Bb][01_]+%s' % decimal_suffixes, Number.Bin),
+ # -- Octal
+ (r'0[0-7_]+%s' % decimal_suffixes, Number.Oct),
+ # -- Hexadecimal
+ (r'0[xX][0-9a-fA-F_]+%s' % decimal_suffixes, Number.Hex),
+ # -- Decimal
+ (r'(0|[1-9][0-9_]*)%s' % decimal_suffixes, Number.Integer),
+
+ # Strings
+ ('([rR][cC]?|[cC][rR])"""', String, 'tdqs'),
+ ("([rR][cC]?|[cC][rR])'''", String, 'tsqs'),
+ ('([rR][cC]?|[cC][rR])"', String, 'dqs'),
+ ("([rR][cC]?|[cC][rR])'", String, 'sqs'),
+ ('[cCfFqQwWuU]?"""', String, combined('stringescape', 'tdqs')),
+ ("[cCfFqQwWuU]?'''", String, combined('stringescape', 'tsqs')),
+ ('[cCfFqQwWuU]?"', String, combined('stringescape', 'dqs')),
+ ("[cCfFqQwWuU]?'", String, combined('stringescape', 'sqs')),
+
+ # Punctuation
+ (r'[\[\]{}:(),;?]', Punctuation),
+
+ # Labels
+ (r'[a-zA-Z_]\w*:>', Name.Label),
+
+ # Identifiers
+ (r'(%s)\b' % '|'.join(name_builtins), Name.Builtin),
+ (r'(%s)\b' % '|'.join(name_pseudo), Name.Builtin.Pseudo),
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ 'whitespace': [
+ (r'\n', Text),
+ (r'\s+', Text),
+
+ include('comment'),
+
+ # Preprocessor
+ (r'#\s*if\s+0', Comment.Preproc, 'if0'),
+ (r'#', Comment.Preproc, 'macro'),
+ ],
+ 'operators': [
+ (r'(%s)\b' % '|'.join(operator_words), Operator.Word),
+ (r'!=|==|<<|>>|\|\||&&|[-~+/*%=<>&^|.$]', Operator),
+ ],
+ 'comment': [
+ (r'//(.*?)\n', Comment.Single),
+ (r'/[*]', Comment.Multiline, 'comment2'),
+ ],
+ 'comment2': [
+ (r'[^/*]', Comment.Multiline),
+ (r'/[*]', Comment.Multiline, '#push'),
+ (r'[*]/', Comment.Multiline, '#pop'),
+ (r'[/*]', Comment.Multiline),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment, '#pop'),
+ (r'.*?\n', Comment),
+ ],
+ 'macro': [
+ include('comment'),
+ (r'(import|include)(\s+)(<[^>]*?>)',
+ bygroups(Comment.Preproc, Text, String), '#pop'),
+ (r'(import|include)(\s+)("[^"]*?")',
+ bygroups(Comment.Preproc, Text, String), '#pop'),
+ (r"(import|include)(\s+)('[^']*?')",
+ bygroups(Comment.Preproc, Text, String), '#pop'),
+ (r'[^/\n]+', Comment.Preproc),
+ # (r'/[*](.|\n)*?[*]/', Comment),
+ # (r'//.*?\n', Comment, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'funcname': [
+ include('whitespace'),
+ (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
+ # anonymous functions
+ (r'(?=\()', Text, '#pop'),
+ ],
+ 'classname': [
+ include('whitespace'),
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ # anonymous classes
+ (r'(?=\{)', Text, '#pop'),
+ ],
+ 'modulename': [
+ include('whitespace'),
+ (r'\[', Punctuation, ('modulename2', 'tvarlist')),
+ default('modulename2'),
+ ],
+ 'modulename2': [
+ include('whitespace'),
+ (r'([a-zA-Z_]\w*)', Name.Namespace, '#pop:2'),
+ ],
+ 'tvarlist': [
+ include('whitespace'),
+ include('operators'),
+ (r'\[', Punctuation, '#push'),
+ (r'\]', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'(with|where)\b', Keyword),
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings': [
+ (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ # included here again for raw strings
+ (r'\\\\|\\"|\\\n', String.Escape),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ # included here again for raw strings
+ (r"\\\\|\\'|\\\n", String.Escape),
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ }
diff --git a/pygments/lexers/fortran.py b/pygments/lexers/fortran.py
new file mode 100644
index 00000000..df3fed4f
--- /dev/null
+++ b/pygments/lexers/fortran.py
@@ -0,0 +1,203 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.fortran
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Fortran languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, include, words, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['FortranLexer', 'FortranFixedLexer']
+
+
+class FortranLexer(RegexLexer):
+ """
+ Lexer for FORTRAN 90 code.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Fortran'
+ aliases = ['fortran']
+ filenames = ['*.f03', '*.f90', '*.F03', '*.F90']
+ mimetypes = ['text/x-fortran']
+ flags = re.IGNORECASE | re.MULTILINE
+
+ # Data Types: INTEGER, REAL, COMPLEX, LOGICAL, CHARACTER and DOUBLE PRECISION
+ # Operators: **, *, +, -, /, <, >, <=, >=, ==, /=
+ # Logical (?): NOT, AND, OR, EQV, NEQV
+
+ # Builtins:
+ # http://gcc.gnu.org/onlinedocs/gcc-3.4.6/g77/Table-of-Intrinsic-Functions.html
+
+ tokens = {
+ 'root': [
+ (r'^#.*\n', Comment.Preproc),
+ (r'!.*\n', Comment),
+ include('strings'),
+ include('core'),
+ (r'[a-z][\w$]*', Name),
+ include('nums'),
+ (r'[\s]+', Text),
+ ],
+ 'core': [
+ # Statements
+ (words((
+ 'ABSTRACT', 'ACCEPT', 'ALL', 'ALLSTOP', 'ALLOCATABLE', 'ALLOCATE',
+ 'ARRAY', 'ASSIGN', 'ASSOCIATE', 'ASYNCHRONOUS', 'BACKSPACE', 'BIND',
+ 'BLOCK', 'BLOCKDATA', 'BYTE', 'CALL', 'CASE', 'CLASS', 'CLOSE',
+ 'CODIMENSION', 'COMMON', 'CONCURRRENT', 'CONTIGUOUS', 'CONTAINS',
+ 'CONTINUE', 'CRITICAL', 'CYCLE', 'DATA', 'DEALLOCATE', 'DECODE',
+ 'DEFERRED', 'DIMENSION', 'DO', 'ELEMENTAL', 'ELSE', 'ENCODE', 'END',
+ 'ENTRY', 'ENUM', 'ENUMERATOR', 'EQUIVALENCE', 'EXIT', 'EXTENDS',
+ 'EXTERNAL', 'EXTRINSIC', 'FILE', 'FINAL', 'FORALL', 'FORMAT',
+ 'FUNCTION', 'GENERIC', 'GOTO', 'IF', 'IMAGES', 'IMPLICIT',
+ 'IMPORT', 'IMPURE', 'INCLUDE', 'INQUIRE', 'INTENT', 'INTERFACE',
+ 'INTRINSIC', 'IS', 'LOCK', 'MEMORY', 'MODULE', 'NAMELIST', 'NULLIFY',
+ 'NONE', 'NON_INTRINSIC', 'NON_OVERRIDABLE', 'NOPASS', 'OPEN', 'OPTIONAL',
+ 'OPTIONS', 'PARAMETER', 'PASS', 'PAUSE', 'POINTER', 'PRINT', 'PRIVATE',
+ 'PROGRAM', 'PROCEDURE', 'PROTECTED', 'PUBLIC', 'PURE', 'READ',
+ 'RECURSIVE', 'RESULT', 'RETURN', 'REWIND', 'SAVE', 'SELECT', 'SEQUENCE',
+ 'STOP', 'SUBMODULE', 'SUBROUTINE', 'SYNC', 'SYNCALL', 'SYNCIMAGES',
+ 'SYNCMEMORY', 'TARGET', 'THEN', 'TYPE', 'UNLOCK', 'USE', 'VALUE',
+ 'VOLATILE', 'WHERE', 'WRITE', 'WHILE'), prefix=r'\b', suffix=r'\s*\b'),
+ Keyword),
+
+ # Data Types
+ (words((
+ 'CHARACTER', 'COMPLEX', 'DOUBLE PRECISION', 'DOUBLE COMPLEX', 'INTEGER',
+ 'LOGICAL', 'REAL', 'C_INT', 'C_SHORT', 'C_LONG', 'C_LONG_LONG', 'C_SIGNED_CHAR',
+ 'C_SIZE_T', 'C_INT8_T', 'C_INT16_T', 'C_INT32_T', 'C_INT64_T', 'C_INT_LEAST8_T',
+ 'C_INT_LEAST16_T', 'C_INT_LEAST32_T', 'C_INT_LEAST64_T', 'C_INT_FAST8_T',
+ 'C_INT_FAST16_T', 'C_INT_FAST32_T', 'C_INT_FAST64_T', 'C_INTMAX_T',
+ 'C_INTPTR_T', 'C_FLOAT', 'C_DOUBLE', 'C_LONG_DOUBLE', 'C_FLOAT_COMPLEX',
+ 'C_DOUBLE_COMPLEX', 'C_LONG_DOUBLE_COMPLEX', 'C_BOOL', 'C_CHAR', 'C_PTR',
+ 'C_FUNPTR'), prefix=r'\b', suffix=r'\s*\b'),
+ Keyword.Type),
+
+ # Operators
+ (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator),
+
+ (r'(::)', Keyword.Declaration),
+
+ (r'[()\[\],:&%;.]', Punctuation),
+ # Intrinsics
+ (words((
+ 'Abort', 'Abs', 'Access', 'AChar', 'ACos', 'ACosH', 'AdjustL',
+ 'AdjustR', 'AImag', 'AInt', 'Alarm', 'All', 'Allocated', 'ALog',
+ 'AMax', 'AMin', 'AMod', 'And', 'ANInt', 'Any', 'ASin', 'ASinH',
+ 'Associated', 'ATan', 'ATanH', 'Atomic_Define', 'Atomic_Ref',
+ 'BesJ', 'BesJN', 'Bessel_J0', 'Bessel_J1', 'Bessel_JN', 'Bessel_Y0',
+ 'Bessel_Y1', 'Bessel_YN', 'BesY', 'BesYN', 'BGE', 'BGT', 'BLE',
+ 'BLT', 'Bit_Size', 'BTest', 'CAbs', 'CCos', 'Ceiling', 'CExp',
+ 'Char', 'ChDir', 'ChMod', 'CLog', 'Cmplx', 'Command_Argument_Count',
+ 'Complex', 'Conjg', 'Cos', 'CosH', 'Count', 'CPU_Time', 'CShift',
+ 'CSin', 'CSqRt', 'CTime', 'C_Loc', 'C_Associated',
+ 'C_Null_Ptr', 'C_Null_Funptr', 'C_F_Pointer', 'C_F_ProcPointer',
+ 'C_Null_Char', 'C_Alert', 'C_Backspace', 'C_Form_Feed', 'C_FunLoc',
+ 'C_Sizeof', 'C_New_Line', 'C_Carriage_Return',
+ 'C_Horizontal_Tab', 'C_Vertical_Tab', 'DAbs', 'DACos', 'DASin',
+ 'DATan', 'Date_and_Time', 'DbesJ', 'DbesJN', 'DbesY',
+ 'DbesYN', 'Dble', 'DCos', 'DCosH', 'DDiM', 'DErF',
+ 'DErFC', 'DExp', 'Digits', 'DiM', 'DInt', 'DLog', 'DMax',
+ 'DMin', 'DMod', 'DNInt', 'Dot_Product', 'DProd', 'DSign', 'DSinH',
+ 'DShiftL', 'DShiftR', 'DSin', 'DSqRt', 'DTanH', 'DTan', 'DTime',
+ 'EOShift', 'Epsilon', 'ErF', 'ErFC', 'ErFC_Scaled', 'ETime',
+ 'Execute_Command_Line', 'Exit', 'Exp', 'Exponent', 'Extends_Type_Of',
+ 'FDate', 'FGet', 'FGetC', 'FindLoc', 'Float', 'Floor', 'Flush',
+ 'FNum', 'FPutC', 'FPut', 'Fraction', 'FSeek', 'FStat', 'FTell',
+ 'Gamma', 'GError', 'GetArg', 'Get_Command', 'Get_Command_Argument',
+ 'Get_Environment_Variable', 'GetCWD', 'GetEnv', 'GetGId', 'GetLog',
+ 'GetPId', 'GetUId', 'GMTime', 'HostNm', 'Huge', 'Hypot', 'IAbs',
+ 'IAChar', 'IAll', 'IAnd', 'IAny', 'IArgC', 'IBClr', 'IBits',
+ 'IBSet', 'IChar', 'IDate', 'IDiM', 'IDInt', 'IDNInt', 'IEOr',
+ 'IErrNo', 'IFix', 'Imag', 'ImagPart', 'Image_Index', 'Index',
+ 'Int', 'IOr', 'IParity', 'IRand', 'IsaTty', 'IShft', 'IShftC',
+ 'ISign', 'Iso_C_Binding', 'Is_Contiguous', 'Is_Iostat_End',
+ 'Is_Iostat_Eor', 'ITime', 'Kill', 'Kind', 'LBound', 'LCoBound',
+ 'Len', 'Len_Trim', 'LGe', 'LGt', 'Link', 'LLe', 'LLt', 'LnBlnk',
+ 'Loc', 'Log', 'Log_Gamma', 'Logical', 'Long', 'LShift', 'LStat',
+ 'LTime', 'MaskL', 'MaskR', 'MatMul', 'Max', 'MaxExponent',
+ 'MaxLoc', 'MaxVal', 'MClock', 'Merge', 'Merge_Bits', 'Move_Alloc',
+ 'Min', 'MinExponent', 'MinLoc', 'MinVal', 'Mod', 'Modulo', 'MvBits',
+ 'Nearest', 'New_Line', 'NInt', 'Norm2', 'Not', 'Null', 'Num_Images',
+ 'Or', 'Pack', 'Parity', 'PError', 'Precision', 'Present', 'Product',
+ 'Radix', 'Rand', 'Random_Number', 'Random_Seed', 'Range', 'Real',
+ 'RealPart', 'Rename', 'Repeat', 'Reshape', 'RRSpacing', 'RShift',
+ 'Same_Type_As', 'Scale', 'Scan', 'Second', 'Selected_Char_Kind',
+ 'Selected_Int_Kind', 'Selected_Real_Kind', 'Set_Exponent', 'Shape',
+ 'ShiftA', 'ShiftL', 'ShiftR', 'Short', 'Sign', 'Signal', 'SinH',
+ 'Sin', 'Sleep', 'Sngl', 'Spacing', 'Spread', 'SqRt', 'SRand',
+ 'Stat', 'Storage_Size', 'Sum', 'SymLnk', 'System', 'System_Clock',
+ 'Tan', 'TanH', 'Time', 'This_Image', 'Tiny', 'TrailZ', 'Transfer',
+ 'Transpose', 'Trim', 'TtyNam', 'UBound', 'UCoBound', 'UMask',
+ 'Unlink', 'Unpack', 'Verify', 'XOr', 'ZAbs', 'ZCos', 'ZExp',
+ 'ZLog', 'ZSin', 'ZSqRt'), prefix=r'\b', suffix=r'\s*\b'),
+ Name.Builtin),
+
+ # Booleans
+ (r'\.(true|false)\.', Name.Builtin),
+ # Comparing Operators
+ (r'\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.', Operator.Word),
+ ],
+
+ 'strings': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ ],
+
+ 'nums': [
+ (r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer),
+ (r'[+-]?\d*\.\d+(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
+ (r'[+-]?\d+\.\d*(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
+ ],
+ }
+
+
+class FortranFixedLexer(RegexLexer):
+ """
+ Lexer for fixed format Fortran.
+ """
+ name = 'FortranFixed'
+ aliases = ['fortranfixed']
+ filenames = ['*.f', '*.F']
+
+ flags = re.IGNORECASE
+
+ def _lex_fortran(self, match, ctx=None):
+ """Lex a line just as free form fortran without line break."""
+ lexer = FortranLexer()
+ text = match.group(0) + "\n"
+ for index, token, value in lexer.get_tokens_unprocessed(text):
+ value = value.replace('\n', '')
+ if value != '':
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ (r'[C*].*\n', Comment),
+ (r'#.*\n', Comment.Preproc),
+ (r' {0,4}!.*\n', Comment),
+ (r'(.{5})', Name.Label, 'cont-char'),
+ (r'.*\n', using(FortranLexer)),
+ ],
+
+ 'cont-char': [
+ (' ', Text, 'code'),
+ ('0', Comment, 'code'),
+ ('.', Generic.Strong, 'code')
+ ],
+
+ 'code': [
+ (r'(.{66})(.*)(\n)',
+ bygroups(_lex_fortran, Comment, Text), 'root'),
+ (r'(.*)(\n)', bygroups(_lex_fortran, Text), 'root'),
+ (r'', Text, 'root')]
+ }
diff --git a/pygments/lexers/foxpro.py b/pygments/lexers/foxpro.py
index 51cd499b..c7f368c7 100644
--- a/pygments/lexers/foxpro.py
+++ b/pygments/lexers/foxpro.py
@@ -5,7 +5,7 @@
Simple lexer for Microsoft Visual FoxPro source code.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -24,11 +24,11 @@ class FoxProLexer(RegexLexer):
FoxPro syntax allows to shorten all keywords and function names
to 4 characters. Shortened forms are not recognized by this lexer.
- *New in Pygments 1.6.*
+ .. versionadded:: 1.6
"""
name = 'FoxPro'
- aliases = ['Clipper', 'XBase']
+ aliases = ['foxpro', 'vfp', 'clipper', 'xbase']
filenames = ['*.PRG', '*.prg']
mimetype = []
diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py
index 770e6bd9..180d3fd4 100644
--- a/pygments/lexers/functional.py
+++ b/pygments/lexers/functional.py
@@ -3,2729 +3,19 @@
pygments.lexers.functional
~~~~~~~~~~~~~~~~~~~~~~~~~~
- Lexers for functional languages.
+ Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
+from pygments.lexers.lisp import SchemeLexer, CommonLispLexer, RacketLexer, \
+ NewLispLexer
+from pygments.lexers.haskell import HaskellLexer, LiterateHaskellLexer, \
+ KokaLexer
+from pygments.lexers.theorem import CoqLexer
+from pygments.lexers.erlang import ErlangLexer, ErlangShellLexer, \
+ ElixirConsoleLexer, ElixirLexer
+from pygments.lexers.ml import SMLLexer, OcamlLexer, OpaLexer
-from pygments.lexer import Lexer, RegexLexer, bygroups, include, do_insertions
-from pygments.token import Text, Comment, Operator, Keyword, Name, \
- String, Number, Punctuation, Literal, Generic, Error
-
-__all__ = ['RacketLexer', 'SchemeLexer', 'CommonLispLexer', 'HaskellLexer',
- 'AgdaLexer', 'LiterateHaskellLexer', 'LiterateAgdaLexer',
- 'SMLLexer', 'OcamlLexer', 'ErlangLexer', 'ErlangShellLexer',
- 'OpaLexer', 'CoqLexer', 'NewLispLexer', 'ElixirLexer',
- 'ElixirConsoleLexer', 'KokaLexer']
-
-
-line_re = re.compile('.*?\n')
-
-
-class RacketLexer(RegexLexer):
- """
- Lexer for `Racket <http://racket-lang.org/>`_ source code (formerly known as
- PLT Scheme).
-
- *New in Pygments 1.6.*
- """
-
- name = 'Racket'
- aliases = ['racket', 'rkt']
- filenames = ['*.rkt', '*.rktl']
- mimetypes = ['text/x-racket', 'application/x-racket']
-
- # From namespace-mapped-symbols
- keywords = [
- '#%app', '#%datum', '#%expression', '#%module-begin',
- '#%plain-app', '#%plain-lambda', '#%plain-module-begin',
- '#%provide', '#%require', '#%stratified-body', '#%top',
- '#%top-interaction', '#%variable-reference', '...', 'and', 'begin',
- 'begin-for-syntax', 'begin0', 'case', 'case-lambda', 'cond',
- 'datum->syntax-object', 'define', 'define-for-syntax',
- 'define-struct', 'define-syntax', 'define-syntax-rule',
- 'define-syntaxes', 'define-values', 'define-values-for-syntax',
- 'delay', 'do', 'expand-path', 'fluid-let', 'hash-table-copy',
- 'hash-table-count', 'hash-table-for-each', 'hash-table-get',
- 'hash-table-iterate-first', 'hash-table-iterate-key',
- 'hash-table-iterate-next', 'hash-table-iterate-value',
- 'hash-table-map', 'hash-table-put!', 'hash-table-remove!',
- 'hash-table?', 'if', 'lambda', 'let', 'let*', 'let*-values',
- 'let-struct', 'let-syntax', 'let-syntaxes', 'let-values', 'let/cc',
- 'let/ec', 'letrec', 'letrec-syntax', 'letrec-syntaxes',
- 'letrec-syntaxes+values', 'letrec-values', 'list-immutable',
- 'make-hash-table', 'make-immutable-hash-table', 'make-namespace',
- 'module', 'module-identifier=?', 'module-label-identifier=?',
- 'module-template-identifier=?', 'module-transformer-identifier=?',
- 'namespace-transformer-require', 'or', 'parameterize',
- 'parameterize*', 'parameterize-break', 'provide',
- 'provide-for-label', 'provide-for-syntax', 'quasiquote',
- 'quasisyntax', 'quasisyntax/loc', 'quote', 'quote-syntax',
- 'quote-syntax/prune', 'require', 'require-for-label',
- 'require-for-syntax', 'require-for-template', 'set!',
- 'set!-values', 'syntax', 'syntax-case', 'syntax-case*',
- 'syntax-id-rules', 'syntax-object->datum', 'syntax-rules',
- 'syntax/loc', 'time', 'transcript-off', 'transcript-on', 'unless',
- 'unquote', 'unquote-splicing', 'unsyntax', 'unsyntax-splicing',
- 'when', 'with-continuation-mark', 'with-handlers',
- 'with-handlers*', 'with-syntax', 'λ'
- ]
-
- # From namespace-mapped-symbols
- builtins = [
- '*', '+', '-', '/', '<', '<=', '=', '>', '>=',
- 'abort-current-continuation', 'abs', 'absolute-path?', 'acos',
- 'add1', 'alarm-evt', 'always-evt', 'andmap', 'angle', 'append',
- 'apply', 'arithmetic-shift', 'arity-at-least',
- 'arity-at-least-value', 'arity-at-least?', 'asin', 'assoc', 'assq',
- 'assv', 'atan', 'banner', 'bitwise-and', 'bitwise-bit-field',
- 'bitwise-bit-set?', 'bitwise-ior', 'bitwise-not', 'bitwise-xor',
- 'boolean?', 'bound-identifier=?', 'box', 'box-immutable', 'box?',
- 'break-enabled', 'break-thread', 'build-path',
- 'build-path/convention-type', 'byte-pregexp', 'byte-pregexp?',
- 'byte-ready?', 'byte-regexp', 'byte-regexp?', 'byte?', 'bytes',
- 'bytes->immutable-bytes', 'bytes->list', 'bytes->path',
- 'bytes->path-element', 'bytes->string/latin-1',
- 'bytes->string/locale', 'bytes->string/utf-8', 'bytes-append',
- 'bytes-close-converter', 'bytes-convert', 'bytes-convert-end',
- 'bytes-converter?', 'bytes-copy', 'bytes-copy!', 'bytes-fill!',
- 'bytes-length', 'bytes-open-converter', 'bytes-ref', 'bytes-set!',
- 'bytes-utf-8-index', 'bytes-utf-8-length', 'bytes-utf-8-ref',
- 'bytes<?', 'bytes=?', 'bytes>?', 'bytes?', 'caaaar', 'caaadr',
- 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', 'cadadr',
- 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr',
- 'call-in-nested-thread', 'call-with-break-parameterization',
- 'call-with-composable-continuation',
- 'call-with-continuation-barrier', 'call-with-continuation-prompt',
- 'call-with-current-continuation', 'call-with-escape-continuation',
- 'call-with-exception-handler',
- 'call-with-immediate-continuation-mark', 'call-with-input-file',
- 'call-with-output-file', 'call-with-parameterization',
- 'call-with-semaphore', 'call-with-semaphore/enable-break',
- 'call-with-values', 'call/cc', 'call/ec', 'car', 'cdaaar',
- 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar', 'cddaar',
- 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr',
- 'ceiling', 'channel-get', 'channel-put', 'channel-put-evt',
- 'channel-try-get', 'channel?', 'chaperone-box', 'chaperone-evt',
- 'chaperone-hash', 'chaperone-of?', 'chaperone-procedure',
- 'chaperone-struct', 'chaperone-struct-type', 'chaperone-vector',
- 'chaperone?', 'char->integer', 'char-alphabetic?', 'char-blank?',
- 'char-ci<=?', 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?',
- 'char-downcase', 'char-foldcase', 'char-general-category',
- 'char-graphic?', 'char-iso-control?', 'char-lower-case?',
- 'char-numeric?', 'char-punctuation?', 'char-ready?',
- 'char-symbolic?', 'char-title-case?', 'char-titlecase',
- 'char-upcase', 'char-upper-case?', 'char-utf-8-length',
- 'char-whitespace?', 'char<=?', 'char<?', 'char=?', 'char>=?',
- 'char>?', 'char?', 'check-duplicate-identifier',
- 'checked-procedure-check-and-extract', 'choice-evt',
- 'cleanse-path', 'close-input-port', 'close-output-port',
- 'collect-garbage', 'collection-file-path', 'collection-path',
- 'compile', 'compile-allow-set!-undefined',
- 'compile-context-preservation-enabled',
- 'compile-enforce-module-constants', 'compile-syntax',
- 'compiled-expression?', 'compiled-module-expression?',
- 'complete-path?', 'complex?', 'cons',
- 'continuation-mark-set->context', 'continuation-mark-set->list',
- 'continuation-mark-set->list*', 'continuation-mark-set-first',
- 'continuation-mark-set?', 'continuation-marks',
- 'continuation-prompt-available?', 'continuation-prompt-tag?',
- 'continuation?', 'copy-file', 'cos',
- 'current-break-parameterization', 'current-code-inspector',
- 'current-command-line-arguments', 'current-compile',
- 'current-continuation-marks', 'current-custodian',
- 'current-directory', 'current-drive', 'current-error-port',
- 'current-eval', 'current-evt-pseudo-random-generator',
- 'current-gc-milliseconds', 'current-get-interaction-input-port',
- 'current-inexact-milliseconds', 'current-input-port',
- 'current-inspector', 'current-library-collection-paths',
- 'current-load', 'current-load-extension',
- 'current-load-relative-directory', 'current-load/use-compiled',
- 'current-locale', 'current-memory-use', 'current-milliseconds',
- 'current-module-declare-name', 'current-module-declare-source',
- 'current-module-name-resolver', 'current-namespace',
- 'current-output-port', 'current-parameterization',
- 'current-preserved-thread-cell-values', 'current-print',
- 'current-process-milliseconds', 'current-prompt-read',
- 'current-pseudo-random-generator', 'current-read-interaction',
- 'current-reader-guard', 'current-readtable', 'current-seconds',
- 'current-security-guard', 'current-subprocess-custodian-mode',
- 'current-thread', 'current-thread-group',
- 'current-thread-initial-stack-size',
- 'current-write-relative-directory', 'custodian-box-value',
- 'custodian-box?', 'custodian-limit-memory',
- 'custodian-managed-list', 'custodian-memory-accounting-available?',
- 'custodian-require-memory', 'custodian-shutdown-all', 'custodian?',
- 'custom-print-quotable-accessor', 'custom-print-quotable?',
- 'custom-write-accessor', 'custom-write?', 'date', 'date*',
- 'date*-nanosecond', 'date*-time-zone-name', 'date*?', 'date-day',
- 'date-dst?', 'date-hour', 'date-minute', 'date-month',
- 'date-second', 'date-time-zone-offset', 'date-week-day',
- 'date-year', 'date-year-day', 'date?', 'datum-intern-literal',
- 'default-continuation-prompt-tag', 'delete-directory',
- 'delete-file', 'denominator', 'directory-exists?',
- 'directory-list', 'display', 'displayln', 'dump-memory-stats',
- 'dynamic-require', 'dynamic-require-for-syntax', 'dynamic-wind',
- 'eof', 'eof-object?', 'ephemeron-value', 'ephemeron?', 'eprintf',
- 'eq-hash-code', 'eq?', 'equal-hash-code',
- 'equal-secondary-hash-code', 'equal?', 'equal?/recur',
- 'eqv-hash-code', 'eqv?', 'error', 'error-display-handler',
- 'error-escape-handler', 'error-print-context-length',
- 'error-print-source-location', 'error-print-width',
- 'error-value->string-handler', 'eval', 'eval-jit-enabled',
- 'eval-syntax', 'even?', 'evt?', 'exact->inexact', 'exact-integer?',
- 'exact-nonnegative-integer?', 'exact-positive-integer?', 'exact?',
- 'executable-yield-handler', 'exit', 'exit-handler', 'exn',
- 'exn-continuation-marks', 'exn-message', 'exn:break',
- 'exn:break-continuation', 'exn:break?', 'exn:fail',
- 'exn:fail:contract', 'exn:fail:contract:arity',
- 'exn:fail:contract:arity?', 'exn:fail:contract:continuation',
- 'exn:fail:contract:continuation?',
- 'exn:fail:contract:divide-by-zero',
- 'exn:fail:contract:divide-by-zero?',
- 'exn:fail:contract:non-fixnum-result',
- 'exn:fail:contract:non-fixnum-result?',
- 'exn:fail:contract:variable', 'exn:fail:contract:variable-id',
- 'exn:fail:contract:variable?', 'exn:fail:contract?',
- 'exn:fail:filesystem', 'exn:fail:filesystem:exists',
- 'exn:fail:filesystem:exists?', 'exn:fail:filesystem:version',
- 'exn:fail:filesystem:version?', 'exn:fail:filesystem?',
- 'exn:fail:network', 'exn:fail:network?', 'exn:fail:out-of-memory',
- 'exn:fail:out-of-memory?', 'exn:fail:read',
- 'exn:fail:read-srclocs', 'exn:fail:read:eof', 'exn:fail:read:eof?',
- 'exn:fail:read:non-char', 'exn:fail:read:non-char?',
- 'exn:fail:read?', 'exn:fail:syntax', 'exn:fail:syntax-exprs',
- 'exn:fail:syntax:unbound', 'exn:fail:syntax:unbound?',
- 'exn:fail:syntax?', 'exn:fail:unsupported',
- 'exn:fail:unsupported?', 'exn:fail:user', 'exn:fail:user?',
- 'exn:fail?', 'exn:srclocs-accessor', 'exn:srclocs?', 'exn?', 'exp',
- 'expand', 'expand-once', 'expand-syntax', 'expand-syntax-once',
- 'expand-syntax-to-top-form', 'expand-to-top-form',
- 'expand-user-path', 'expt', 'file-exists?',
- 'file-or-directory-identity', 'file-or-directory-modify-seconds',
- 'file-or-directory-permissions', 'file-position', 'file-size',
- 'file-stream-buffer-mode', 'file-stream-port?',
- 'filesystem-root-list', 'find-executable-path',
- 'find-library-collection-paths', 'find-system-path', 'fixnum?',
- 'floating-point-bytes->real', 'flonum?', 'floor', 'flush-output',
- 'for-each', 'force', 'format', 'fprintf', 'free-identifier=?',
- 'gcd', 'generate-temporaries', 'gensym', 'get-output-bytes',
- 'get-output-string', 'getenv', 'global-port-print-handler',
- 'guard-evt', 'handle-evt', 'handle-evt?', 'hash', 'hash-equal?',
- 'hash-eqv?', 'hash-has-key?', 'hash-placeholder?', 'hash-ref!',
- 'hasheq', 'hasheqv', 'identifier-binding',
- 'identifier-label-binding', 'identifier-prune-lexical-context',
- 'identifier-prune-to-source-module',
- 'identifier-remove-from-definition-context',
- 'identifier-template-binding', 'identifier-transformer-binding',
- 'identifier?', 'imag-part', 'immutable?', 'impersonate-box',
- 'impersonate-hash', 'impersonate-procedure', 'impersonate-struct',
- 'impersonate-vector', 'impersonator-of?',
- 'impersonator-prop:application-mark',
- 'impersonator-property-accessor-procedure?',
- 'impersonator-property?', 'impersonator?', 'inexact->exact',
- 'inexact-real?', 'inexact?', 'input-port?', 'inspector?',
- 'integer->char', 'integer->integer-bytes',
- 'integer-bytes->integer', 'integer-length', 'integer-sqrt',
- 'integer-sqrt/remainder', 'integer?',
- 'internal-definition-context-seal', 'internal-definition-context?',
- 'keyword->string', 'keyword<?', 'keyword?', 'kill-thread', 'lcm',
- 'length', 'liberal-define-context?', 'link-exists?', 'list',
- 'list*', 'list->bytes', 'list->string', 'list->vector', 'list-ref',
- 'list-tail', 'list?', 'load', 'load-extension',
- 'load-on-demand-enabled', 'load-relative',
- 'load-relative-extension', 'load/cd', 'load/use-compiled',
- 'local-expand', 'local-expand/capture-lifts',
- 'local-transformer-expand',
- 'local-transformer-expand/capture-lifts', 'locale-string-encoding',
- 'log', 'magnitude', 'make-arity-at-least', 'make-bytes',
- 'make-channel', 'make-continuation-prompt-tag', 'make-custodian',
- 'make-custodian-box', 'make-date', 'make-date*',
- 'make-derived-parameter', 'make-directory', 'make-ephemeron',
- 'make-exn', 'make-exn:break', 'make-exn:fail',
- 'make-exn:fail:contract', 'make-exn:fail:contract:arity',
- 'make-exn:fail:contract:continuation',
- 'make-exn:fail:contract:divide-by-zero',
- 'make-exn:fail:contract:non-fixnum-result',
- 'make-exn:fail:contract:variable', 'make-exn:fail:filesystem',
- 'make-exn:fail:filesystem:exists',
- 'make-exn:fail:filesystem:version', 'make-exn:fail:network',
- 'make-exn:fail:out-of-memory', 'make-exn:fail:read',
- 'make-exn:fail:read:eof', 'make-exn:fail:read:non-char',
- 'make-exn:fail:syntax', 'make-exn:fail:syntax:unbound',
- 'make-exn:fail:unsupported', 'make-exn:fail:user',
- 'make-file-or-directory-link', 'make-hash-placeholder',
- 'make-hasheq-placeholder', 'make-hasheqv',
- 'make-hasheqv-placeholder', 'make-immutable-hasheqv',
- 'make-impersonator-property', 'make-input-port', 'make-inspector',
- 'make-known-char-range-list', 'make-output-port', 'make-parameter',
- 'make-pipe', 'make-placeholder', 'make-polar',
- 'make-prefab-struct', 'make-pseudo-random-generator',
- 'make-reader-graph', 'make-readtable', 'make-rectangular',
- 'make-rename-transformer', 'make-resolved-module-path',
- 'make-security-guard', 'make-semaphore', 'make-set!-transformer',
- 'make-shared-bytes', 'make-sibling-inspector',
- 'make-special-comment', 'make-srcloc', 'make-string',
- 'make-struct-field-accessor', 'make-struct-field-mutator',
- 'make-struct-type', 'make-struct-type-property',
- 'make-syntax-delta-introducer', 'make-syntax-introducer',
- 'make-thread-cell', 'make-thread-group', 'make-vector',
- 'make-weak-box', 'make-weak-hasheqv', 'make-will-executor', 'map',
- 'max', 'mcar', 'mcdr', 'mcons', 'member', 'memq', 'memv', 'min',
- 'module->exports', 'module->imports', 'module->language-info',
- 'module->namespace', 'module-compiled-exports',
- 'module-compiled-imports', 'module-compiled-language-info',
- 'module-compiled-name', 'module-path-index-join',
- 'module-path-index-resolve', 'module-path-index-split',
- 'module-path-index?', 'module-path?', 'module-predefined?',
- 'module-provide-protected?', 'modulo', 'mpair?', 'nack-guard-evt',
- 'namespace-attach-module', 'namespace-attach-module-declaration',
- 'namespace-base-phase', 'namespace-mapped-symbols',
- 'namespace-module-identifier', 'namespace-module-registry',
- 'namespace-require', 'namespace-require/constant',
- 'namespace-require/copy', 'namespace-require/expansion-time',
- 'namespace-set-variable-value!', 'namespace-symbol->identifier',
- 'namespace-syntax-introduce', 'namespace-undefine-variable!',
- 'namespace-unprotect-module', 'namespace-variable-value',
- 'namespace?', 'negative?', 'never-evt', 'newline',
- 'normal-case-path', 'not', 'null', 'null?', 'number->string',
- 'number?', 'numerator', 'object-name', 'odd?', 'open-input-bytes',
- 'open-input-file', 'open-input-output-file', 'open-input-string',
- 'open-output-bytes', 'open-output-file', 'open-output-string',
- 'ormap', 'output-port?', 'pair?', 'parameter-procedure=?',
- 'parameter?', 'parameterization?', 'path->bytes',
- 'path->complete-path', 'path->directory-path', 'path->string',
- 'path-add-suffix', 'path-convention-type', 'path-element->bytes',
- 'path-element->string', 'path-for-some-system?',
- 'path-list-string->path-list', 'path-replace-suffix',
- 'path-string?', 'path?', 'peek-byte', 'peek-byte-or-special',
- 'peek-bytes', 'peek-bytes!', 'peek-bytes-avail!',
- 'peek-bytes-avail!*', 'peek-bytes-avail!/enable-break',
- 'peek-char', 'peek-char-or-special', 'peek-string', 'peek-string!',
- 'pipe-content-length', 'placeholder-get', 'placeholder-set!',
- 'placeholder?', 'poll-guard-evt', 'port-closed-evt',
- 'port-closed?', 'port-commit-peeked', 'port-count-lines!',
- 'port-count-lines-enabled', 'port-display-handler',
- 'port-file-identity', 'port-file-unlock', 'port-next-location',
- 'port-print-handler', 'port-progress-evt',
- 'port-provides-progress-evts?', 'port-read-handler',
- 'port-try-file-lock?', 'port-write-handler', 'port-writes-atomic?',
- 'port-writes-special?', 'port?', 'positive?',
- 'prefab-key->struct-type', 'prefab-struct-key', 'pregexp',
- 'pregexp?', 'primitive-closure?', 'primitive-result-arity',
- 'primitive?', 'print', 'print-as-expression',
- 'print-boolean-long-form', 'print-box', 'print-graph',
- 'print-hash-table', 'print-mpair-curly-braces',
- 'print-pair-curly-braces', 'print-reader-abbreviations',
- 'print-struct', 'print-syntax-width', 'print-unreadable',
- 'print-vector-length', 'printf', 'procedure->method',
- 'procedure-arity', 'procedure-arity-includes?', 'procedure-arity?',
- 'procedure-closure-contents-eq?', 'procedure-extract-target',
- 'procedure-reduce-arity', 'procedure-rename',
- 'procedure-struct-type?', 'procedure?', 'promise?',
- 'prop:arity-string', 'prop:checked-procedure',
- 'prop:custom-print-quotable', 'prop:custom-write',
- 'prop:equal+hash', 'prop:evt', 'prop:exn:srclocs',
- 'prop:impersonator-of', 'prop:input-port',
- 'prop:liberal-define-context', 'prop:output-port',
- 'prop:procedure', 'prop:rename-transformer',
- 'prop:set!-transformer', 'pseudo-random-generator->vector',
- 'pseudo-random-generator-vector?', 'pseudo-random-generator?',
- 'putenv', 'quotient', 'quotient/remainder', 'raise',
- 'raise-arity-error', 'raise-mismatch-error', 'raise-syntax-error',
- 'raise-type-error', 'raise-user-error', 'random', 'random-seed',
- 'rational?', 'rationalize', 'read', 'read-accept-bar-quote',
- 'read-accept-box', 'read-accept-compiled', 'read-accept-dot',
- 'read-accept-graph', 'read-accept-infix-dot', 'read-accept-lang',
- 'read-accept-quasiquote', 'read-accept-reader', 'read-byte',
- 'read-byte-or-special', 'read-bytes', 'read-bytes!',
- 'read-bytes-avail!', 'read-bytes-avail!*',
- 'read-bytes-avail!/enable-break', 'read-bytes-line',
- 'read-case-sensitive', 'read-char', 'read-char-or-special',
- 'read-curly-brace-as-paren', 'read-decimal-as-inexact',
- 'read-eval-print-loop', 'read-language', 'read-line',
- 'read-on-demand-source', 'read-square-bracket-as-paren',
- 'read-string', 'read-string!', 'read-syntax',
- 'read-syntax/recursive', 'read/recursive', 'readtable-mapping',
- 'readtable?', 'real->double-flonum', 'real->floating-point-bytes',
- 'real->single-flonum', 'real-part', 'real?', 'regexp',
- 'regexp-match', 'regexp-match-peek', 'regexp-match-peek-immediate',
- 'regexp-match-peek-positions',
- 'regexp-match-peek-positions-immediate',
- 'regexp-match-peek-positions-immediate/end',
- 'regexp-match-peek-positions/end', 'regexp-match-positions',
- 'regexp-match-positions/end', 'regexp-match/end', 'regexp-match?',
- 'regexp-max-lookbehind', 'regexp-replace', 'regexp-replace*',
- 'regexp?', 'relative-path?', 'remainder',
- 'rename-file-or-directory', 'rename-transformer-target',
- 'rename-transformer?', 'resolve-path', 'resolved-module-path-name',
- 'resolved-module-path?', 'reverse', 'round', 'seconds->date',
- 'security-guard?', 'semaphore-peek-evt', 'semaphore-post',
- 'semaphore-try-wait?', 'semaphore-wait',
- 'semaphore-wait/enable-break', 'semaphore?',
- 'set!-transformer-procedure', 'set!-transformer?', 'set-box!',
- 'set-mcar!', 'set-mcdr!', 'set-port-next-location!',
- 'shared-bytes', 'shell-execute', 'simplify-path', 'sin',
- 'single-flonum?', 'sleep', 'special-comment-value',
- 'special-comment?', 'split-path', 'sqrt', 'srcloc',
- 'srcloc-column', 'srcloc-line', 'srcloc-position', 'srcloc-source',
- 'srcloc-span', 'srcloc?', 'string', 'string->bytes/latin-1',
- 'string->bytes/locale', 'string->bytes/utf-8',
- 'string->immutable-string', 'string->keyword', 'string->list',
- 'string->number', 'string->path', 'string->path-element',
- 'string->symbol', 'string->uninterned-symbol',
- 'string->unreadable-symbol', 'string-append', 'string-ci<=?',
- 'string-ci<?', 'string-ci=?', 'string-ci>=?', 'string-ci>?',
- 'string-copy', 'string-copy!', 'string-downcase', 'string-fill!',
- 'string-foldcase', 'string-length', 'string-locale-ci<?',
- 'string-locale-ci=?', 'string-locale-ci>?',
- 'string-locale-downcase', 'string-locale-upcase',
- 'string-locale<?', 'string-locale=?', 'string-locale>?',
- 'string-normalize-nfc', 'string-normalize-nfd',
- 'string-normalize-nfkc', 'string-normalize-nfkd', 'string-ref',
- 'string-set!', 'string-titlecase', 'string-upcase',
- 'string-utf-8-length', 'string<=?', 'string<?', 'string=?',
- 'string>=?', 'string>?', 'string?', 'struct->vector',
- 'struct-accessor-procedure?', 'struct-constructor-procedure?',
- 'struct-info', 'struct-mutator-procedure?',
- 'struct-predicate-procedure?', 'struct-type-info',
- 'struct-type-make-constructor', 'struct-type-make-predicate',
- 'struct-type-property-accessor-procedure?',
- 'struct-type-property?', 'struct-type?', 'struct:arity-at-least',
- 'struct:date', 'struct:date*', 'struct:exn', 'struct:exn:break',
- 'struct:exn:fail', 'struct:exn:fail:contract',
- 'struct:exn:fail:contract:arity',
- 'struct:exn:fail:contract:continuation',
- 'struct:exn:fail:contract:divide-by-zero',
- 'struct:exn:fail:contract:non-fixnum-result',
- 'struct:exn:fail:contract:variable', 'struct:exn:fail:filesystem',
- 'struct:exn:fail:filesystem:exists',
- 'struct:exn:fail:filesystem:version', 'struct:exn:fail:network',
- 'struct:exn:fail:out-of-memory', 'struct:exn:fail:read',
- 'struct:exn:fail:read:eof', 'struct:exn:fail:read:non-char',
- 'struct:exn:fail:syntax', 'struct:exn:fail:syntax:unbound',
- 'struct:exn:fail:unsupported', 'struct:exn:fail:user',
- 'struct:srcloc', 'struct?', 'sub1', 'subbytes', 'subprocess',
- 'subprocess-group-enabled', 'subprocess-kill', 'subprocess-pid',
- 'subprocess-status', 'subprocess-wait', 'subprocess?', 'substring',
- 'symbol->string', 'symbol-interned?', 'symbol-unreadable?',
- 'symbol?', 'sync', 'sync/enable-break', 'sync/timeout',
- 'sync/timeout/enable-break', 'syntax->list', 'syntax-arm',
- 'syntax-column', 'syntax-disarm', 'syntax-e', 'syntax-line',
- 'syntax-local-bind-syntaxes', 'syntax-local-certifier',
- 'syntax-local-context', 'syntax-local-expand-expression',
- 'syntax-local-get-shadower', 'syntax-local-introduce',
- 'syntax-local-lift-context', 'syntax-local-lift-expression',
- 'syntax-local-lift-module-end-declaration',
- 'syntax-local-lift-provide', 'syntax-local-lift-require',
- 'syntax-local-lift-values-expression',
- 'syntax-local-make-definition-context',
- 'syntax-local-make-delta-introducer',
- 'syntax-local-module-defined-identifiers',
- 'syntax-local-module-exports',
- 'syntax-local-module-required-identifiers', 'syntax-local-name',
- 'syntax-local-phase-level',
- 'syntax-local-transforming-module-provides?', 'syntax-local-value',
- 'syntax-local-value/immediate', 'syntax-original?',
- 'syntax-position', 'syntax-property',
- 'syntax-property-symbol-keys', 'syntax-protect', 'syntax-rearm',
- 'syntax-recertify', 'syntax-shift-phase-level', 'syntax-source',
- 'syntax-source-module', 'syntax-span', 'syntax-taint',
- 'syntax-tainted?', 'syntax-track-origin',
- 'syntax-transforming-module-expression?', 'syntax-transforming?',
- 'syntax?', 'system-big-endian?', 'system-idle-evt',
- 'system-language+country', 'system-library-subpath',
- 'system-path-convention-type', 'system-type', 'tan',
- 'tcp-abandon-port', 'tcp-accept', 'tcp-accept-evt',
- 'tcp-accept-ready?', 'tcp-accept/enable-break', 'tcp-addresses',
- 'tcp-close', 'tcp-connect', 'tcp-connect/enable-break',
- 'tcp-listen', 'tcp-listener?', 'tcp-port?', 'terminal-port?',
- 'thread', 'thread-cell-ref', 'thread-cell-set!', 'thread-cell?',
- 'thread-dead-evt', 'thread-dead?', 'thread-group?',
- 'thread-resume', 'thread-resume-evt', 'thread-rewind-receive',
- 'thread-running?', 'thread-suspend', 'thread-suspend-evt',
- 'thread-wait', 'thread/suspend-to-kill', 'thread?', 'time-apply',
- 'truncate', 'udp-addresses', 'udp-bind!', 'udp-bound?',
- 'udp-close', 'udp-connect!', 'udp-connected?', 'udp-open-socket',
- 'udp-receive!', 'udp-receive!*', 'udp-receive!-evt',
- 'udp-receive!/enable-break', 'udp-receive-ready-evt', 'udp-send',
- 'udp-send*', 'udp-send-evt', 'udp-send-ready-evt', 'udp-send-to',
- 'udp-send-to*', 'udp-send-to-evt', 'udp-send-to/enable-break',
- 'udp-send/enable-break', 'udp?', 'unbox',
- 'uncaught-exception-handler', 'use-collection-link-paths',
- 'use-compiled-file-paths', 'use-user-specific-search-paths',
- 'values', 'variable-reference->empty-namespace',
- 'variable-reference->module-base-phase',
- 'variable-reference->module-declaration-inspector',
- 'variable-reference->module-source',
- 'variable-reference->namespace', 'variable-reference->phase',
- 'variable-reference->resolved-module-path',
- 'variable-reference-constant?', 'variable-reference?', 'vector',
- 'vector->immutable-vector', 'vector->list',
- 'vector->pseudo-random-generator',
- 'vector->pseudo-random-generator!', 'vector->values',
- 'vector-fill!', 'vector-immutable', 'vector-length', 'vector-ref',
- 'vector-set!', 'vector-set-performance-stats!', 'vector?',
- 'version', 'void', 'void?', 'weak-box-value', 'weak-box?',
- 'will-execute', 'will-executor?', 'will-register',
- 'will-try-execute', 'with-input-from-file', 'with-output-to-file',
- 'wrap-evt', 'write', 'write-byte', 'write-bytes',
- 'write-bytes-avail', 'write-bytes-avail*', 'write-bytes-avail-evt',
- 'write-bytes-avail/enable-break', 'write-char', 'write-special',
- 'write-special-avail*', 'write-special-evt', 'write-string', 'zero?'
- ]
-
- # From SchemeLexer
- valid_name = r'[a-zA-Z0-9!$%&*+,/:<=>?@^_~|-]+'
-
- tokens = {
- 'root' : [
- (r';.*$', Comment.Single),
- (r'#\|[^|]+\|#', Comment.Multiline),
-
- # whitespaces - usually not relevant
- (r'\s+', Text),
-
- ## numbers: Keep in mind Racket reader hash prefixes,
- ## which can denote the base or the type. These don't map
- ## neatly onto pygments token types; some judgment calls
- ## here. Note that none of these regexps attempt to
- ## exclude identifiers that start with a number, such as a
- ## variable named "100-Continue".
-
- # #b
- (r'#b[-+]?[01]+\.[01]+', Number.Float),
- (r'#b[01]+e[-+]?[01]+', Number.Float),
- (r'#b[-+]?[01]/[01]+', Number),
- (r'#b[-+]?[01]+', Number.Integer),
- (r'#b\S*', Error),
-
- # #d OR no hash prefix
- (r'(#d)?[-+]?\d+\.\d+', Number.Float),
- (r'(#d)?\d+e[-+]?\d+', Number.Float),
- (r'(#d)?[-+]?\d+/\d+', Number),
- (r'(#d)?[-+]?\d+', Number.Integer),
- (r'#d\S*', Error),
-
- # #e
- (r'#e[-+]?\d+\.\d+', Number.Float),
- (r'#e\d+e[-+]?\d+', Number.Float),
- (r'#e[-+]?\d+/\d+', Number),
- (r'#e[-+]?\d+', Number),
- (r'#e\S*', Error),
-
- # #i is always inexact-real, i.e. float
- (r'#i[-+]?\d+\.\d+', Number.Float),
- (r'#i\d+e[-+]?\d+', Number.Float),
- (r'#i[-+]?\d+/\d+', Number.Float),
- (r'#i[-+]?\d+', Number.Float),
- (r'#i\S*', Error),
-
- # #o
- (r'#o[-+]?[0-7]+\.[0-7]+', Number.Oct),
- (r'#o[0-7]+e[-+]?[0-7]+', Number.Oct),
- (r'#o[-+]?[0-7]+/[0-7]+', Number.Oct),
- (r'#o[-+]?[0-7]+', Number.Oct),
- (r'#o\S*', Error),
-
- # #x
- (r'#x[-+]?[0-9a-fA-F]+\.[0-9a-fA-F]+', Number.Hex),
- # the exponent variation (e.g. #x1e1) is N/A
- (r'#x[-+]?[0-9a-fA-F]+/[0-9a-fA-F]+', Number.Hex),
- (r'#x[-+]?[0-9a-fA-F]+', Number.Hex),
- (r'#x\S*', Error),
-
-
- # strings, symbols and characters
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'" + valid_name, String.Symbol),
- (r"#\\([()/'\"._!§$%& ?=+-]{1}|[a-zA-Z0-9]+)", String.Char),
- (r'#rx".+"', String.Regex),
- (r'#px".+"', String.Regex),
-
- # constants
- (r'(#t|#f)', Name.Constant),
-
- # keyword argument names (e.g. #:keyword)
- (r'#:\S+', Keyword.Declaration),
-
- # #lang
- (r'#lang \S+', Keyword.Namespace),
-
- # special operators
- (r"('|#|`|,@|,|\.)", Operator),
-
- # highlight the keywords
- ('(%s)' % '|'.join([
- re.escape(entry) + ' ' for entry in keywords]),
- Keyword
- ),
-
- # first variable in a quoted string like
- # '(this is syntactic sugar)
- (r"(?<='\()" + valid_name, Name.Variable),
- (r"(?<=#\()" + valid_name, Name.Variable),
-
- # highlight the builtins
- ("(?<=\()(%s)" % '|'.join([
- re.escape(entry) + ' ' for entry in builtins]),
- Name.Builtin
- ),
-
- # the remaining functions; handle both ( and [
- (r'(?<=(\(|\[|\{))' + valid_name, Name.Function),
-
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # the famous parentheses!
- (r'(\(|\)|\[|\]|\{|\})', Punctuation),
- ],
- }
-
-
-class SchemeLexer(RegexLexer):
- """
- A Scheme lexer, parsing a stream and outputting the tokens
- needed to highlight scheme code.
- This lexer could be most probably easily subclassed to parse
- other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp.
-
- This parser is checked with pastes from the LISP pastebin
- at http://paste.lisp.org/ to cover as much syntax as possible.
-
- It supports the full Scheme syntax as defined in R5RS.
-
- *New in Pygments 0.6.*
- """
- name = 'Scheme'
- aliases = ['scheme', 'scm']
- filenames = ['*.scm', '*.ss']
- mimetypes = ['text/x-scheme', 'application/x-scheme']
-
- # list of known keywords and builtins taken form vim 6.4 scheme.vim
- # syntax file.
- keywords = [
- 'lambda', 'define', 'if', 'else', 'cond', 'and', 'or', 'case', 'let',
- 'let*', 'letrec', 'begin', 'do', 'delay', 'set!', '=>', 'quote',
- 'quasiquote', 'unquote', 'unquote-splicing', 'define-syntax',
- 'let-syntax', 'letrec-syntax', 'syntax-rules'
- ]
- builtins = [
- '*', '+', '-', '/', '<', '<=', '=', '>', '>=', 'abs', 'acos', 'angle',
- 'append', 'apply', 'asin', 'assoc', 'assq', 'assv', 'atan',
- 'boolean?', 'caaaar', 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr',
- 'caar', 'cadaar', 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr',
- 'cadr', 'call-with-current-continuation', 'call-with-input-file',
- 'call-with-output-file', 'call-with-values', 'call/cc', 'car',
- 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
- 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr',
- 'cdr', 'ceiling', 'char->integer', 'char-alphabetic?', 'char-ci<=?',
- 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?', 'char-downcase',
- 'char-lower-case?', 'char-numeric?', 'char-ready?', 'char-upcase',
- 'char-upper-case?', 'char-whitespace?', 'char<=?', 'char<?', 'char=?',
- 'char>=?', 'char>?', 'char?', 'close-input-port', 'close-output-port',
- 'complex?', 'cons', 'cos', 'current-input-port', 'current-output-port',
- 'denominator', 'display', 'dynamic-wind', 'eof-object?', 'eq?',
- 'equal?', 'eqv?', 'eval', 'even?', 'exact->inexact', 'exact?', 'exp',
- 'expt', 'floor', 'for-each', 'force', 'gcd', 'imag-part',
- 'inexact->exact', 'inexact?', 'input-port?', 'integer->char',
- 'integer?', 'interaction-environment', 'lcm', 'length', 'list',
- 'list->string', 'list->vector', 'list-ref', 'list-tail', 'list?',
- 'load', 'log', 'magnitude', 'make-polar', 'make-rectangular',
- 'make-string', 'make-vector', 'map', 'max', 'member', 'memq', 'memv',
- 'min', 'modulo', 'negative?', 'newline', 'not', 'null-environment',
- 'null?', 'number->string', 'number?', 'numerator', 'odd?',
- 'open-input-file', 'open-output-file', 'output-port?', 'pair?',
- 'peek-char', 'port?', 'positive?', 'procedure?', 'quotient',
- 'rational?', 'rationalize', 'read', 'read-char', 'real-part', 'real?',
- 'remainder', 'reverse', 'round', 'scheme-report-environment',
- 'set-car!', 'set-cdr!', 'sin', 'sqrt', 'string', 'string->list',
- 'string->number', 'string->symbol', 'string-append', 'string-ci<=?',
- 'string-ci<?', 'string-ci=?', 'string-ci>=?', 'string-ci>?',
- 'string-copy', 'string-fill!', 'string-length', 'string-ref',
- 'string-set!', 'string<=?', 'string<?', 'string=?', 'string>=?',
- 'string>?', 'string?', 'substring', 'symbol->string', 'symbol?',
- 'tan', 'transcript-off', 'transcript-on', 'truncate', 'values',
- 'vector', 'vector->list', 'vector-fill!', 'vector-length',
- 'vector-ref', 'vector-set!', 'vector?', 'with-input-from-file',
- 'with-output-to-file', 'write', 'write-char', 'zero?'
- ]
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
- valid_name = r'[a-zA-Z0-9!$%&*+,/:<=>?@^_~|-]+'
-
- tokens = {
- 'root' : [
- # the comments - always starting with semicolon
- # and going to the end of the line
- (r';.*$', Comment.Single),
-
- # whitespaces - usually not relevant
- (r'\s+', Text),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
- # support for uncommon kinds of numbers -
- # have to figure out what the characters mean
- #(r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
-
- # strings, symbols and characters
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'" + valid_name, String.Symbol),
- (r"#\\([()/'\"._!§$%& ?=+-]{1}|[a-zA-Z0-9]+)", String.Char),
-
- # constants
- (r'(#t|#f)', Name.Constant),
-
- # special operators
- (r"('|#|`|,@|,|\.)", Operator),
-
- # highlight the keywords
- ('(%s)' % '|'.join([
- re.escape(entry) + ' ' for entry in keywords]),
- Keyword
- ),
-
- # first variable in a quoted string like
- # '(this is syntactic sugar)
- (r"(?<='\()" + valid_name, Name.Variable),
- (r"(?<=#\()" + valid_name, Name.Variable),
-
- # highlight the builtins
- ("(?<=\()(%s)" % '|'.join([
- re.escape(entry) + ' ' for entry in builtins]),
- Name.Builtin
- ),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
- (r'(\[|\])', Punctuation),
- ],
- }
-
-
-class CommonLispLexer(RegexLexer):
- """
- A Common Lisp lexer.
-
- *New in Pygments 0.9.*
- """
- name = 'Common Lisp'
- aliases = ['common-lisp', 'cl', 'lisp']
- filenames = ['*.cl', '*.lisp', '*.el'] # use for Elisp too
- mimetypes = ['text/x-common-lisp']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- ### couple of useful regexes
-
- # characters that are not macro-characters and can be used to begin a symbol
- nonmacro = r'\\.|[a-zA-Z0-9!$%&*+-/<=>?@\[\]^_{}~]'
- constituent = nonmacro + '|[#.:]'
- terminated = r'(?=[ "()\'\n,;`])' # whitespace or terminating macro characters
-
- ### symbol token, reverse-engineered from hyperspec
- # Take a deep breath...
- symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent)
-
- def __init__(self, **options):
- from pygments.lexers._clbuiltins import BUILTIN_FUNCTIONS, \
- SPECIAL_FORMS, MACROS, LAMBDA_LIST_KEYWORDS, DECLARATIONS, \
- BUILTIN_TYPES, BUILTIN_CLASSES
- self.builtin_function = BUILTIN_FUNCTIONS
- self.special_forms = SPECIAL_FORMS
- self.macros = MACROS
- self.lambda_list_keywords = LAMBDA_LIST_KEYWORDS
- self.declarations = DECLARATIONS
- self.builtin_types = BUILTIN_TYPES
- self.builtin_classes = BUILTIN_CLASSES
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name.Variable:
- if value in self.builtin_function:
- yield index, Name.Builtin, value
- continue
- if value in self.special_forms:
- yield index, Keyword, value
- continue
- if value in self.macros:
- yield index, Name.Builtin, value
- continue
- if value in self.lambda_list_keywords:
- yield index, Keyword, value
- continue
- if value in self.declarations:
- yield index, Keyword, value
- continue
- if value in self.builtin_types:
- yield index, Keyword.Type, value
- continue
- if value in self.builtin_classes:
- yield index, Name.Class, value
- continue
- yield index, token, value
-
- tokens = {
- 'root' : [
- ('', Text, 'body'),
- ],
- 'multiline-comment' : [
- (r'#\|', Comment.Multiline, '#push'), # (cf. Hyperspec 2.4.8.19)
- (r'\|#', Comment.Multiline, '#pop'),
- (r'[^|#]+', Comment.Multiline),
- (r'[|#]', Comment.Multiline),
- ],
- 'commented-form' : [
- (r'\(', Comment.Preproc, '#push'),
- (r'\)', Comment.Preproc, '#pop'),
- (r'[^()]+', Comment.Preproc),
- ],
- 'body' : [
- # whitespace
- (r'\s+', Text),
-
- # single-line comment
- (r';.*$', Comment.Single),
-
- # multi-line comment
- (r'#\|', Comment.Multiline, 'multiline-comment'),
-
- # encoding comment (?)
- (r'#\d*Y.*$', Comment.Special),
-
- # strings and characters
- (r'"(\\.|\\\n|[^"\\])*"', String),
- # quoting
- (r":" + symbol, String.Symbol),
- (r"::" + symbol, String.Symbol),
- (r":#" + symbol, String.Symbol),
- (r"'" + symbol, String.Symbol),
- (r"'", Operator),
- (r"`", Operator),
-
- # decimal numbers
- (r'[-+]?\d+\.?' + terminated, Number.Integer),
- (r'[-+]?\d+/\d+' + terminated, Number),
- (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' \
- + terminated, Number.Float),
-
- # sharpsign strings and characters
- (r"#\\." + terminated, String.Char),
- (r"#\\" + symbol, String.Char),
-
- # vector
- (r'#\(', Operator, 'body'),
-
- # bitstring
- (r'#\d*\*[01]*', Literal.Other),
-
- # uninterned symbol
- (r'#:' + symbol, String.Symbol),
-
- # read-time and load-time evaluation
- (r'#[.,]', Operator),
-
- # function shorthand
- (r'#\'', Name.Function),
-
- # binary rational
- (r'#[bB][+-]?[01]+(/[01]+)?', Number),
-
- # octal rational
- (r'#[oO][+-]?[0-7]+(/[0-7]+)?', Number.Oct),
-
- # hex rational
- (r'#[xX][+-]?[0-9a-fA-F]+(/[0-9a-fA-F]+)?', Number.Hex),
-
- # radix rational
- (r'#\d+[rR][+-]?[0-9a-zA-Z]+(/[0-9a-zA-Z]+)?', Number),
-
- # complex
- (r'(#[cC])(\()', bygroups(Number, Punctuation), 'body'),
-
- # array
- (r'(#\d+[aA])(\()', bygroups(Literal.Other, Punctuation), 'body'),
-
- # structure
- (r'(#[sS])(\()', bygroups(Literal.Other, Punctuation), 'body'),
-
- # path
- (r'#[pP]?"(\\.|[^"])*"', Literal.Other),
-
- # reference
- (r'#\d+=', Operator),
- (r'#\d+#', Operator),
-
- # read-time comment
- (r'#+nil' + terminated + '\s*\(', Comment.Preproc, 'commented-form'),
-
- # read-time conditional
- (r'#[+-]', Operator),
-
- # special operators that should have been parsed already
- (r'(,@|,|\.)', Operator),
-
- # special constants
- (r'(t|nil)' + terminated, Name.Constant),
-
- # functions and variables
- (r'\*' + symbol + '\*', Name.Variable.Global),
- (symbol, Name.Variable),
-
- # parentheses
- (r'\(', Punctuation, 'body'),
- (r'\)', Punctuation, '#pop'),
- ],
- }
-
-
-class HaskellLexer(RegexLexer):
- """
- A Haskell lexer based on the lexemes defined in the Haskell 98 Report.
-
- *New in Pygments 0.8.*
- """
- name = 'Haskell'
- aliases = ['haskell', 'hs']
- filenames = ['*.hs']
- mimetypes = ['text/x-haskell']
-
- reserved = ['case','class','data','default','deriving','do','else',
- 'if','in','infix[lr]?','instance',
- 'let','newtype','of','then','type','where','_']
- ascii = ['NUL','SOH','[SE]TX','EOT','ENQ','ACK',
- 'BEL','BS','HT','LF','VT','FF','CR','S[OI]','DLE',
- 'DC[1-4]','NAK','SYN','ETB','CAN',
- 'EM','SUB','ESC','[FGRU]S','SP','DEL']
-
- tokens = {
- 'root': [
- # Whitespace:
- (r'\s+', Text),
- #(r'--\s*|.*$', Comment.Doc),
- (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single),
- (r'{-', Comment.Multiline, 'comment'),
- # Lexemes:
- # Identifiers
- (r'\bimport\b', Keyword.Reserved, 'import'),
- (r'\bmodule\b', Keyword.Reserved, 'module'),
- (r'\berror\b', Name.Exception),
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'^[_a-z][\w\']*', Name.Function),
- (r"'?[_a-z][\w']*", Name),
- (r"('')?[A-Z][\w\']*", Keyword.Type),
- # Operators
- (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
- (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
- (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Character/String Literals
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- # Special
- (r'\[\]', Keyword.Type),
- (r'\(\)', Name.Builtin),
- (r'[][(),;`{}]', Punctuation),
- ],
- 'import': [
- # Import statements
- (r'\s+', Text),
- (r'"', String, 'string'),
- # after "funclist" state
- (r'\)', Punctuation, '#pop'),
- (r'qualified\b', Keyword),
- # import X as Y
- (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(as)(\s+)([A-Z][a-zA-Z0-9_.]*)',
- bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
- # import X hiding (functions)
- (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(hiding)(\s+)(\()',
- bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
- # import X (functions)
- (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(\()',
- bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
- # import X
- (r'[a-zA-Z0-9_.]+', Name.Namespace, '#pop'),
- ],
- 'module': [
- (r'\s+', Text),
- (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(\()',
- bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
- (r'[A-Z][a-zA-Z0-9_.]*', Name.Namespace, '#pop'),
- ],
- 'funclist': [
- (r'\s+', Text),
- (r'[A-Z][a-zA-Z0-9_]*', Keyword.Type),
- (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
- (r'--.*$', Comment.Single),
- (r'{-', Comment.Multiline, 'comment'),
- (r',', Punctuation),
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
- # (HACK, but it makes sense to push two instances, believe me)
- (r'\(', Punctuation, ('funclist', 'funclist')),
- (r'\)', Punctuation, '#pop:2'),
- ],
- # NOTE: the next four states are shared in the AgdaLexer; make sure
- # any change is compatible with Agda as well or copy over and change
- 'comment': [
- # Multiline Comments
- (r'[^-{}]+', Comment.Multiline),
- (r'{-', Comment.Multiline, '#push'),
- (r'-}', Comment.Multiline, '#pop'),
- (r'[-{}]', Comment.Multiline),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']", String.Char),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][A-Z@\^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
- (r'\s+\\', String.Escape, '#pop'),
- ],
- }
-
-
-class AgdaLexer(RegexLexer):
- """
- For the `Agda <http://wiki.portal.chalmers.se/agda/pmwiki.php>`_
- dependently typed functional programming language and proof assistant.
-
- *New in Pygments 1.7.*
- """
-
- name = 'Agda'
- aliases = ['agda']
- filenames = ['*.agda']
- mimetypes = ['text/x-agda']
-
- reserved = ['abstract', 'codata', 'coinductive', 'constructor', 'data',
- 'field', 'forall', 'hiding', 'in', 'inductive', 'infix',
- 'infixl', 'infixr', 'let', 'open', 'pattern', 'primitive',
- 'private', 'mutual', 'quote', 'quoteGoal', 'quoteTerm',
- 'record', 'syntax', 'rewrite', 'unquote', 'using', 'where',
- 'with']
-
- tokens = {
- 'root': [
- # Declaration
- (r'^(\s*)([^\s\(\)\{\}]+)(\s*)(:)(\s*)',
- bygroups(Text, Name.Function, Text, Operator.Word, Text)),
- # Comments
- (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single),
- (r'{-', Comment.Multiline, 'comment'),
- # Holes
- (r'{!', Comment.Directive, 'hole'),
- # Lexemes:
- # Identifiers
- (ur'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
- (r'\b(Set|Prop)\b', Keyword.Type),
- # Special Symbols
- (r'(\(|\)|\{|\})', Operator),
- (ur'(\.{1,3}|\||[\u039B]|[\u2200]|[\u2192]|:|=|->)', Operator.Word),
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Strings
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- (r'[^\s\(\)\{\}]+', Text),
- (r'\s+?', Text), # Whitespace
- ],
- 'hole': [
- # Holes
- (r'[^!{}]+', Comment.Directive),
- (r'{!', Comment.Directive, '#push'),
- (r'!}', Comment.Directive, '#pop'),
- (r'[!{}]', Comment.Directive),
- ],
- 'module': [
- (r'{-', Comment.Multiline, 'comment'),
- (r'[a-zA-Z][a-zA-Z0-9_.]*', Name, '#pop'),
- (r'[^a-zA-Z]*', Text)
- ],
- 'comment': HaskellLexer.tokens['comment'],
- 'character': HaskellLexer.tokens['character'],
- 'string': HaskellLexer.tokens['string'],
- 'escape': HaskellLexer.tokens['escape']
- }
-
-
-class LiterateLexer(Lexer):
- """
- Base class for lexers of literate file formats based on LaTeX or Bird-style
- (prefixing each code line with ">").
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
- """
-
- bird_re = re.compile(r'(>[ \t]*)(.*\n)')
-
- def __init__(self, baselexer, **options):
- self.baselexer = baselexer
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- style = self.options.get('litstyle')
- if style is None:
- style = (text.lstrip()[0:1] in '%\\') and 'latex' or 'bird'
-
- code = ''
- insertions = []
- if style == 'bird':
- # bird-style
- for match in line_re.finditer(text):
- line = match.group()
- m = self.bird_re.match(line)
- if m:
- insertions.append((len(code),
- [(0, Comment.Special, m.group(1))]))
- code += m.group(2)
- else:
- insertions.append((len(code), [(0, Text, line)]))
- else:
- # latex-style
- from pygments.lexers.text import TexLexer
- lxlexer = TexLexer(**self.options)
- codelines = 0
- latex = ''
- for match in line_re.finditer(text):
- line = match.group()
- if codelines:
- if line.lstrip().startswith('\\end{code}'):
- codelines = 0
- latex += line
- else:
- code += line
- elif line.lstrip().startswith('\\begin{code}'):
- codelines = 1
- latex += line
- insertions.append((len(code),
- list(lxlexer.get_tokens_unprocessed(latex))))
- latex = ''
- else:
- latex += line
- insertions.append((len(code),
- list(lxlexer.get_tokens_unprocessed(latex))))
- for item in do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)):
- yield item
-
-
-class LiterateHaskellLexer(LiterateLexer):
- """
- For Literate Haskell (Bird-style or LaTeX) source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- *New in Pygments 0.9.*
- """
- name = 'Literate Haskell'
- aliases = ['lhs', 'literate-haskell', 'lhaskell']
- filenames = ['*.lhs']
- mimetypes = ['text/x-literate-haskell']
-
- def __init__(self, **options):
- hslexer = HaskellLexer(**options)
- LiterateLexer.__init__(self, hslexer, **options)
-
-
-class LiterateAgdaLexer(LiterateLexer):
- """
- For Literate Agda source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- *New in Pygments 1.7.*
- """
- name = 'Literate Agda'
- aliases = ['lagda', 'literate-agda']
- filenames = ['*.lagda']
- mimetypes = ['text/x-literate-agda']
-
- def __init__(self, **options):
- agdalexer = AgdaLexer(**options)
- LiterateLexer.__init__(self, agdalexer, litstyle='latex', **options)
-
-
-class SMLLexer(RegexLexer):
- """
- For the Standard ML language.
-
- *New in Pygments 1.5.*
- """
-
- name = 'Standard ML'
- aliases = ['sml']
- filenames = ['*.sml', '*.sig', '*.fun',]
- mimetypes = ['text/x-standardml', 'application/x-standardml']
-
- alphanumid_reserved = [
- # Core
- 'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
- 'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
- 'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'open', 'orelse',
- 'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while',
- # Modules
- 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
- 'struct', 'structure', 'where',
- ]
-
- symbolicid_reserved = [
- # Core
- ':', '\|', '=', '=>', '->', '#',
- # Modules
- ':>',
- ]
-
- nonid_reserved = [ '(', ')', '[', ']', '{', '}', ',', ';', '...', '_' ]
-
- alphanumid_re = r"[a-zA-Z][a-zA-Z0-9_']*"
- symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
-
- # A character constant is a sequence of the form #s, where s is a string
- # constant denoting a string of size one character. This setup just parses
- # the entire string as either a String.Double or a String.Char (depending
- # on the argument), even if the String.Char is an erronous
- # multiple-character string.
- def stringy (whatkind):
- return [
- (r'[^"\\]', whatkind),
- (r'\\[\\\"abtnvfr]', String.Escape),
- # Control-character notation is used for codes < 32,
- # where \^@ == \000
- (r'\\\^[\x40-\x5e]', String.Escape),
- # Docs say 'decimal digits'
- (r'\\[0-9]{3}', String.Escape),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\\s+\\', String.Interpol),
- (r'"', whatkind, '#pop'),
- ]
-
- # Callbacks for distinguishing tokens and reserved words
- def long_id_callback(self, match):
- if match.group(1) in self.alphanumid_reserved: token = Error
- else: token = Name.Namespace
- yield match.start(1), token, match.group(1)
- yield match.start(2), Punctuation, match.group(2)
-
- def end_id_callback(self, match):
- if match.group(1) in self.alphanumid_reserved: token = Error
- elif match.group(1) in self.symbolicid_reserved: token = Error
- else: token = Name
- yield match.start(1), token, match.group(1)
-
- def id_callback(self, match):
- str = match.group(1)
- if str in self.alphanumid_reserved: token = Keyword.Reserved
- elif str in self.symbolicid_reserved: token = Punctuation
- else: token = Name
- yield match.start(1), token, str
-
- tokens = {
- # Whitespace and comments are (almost) everywhere
- 'whitespace': [
- (r'\s+', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
- ],
-
- 'delimiters': [
- # This lexer treats these delimiters specially:
- # Delimiters define scopes, and the scope is how the meaning of
- # the `|' is resolved - is it a case/handle expression, or function
- # definition by cases? (This is not how the Definition works, but
- # it's how MLton behaves, see http://mlton.org/SMLNJDeviations)
- (r'\(|\[|{', Punctuation, 'main'),
- (r'\)|\]|}', Punctuation, '#pop'),
- (r'\b(let|if|local)\b(?!\')', Keyword.Reserved, ('main', 'main')),
- (r'\b(struct|sig|while)\b(?!\')', Keyword.Reserved, 'main'),
- (r'\b(do|else|end|in|then)\b(?!\')', Keyword.Reserved, '#pop'),
- ],
-
- 'core': [
- # Punctuation that doesn't overlap symbolic identifiers
- (r'(%s)' % '|'.join([re.escape(z) for z in nonid_reserved]),
- Punctuation),
-
- # Special constants: strings, floats, numbers in decimal and hex
- (r'#"', String.Char, 'char'),
- (r'"', String.Double, 'string'),
- (r'~?0x[0-9a-fA-F]+', Number.Hex),
- (r'0wx[0-9a-fA-F]+', Number.Hex),
- (r'0w\d+', Number.Integer),
- (r'~?\d+\.\d+[eE]~?\d+', Number.Float),
- (r'~?\d+\.\d+', Number.Float),
- (r'~?\d+[eE]~?\d+', Number.Float),
- (r'~?\d+', Number.Integer),
-
- # Labels
- (r'#\s*[1-9][0-9]*', Name.Label),
- (r'#\s*(%s)' % alphanumid_re, Name.Label),
- (r'#\s+(%s)' % symbolicid_re, Name.Label),
- # Some reserved words trigger a special, local lexer state change
- (r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'),
- (r'(?=\b(exception)\b(?!\'))', Text, ('ename')),
- (r'\b(functor|include|open|signature|structure)\b(?!\')',
- Keyword.Reserved, 'sname'),
- (r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'),
-
- # Regular identifiers, long and otherwise
- (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
- (r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"),
- (r'(%s)' % alphanumid_re, id_callback),
- (r'(%s)' % symbolicid_re, id_callback),
- ],
- 'dotted': [
- (r'(%s)(\.)' % alphanumid_re, long_id_callback),
- (r'(%s)' % alphanumid_re, end_id_callback, "#pop"),
- (r'(%s)' % symbolicid_re, end_id_callback, "#pop"),
- (r'\s+', Error),
- (r'\S+', Error),
- ],
-
-
- # Main parser (prevents errors in files that have scoping errors)
- 'root': [ (r'', Text, 'main') ],
-
- # In this scope, I expect '|' to not be followed by a function name,
- # and I expect 'and' to be followed by a binding site
- 'main': [
- include('whitespace'),
-
- # Special behavior of val/and/fun
- (r'\b(val|and)\b(?!\')', Keyword.Reserved, 'vname'),
- (r'\b(fun)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main-fun', 'fname')),
-
- include('delimiters'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # In this scope, I expect '|' and 'and' to be followed by a function
- 'main-fun': [
- include('whitespace'),
-
- (r'\s', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
-
- # Special behavior of val/and/fun
- (r'\b(fun|and)\b(?!\')', Keyword.Reserved, 'fname'),
- (r'\b(val)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main', 'vname')),
-
- # Special behavior of '|' and '|'-manipulating keywords
- (r'\|', Punctuation, 'fname'),
- (r'\b(case|handle)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main')),
-
- include('delimiters'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # Character and string parsers
- 'char': stringy(String.Char),
- 'string': stringy(String.Double),
-
- 'breakout': [
- (r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'),
- ],
-
- # Dealing with what comes after module system keywords
- 'sname': [
- include('whitespace'),
- include('breakout'),
-
- (r'(%s)' % alphanumid_re, Name.Namespace),
- (r'', Text, '#pop'),
- ],
-
- # Dealing with what comes after the 'fun' (or 'and' or '|') keyword
- 'fname': [
- include('whitespace'),
- (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
-
- (r'(%s)' % alphanumid_re, Name.Function, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Function, '#pop'),
-
- # Ignore interesting function declarations like "fun (x + y) = ..."
- (r'', Text, '#pop'),
- ],
-
- # Dealing with what comes after the 'val' (or 'and') keyword
- 'vname': [
- include('whitespace'),
- (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
-
- (r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re),
- bygroups(Name.Variable, Text, Punctuation), '#pop'),
- (r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re),
- bygroups(Name.Variable, Text, Punctuation), '#pop'),
- (r'(%s)' % alphanumid_re, Name.Variable, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Variable, '#pop'),
-
- # Ignore interesting patterns like 'val (x, y)'
- (r'', Text, '#pop'),
- ],
-
- # Dealing with what comes after the 'type' (or 'and') keyword
- 'tname': [
- include('whitespace'),
- include('breakout'),
-
- (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
- (r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')),
-
- (r'(%s)' % alphanumid_re, Keyword.Type),
- (r'(%s)' % symbolicid_re, Keyword.Type),
- (r'\S+', Error, '#pop'),
- ],
-
- # A type binding includes most identifiers
- 'typbind': [
- include('whitespace'),
-
- (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
-
- include('breakout'),
- include('core'),
- (r'\S+', Error, '#pop'),
- ],
-
- # Dealing with what comes after the 'datatype' (or 'and') keyword
- 'dname': [
- include('whitespace'),
- include('breakout'),
-
- (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
- (r'(=)(\s*)(datatype)',
- bygroups(Punctuation, Text, Keyword.Reserved), '#pop'),
- (r'=(?!%s)' % symbolicid_re, Punctuation,
- ('#pop', 'datbind', 'datcon')),
-
- (r'(%s)' % alphanumid_re, Keyword.Type),
- (r'(%s)' % symbolicid_re, Keyword.Type),
- (r'\S+', Error, '#pop'),
- ],
-
- # common case - A | B | C of int
- 'datbind': [
- include('whitespace'),
-
- (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'dname')),
- (r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
- (r'\b(of)\b(?!\')', Keyword.Reserved),
-
- (r'(\|)(\s*)(%s)' % alphanumid_re,
- bygroups(Punctuation, Text, Name.Class)),
- (r'(\|)(\s+)(%s)' % symbolicid_re,
- bygroups(Punctuation, Text, Name.Class)),
-
- include('breakout'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # Dealing with what comes after an exception
- 'ename': [
- include('whitespace'),
-
- (r'(exception|and)\b(\s+)(%s)' % alphanumid_re,
- bygroups(Keyword.Reserved, Text, Name.Class)),
- (r'(exception|and)\b(\s*)(%s)' % symbolicid_re,
- bygroups(Keyword.Reserved, Text, Name.Class)),
- (r'\b(of)\b(?!\')', Keyword.Reserved),
-
- include('breakout'),
- include('core'),
- (r'\S+', Error),
- ],
-
- 'datcon': [
- include('whitespace'),
- (r'(%s)' % alphanumid_re, Name.Class, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Class, '#pop'),
- (r'\S+', Error, '#pop'),
- ],
-
- # Series of type variables
- 'tyvarseq': [
- (r'\s', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
-
- (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
- (alphanumid_re, Name),
- (r',', Punctuation),
- (r'\)', Punctuation, '#pop'),
- (symbolicid_re, Name),
- ],
-
- 'comment': [
- (r'[^(*)]', Comment.Multiline),
- (r'\(\*', Comment.Multiline, '#push'),
- (r'\*\)', Comment.Multiline, '#pop'),
- (r'[(*)]', Comment.Multiline),
- ],
- }
-
-
-class OcamlLexer(RegexLexer):
- """
- For the OCaml language.
-
- *New in Pygments 0.7.*
- """
-
- name = 'OCaml'
- aliases = ['ocaml']
- filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
- mimetypes = ['text/x-ocaml']
-
- keywords = [
- 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
- 'downto', 'else', 'end', 'exception', 'external', 'false',
- 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
- 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
- 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
- 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
- 'type', 'value', 'val', 'virtual', 'when', 'while', 'with',
- ]
- keyopts = [
- '!=','#','&','&&','\(','\)','\*','\+',',','-',
- '-\.','->','\.','\.\.',':','::',':=',':>',';',';;','<',
- '<-','=','>','>]','>}','\?','\?\?','\[','\[<','\[>','\[\|',
- ']','_','`','{','{<','\|','\|]','}','~'
- ]
-
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- word_operators = ['and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or']
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
- primitives = ['unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array']
-
- tokens = {
- 'escape-sequence': [
- (r'\\[\\\"\'ntbr]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
- 'root': [
- (r'\s+', Text),
- (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\b([A-Z][A-Za-z0-9_\']*)(?=\s*\.)',
- Name.Namespace, 'dotted'),
- (r'\b([A-Z][A-Za-z0-9_\']*)', Name.Class),
- (r'\(\*(?![)])', Comment, 'comment'),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Binary),
- (r'\d[\d_]*', Number.Integer),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
-
- (r'"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name.Variable),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'string': [
- (r'[^\\"]+', String.Double),
- include('escape-sequence'),
- (r'\\\n', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][A-Za-z0-9_\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][A-Za-z0-9_\']*', Name.Class, '#pop'),
- (r'[a-z_][A-Za-z0-9_\']*', Name, '#pop'),
- ],
- }
-
-
-class ErlangLexer(RegexLexer):
- """
- For the Erlang functional programming language.
-
- Blame Jeremy Thurgood (http://jerith.za.net/).
-
- *New in Pygments 0.9.*
- """
-
- name = 'Erlang'
- aliases = ['erlang']
- filenames = ['*.erl', '*.hrl', '*.es', '*.escript']
- mimetypes = ['text/x-erlang']
-
- keywords = [
- 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if',
- 'let', 'of', 'query', 'receive', 'try', 'when',
- ]
-
- builtins = [ # See erlang(3) man page
- 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list',
- 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions',
- 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module',
- 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit',
- 'float', 'float_to_list', 'fun_info', 'fun_to_list',
- 'function_exported', 'garbage_collect', 'get', 'get_keys',
- 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary',
- 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean',
- 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list',
- 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record',
- 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom',
- 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom',
- 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple',
- 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5',
- 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor',
- 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2',
- 'pid_to_list', 'port_close', 'port_command', 'port_connect',
- 'port_control', 'port_call', 'port_info', 'port_to_list',
- 'process_display', 'process_flag', 'process_info', 'purge_module',
- 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process',
- 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie',
- 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor',
- 'spawn_opt', 'split_binary', 'start_timer', 'statistics',
- 'suspend_process', 'system_flag', 'system_info', 'system_monitor',
- 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered',
- 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list',
- 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis'
- ]
-
- operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)'
- word_operators = [
- 'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor',
- 'div', 'not', 'or', 'orelse', 'rem', 'xor'
- ]
-
- atom_re = r"(?:[a-z][a-zA-Z0-9_]*|'[^\n']*[^\\]')"
-
- variable_re = r'(?:[A-Z_][a-zA-Z0-9_]*)'
-
- escape_re = r'(?:\\(?:[bdefnrstv\'"\\/]|[0-7][0-7]?[0-7]?|\^[a-zA-Z]))'
-
- macro_re = r'(?:'+variable_re+r'|'+atom_re+r')'
-
- base_re = r'(?:[2-9]|[12][0-9]|3[0-6])'
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'%.*\n', Comment),
- ('(' + '|'.join(keywords) + r')\b', Keyword),
- ('(' + '|'.join(builtins) + r')\b', Name.Builtin),
- ('(' + '|'.join(word_operators) + r')\b', Operator.Word),
- (r'^-', Punctuation, 'directive'),
- (operators, Operator),
- (r'"', String, 'string'),
- (r'<<', Name.Label),
- (r'>>', Name.Label),
- ('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)),
- ('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[+-]?'+base_re+r'#[0-9a-zA-Z]+', Number.Integer),
- (r'[+-]?\d+', Number.Integer),
- (r'[+-]?\d+.\d+', Number.Float),
- (r'[]\[:_@\".{}()|;,]', Punctuation),
- (variable_re, Name.Variable),
- (atom_re, Name),
- (r'\?'+macro_re, Name.Constant),
- (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char),
- (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label),
- ],
- 'string': [
- (escape_re, String.Escape),
- (r'"', String, '#pop'),
- (r'~[0-9.*]*[~#+bBcdefginpPswWxX]', String.Interpol),
- (r'[^"\\~]+', String),
- (r'~', String),
- ],
- 'directive': [
- (r'(define)(\s*)(\()('+macro_re+r')',
- bygroups(Name.Entity, Text, Punctuation, Name.Constant), '#pop'),
- (r'(record)(\s*)(\()('+macro_re+r')',
- bygroups(Name.Entity, Text, Punctuation, Name.Label), '#pop'),
- (atom_re, Name.Entity, '#pop'),
- ],
- }
-
-
-class ErlangShellLexer(Lexer):
- """
- Shell sessions in erl (for Erlang code).
-
- *New in Pygments 1.1.*
- """
- name = 'Erlang erl session'
- aliases = ['erl']
- filenames = ['*.erl-sh']
- mimetypes = ['text/x-erl-shellsession']
-
- _prompt_re = re.compile(r'\d+>(?=\s|\Z)')
-
- def get_tokens_unprocessed(self, text):
- erlexer = ErlangLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- for item in do_insertions(insertions,
- erlexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- if line.startswith('*'):
- yield match.start(), Generic.Traceback, line
- else:
- yield match.start(), Generic.Output, line
- if curcode:
- for item in do_insertions(insertions,
- erlexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class OpaLexer(RegexLexer):
- """
- Lexer for the Opa language (http://opalang.org).
-
- *New in Pygments 1.5.*
- """
-
- name = 'Opa'
- aliases = ['opa']
- filenames = ['*.opa']
- mimetypes = ['text/x-opa']
-
- # most of these aren't strictly keywords
- # but if you color only real keywords, you might just
- # as well not color anything
- keywords = [
- 'and', 'as', 'begin', 'case', 'client', 'css', 'database', 'db', 'do',
- 'else', 'end', 'external', 'forall', 'function', 'if', 'import',
- 'match', 'module', 'or', 'package', 'parser', 'rec', 'server', 'then',
- 'type', 'val', 'with', 'xml_parser',
- ]
-
- # matches both stuff and `stuff`
- ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))'
-
- op_re = r'[.=\-<>,@~%/+?*&^!]'
- punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere
- # because they are also used for inserts
-
- tokens = {
- # copied from the caml lexer, should be adapted
- 'escape-sequence': [
- (r'\\[\\\"\'ntr}]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
-
- # factorizing these rules, because they are inserted many times
- 'comments': [
- (r'/\*', Comment, 'nested-comment'),
- (r'//.*?$', Comment),
- ],
- 'comments-and-spaces': [
- include('comments'),
- (r'\s+', Text),
- ],
-
- 'root': [
- include('comments-and-spaces'),
- # keywords
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- # directives
- # we could parse the actual set of directives instead of anything
- # starting with @, but this is troublesome
- # because it needs to be adjusted all the time
- # and assuming we parse only sources that compile, it is useless
- (r'@'+ident_re+r'\b', Name.Builtin.Pseudo),
-
- # number literals
- (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float),
- (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float),
- (r'-?\d+[eE][+\-]?\d+', Number.Float),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[bB][01]+', Number.Binary),
- (r'\d+', Number.Integer),
- # color literals
- (r'#[\da-fA-F]{3,6}', Number.Integer),
-
- # string literals
- (r'"', String.Double, 'string'),
- # char literal, should be checked because this is the regexp from
- # the caml lexer
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'",
- String.Char),
-
- # this is meant to deal with embedded exprs in strings
- # every time we find a '}' we pop a state so that if we were
- # inside a string, we are back in the string state
- # as a consequence, we must also push a state every time we find a
- # '{' or else we will have errors when parsing {} for instance
- (r'{', Operator, '#push'),
- (r'}', Operator, '#pop'),
-
- # html literals
- # this is a much more strict that the actual parser,
- # since a<b would not be parsed as html
- # but then again, the parser is way too lax, and we can't hope
- # to have something as tolerant
- (r'<(?=[a-zA-Z>])', String.Single, 'html-open-tag'),
-
- # db path
- # matching the '[_]' in '/a[_]' because it is a part
- # of the syntax of the db path definition
- # unfortunately, i don't know how to match the ']' in
- # /a[1], so this is somewhat inconsistent
- (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable),
- # putting the same color on <- as on db path, since
- # it can be used only to mean Db.write
- (r'<-(?!'+op_re+r')', Name.Variable),
-
- # 'modules'
- # although modules are not distinguished by their names as in caml
- # the standard library seems to follow the convention that modules
- # only area capitalized
- (r'\b([A-Z]\w*)(?=\.)', Name.Namespace),
-
- # operators
- # = has a special role because this is the only
- # way to syntactic distinguish binding constructions
- # unfortunately, this colors the equal in {x=2} too
- (r'=(?!'+op_re+r')', Keyword),
- (r'(%s)+' % op_re, Operator),
- (r'(%s)+' % punc_re, Operator),
-
- # coercions
- (r':', Operator, 'type'),
- # type variables
- # we need this rule because we don't parse specially type
- # definitions so in "type t('a) = ...", "'a" is parsed by 'root'
- ("'"+ident_re, Keyword.Type),
-
- # id literal, #something, or #{expr}
- (r'#'+ident_re, String.Single),
- (r'#(?={)', String.Single),
-
- # identifiers
- # this avoids to color '2' in 'a2' as an integer
- (ident_re, Text),
-
- # default, not sure if that is needed or not
- # (r'.', Text),
- ],
-
- # it is quite painful to have to parse types to know where they end
- # this is the general rule for a type
- # a type is either:
- # * -> ty
- # * type-with-slash
- # * type-with-slash -> ty
- # * type-with-slash (, type-with-slash)+ -> ty
- #
- # the code is pretty funky in here, but this code would roughly
- # translate in caml to:
- # let rec type stream =
- # match stream with
- # | [< "->"; stream >] -> type stream
- # | [< ""; stream >] ->
- # type_with_slash stream
- # type_lhs_1 stream;
- # and type_1 stream = ...
- 'type': [
- include('comments-and-spaces'),
- (r'->', Keyword.Type),
- (r'', Keyword.Type, ('#pop', 'type-lhs-1', 'type-with-slash')),
- ],
-
- # parses all the atomic or closed constructions in the syntax of type
- # expressions: record types, tuple types, type constructors, basic type
- # and type variables
- 'type-1': [
- include('comments-and-spaces'),
- (r'\(', Keyword.Type, ('#pop', 'type-tuple')),
- (r'~?{', Keyword.Type, ('#pop', 'type-record')),
- (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')),
- (ident_re, Keyword.Type, '#pop'),
- ("'"+ident_re, Keyword.Type),
- # this case is not in the syntax but sometimes
- # we think we are parsing types when in fact we are parsing
- # some css, so we just pop the states until we get back into
- # the root state
- (r'', Keyword.Type, '#pop'),
- ],
-
- # type-with-slash is either:
- # * type-1
- # * type-1 (/ type-1)+
- 'type-with-slash': [
- include('comments-and-spaces'),
- (r'', Keyword.Type, ('#pop', 'slash-type-1', 'type-1')),
- ],
- 'slash-type-1': [
- include('comments-and-spaces'),
- ('/', Keyword.Type, ('#pop', 'type-1')),
- # same remark as above
- (r'', Keyword.Type, '#pop'),
- ],
-
- # we go in this state after having parsed a type-with-slash
- # while trying to parse a type
- # and at this point we must determine if we are parsing an arrow
- # type (in which case we must continue parsing) or not (in which
- # case we stop)
- 'type-lhs-1': [
- include('comments-and-spaces'),
- (r'->', Keyword.Type, ('#pop', 'type')),
- (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')),
- (r'', Keyword.Type, '#pop'),
- ],
- 'type-arrow': [
- include('comments-and-spaces'),
- # the look ahead here allows to parse f(x : int, y : float -> truc)
- # correctly
- (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'),
- (r'->', Keyword.Type, ('#pop', 'type')),
- # same remark as above
- (r'', Keyword.Type, '#pop'),
- ],
-
- # no need to do precise parsing for tuples and records
- # because they are closed constructions, so we can simply
- # find the closing delimiter
- # note that this function would be not work if the source
- # contained identifiers like `{)` (although it could be patched
- # to support it)
- 'type-tuple': [
- include('comments-and-spaces'),
- (r'[^\(\)/*]+', Keyword.Type),
- (r'[/*]', Keyword.Type),
- (r'\(', Keyword.Type, '#push'),
- (r'\)', Keyword.Type, '#pop'),
- ],
- 'type-record': [
- include('comments-and-spaces'),
- (r'[^{}/*]+', Keyword.Type),
- (r'[/*]', Keyword.Type),
- (r'{', Keyword.Type, '#push'),
- (r'}', Keyword.Type, '#pop'),
- ],
-
-# 'type-tuple': [
-# include('comments-and-spaces'),
-# (r'\)', Keyword.Type, '#pop'),
-# (r'', Keyword.Type, ('#pop', 'type-tuple-1', 'type-1')),
-# ],
-# 'type-tuple-1': [
-# include('comments-and-spaces'),
-# (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,)
-# (r',', Keyword.Type, 'type-1'),
-# ],
-# 'type-record':[
-# include('comments-and-spaces'),
-# (r'}', Keyword.Type, '#pop'),
-# (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'),
-# ],
-# 'type-record-field-expr': [
-#
-# ],
-
- 'nested-comment': [
- (r'[^/*]+', Comment),
- (r'/\*', Comment, '#push'),
- (r'\*/', Comment, '#pop'),
- (r'[/*]', Comment),
- ],
-
- # the copy pasting between string and single-string
- # is kinda sad. Is there a way to avoid that??
- 'string': [
- (r'[^\\"{]+', String.Double),
- (r'"', String.Double, '#pop'),
- (r'{', Operator, 'root'),
- include('escape-sequence'),
- ],
- 'single-string': [
- (r'[^\\\'{]+', String.Double),
- (r'\'', String.Double, '#pop'),
- (r'{', Operator, 'root'),
- include('escape-sequence'),
- ],
-
- # all the html stuff
- # can't really reuse some existing html parser
- # because we must be able to parse embedded expressions
-
- # we are in this state after someone parsed the '<' that
- # started the html literal
- 'html-open-tag': [
- (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')),
- (r'>', String.Single, ('#pop', 'html-content')),
- ],
-
- # we are in this state after someone parsed the '</' that
- # started the end of the closing tag
- 'html-end-tag': [
- # this is a star, because </> is allowed
- (r'[\w\-:]*>', String.Single, '#pop'),
- ],
-
- # we are in this state after having parsed '<ident(:ident)?'
- # we thus parse a possibly empty list of attributes
- 'html-attr': [
- (r'\s+', Text),
- (r'[\w\-:]+=', String.Single, 'html-attr-value'),
- (r'/>', String.Single, '#pop'),
- (r'>', String.Single, ('#pop', 'html-content')),
- ],
-
- 'html-attr-value': [
- (r"'", String.Single, ('#pop', 'single-string')),
- (r'"', String.Single, ('#pop', 'string')),
- (r'#'+ident_re, String.Single, '#pop'),
- (r'#(?={)', String.Single, ('#pop', 'root')),
- (r'[^"\'{`=<>]+', String.Single, '#pop'),
- (r'{', Operator, ('#pop', 'root')), # this is a tail call!
- ],
-
- # we should probably deal with '\' escapes here
- 'html-content': [
- (r'<!--', Comment, 'html-comment'),
- (r'</', String.Single, ('#pop', 'html-end-tag')),
- (r'<', String.Single, 'html-open-tag'),
- (r'{', Operator, 'root'),
- (r'[^<{]+', String.Single),
- ],
-
- 'html-comment': [
- (r'-->', Comment, '#pop'),
- (r'[^\-]+|-', Comment),
- ],
- }
-
-
-class CoqLexer(RegexLexer):
- """
- For the `Coq <http://coq.inria.fr/>`_ theorem prover.
-
- *New in Pygments 1.5.*
- """
-
- name = 'Coq'
- aliases = ['coq']
- filenames = ['*.v']
- mimetypes = ['text/x-coq']
-
- keywords1 = [
- # Vernacular commands
- 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
- 'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis',
- 'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
- 'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac',
- 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
- 'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
- 'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
- 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
- 'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
- 'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
- 'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
- 'outside',
- ]
- keywords2 = [
- # Gallina
- 'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
- 'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
- 'for', 'of', 'nosimpl', 'with', 'as',
- ]
- keywords3 = [
- # Sorts
- 'Type', 'Prop',
- ]
- keywords4 = [
- # Tactics
- 'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
- 'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
- 'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
- 'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
- 'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
- 'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
- 'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
- 'split', 'left', 'right', 'autorewrite',
- ]
- keywords5 = [
- # Terminators
- 'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
- 'assumption', 'solve', 'contradiction', 'discriminate',
- ]
- keywords6 = [
- # Control
- 'do', 'last', 'first', 'try', 'idtac', 'repeat',
- ]
- # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
- # 'downto', 'else', 'end', 'exception', 'external', 'false',
- # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
- # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
- # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
- # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
- # 'type', 'val', 'virtual', 'when', 'while', 'with'
- keyopts = [
- '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
- r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
- '<-', '=', '>', '>]', '>}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
- r'\[\|', ']', '_', '`', '{', '{<', r'\|', r'\|]', '}', '~', '=>',
- r'/\\', r'\\/',
- u'Π', u'λ',
- ]
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- word_operators = ['and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or']
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
- primitives = ['unit', 'int', 'float', 'bool', 'string', 'char', 'list',
- 'array']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\(\*', Comment, 'comment'),
- (r'\b(%s)\b' % '|'.join(keywords1), Keyword.Namespace),
- (r'\b(%s)\b' % '|'.join(keywords2), Keyword),
- (r'\b(%s)\b' % '|'.join(keywords3), Keyword.Type),
- (r'\b(%s)\b' % '|'.join(keywords4), Keyword),
- (r'\b(%s)\b' % '|'.join(keywords5), Keyword.Pseudo),
- (r'\b(%s)\b' % '|'.join(keywords6), Keyword.Reserved),
- (r'\b([A-Z][A-Za-z0-9_\']*)(?=\s*\.)',
- Name.Namespace, 'dotted'),
- (r'\b([A-Z][A-Za-z0-9_\']*)', Name.Class),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'\d[\d_]*', Number.Integer),
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Binary),
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
-
- (r'"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name.Variable),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'string': [
- (r'[^"]+', String.Double),
- (r'""', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][A-Za-z0-9_\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][A-Za-z0-9_\']*', Name.Class, '#pop'),
- (r'[a-z][a-z0-9_\']*', Name, '#pop'),
- (r'', Text, '#pop')
- ],
- }
-
- def analyse_text(text):
- if text.startswith('(*'):
- return True
-
-
-class NewLispLexer(RegexLexer):
- """
- For `newLISP. <www.newlisp.org>`_ source code (version 10.3.0).
-
- *New in Pygments 1.5.*
- """
-
- name = 'NewLisp'
- aliases = ['newlisp']
- filenames = ['*.lsp', '*.nl']
- mimetypes = ['text/x-newlisp', 'application/x-newlisp']
-
- flags = re.IGNORECASE | re.MULTILINE | re.UNICODE
-
- # list of built-in functions for newLISP version 10.3
- builtins = [
- '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++',
- '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10',
- '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7',
- '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs',
- 'acos', 'acosh', 'add', 'address', 'amb', 'and', 'and', 'append-file',
- 'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin',
- 'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec',
- 'base64-enc', 'bayes-query', 'bayes-train', 'begin', 'begin', 'begin',
- 'beta', 'betai', 'bind', 'binomial', 'bits', 'callback', 'case', 'case',
- 'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean',
- 'close', 'command-event', 'cond', 'cond', 'cond', 'cons', 'constant',
- 'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count',
- 'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry',
- 'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec',
- 'def-new', 'default', 'define-macro', 'define-macro', 'define',
- 'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device',
- 'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while',
- 'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup',
- 'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event',
- 'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand',
- 'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter',
- 'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt',
- 'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln',
- 'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string',
- 'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc',
- 'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert',
- 'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error',
- 'last', 'legal?', 'length', 'let', 'let', 'let', 'letex', 'letn',
- 'letn', 'letn', 'list?', 'list', 'load', 'local', 'log', 'lookup',
- 'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat',
- 'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply',
- 'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error',
- 'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local',
- 'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping',
- 'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select',
- 'net-send-to', 'net-send-udp', 'net-send', 'net-service',
- 'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper',
- 'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack',
- 'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop',
- 'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print',
- 'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event',
- 'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand',
- 'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file',
- 'read-key', 'read-line', 'read-utf8', 'read', 'reader-event',
- 'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex',
- 'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse',
- 'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self',
- 'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all',
- 'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent',
- 'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt',
- 'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?',
- 'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term',
- 'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case',
- 'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?',
- 'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until',
- 'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while',
- 'write', 'write-char', 'write-file', 'write-line', 'write',
- 'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?',
- ]
-
- # valid names
- valid_name = r'([a-zA-Z0-9!$%&*+.,/<=>?@^_~|-])+|(\[.*?\])+'
-
- tokens = {
- 'root': [
- # shebang
- (r'#!(.*?)$', Comment.Preproc),
- # comments starting with semicolon
- (r';.*$', Comment.Single),
- # comments starting with #
- (r'#.*$', Comment.Single),
-
- # whitespace
- (r'\s+', Text),
-
- # strings, symbols and characters
- (r'"(\\\\|\\"|[^"])*"', String),
-
- # braces
- (r"{", String, "bracestring"),
-
- # [text] ... [/text] delimited strings
- (r'\[text\]*', String, "tagstring"),
-
- # 'special' operators...
- (r"('|:)", Operator),
-
- # highlight the builtins
- ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
- Keyword),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Variable),
-
- # the remaining variables
- (valid_name, String.Symbol),
-
- # parentheses
- (r'(\(|\))', Punctuation),
- ],
-
- # braced strings...
- 'bracestring': [
- ("{", String, "#push"),
- ("}", String, "#pop"),
- ("[^{}]+", String),
- ],
-
- # tagged [text]...[/text] delimited strings...
- 'tagstring': [
- (r'(?s)(.*?)(\[/text\])', String, '#pop'),
- ],
- }
-
-
-class ElixirLexer(RegexLexer):
- """
- For the `Elixir language <http://elixir-lang.org>`_.
-
- *New in Pygments 1.5.*
- """
-
- name = 'Elixir'
- aliases = ['elixir', 'ex', 'exs']
- filenames = ['*.ex', '*.exs']
- mimetypes = ['text/x-elixir']
-
- def gen_elixir_sigil_rules():
- states = {}
-
- states['strings'] = [
- (r'(%[A-Ba-z])?"""(?:.|\n)*?"""', String.Doc),
- (r"'''(?:.|\n)*?'''", String.Doc),
- (r'"', String.Double, 'dqs'),
- (r"'.*'", String.Single),
- (r'(?<!\w)\?(\\(x\d{1,2}|\h{1,2}(?!\h)\b|0[0-7]{0,2}(?![0-7])\b|'
- r'[^x0MC])|(\\[MC]-)+\w|[^\s\\])', String.Other)
- ]
-
- for lbrace, rbrace, name, in ('\\{', '\\}', 'cb'), \
- ('\\[', '\\]', 'sb'), \
- ('\\(', '\\)', 'pa'), \
- ('\\<', '\\>', 'lt'):
-
- states['strings'] += [
- (r'%[a-z]' + lbrace, String.Double, name + 'intp'),
- (r'%[A-Z]' + lbrace, String.Double, name + 'no-intp')
- ]
-
- states[name +'intp'] = [
- (r'' + rbrace + '[a-z]*', String.Double, "#pop"),
- include('enddoublestr')
- ]
-
- states[name +'no-intp'] = [
- (r'.*' + rbrace + '[a-z]*', String.Double , "#pop")
- ]
-
- return states
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*$', Comment.Single),
- (r'\b(case|cond|end|bc|lc|if|unless|try|loop|receive|fn|defmodule|'
- r'defp?|defprotocol|defimpl|defrecord|defmacrop?|defdelegate|'
- r'defexception|exit|raise|throw|unless|after|rescue|catch|else)\b(?![?!])|'
- r'(?<!\.)\b(do|\-\>)\b\s*', Keyword),
- (r'\b(import|require|use|recur|quote|unquote|super|refer)\b(?![?!])',
- Keyword.Namespace),
- (r'(?<!\.)\b(and|not|or|when|xor|in)\b', Operator.Word),
- (r'%=|\*=|\*\*=|\+=|\-=|\^=|\|\|=|'
- r'<=>|<(?!<|=)|>(?!<|=|>)|<=|>=|===|==|=~|!=|!~|(?=[ \t])\?|'
- r'(?<=[ \t])!+|&&|\|\||\^|\*|\+|\-|/|'
- r'\||\+\+|\-\-|\*\*|\/\/|\<\-|\<\>|<<|>>|=|\.', Operator),
- (r'(?<!:)(:)([a-zA-Z_]\w*([?!]|=(?![>=]))?|\<\>|===?|>=?|<=?|'
- r'<=>|&&?|%\(\)|%\[\]|%\{\}|\+\+?|\-\-?|\|\|?|\!|//|[%&`/\|]|'
- r'\*\*?|=?~|<\-)|([a-zA-Z_]\w*([?!])?)(:)(?!:)', String.Symbol),
- (r':"', String.Symbol, 'interpoling_symbol'),
- (r'\b(nil|true|false)\b(?![?!])|\b[A-Z]\w*\b', Name.Constant),
- (r'\b(__(FILE|LINE|MODULE|MAIN|FUNCTION)__)\b(?![?!])', Name.Builtin.Pseudo),
- (r'[a-zA-Z_!][\w_]*[!\?]?', Name),
- (r'[(){};,/\|:\\\[\]]', Punctuation),
- (r'@[a-zA-Z_]\w*|&\d', Name.Variable),
- (r'\b(0[xX][0-9A-Fa-f]+|\d(_?\d)*(\.(?![^\d\s])'
- r'(_?\d)*)?([eE][-+]?\d(_?\d)*)?|0[bB][01]+)\b', Number),
- (r'%r\/.*\/', String.Regex),
- include('strings'),
- ],
- 'dqs': [
- (r'"', String.Double, "#pop"),
- include('enddoublestr')
- ],
- 'interpoling': [
- (r'#{', String.Interpol, 'interpoling_string'),
- ],
- 'interpoling_string' : [
- (r'}', String.Interpol, "#pop"),
- include('root')
- ],
- 'interpoling_symbol': [
- (r'"', String.Symbol, "#pop"),
- include('interpoling'),
- (r'[^#"]+', String.Symbol),
- ],
- 'enddoublestr' : [
- include('interpoling'),
- (r'[^#"]+', String.Double),
- ]
- }
- tokens.update(gen_elixir_sigil_rules())
-
-
-class ElixirConsoleLexer(Lexer):
- """
- For Elixir interactive console (iex) output like:
-
- .. sourcecode:: iex
-
- iex> [head | tail] = [1,2,3]
- [1,2,3]
- iex> head
- 1
- iex> tail
- [2,3]
- iex> [head | tail]
- [1,2,3]
- iex> length [head | tail]
- 3
-
- *New in Pygments 1.5.*
- """
-
- name = 'Elixir iex session'
- aliases = ['iex']
- mimetypes = ['text/x-elixir-shellsession']
-
- _prompt_re = re.compile('(iex|\.{3})> ')
-
- def get_tokens_unprocessed(self, text):
- exlexer = ElixirLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith(u'** '):
- insertions.append((len(curcode),
- [(0, Generic.Error, line[:-1])]))
- curcode += line[-1:]
- else:
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- for item in do_insertions(insertions,
- exlexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- yield match.start(), Generic.Output, line
- if curcode:
- for item in do_insertions(insertions,
- exlexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class KokaLexer(RegexLexer):
- """
- Lexer for the `Koka <http://koka.codeplex.com>`_
- language.
-
- *New in Pygments 1.6.*
- """
-
- name = 'Koka'
- aliases = ['koka']
- filenames = ['*.kk', '*.kki']
- mimetypes = ['text/x-koka']
-
- keywords = [
- 'infix', 'infixr', 'infixl',
- 'type', 'cotype', 'rectype', 'alias',
- 'struct', 'con',
- 'fun', 'function', 'val', 'var',
- 'external',
- 'if', 'then', 'else', 'elif', 'return', 'match',
- 'private', 'public', 'private',
- 'module', 'import', 'as',
- 'include', 'inline',
- 'rec',
- 'try', 'yield', 'enum',
- 'interface', 'instance',
- ]
-
- # keywords that are followed by a type
- typeStartKeywords = [
- 'type', 'cotype', 'rectype', 'alias', 'struct', 'enum',
- ]
-
- # keywords valid in a type
- typekeywords = [
- 'forall', 'exists', 'some', 'with',
- ]
-
- # builtin names and special names
- builtin = [
- 'for', 'while', 'repeat',
- 'foreach', 'foreach-indexed',
- 'error', 'catch', 'finally',
- 'cs', 'js', 'file', 'ref', 'assigned',
- ]
-
- # symbols that can be in an operator
- symbols = '[\$%&\*\+@!/\\\^~=\.:\-\?\|<>]+'
-
- # symbol boundary: an operator keyword should not be followed by any of these
- sboundary = '(?!'+symbols+')'
-
- # name boundary: a keyword should not be followed by any of these
- boundary = '(?![\w/])'
-
- # koka token abstractions
- tokenType = Name.Attribute
- tokenTypeDef = Name.Class
- tokenConstructor = Generic.Emph
-
- # main lexer
- tokens = {
- 'root': [
- include('whitespace'),
-
- # go into type mode
- (r'::?' + sboundary, tokenType, 'type'),
- (r'(alias)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
- 'alias-type'),
- (r'(struct)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
- 'struct-type'),
- ((r'(%s)' % '|'.join(typeStartKeywords)) +
- r'(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
- 'type'),
-
- # special sequences of tokens (we use ?: for non-capturing group as
- # required by 'bygroups')
- (r'(module)(\s+)(interface\s+)?((?:[a-z]\w*/)*[a-z]\w*)',
- bygroups(Keyword, Text, Keyword, Name.Namespace)),
- (r'(import)(\s+)((?:[a-z]\w*/)*[a-z]\w*)'
- r'(?:(\s*)(=)(\s*)((?:qualified\s*)?)'
- r'((?:[a-z]\w*/)*[a-z]\w*))?',
- bygroups(Keyword, Text, Name.Namespace, Text, Keyword, Text,
- Keyword, Name.Namespace)),
-
- (r'(^(?:(?:public|private)\s*)?(?:function|fun|val))'
- r'(\s+)([a-z]\w*|\((?:' + symbols + r'|/)\))',
- bygroups(Keyword, Text, Name.Function)),
- (r'(^(?:(?:public|private)\s*)?external)(\s+)(inline\s+)?'
- r'([a-z]\w*|\((?:' + symbols + r'|/)\))',
- bygroups(Keyword, Text, Keyword, Name.Function)),
-
- # keywords
- (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
- (r'(%s)' % '|'.join(keywords) + boundary, Keyword),
- (r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo),
- (r'::?|:=|\->|[=\.]' + sboundary, Keyword),
-
- # names
- (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
- bygroups(Name.Namespace, tokenConstructor)),
- (r'((?:[a-z]\w*/)*)([a-z]\w*)', bygroups(Name.Namespace, Name)),
- (r'((?:[a-z]\w*/)*)(\((?:' + symbols + r'|/)\))',
- bygroups(Name.Namespace, Name)),
- (r'_\w*', Name.Variable),
-
- # literal string
- (r'@"', String.Double, 'litstring'),
-
- # operators
- (symbols + "|/(?![\*/])", Operator),
- (r'`', Operator),
- (r'[\{\}\(\)\[\];,]', Punctuation),
-
- # literals. No check for literal characters with len > 1
- (r'[0-9]+\.[0-9]+([eE][\-\+]?[0-9]+)?', Number.Float),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
-
- (r"'", String.Char, 'char'),
- (r'"', String.Double, 'string'),
- ],
-
- # type started by alias
- 'alias-type': [
- (r'=',Keyword),
- include('type')
- ],
-
- # type started by struct
- 'struct-type': [
- (r'(?=\((?!,*\)))',Punctuation, '#pop'),
- include('type')
- ],
-
- # type started by colon
- 'type': [
- (r'[\(\[<]', tokenType, 'type-nested'),
- include('type-content')
- ],
-
- # type nested in brackets: can contain parameters, comma etc.
- 'type-nested': [
- (r'[\)\]>]', tokenType, '#pop'),
- (r'[\(\[<]', tokenType, 'type-nested'),
- (r',', tokenType),
- (r'([a-z]\w*)(\s*)(:)(?!:)',
- bygroups(Name, Text, tokenType)), # parameter name
- include('type-content')
- ],
-
- # shared contents of a type
- 'type-content': [
- include('whitespace'),
-
- # keywords
- (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword),
- (r'(?=((%s)' % '|'.join(keywords) + boundary + '))',
- Keyword, '#pop'), # need to match because names overlap...
-
- # kinds
- (r'[EPHVX]' + boundary, tokenType),
-
- # type names
- (r'[a-z][0-9]*(?![\w/])', tokenType ),
- (r'_\w*', tokenType.Variable), # Generic.Emph
- (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
- bygroups(Name.Namespace, tokenType)),
- (r'((?:[a-z]\w*/)*)([a-z]\w+)',
- bygroups(Name.Namespace, tokenType)),
-
- # type keyword operators
- (r'::|\->|[\.:|]', tokenType),
-
- #catchall
- (r'', Text, '#pop')
- ],
-
- # comments and literals
- 'whitespace': [
- (r'\n\s*#.*$', Comment.Preproc),
- (r'\s+', Text),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'//.*$', Comment.Single)
- ],
- 'comment': [
- (r'[^/\*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[\*/]', Comment.Multiline),
- ],
- 'litstring': [
- (r'[^"]+', String.Double),
- (r'""', String.Escape),
- (r'"', String.Double, '#pop'),
- ],
- 'string': [
- (r'[^\\"\n]+', String.Double),
- include('escape-sequence'),
- (r'["\n]', String.Double, '#pop'),
- ],
- 'char': [
- (r'[^\\\'\n]+', String.Char),
- include('escape-sequence'),
- (r'[\'\n]', String.Char, '#pop'),
- ],
- 'escape-sequence': [
- (r'\\[nrt\\\"\']', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- # Yes, \U literals are 6 hex digits.
- (r'\\U[0-9a-fA-F]{6}', String.Escape)
- ]
- }
+__all__ = []
diff --git a/pygments/lexers/go.py b/pygments/lexers/go.py
new file mode 100644
index 00000000..8bd6c7fb
--- /dev/null
+++ b/pygments/lexers/go.py
@@ -0,0 +1,101 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.go
+ ~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Google Go language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['GoLexer']
+
+
+class GoLexer(RegexLexer):
+ """
+ For `Go <http://golang.org>`_ source.
+
+ .. versionadded:: 1.2
+ """
+ name = 'Go'
+ filenames = ['*.go']
+ aliases = ['go']
+ mimetypes = ['text/x-gosrc']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuations
+ (r'//(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'(import|package)\b', Keyword.Namespace),
+ (r'(var|func|struct|map|chan|type|interface|const)\b',
+ Keyword.Declaration),
+ (words((
+ 'break', 'default', 'select', 'case', 'defer', 'go',
+ 'else', 'goto', 'switch', 'fallthrough', 'if', 'range',
+ 'continue', 'for', 'return'), suffix=r'\b'),
+ Keyword),
+ (r'(true|false|iota|nil)\b', Keyword.Constant),
+ # It seems the builtin types aren't actually keywords, but
+ # can be used as functions. So we need two declarations.
+ (words((
+ 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
+ 'int', 'int8', 'int16', 'int32', 'int64',
+ 'float', 'float32', 'float64',
+ 'complex64', 'complex128', 'byte', 'rune',
+ 'string', 'bool', 'error', 'uintptr',
+ 'print', 'println', 'panic', 'recover', 'close', 'complex',
+ 'real', 'imag', 'len', 'cap', 'append', 'copy', 'delete',
+ 'new', 'make'), suffix=r'\b(\()'),
+ bygroups(Name.Builtin, Punctuation)),
+ (words((
+ 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
+ 'int', 'int8', 'int16', 'int32', 'int64',
+ 'float', 'float32', 'float64',
+ 'complex64', 'complex128', 'byte', 'rune',
+ 'string', 'bool', 'error', 'uintptr'), suffix=r'\b'),
+ Keyword.Type),
+ # imaginary_lit
+ (r'\d+i', Number),
+ (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
+ (r'\.\d+([Ee][-+]\d+)?i', Number),
+ (r'\d+[Ee][-+]\d+i', Number),
+ # float_lit
+ (r'\d+(\.\d+[eE][+\-]?\d+|'
+ r'\.\d*|[eE][+\-]?\d+)', Number.Float),
+ (r'\.\d+([eE][+\-]?\d+)?', Number.Float),
+ # int_lit
+ # -- octal_lit
+ (r'0[0-7]+', Number.Oct),
+ # -- hex_lit
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ # -- decimal_lit
+ (r'(0|[1-9][0-9]*)', Number.Integer),
+ # char_lit
+ (r"""'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'""",
+ String.Char),
+ # StringLiteral
+ # -- raw_string_lit
+ (r'`[^`]*`', String),
+ # -- interpreted_string_lit
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # Tokens
+ (r'(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|'
+ r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])', Operator),
+ (r'[|^<>=!()\[\]{}.,;:]', Punctuation),
+ # identifier
+ (r'[^\W\d]\w*', Name.Other),
+ ]
+ }
diff --git a/pygments/lexers/graph.py b/pygments/lexers/graph.py
new file mode 100644
index 00000000..d90f0278
--- /dev/null
+++ b/pygments/lexers/graph.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.graph
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for graph query languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this
+from pygments.token import Keyword, Punctuation, Comment, Operator, Name,\
+ String, Number, Whitespace
+
+
+__all__ = ['CypherLexer']
+
+
+class CypherLexer(RegexLexer):
+ """
+ For `Cypher Query Language
+ <http://docs.neo4j.org/chunked/milestone/cypher-query-lang.html>`_
+
+ For the Cypher version in Neo4J 2.0
+
+ .. versionadded:: 2.0
+ """
+ name = 'Cypher'
+ aliases = ['cypher']
+ filenames = ['*.cyp', '*.cypher']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ include('comment'),
+ include('keywords'),
+ include('clauses'),
+ include('relations'),
+ include('strings'),
+ include('whitespace'),
+ include('barewords'),
+ ],
+ 'comment': [
+ (r'^.*//.*\n', Comment.Single),
+ ],
+ 'keywords': [
+ (r'(create|order|match|limit|set|skip|start|return|with|where|'
+ r'delete|foreach|not|by)\b', Keyword),
+ ],
+ 'clauses': [
+ # TODO: many missing ones, see http://docs.neo4j.org/refcard/2.0/
+ (r'(all|any|as|asc|create|create\s+unique|delete|'
+ r'desc|distinct|foreach|in|is\s+null|limit|match|none|'
+ r'order\s+by|return|set|skip|single|start|union|where|with)\b',
+ Keyword),
+ ],
+ 'relations': [
+ (r'(-\[)(.*?)(\]->)', bygroups(Operator, using(this), Operator)),
+ (r'(<-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
+ (r'-->|<--|\[|\]', Operator),
+ (r'<|>|<>|=|<=|=>|\(|\)|\||:|,|;', Punctuation),
+ (r'[.*{}]', Punctuation),
+ ],
+ 'strings': [
+ (r'"(?:\\[tbnrf\'"\\]|[^\\"])*"', String),
+ (r'`(?:``|[^`])+`', Name.Variable),
+ ],
+ 'whitespace': [
+ (r'\s+', Whitespace),
+ ],
+ 'barewords': [
+ (r'[a-z]\w*', Name),
+ (r'\d+', Number),
+ ],
+ }
diff --git a/pygments/lexers/graphics.py b/pygments/lexers/graphics.py
new file mode 100644
index 00000000..b40e0286
--- /dev/null
+++ b/pygments/lexers/graphics.py
@@ -0,0 +1,553 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.graphics
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for computer graphics and plotting related languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, include, bygroups, using, \
+ this, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, \
+ Number, Punctuation, String
+
+__all__ = ['GLShaderLexer', 'PostScriptLexer', 'AsymptoteLexer', 'GnuplotLexer',
+ 'PovrayLexer']
+
+
+class GLShaderLexer(RegexLexer):
+ """
+ GLSL (OpenGL Shader) lexer.
+
+ .. versionadded:: 1.1
+ """
+ name = 'GLSL'
+ aliases = ['glsl']
+ filenames = ['*.vert', '*.frag', '*.geo']
+ mimetypes = ['text/x-glslsrc']
+
+ tokens = {
+ 'root': [
+ (r'^#.*', Comment.Preproc),
+ (r'//.*', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
+ Operator),
+ (r'[?:]', Operator), # quick hack for ternary
+ (r'\bdefined\b', Operator),
+ (r'[;{}(),\[\]]', Punctuation),
+ # FIXME when e is present, no decimal point needed
+ (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
+ (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
+ (r'0[xX][0-9a-fA-F]*', Number.Hex),
+ (r'0[0-7]*', Number.Oct),
+ (r'[1-9][0-9]*', Number.Integer),
+ (words((
+ 'attribute', 'const', 'uniform', 'varying', 'centroid', 'break',
+ 'continue', 'do', 'for', 'while', 'if', 'else', 'in', 'out',
+ 'inout', 'float', 'int', 'void', 'bool', 'true', 'false',
+ 'invariant', 'discard', 'return', 'mat2', 'mat3' 'mat4',
+ 'mat2x2', 'mat3x2', 'mat4x2', 'mat2x3', 'mat3x3', 'mat4x3',
+ 'mat2x4', 'mat3x4', 'mat4x4', 'vec2', 'vec3', 'vec4',
+ 'ivec2', 'ivec3', 'ivec4', 'bvec2', 'bvec3', 'bvec4',
+ 'sampler1D', 'sampler2D', 'sampler3D' 'samplerCube',
+ 'sampler1DShadow', 'sampler2DShadow', 'struct'),
+ prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (words((
+ 'asm', 'class', 'union', 'enum', 'typedef', 'template', 'this',
+ 'packed', 'goto', 'switch', 'default', 'inline', 'noinline',
+ 'volatile', 'public', 'static', 'extern', 'external', 'interface',
+ 'long', 'short', 'double', 'half', 'fixed', 'unsigned', 'lowp',
+ 'mediump', 'highp', 'precision', 'input', 'output',
+ 'hvec2', 'hvec3', 'hvec4', 'dvec2', 'dvec3', 'dvec4',
+ 'fvec2', 'fvec3', 'fvec4', 'sampler2DRect', 'sampler3DRect',
+ 'sampler2DRectShadow', 'sizeof', 'cast', 'namespace', 'using'),
+ prefix=r'\b', suffix=r'\b'),
+ Keyword), # future use
+ (r'[a-zA-Z_]\w*', Name),
+ (r'\.', Punctuation),
+ (r'\s+', Text),
+ ],
+ }
+
+
+class PostScriptLexer(RegexLexer):
+ """
+ Lexer for PostScript files.
+
+ The PostScript Language Reference published by Adobe at
+ <http://partners.adobe.com/public/developer/en/ps/PLRM.pdf>
+ is the authority for this.
+
+ .. versionadded:: 1.4
+ """
+ name = 'PostScript'
+ aliases = ['postscript', 'postscr']
+ filenames = ['*.ps', '*.eps']
+ mimetypes = ['application/postscript']
+
+ delimiter = r'()<>\[\]{}/%\s'
+ delimiter_end = r'(?=[%s])' % delimiter
+
+ valid_name_chars = r'[^%s]' % delimiter
+ valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
+
+ tokens = {
+ 'root': [
+ # All comment types
+ (r'^%!.+\n', Comment.Preproc),
+ (r'%%.*\n', Comment.Special),
+ (r'(^%.*\n){2,}', Comment.Multiline),
+ (r'%.*\n', Comment.Single),
+
+ # String literals are awkward; enter separate state.
+ (r'\(', String, 'stringliteral'),
+
+ (r'[{}<>\[\]]', Punctuation),
+
+ # Numbers
+ (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex),
+ # Slight abuse: use Oct to signify any explicit base system
+ (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)'
+ r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct),
+ (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?'
+ + delimiter_end, Number.Float),
+ (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer),
+
+ # References
+ (r'\/%s' % valid_name, Name.Variable),
+
+ # Names
+ (valid_name, Name.Function), # Anything else is executed
+
+ # These keywords taken from
+ # <http://www.math.ubc.ca/~cass/graphics/manual/pdf/a1.pdf>
+ # Is there an authoritative list anywhere that doesn't involve
+ # trawling documentation?
+
+ (r'(false|true)' + delimiter_end, Keyword.Constant),
+
+ # Conditionals / flow control
+ (r'(eq|ne|g[et]|l[et]|and|or|not|if(?:else)?|for(?:all)?)'
+ + delimiter_end, Keyword.Reserved),
+
+ (words((
+ 'abs', 'add', 'aload', 'arc', 'arcn', 'array', 'atan', 'begin',
+ 'bind', 'ceiling', 'charpath', 'clip', 'closepath', 'concat',
+ 'concatmatrix', 'copy', 'cos', 'currentlinewidth', 'currentmatrix',
+ 'currentpoint', 'curveto', 'cvi', 'cvs', 'def', 'defaultmatrix',
+ 'dict', 'dictstackoverflow', 'div', 'dtransform', 'dup', 'end',
+ 'exch', 'exec', 'exit', 'exp', 'fill', 'findfont', 'floor', 'get',
+ 'getinterval', 'grestore', 'gsave', 'gt', 'identmatrix', 'idiv',
+ 'idtransform', 'index', 'invertmatrix', 'itransform', 'length',
+ 'lineto', 'ln', 'load', 'log', 'loop', 'matrix', 'mod', 'moveto',
+ 'mul', 'neg', 'newpath', 'pathforall', 'pathbbox', 'pop', 'print',
+ 'pstack', 'put', 'quit', 'rand', 'rangecheck', 'rcurveto', 'repeat',
+ 'restore', 'rlineto', 'rmoveto', 'roll', 'rotate', 'round', 'run',
+ 'save', 'scale', 'scalefont', 'setdash', 'setfont', 'setgray',
+ 'setlinecap', 'setlinejoin', 'setlinewidth', 'setmatrix',
+ 'setrgbcolor', 'shfill', 'show', 'showpage', 'sin', 'sqrt',
+ 'stack', 'stringwidth', 'stroke', 'strokepath', 'sub', 'syntaxerror',
+ 'transform', 'translate', 'truncate', 'typecheck', 'undefined',
+ 'undefinedfilename', 'undefinedresult'), suffix=delimiter_end),
+ Name.Builtin),
+
+ (r'\s+', Text),
+ ],
+
+ 'stringliteral': [
+ (r'[^()\\]+', String),
+ (r'\\', String.Escape, 'escape'),
+ (r'\(', String, '#push'),
+ (r'\)', String, '#pop'),
+ ],
+
+ 'escape': [
+ (r'[0-8]{3}|n|r|t|b|f|\\|\(|\)', String.Escape, '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+class AsymptoteLexer(RegexLexer):
+ """
+ For `Asymptote <http://asymptote.sf.net/>`_ source code.
+
+ .. versionadded:: 1.2
+ """
+ name = 'Asymptote'
+ aliases = ['asy', 'asymptote']
+ filenames = ['*.asy']
+ mimetypes = ['text/x-asymptote']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
+
+ tokens = {
+ 'whitespace': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment),
+ ],
+ 'statements': [
+ # simple string (TeX friendly)
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # C style string (with character escapes)
+ (r"'", String, 'string'),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'0[0-7]+[Ll]?', Number.Oct),
+ (r'\d+[Ll]?', Number.Integer),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.]', Punctuation),
+ (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)),
+ (r'(and|controls|tension|atleast|curl|if|else|while|for|do|'
+ r'return|break|continue|struct|typedef|new|access|import|'
+ r'unravel|from|include|quote|static|public|private|restricted|'
+ r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword),
+ # Since an asy-type-name can be also an asy-function-name,
+ # in the following we test if the string " [a-zA-Z]" follows
+ # the Keyword.Type.
+ # Of course it is not perfect !
+ (r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|'
+ r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|'
+ r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|'
+ r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|'
+ r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|'
+ r'path3|pen|picture|point|position|projection|real|revolution|'
+ r'scaleT|scientific|segment|side|slice|splitface|string|surface|'
+ r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|'
+ r'transformation|tree|triangle|trilinear|triple|vector|'
+ r'vertex|void)(?=\s+[a-zA-Z])', Keyword.Type),
+ # Now the asy-type-name which are not asy-function-name
+ # except yours !
+ # Perhaps useless
+ (r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|'
+ r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|'
+ r'picture|position|real|revolution|slice|splitface|ticksgridT|'
+ r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type),
+ ('[a-zA-Z_]\w*:(?!:)', Name.Label),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'root': [
+ include('whitespace'),
+ # functions
+ (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(' + _ws + r')(\{)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation),
+ 'function'),
+ # function declarations
+ (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(' + _ws + r')(;)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation)),
+ default('statement'),
+ ],
+ 'statement': [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Punctuation),
+ (';', Punctuation, '#pop'),
+ ],
+ 'function': [
+ include('whitespace'),
+ include('statements'),
+ (';', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r"'", String, '#pop'),
+ (r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'\n', String),
+ (r"[^\\'\n]+", String), # all other characters
+ (r'\\\n', String),
+ (r'\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ }
+
+ def get_tokens_unprocessed(self, text):
+ from pygments.lexers._asy_builtins import ASYFUNCNAME, ASYVARNAME
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name and value in ASYFUNCNAME:
+ token = Name.Function
+ elif token is Name and value in ASYVARNAME:
+ token = Name.Variable
+ yield index, token, value
+
+
+def _shortened(word):
+ dpos = word.find('$')
+ return '|'.join(word[:dpos] + word[dpos+1:i] + r'\b'
+ for i in range(len(word), dpos, -1))
+
+
+def _shortened_many(*words):
+ return '|'.join(map(_shortened, words))
+
+
+class GnuplotLexer(RegexLexer):
+ """
+ For `Gnuplot <http://gnuplot.info/>`_ plotting scripts.
+
+ .. versionadded:: 0.11
+ """
+
+ name = 'Gnuplot'
+ aliases = ['gnuplot']
+ filenames = ['*.plot', '*.plt']
+ mimetypes = ['text/x-gnuplot']
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ (_shortened('bi$nd'), Keyword, 'bind'),
+ (_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'),
+ (_shortened('f$it'), Keyword, 'fit'),
+ (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'),
+ (r'else\b', Keyword),
+ (_shortened('pa$use'), Keyword, 'pause'),
+ (_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'),
+ (_shortened('sa$ve'), Keyword, 'save'),
+ (_shortened('se$t'), Keyword, ('genericargs', 'optionarg')),
+ (_shortened_many('sh$ow', 'uns$et'),
+ Keyword, ('noargs', 'optionarg')),
+ (_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear',
+ 'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int',
+ 'pwd$', 're$read', 'res$et', 'scr$eendump',
+ 'she$ll', 'sy$stem', 'up$date'),
+ Keyword, 'genericargs'),
+ (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump',
+ 'she$ll', 'test$'),
+ Keyword, 'noargs'),
+ ('([a-zA-Z_]\w*)(\s*)(=)',
+ bygroups(Name.Variable, Text, Operator), 'genericargs'),
+ ('([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)',
+ bygroups(Name.Function, Text, Operator), 'genericargs'),
+ (r'@[a-zA-Z_]\w*', Name.Constant), # macros
+ (r';', Keyword),
+ ],
+ 'comment': [
+ (r'[^\\\n]', Comment),
+ (r'\\\n', Comment),
+ (r'\\', Comment),
+ # don't add the newline to the Comment token
+ default('#pop'),
+ ],
+ 'whitespace': [
+ ('#', Comment, 'comment'),
+ (r'[ \t\v\f]+', Text),
+ ],
+ 'noargs': [
+ include('whitespace'),
+ # semicolon and newline end the argument list
+ (r';', Punctuation, '#pop'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'dqstring': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ (r'\n', String, '#pop'), # newline ends the string too
+ ],
+ 'sqstring': [
+ (r"''", String), # escaped single quote
+ (r"'", String, '#pop'),
+ (r"[^\\'\n]+", String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # normal backslash
+ (r'\n', String, '#pop'), # newline ends the string too
+ ],
+ 'genericargs': [
+ include('noargs'),
+ (r'"', String, 'dqstring'),
+ (r"'", String, 'sqstring'),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
+ (r'(\d+\.\d*|\.\d+)', Number.Float),
+ (r'-?\d+', Number.Integer),
+ ('[,.~!%^&*+=|?:<>/-]', Operator),
+ ('[{}()\[\]]', Punctuation),
+ (r'(eq|ne)\b', Operator.Word),
+ (r'([a-zA-Z_]\w*)(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'@[a-zA-Z_]\w*', Name.Constant), # macros
+ (r'\\\n', Text),
+ ],
+ 'optionarg': [
+ include('whitespace'),
+ (_shortened_many(
+ "a$ll", "an$gles", "ar$row", "au$toscale", "b$ars", "bor$der",
+ "box$width", "cl$abel", "c$lip", "cn$trparam", "co$ntour", "da$ta",
+ "data$file", "dg$rid3d", "du$mmy", "enc$oding", "dec$imalsign",
+ "fit$", "font$path", "fo$rmat", "fu$nction", "fu$nctions", "g$rid",
+ "hid$den3d", "his$torysize", "is$osamples", "k$ey", "keyt$itle",
+ "la$bel", "li$nestyle", "ls$", "loa$dpath", "loc$ale", "log$scale",
+ "mac$ros", "map$ping", "map$ping3d", "mar$gin", "lmar$gin",
+ "rmar$gin", "tmar$gin", "bmar$gin", "mo$use", "multi$plot",
+ "mxt$ics", "nomxt$ics", "mx2t$ics", "nomx2t$ics", "myt$ics",
+ "nomyt$ics", "my2t$ics", "nomy2t$ics", "mzt$ics", "nomzt$ics",
+ "mcbt$ics", "nomcbt$ics", "of$fsets", "or$igin", "o$utput",
+ "pa$rametric", "pm$3d", "pal$ette", "colorb$ox", "p$lot",
+ "poi$ntsize", "pol$ar", "pr$int", "obj$ect", "sa$mples", "si$ze",
+ "st$yle", "su$rface", "table$", "t$erminal", "termo$ptions", "ti$cs",
+ "ticsc$ale", "ticsl$evel", "timef$mt", "tim$estamp", "tit$le",
+ "v$ariables", "ve$rsion", "vi$ew", "xyp$lane", "xda$ta", "x2da$ta",
+ "yda$ta", "y2da$ta", "zda$ta", "cbda$ta", "xl$abel", "x2l$abel",
+ "yl$abel", "y2l$abel", "zl$abel", "cbl$abel", "xti$cs", "noxti$cs",
+ "x2ti$cs", "nox2ti$cs", "yti$cs", "noyti$cs", "y2ti$cs", "noy2ti$cs",
+ "zti$cs", "nozti$cs", "cbti$cs", "nocbti$cs", "xdti$cs", "noxdti$cs",
+ "x2dti$cs", "nox2dti$cs", "ydti$cs", "noydti$cs", "y2dti$cs",
+ "noy2dti$cs", "zdti$cs", "nozdti$cs", "cbdti$cs", "nocbdti$cs",
+ "xmti$cs", "noxmti$cs", "x2mti$cs", "nox2mti$cs", "ymti$cs",
+ "noymti$cs", "y2mti$cs", "noy2mti$cs", "zmti$cs", "nozmti$cs",
+ "cbmti$cs", "nocbmti$cs", "xr$ange", "x2r$ange", "yr$ange",
+ "y2r$ange", "zr$ange", "cbr$ange", "rr$ange", "tr$ange", "ur$ange",
+ "vr$ange", "xzeroa$xis", "x2zeroa$xis", "yzeroa$xis", "y2zeroa$xis",
+ "zzeroa$xis", "zeroa$xis", "z$ero"), Name.Builtin, '#pop'),
+ ],
+ 'bind': [
+ ('!', Keyword, '#pop'),
+ (_shortened('all$windows'), Name.Builtin),
+ include('genericargs'),
+ ],
+ 'quit': [
+ (r'gnuplot\b', Keyword),
+ include('noargs'),
+ ],
+ 'fit': [
+ (r'via\b', Name.Builtin),
+ include('plot'),
+ ],
+ 'if': [
+ (r'\)', Punctuation, '#pop'),
+ include('genericargs'),
+ ],
+ 'pause': [
+ (r'(mouse|any|button1|button2|button3)\b', Name.Builtin),
+ (_shortened('key$press'), Name.Builtin),
+ include('genericargs'),
+ ],
+ 'plot': [
+ (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex',
+ 'mat$rix', 's$mooth', 'thru$', 't$itle',
+ 'not$itle', 'u$sing', 'w$ith'),
+ Name.Builtin),
+ include('genericargs'),
+ ],
+ 'save': [
+ (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'),
+ Name.Builtin),
+ include('genericargs'),
+ ],
+ }
+
+
+class PovrayLexer(RegexLexer):
+ """
+ For `Persistence of Vision Raytracer <http://www.povray.org/>`_ files.
+
+ .. versionadded:: 0.11
+ """
+ name = 'POVRay'
+ aliases = ['pov']
+ filenames = ['*.pov', '*.inc']
+ mimetypes = ['text/x-povray']
+
+ tokens = {
+ 'root': [
+ (r'/\*[\w\W]*?\*/', Comment.Multiline),
+ (r'//.*\n', Comment.Single),
+ (r'(?s)"(?:\\.|[^"\\])+"', String.Double),
+ (words((
+ 'break', 'case', 'debug', 'declare', 'default', 'define', 'else',
+ 'elseif', 'end', 'error', 'fclose', 'fopen', 'for', 'if', 'ifdef',
+ 'ifndef', 'include', 'local', 'macro', 'range', 'read', 'render',
+ 'statistics', 'switch', 'undef', 'version', 'warning', 'while',
+ 'write'), prefix=r'#', suffix=r'\b'),
+ Comment.Preproc),
+ (words((
+ 'aa_level', 'aa_threshold', 'abs', 'acos', 'acosh', 'adaptive', 'adc_bailout',
+ 'agate', 'agate_turb', 'all', 'alpha', 'ambient', 'ambient_light', 'angle',
+ 'aperture', 'arc_angle', 'area_light', 'asc', 'asin', 'asinh', 'assumed_gamma',
+ 'atan', 'atan2', 'atanh', 'atmosphere', 'atmospheric_attenuation',
+ 'attenuating', 'average', 'background', 'black_hole', 'blue', 'blur_samples',
+ 'bounded_by', 'box_mapping', 'bozo', 'break', 'brick', 'brick_size',
+ 'brightness', 'brilliance', 'bumps', 'bumpy1', 'bumpy2', 'bumpy3', 'bump_map',
+ 'bump_size', 'case', 'caustics', 'ceil', 'checker', 'chr', 'clipped_by', 'clock',
+ 'color', 'color_map', 'colour', 'colour_map', 'component', 'composite', 'concat',
+ 'confidence', 'conic_sweep', 'constant', 'control0', 'control1', 'cos', 'cosh',
+ 'count', 'crackle', 'crand', 'cube', 'cubic_spline', 'cylindrical_mapping',
+ 'debug', 'declare', 'default', 'degrees', 'dents', 'diffuse', 'direction',
+ 'distance', 'distance_maximum', 'div', 'dust', 'dust_type', 'eccentricity',
+ 'else', 'emitting', 'end', 'error', 'error_bound', 'exp', 'exponent',
+ 'fade_distance', 'fade_power', 'falloff', 'falloff_angle', 'false',
+ 'file_exists', 'filter', 'finish', 'fisheye', 'flatness', 'flip', 'floor',
+ 'focal_point', 'fog', 'fog_alt', 'fog_offset', 'fog_type', 'frequency', 'gif',
+ 'global_settings', 'glowing', 'gradient', 'granite', 'gray_threshold',
+ 'green', 'halo', 'hexagon', 'hf_gray_16', 'hierarchy', 'hollow', 'hypercomplex',
+ 'if', 'ifdef', 'iff', 'image_map', 'incidence', 'include', 'int', 'interpolate',
+ 'inverse', 'ior', 'irid', 'irid_wavelength', 'jitter', 'lambda', 'leopard',
+ 'linear', 'linear_spline', 'linear_sweep', 'location', 'log', 'looks_like',
+ 'look_at', 'low_error_factor', 'mandel', 'map_type', 'marble', 'material_map',
+ 'matrix', 'max', 'max_intersections', 'max_iteration', 'max_trace_level',
+ 'max_value', 'metallic', 'min', 'minimum_reuse', 'mod', 'mortar',
+ 'nearest_count', 'no', 'normal', 'normal_map', 'no_shadow', 'number_of_waves',
+ 'octaves', 'off', 'offset', 'omega', 'omnimax', 'on', 'once', 'onion', 'open',
+ 'orthographic', 'panoramic', 'pattern1', 'pattern2', 'pattern3',
+ 'perspective', 'pgm', 'phase', 'phong', 'phong_size', 'pi', 'pigment',
+ 'pigment_map', 'planar_mapping', 'png', 'point_at', 'pot', 'pow', 'ppm',
+ 'precision', 'pwr', 'quadratic_spline', 'quaternion', 'quick_color',
+ 'quick_colour', 'quilted', 'radial', 'radians', 'radiosity', 'radius', 'rainbow',
+ 'ramp_wave', 'rand', 'range', 'reciprocal', 'recursion_limit', 'red',
+ 'reflection', 'refraction', 'render', 'repeat', 'rgb', 'rgbf', 'rgbft', 'rgbt',
+ 'right', 'ripples', 'rotate', 'roughness', 'samples', 'scale', 'scallop_wave',
+ 'scattering', 'seed', 'shadowless', 'sin', 'sine_wave', 'sinh', 'sky', 'sky_sphere',
+ 'slice', 'slope_map', 'smooth', 'specular', 'spherical_mapping', 'spiral',
+ 'spiral1', 'spiral2', 'spotlight', 'spotted', 'sqr', 'sqrt', 'statistics', 'str',
+ 'strcmp', 'strength', 'strlen', 'strlwr', 'strupr', 'sturm', 'substr', 'switch', 'sys',
+ 't', 'tan', 'tanh', 'test_camera_1', 'test_camera_2', 'test_camera_3',
+ 'test_camera_4', 'texture', 'texture_map', 'tga', 'thickness', 'threshold',
+ 'tightness', 'tile2', 'tiles', 'track', 'transform', 'translate', 'transmit',
+ 'triangle_wave', 'true', 'ttf', 'turbulence', 'turb_depth', 'type',
+ 'ultra_wide_angle', 'up', 'use_color', 'use_colour', 'use_index', 'u_steps',
+ 'val', 'variance', 'vaxis_rotate', 'vcross', 'vdot', 'version', 'vlength',
+ 'vnormalize', 'volume_object', 'volume_rendered', 'vol_with_light',
+ 'vrotate', 'v_steps', 'warning', 'warp', 'water_level', 'waves', 'while', 'width',
+ 'wood', 'wrinkles', 'yes'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (words((
+ 'bicubic_patch', 'blob', 'box', 'camera', 'cone', 'cubic', 'cylinder', 'difference',
+ 'disc', 'height_field', 'intersection', 'julia_fractal', 'lathe',
+ 'light_source', 'merge', 'mesh', 'object', 'plane', 'poly', 'polygon', 'prism',
+ 'quadric', 'quartic', 'smooth_triangle', 'sor', 'sphere', 'superellipsoid',
+ 'text', 'torus', 'triangle', 'union'), suffix=r'\b'),
+ Name.Builtin),
+ # TODO: <=, etc
+ (r'[\[\](){}<>;,]', Punctuation),
+ (r'[-+*/=]', Operator),
+ (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'[0-9]+\.[0-9]*', Number.Float),
+ (r'\.[0-9]+', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r'\s+', Text),
+ ]
+ }
diff --git a/pygments/lexers/haskell.py b/pygments/lexers/haskell.py
new file mode 100644
index 00000000..95e68a33
--- /dev/null
+++ b/pygments/lexers/haskell.py
@@ -0,0 +1,840 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.haskell
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Haskell and related languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
+ default, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+from pygments import unistring as uni
+
+__all__ = ['HaskellLexer', 'IdrisLexer', 'AgdaLexer', 'CryptolLexer',
+ 'LiterateHaskellLexer', 'LiterateIdrisLexer', 'LiterateAgdaLexer',
+ 'LiterateCryptolLexer', 'KokaLexer']
+
+
+line_re = re.compile('.*?\n')
+
+
+class HaskellLexer(RegexLexer):
+ """
+ A Haskell lexer based on the lexemes defined in the Haskell 98 Report.
+
+ .. versionadded:: 0.8
+ """
+ name = 'Haskell'
+ aliases = ['haskell', 'hs']
+ filenames = ['*.hs']
+ mimetypes = ['text/x-haskell']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ reserved = ('case', 'class', 'data', 'default', 'deriving', 'do', 'else',
+ 'if', 'in', 'infix[lr]?', 'instance',
+ 'let', 'newtype', 'of', 'then', 'type', 'where', '_')
+ ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
+ 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
+ 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
+ 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
+
+ tokens = {
+ 'root': [
+ # Whitespace:
+ (r'\s+', Text),
+ # (r'--\s*|.*$', Comment.Doc),
+ (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
+ (r'\{-', Comment.Multiline, 'comment'),
+ # Lexemes:
+ # Identifiers
+ (r'\bimport\b', Keyword.Reserved, 'import'),
+ (r'\bmodule\b', Keyword.Reserved, 'module'),
+ (r'\berror\b', Name.Exception),
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r"'[^\\]'", String.Char), # this has to come before the TH quote
+ (r'^[_' + uni.Ll + r'][\w\']*', Name.Function),
+ (r"'?[_" + uni.Ll + r"][\w']*", Name),
+ (r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type),
+ # Operators
+ (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
+ (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
+ (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
+ # Numbers
+ (r'\d+[eE][+-]?\d+', Number.Float),
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ # Character/String Literals
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ # Special
+ (r'\[\]', Keyword.Type),
+ (r'\(\)', Name.Builtin),
+ (r'[][(),;`{}]', Punctuation),
+ ],
+ 'import': [
+ # Import statements
+ (r'\s+', Text),
+ (r'"', String, 'string'),
+ # after "funclist" state
+ (r'\)', Punctuation, '#pop'),
+ (r'qualified\b', Keyword),
+ # import X as Y
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(as)(\s+)([' + uni.Lu + r'][\w.]*)',
+ bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
+ # import X hiding (functions)
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(hiding)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
+ # import X (functions)
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ # import X
+ (r'[\w.]+', Name.Namespace, '#pop'),
+ ],
+ 'module': [
+ (r'\s+', Text),
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ (r'[' + uni.Lu + r'][\w.]*', Name.Namespace, '#pop'),
+ ],
+ 'funclist': [
+ (r'\s+', Text),
+ (r'[' + uni.Lu + r']\w*', Keyword.Type),
+ (r'(_[\w\']+|[' + uni.Ll + r'][\w\']*)', Name.Function),
+ (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
+ (r'\{-', Comment.Multiline, 'comment'),
+ (r',', Punctuation),
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
+ # (HACK, but it makes sense to push two instances, believe me)
+ (r'\(', Punctuation, ('funclist', 'funclist')),
+ (r'\)', Punctuation, '#pop:2'),
+ ],
+ # NOTE: the next four states are shared in the AgdaLexer; make sure
+ # any change is compatible with Agda as well or copy over and change
+ 'comment': [
+ # Multiline Comments
+ (r'[^-{}]+', Comment.Multiline),
+ (r'\{-', Comment.Multiline, '#push'),
+ (r'-\}', Comment.Multiline, '#pop'),
+ (r'[-{}]', Comment.Multiline),
+ ],
+ 'character': [
+ # Allows multi-chars, incorrectly.
+ (r"[^\\']'", String.Char, '#pop'),
+ (r"\\", String.Escape, 'escape'),
+ ("'", String.Char, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ (r"\\", String.Escape, 'escape'),
+ ('"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
+ (r'\^[][' + uni.Lu + r'@^_]', String.Escape, '#pop'),
+ ('|'.join(ascii), String.Escape, '#pop'),
+ (r'o[0-7]+', String.Escape, '#pop'),
+ (r'x[\da-fA-F]+', String.Escape, '#pop'),
+ (r'\d+', String.Escape, '#pop'),
+ (r'\s+\\', String.Escape, '#pop'),
+ ],
+ }
+
+
+class IdrisLexer(RegexLexer):
+ """
+ A lexer for the dependently typed programming language Idris.
+
+ Based on the Haskell and Agda Lexer.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Idris'
+ aliases = ['idris', 'idr']
+ filenames = ['*.idr']
+ mimetypes = ['text/x-idris']
+
+ reserved = ('case', 'class', 'data', 'default', 'using', 'do', 'else',
+ 'if', 'in', 'infix[lr]?', 'instance', 'rewrite', 'auto',
+ 'namespace', 'codata', 'mutual', 'private', 'public', 'abstract',
+ 'total', 'partial',
+ 'let', 'proof', 'of', 'then', 'static', 'where', '_', 'with',
+ 'pattern', 'term', 'syntax', 'prefix',
+ 'postulate', 'parameters', 'record', 'dsl', 'impossible', 'implicit',
+ 'tactics', 'intros', 'intro', 'compute', 'refine', 'exact', 'trivial')
+
+ ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
+ 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
+ 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
+ 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
+
+ directives = ('lib', 'link', 'flag', 'include', 'hide', 'freeze', 'access',
+ 'default', 'logging', 'dynamic', 'name', 'error_handlers', 'language')
+
+ tokens = {
+ 'root': [
+ # Comments
+ (r'^(\s*)(%%%s)' % '|'.join(directives),
+ bygroups(Text, Keyword.Reserved)),
+ (r'(\s*)(--(?![!#$%&*+./<=>?@^|_~:\\]).*?)$', bygroups(Text, Comment.Single)),
+ (r'(\s*)(\|{3}.*?)$', bygroups(Text, Comment.Single)),
+ (r'(\s*)(\{-)', bygroups(Text, Comment.Multiline), 'comment'),
+ # Declaration
+ (r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
+ bygroups(Text, Name.Function, Text, Operator.Word, Text)),
+ # Identifiers
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
+ (r"('')?[A-Z][\w\']*", Keyword.Type),
+ (r'[a-z][\w\']*', Text),
+ # Special Symbols
+ (r'(<-|::|->|=>|=)', Operator.Word), # specials
+ (r'([(){}\[\]:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
+ # Numbers
+ (r'\d+[eE][+-]?\d+', Number.Float),
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ # Strings
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ (r'[^\s(){}]+', Text),
+ (r'\s+?', Text), # Whitespace
+ ],
+ 'module': [
+ (r'\s+', Text),
+ (r'([A-Z][\w.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ (r'[A-Z][\w.]*', Name.Namespace, '#pop'),
+ ],
+ 'funclist': [
+ (r'\s+', Text),
+ (r'[A-Z]\w*', Keyword.Type),
+ (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
+ (r'--.*$', Comment.Single),
+ (r'\{-', Comment.Multiline, 'comment'),
+ (r',', Punctuation),
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
+ # (HACK, but it makes sense to push two instances, believe me)
+ (r'\(', Punctuation, ('funclist', 'funclist')),
+ (r'\)', Punctuation, '#pop:2'),
+ ],
+ # NOTE: the next four states are shared in the AgdaLexer; make sure
+ # any change is compatible with Agda as well or copy over and change
+ 'comment': [
+ # Multiline Comments
+ (r'[^-{}]+', Comment.Multiline),
+ (r'\{-', Comment.Multiline, '#push'),
+ (r'-\}', Comment.Multiline, '#pop'),
+ (r'[-{}]', Comment.Multiline),
+ ],
+ 'character': [
+ # Allows multi-chars, incorrectly.
+ (r"[^\\']", String.Char),
+ (r"\\", String.Escape, 'escape'),
+ ("'", String.Char, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ (r"\\", String.Escape, 'escape'),
+ ('"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
+ (r'\^[][A-Z@^_]', String.Escape, '#pop'),
+ ('|'.join(ascii), String.Escape, '#pop'),
+ (r'o[0-7]+', String.Escape, '#pop'),
+ (r'x[\da-fA-F]+', String.Escape, '#pop'),
+ (r'\d+', String.Escape, '#pop'),
+ (r'\s+\\', String.Escape, '#pop')
+ ],
+ }
+
+
+class AgdaLexer(RegexLexer):
+ """
+ For the `Agda <http://wiki.portal.chalmers.se/agda/pmwiki.php>`_
+ dependently typed functional programming language and proof assistant.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Agda'
+ aliases = ['agda']
+ filenames = ['*.agda']
+ mimetypes = ['text/x-agda']
+
+ reserved = ['abstract', 'codata', 'coinductive', 'constructor', 'data',
+ 'field', 'forall', 'hiding', 'in', 'inductive', 'infix',
+ 'infixl', 'infixr', 'instance', 'let', 'mutual', 'open',
+ 'pattern', 'postulate', 'primitive', 'private',
+ 'quote', 'quoteGoal', 'quoteTerm',
+ 'record', 'renaming', 'rewrite', 'syntax', 'tactic',
+ 'unquote', 'unquoteDecl', 'using', 'where', 'with']
+
+ tokens = {
+ 'root': [
+ # Declaration
+ (r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
+ bygroups(Text, Name.Function, Text, Operator.Word, Text)),
+ # Comments
+ (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
+ (r'\{-', Comment.Multiline, 'comment'),
+ # Holes
+ (r'\{!', Comment.Directive, 'hole'),
+ # Lexemes:
+ # Identifiers
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
+ (r'\b(Set|Prop)\b', Keyword.Type),
+ # Special Symbols
+ (r'(\(|\)|\{|\})', Operator),
+ (u'(\\.{1,3}|\\||\u039B|\u2200|\u2192|:|=|->)', Operator.Word),
+ # Numbers
+ (r'\d+[eE][+-]?\d+', Number.Float),
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ # Strings
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ (r'[^\s(){}]+', Text),
+ (r'\s+?', Text), # Whitespace
+ ],
+ 'hole': [
+ # Holes
+ (r'[^!{}]+', Comment.Directive),
+ (r'\{!', Comment.Directive, '#push'),
+ (r'!\}', Comment.Directive, '#pop'),
+ (r'[!{}]', Comment.Directive),
+ ],
+ 'module': [
+ (r'\{-', Comment.Multiline, 'comment'),
+ (r'[a-zA-Z][\w.]*', Name, '#pop'),
+ (r'[^a-zA-Z]+', Text)
+ ],
+ 'comment': HaskellLexer.tokens['comment'],
+ 'character': HaskellLexer.tokens['character'],
+ 'string': HaskellLexer.tokens['string'],
+ 'escape': HaskellLexer.tokens['escape']
+ }
+
+
+class CryptolLexer(RegexLexer):
+ """
+ FIXME: A Cryptol2 lexer based on the lexemes defined in the Haskell 98 Report.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Cryptol'
+ aliases = ['cryptol', 'cry']
+ filenames = ['*.cry']
+ mimetypes = ['text/x-cryptol']
+
+ reserved = ('Arith', 'Bit', 'Cmp', 'False', 'Inf', 'True', 'else',
+ 'export', 'extern', 'fin', 'if', 'import', 'inf', 'lg2',
+ 'max', 'min', 'module', 'newtype', 'pragma', 'property',
+ 'then', 'type', 'where', 'width')
+ ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
+ 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
+ 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
+ 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
+
+ tokens = {
+ 'root': [
+ # Whitespace:
+ (r'\s+', Text),
+ # (r'--\s*|.*$', Comment.Doc),
+ (r'//.*$', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ # Lexemes:
+ # Identifiers
+ (r'\bimport\b', Keyword.Reserved, 'import'),
+ (r'\bmodule\b', Keyword.Reserved, 'module'),
+ (r'\berror\b', Name.Exception),
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'^[_a-z][\w\']*', Name.Function),
+ (r"'?[_a-z][\w']*", Name),
+ (r"('')?[A-Z][\w\']*", Keyword.Type),
+ # Operators
+ (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
+ (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
+ (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
+ # Numbers
+ (r'\d+[eE][+-]?\d+', Number.Float),
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ # Character/String Literals
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ # Special
+ (r'\[\]', Keyword.Type),
+ (r'\(\)', Name.Builtin),
+ (r'[][(),;`{}]', Punctuation),
+ ],
+ 'import': [
+ # Import statements
+ (r'\s+', Text),
+ (r'"', String, 'string'),
+ # after "funclist" state
+ (r'\)', Punctuation, '#pop'),
+ (r'qualified\b', Keyword),
+ # import X as Y
+ (r'([A-Z][\w.]*)(\s+)(as)(\s+)([A-Z][\w.]*)',
+ bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
+ # import X hiding (functions)
+ (r'([A-Z][\w.]*)(\s+)(hiding)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
+ # import X (functions)
+ (r'([A-Z][\w.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ # import X
+ (r'[\w.]+', Name.Namespace, '#pop'),
+ ],
+ 'module': [
+ (r'\s+', Text),
+ (r'([A-Z][\w.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ (r'[A-Z][\w.]*', Name.Namespace, '#pop'),
+ ],
+ 'funclist': [
+ (r'\s+', Text),
+ (r'[A-Z]\w*', Keyword.Type),
+ (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
+ # TODO: these don't match the comments in docs, remove.
+ #(r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
+ #(r'{-', Comment.Multiline, 'comment'),
+ (r',', Punctuation),
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
+ # (HACK, but it makes sense to push two instances, believe me)
+ (r'\(', Punctuation, ('funclist', 'funclist')),
+ (r'\)', Punctuation, '#pop:2'),
+ ],
+ 'comment': [
+ # Multiline Comments
+ (r'[^/*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'character': [
+ # Allows multi-chars, incorrectly.
+ (r"[^\\']'", String.Char, '#pop'),
+ (r"\\", String.Escape, 'escape'),
+ ("'", String.Char, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ (r"\\", String.Escape, 'escape'),
+ ('"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
+ (r'\^[][A-Z@^_]', String.Escape, '#pop'),
+ ('|'.join(ascii), String.Escape, '#pop'),
+ (r'o[0-7]+', String.Escape, '#pop'),
+ (r'x[\da-fA-F]+', String.Escape, '#pop'),
+ (r'\d+', String.Escape, '#pop'),
+ (r'\s+\\', String.Escape, '#pop'),
+ ],
+ }
+
+ EXTRA_KEYWORDS = set(('join', 'split', 'reverse', 'transpose', 'width',
+ 'length', 'tail', '<<', '>>', '<<<', '>>>', 'const',
+ 'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error',
+ 'trace'))
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name and value in self.EXTRA_KEYWORDS:
+ yield index, Name.Builtin, value
+ else:
+ yield index, token, value
+
+
+class LiterateLexer(Lexer):
+ """
+ Base class for lexers of literate file formats based on LaTeX or Bird-style
+ (prefixing each code line with ">").
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+ """
+
+ bird_re = re.compile(r'(>[ \t]*)(.*\n)')
+
+ def __init__(self, baselexer, **options):
+ self.baselexer = baselexer
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ style = self.options.get('litstyle')
+ if style is None:
+ style = (text.lstrip()[0:1] in '%\\') and 'latex' or 'bird'
+
+ code = ''
+ insertions = []
+ if style == 'bird':
+ # bird-style
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = self.bird_re.match(line)
+ if m:
+ insertions.append((len(code),
+ [(0, Comment.Special, m.group(1))]))
+ code += m.group(2)
+ else:
+ insertions.append((len(code), [(0, Text, line)]))
+ else:
+ # latex-style
+ from pygments.lexers.markup import TexLexer
+ lxlexer = TexLexer(**self.options)
+ codelines = 0
+ latex = ''
+ for match in line_re.finditer(text):
+ line = match.group()
+ if codelines:
+ if line.lstrip().startswith('\\end{code}'):
+ codelines = 0
+ latex += line
+ else:
+ code += line
+ elif line.lstrip().startswith('\\begin{code}'):
+ codelines = 1
+ latex += line
+ insertions.append((len(code),
+ list(lxlexer.get_tokens_unprocessed(latex))))
+ latex = ''
+ else:
+ latex += line
+ insertions.append((len(code),
+ list(lxlexer.get_tokens_unprocessed(latex))))
+ for item in do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)):
+ yield item
+
+
+class LiterateHaskellLexer(LiterateLexer):
+ """
+ For Literate Haskell (Bird-style or LaTeX) source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Literate Haskell'
+ aliases = ['lhs', 'literate-haskell', 'lhaskell']
+ filenames = ['*.lhs']
+ mimetypes = ['text/x-literate-haskell']
+
+ def __init__(self, **options):
+ hslexer = HaskellLexer(**options)
+ LiterateLexer.__init__(self, hslexer, **options)
+
+
+class LiterateIdrisLexer(LiterateLexer):
+ """
+ For Literate Idris (Bird-style or LaTeX) source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Literate Idris'
+ aliases = ['lidr', 'literate-idris', 'lidris']
+ filenames = ['*.lidr']
+ mimetypes = ['text/x-literate-idris']
+
+ def __init__(self, **options):
+ hslexer = IdrisLexer(**options)
+ LiterateLexer.__init__(self, hslexer, **options)
+
+
+class LiterateAgdaLexer(LiterateLexer):
+ """
+ For Literate Agda source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Literate Agda'
+ aliases = ['lagda', 'literate-agda']
+ filenames = ['*.lagda']
+ mimetypes = ['text/x-literate-agda']
+
+ def __init__(self, **options):
+ agdalexer = AgdaLexer(**options)
+ LiterateLexer.__init__(self, agdalexer, litstyle='latex', **options)
+
+
+class LiterateCryptolLexer(LiterateLexer):
+ """
+ For Literate Cryptol (Bird-style or LaTeX) source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Literate Cryptol'
+ aliases = ['lcry', 'literate-cryptol', 'lcryptol']
+ filenames = ['*.lcry']
+ mimetypes = ['text/x-literate-cryptol']
+
+ def __init__(self, **options):
+ crylexer = CryptolLexer(**options)
+ LiterateLexer.__init__(self, crylexer, **options)
+
+
+class KokaLexer(RegexLexer):
+ """
+ Lexer for the `Koka <http://koka.codeplex.com>`_
+ language.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Koka'
+ aliases = ['koka']
+ filenames = ['*.kk', '*.kki']
+ mimetypes = ['text/x-koka']
+
+ keywords = [
+ 'infix', 'infixr', 'infixl',
+ 'type', 'cotype', 'rectype', 'alias',
+ 'struct', 'con',
+ 'fun', 'function', 'val', 'var',
+ 'external',
+ 'if', 'then', 'else', 'elif', 'return', 'match',
+ 'private', 'public', 'private',
+ 'module', 'import', 'as',
+ 'include', 'inline',
+ 'rec',
+ 'try', 'yield', 'enum',
+ 'interface', 'instance',
+ ]
+
+ # keywords that are followed by a type
+ typeStartKeywords = [
+ 'type', 'cotype', 'rectype', 'alias', 'struct', 'enum',
+ ]
+
+ # keywords valid in a type
+ typekeywords = [
+ 'forall', 'exists', 'some', 'with',
+ ]
+
+ # builtin names and special names
+ builtin = [
+ 'for', 'while', 'repeat',
+ 'foreach', 'foreach-indexed',
+ 'error', 'catch', 'finally',
+ 'cs', 'js', 'file', 'ref', 'assigned',
+ ]
+
+ # symbols that can be in an operator
+ symbols = r'[$%&*+@!/\\^~=.:\-?|<>]+'
+
+ # symbol boundary: an operator keyword should not be followed by any of these
+ sboundary = '(?!'+symbols+')'
+
+ # name boundary: a keyword should not be followed by any of these
+ boundary = '(?![\w/])'
+
+ # koka token abstractions
+ tokenType = Name.Attribute
+ tokenTypeDef = Name.Class
+ tokenConstructor = Generic.Emph
+
+ # main lexer
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ # go into type mode
+ (r'::?' + sboundary, tokenType, 'type'),
+ (r'(alias)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
+ 'alias-type'),
+ (r'(struct)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
+ 'struct-type'),
+ ((r'(%s)' % '|'.join(typeStartKeywords)) +
+ r'(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
+ 'type'),
+
+ # special sequences of tokens (we use ?: for non-capturing group as
+ # required by 'bygroups')
+ (r'(module)(\s+)(interface\s+)?((?:[a-z]\w*/)*[a-z]\w*)',
+ bygroups(Keyword, Text, Keyword, Name.Namespace)),
+ (r'(import)(\s+)((?:[a-z]\w*/)*[a-z]\w*)'
+ r'(?:(\s*)(=)(\s*)((?:qualified\s*)?)'
+ r'((?:[a-z]\w*/)*[a-z]\w*))?',
+ bygroups(Keyword, Text, Name.Namespace, Text, Keyword, Text,
+ Keyword, Name.Namespace)),
+
+ (r'(^(?:(?:public|private)\s*)?(?:function|fun|val))'
+ r'(\s+)([a-z]\w*|\((?:' + symbols + r'|/)\))',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(^(?:(?:public|private)\s*)?external)(\s+)(inline\s+)?'
+ r'([a-z]\w*|\((?:' + symbols + r'|/)\))',
+ bygroups(Keyword, Text, Keyword, Name.Function)),
+
+ # keywords
+ (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
+ (r'(%s)' % '|'.join(keywords) + boundary, Keyword),
+ (r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo),
+ (r'::?|:=|\->|[=.]' + sboundary, Keyword),
+
+ # names
+ (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
+ bygroups(Name.Namespace, tokenConstructor)),
+ (r'((?:[a-z]\w*/)*)([a-z]\w*)', bygroups(Name.Namespace, Name)),
+ (r'((?:[a-z]\w*/)*)(\((?:' + symbols + r'|/)\))',
+ bygroups(Name.Namespace, Name)),
+ (r'_\w*', Name.Variable),
+
+ # literal string
+ (r'@"', String.Double, 'litstring'),
+
+ # operators
+ (symbols + "|/(?![*/])", Operator),
+ (r'`', Operator),
+ (r'[{}()\[\];,]', Punctuation),
+
+ # literals. No check for literal characters with len > 1
+ (r'[0-9]+\.[0-9]+([eE][\-+]?[0-9]+)?', Number.Float),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+
+ (r"'", String.Char, 'char'),
+ (r'"', String.Double, 'string'),
+ ],
+
+ # type started by alias
+ 'alias-type': [
+ (r'=', Keyword),
+ include('type')
+ ],
+
+ # type started by struct
+ 'struct-type': [
+ (r'(?=\((?!,*\)))', Punctuation, '#pop'),
+ include('type')
+ ],
+
+ # type started by colon
+ 'type': [
+ (r'[(\[<]', tokenType, 'type-nested'),
+ include('type-content')
+ ],
+
+ # type nested in brackets: can contain parameters, comma etc.
+ 'type-nested': [
+ (r'[)\]>]', tokenType, '#pop'),
+ (r'[(\[<]', tokenType, 'type-nested'),
+ (r',', tokenType),
+ (r'([a-z]\w*)(\s*)(:)(?!:)',
+ bygroups(Name, Text, tokenType)), # parameter name
+ include('type-content')
+ ],
+
+ # shared contents of a type
+ 'type-content': [
+ include('whitespace'),
+
+ # keywords
+ (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword),
+ (r'(?=((%s)' % '|'.join(keywords) + boundary + '))',
+ Keyword, '#pop'), # need to match because names overlap...
+
+ # kinds
+ (r'[EPHVX]' + boundary, tokenType),
+
+ # type names
+ (r'[a-z][0-9]*(?![\w/])', tokenType),
+ (r'_\w*', tokenType.Variable), # Generic.Emph
+ (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
+ bygroups(Name.Namespace, tokenType)),
+ (r'((?:[a-z]\w*/)*)([a-z]\w+)',
+ bygroups(Name.Namespace, tokenType)),
+
+ # type keyword operators
+ (r'::|->|[.:|]', tokenType),
+
+ # catchall
+ default('#pop')
+ ],
+
+ # comments and literals
+ 'whitespace': [
+ (r'\n\s*#.*$', Comment.Preproc),
+ (r'\s+', Text),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'//.*$', Comment.Single)
+ ],
+ 'comment': [
+ (r'[^/*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'litstring': [
+ (r'[^"]+', String.Double),
+ (r'""', String.Escape),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"\n]+', String.Double),
+ include('escape-sequence'),
+ (r'["\n]', String.Double, '#pop'),
+ ],
+ 'char': [
+ (r'[^\\\'\n]+', String.Char),
+ include('escape-sequence'),
+ (r'[\'\n]', String.Char, '#pop'),
+ ],
+ 'escape-sequence': [
+ (r'\\[nrt\\"\']', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ # Yes, \U literals are 6 hex digits.
+ (r'\\U[0-9a-fA-F]{6}', String.Escape)
+ ]
+ }
diff --git a/pygments/lexers/haxe.py b/pygments/lexers/haxe.py
new file mode 100644
index 00000000..e0e15c11
--- /dev/null
+++ b/pygments/lexers/haxe.py
@@ -0,0 +1,936 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.haxe
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Haxe and related stuff.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \
+ default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Whitespace
+
+__all__ = ['HaxeLexer', 'HxmlLexer']
+
+
+class HaxeLexer(ExtendedRegexLexer):
+ """
+ For Haxe source code (http://haxe.org/).
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Haxe'
+ aliases = ['hx', 'haxe', 'hxsl']
+ filenames = ['*.hx', '*.hxsl']
+ mimetypes = ['text/haxe', 'text/x-haxe', 'text/x-hx']
+
+ # keywords extracted from lexer.mll in the haxe compiler source
+ keyword = (r'(?:function|class|static|var|if|else|while|do|for|'
+ r'break|return|continue|extends|implements|import|'
+ r'switch|case|default|public|private|try|untyped|'
+ r'catch|new|this|throw|extern|enum|in|interface|'
+ r'cast|override|dynamic|typedef|package|'
+ r'inline|using|null|true|false|abstract)\b')
+
+ # idtype in lexer.mll
+ typeid = r'_*[A-Z]\w*'
+
+ # combined ident and dollar and idtype
+ ident = r'(?:_*[a-z]\w*|_+[0-9]\w*|' + typeid + '|_+|\$\w+)'
+
+ binop = (r'(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|'
+ r'!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|'
+ r'/|\-|=>|=)')
+
+ # ident except keywords
+ ident_no_keyword = r'(?!' + keyword + ')' + ident
+
+ flags = re.DOTALL | re.MULTILINE
+
+ preproc_stack = []
+
+ def preproc_callback(self, match, ctx):
+ proc = match.group(2)
+
+ if proc == 'if':
+ # store the current stack
+ self.preproc_stack.append(ctx.stack[:])
+ elif proc in ['else', 'elseif']:
+ # restore the stack back to right before #if
+ if self.preproc_stack:
+ ctx.stack = self.preproc_stack[-1][:]
+ elif proc == 'end':
+ # remove the saved stack of previous #if
+ if self.preproc_stack:
+ self.preproc_stack.pop()
+
+ # #if and #elseif should follow by an expr
+ if proc in ['if', 'elseif']:
+ ctx.stack.append('preproc-expr')
+
+ # #error can be optionally follow by the error msg
+ if proc in ['error']:
+ ctx.stack.append('preproc-error')
+
+ yield match.start(), Comment.Preproc, '#' + proc
+ ctx.pos = match.end()
+
+ tokens = {
+ 'root': [
+ include('spaces'),
+ include('meta'),
+ (r'(?:package)\b', Keyword.Namespace, ('semicolon', 'package')),
+ (r'(?:import)\b', Keyword.Namespace, ('semicolon', 'import')),
+ (r'(?:using)\b', Keyword.Namespace, ('semicolon', 'using')),
+ (r'(?:extern|private)\b', Keyword.Declaration),
+ (r'(?:abstract)\b', Keyword.Declaration, 'abstract'),
+ (r'(?:class|interface)\b', Keyword.Declaration, 'class'),
+ (r'(?:enum)\b', Keyword.Declaration, 'enum'),
+ (r'(?:typedef)\b', Keyword.Declaration, 'typedef'),
+
+ # top-level expression
+ # although it is not supported in haxe, but it is common to write
+ # expression in web pages the positive lookahead here is to prevent
+ # an infinite loop at the EOF
+ (r'(?=.)', Text, 'expr-statement'),
+ ],
+
+ # space/tab/comment/preproc
+ 'spaces': [
+ (r'\s+', Text),
+ (r'//[^\n\r]*', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'(#)(if|elseif|else|end|error)\b', preproc_callback),
+ ],
+
+ 'string-single-interpol': [
+ (r'\$\{', String.Interpol, ('string-interpol-close', 'expr')),
+ (r'\$\$', String.Escape),
+ (r'\$(?=' + ident + ')', String.Interpol, 'ident'),
+ include('string-single'),
+ ],
+
+ 'string-single': [
+ (r"'", String.Single, '#pop'),
+ (r'\\.', String.Escape),
+ (r'.', String.Single),
+ ],
+
+ 'string-double': [
+ (r'"', String.Double, '#pop'),
+ (r'\\.', String.Escape),
+ (r'.', String.Double),
+ ],
+
+ 'string-interpol-close': [
+ (r'\$'+ident, String.Interpol),
+ (r'\}', String.Interpol, '#pop'),
+ ],
+
+ 'package': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\.', Punctuation, 'import-ident'),
+ default('#pop'),
+ ],
+
+ 'import': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\*', Keyword), # wildcard import
+ (r'\.', Punctuation, 'import-ident'),
+ (r'in', Keyword.Namespace, 'ident'),
+ default('#pop'),
+ ],
+
+ 'import-ident': [
+ include('spaces'),
+ (r'\*', Keyword, '#pop'), # wildcard import
+ (ident, Name.Namespace, '#pop'),
+ ],
+
+ 'using': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\.', Punctuation, 'import-ident'),
+ default('#pop'),
+ ],
+
+ 'preproc-error': [
+ (r'\s+', Comment.Preproc),
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+ default('#pop'),
+ ],
+
+ 'preproc-expr': [
+ (r'\s+', Comment.Preproc),
+ (r'\!', Comment.Preproc),
+ (r'\(', Comment.Preproc, ('#pop', 'preproc-parenthesis')),
+
+ (ident, Comment.Preproc, '#pop'),
+
+ # Float
+ (r'\.[0-9]+', Number.Float),
+ (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float),
+ (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float),
+ (r'[0-9]+\.[0-9]+', Number.Float),
+ (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float),
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+
+ # String
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+ ],
+
+ 'preproc-parenthesis': [
+ (r'\s+', Comment.Preproc),
+ (r'\)', Comment.Preproc, '#pop'),
+ default('preproc-expr-in-parenthesis'),
+ ],
+
+ 'preproc-expr-chain': [
+ (r'\s+', Comment.Preproc),
+ (binop, Comment.Preproc, ('#pop', 'preproc-expr-in-parenthesis')),
+ default('#pop'),
+ ],
+
+ # same as 'preproc-expr' but able to chain 'preproc-expr-chain'
+ 'preproc-expr-in-parenthesis': [
+ (r'\s+', Comment.Preproc),
+ (r'\!', Comment.Preproc),
+ (r'\(', Comment.Preproc,
+ ('#pop', 'preproc-expr-chain', 'preproc-parenthesis')),
+
+ (ident, Comment.Preproc, ('#pop', 'preproc-expr-chain')),
+
+ # Float
+ (r'\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, ('#pop', 'preproc-expr-chain')),
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+', Number.Integer, ('#pop', 'preproc-expr-chain')),
+
+ # String
+ (r"'", String.Single,
+ ('#pop', 'preproc-expr-chain', 'string-single')),
+ (r'"', String.Double,
+ ('#pop', 'preproc-expr-chain', 'string-double')),
+ ],
+
+ 'abstract': [
+ include('spaces'),
+ default(('#pop', 'abstract-body', 'abstract-relation',
+ 'abstract-opaque', 'type-param-constraint', 'type-name')),
+ ],
+
+ 'abstract-body': [
+ include('spaces'),
+ (r'\{', Punctuation, ('#pop', 'class-body')),
+ ],
+
+ 'abstract-opaque': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close', 'type')),
+ default('#pop'),
+ ],
+
+ 'abstract-relation': [
+ include('spaces'),
+ (r'(?:to|from)', Keyword.Declaration, 'type'),
+ (r',', Punctuation),
+ default('#pop'),
+ ],
+
+ 'meta': [
+ include('spaces'),
+ (r'@', Name.Decorator, ('meta-body', 'meta-ident', 'meta-colon')),
+ ],
+
+ # optional colon
+ 'meta-colon': [
+ include('spaces'),
+ (r':', Name.Decorator, '#pop'),
+ default('#pop'),
+ ],
+
+ # same as 'ident' but set token as Name.Decorator instead of Name
+ 'meta-ident': [
+ include('spaces'),
+ (ident, Name.Decorator, '#pop'),
+ ],
+
+ 'meta-body': [
+ include('spaces'),
+ (r'\(', Name.Decorator, ('#pop', 'meta-call')),
+ default('#pop'),
+ ],
+
+ 'meta-call': [
+ include('spaces'),
+ (r'\)', Name.Decorator, '#pop'),
+ default(('#pop', 'meta-call-sep', 'expr')),
+ ],
+
+ 'meta-call-sep': [
+ include('spaces'),
+ (r'\)', Name.Decorator, '#pop'),
+ (r',', Punctuation, ('#pop', 'meta-call')),
+ ],
+
+ 'typedef': [
+ include('spaces'),
+ default(('#pop', 'typedef-body', 'type-param-constraint',
+ 'type-name')),
+ ],
+
+ 'typedef-body': [
+ include('spaces'),
+ (r'=', Operator, ('#pop', 'optional-semicolon', 'type')),
+ ],
+
+ 'enum': [
+ include('spaces'),
+ default(('#pop', 'enum-body', 'bracket-open',
+ 'type-param-constraint', 'type-name')),
+ ],
+
+ 'enum-body': [
+ include('spaces'),
+ include('meta'),
+ (r'\}', Punctuation, '#pop'),
+ (ident_no_keyword, Name, ('enum-member', 'type-param-constraint')),
+ ],
+
+ 'enum-member': [
+ include('spaces'),
+ (r'\(', Punctuation,
+ ('#pop', 'semicolon', 'flag', 'function-param')),
+ default(('#pop', 'semicolon', 'flag')),
+ ],
+
+ 'class': [
+ include('spaces'),
+ default(('#pop', 'class-body', 'bracket-open', 'extends',
+ 'type-param-constraint', 'type-name')),
+ ],
+
+ 'extends': [
+ include('spaces'),
+ (r'(?:extends|implements)\b', Keyword.Declaration, 'type'),
+ (r',', Punctuation), # the comma is made optional here, since haxe2
+ # requires the comma but haxe3 does not allow it
+ default('#pop'),
+ ],
+
+ 'bracket-open': [
+ include('spaces'),
+ (r'\{', Punctuation, '#pop'),
+ ],
+
+ 'bracket-close': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+
+ 'class-body': [
+ include('spaces'),
+ include('meta'),
+ (r'\}', Punctuation, '#pop'),
+ (r'(?:static|public|private|override|dynamic|inline|macro)\b',
+ Keyword.Declaration),
+ default('class-member'),
+ ],
+
+ 'class-member': [
+ include('spaces'),
+ (r'(var)\b', Keyword.Declaration,
+ ('#pop', 'optional-semicolon', 'var')),
+ (r'(function)\b', Keyword.Declaration,
+ ('#pop', 'optional-semicolon', 'class-method')),
+ ],
+
+ # local function, anonymous or not
+ 'function-local': [
+ include('spaces'),
+ (ident_no_keyword, Name.Function,
+ ('#pop', 'optional-expr', 'flag', 'function-param',
+ 'parenthesis-open', 'type-param-constraint')),
+ default(('#pop', 'optional-expr', 'flag', 'function-param',
+ 'parenthesis-open', 'type-param-constraint')),
+ ],
+
+ 'optional-expr': [
+ include('spaces'),
+ include('expr'),
+ default('#pop'),
+ ],
+
+ 'class-method': [
+ include('spaces'),
+ (ident, Name.Function, ('#pop', 'optional-expr', 'flag',
+ 'function-param', 'parenthesis-open',
+ 'type-param-constraint')),
+ ],
+
+ # function arguments
+ 'function-param': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r'\?', Punctuation),
+ (ident_no_keyword, Name,
+ ('#pop', 'function-param-sep', 'assign', 'flag')),
+ ],
+
+ 'function-param-sep': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'function-param')),
+ ],
+
+ 'prop-get-set': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close',
+ 'prop-get-set-opt', 'comma', 'prop-get-set-opt')),
+ default('#pop'),
+ ],
+
+ 'prop-get-set-opt': [
+ include('spaces'),
+ (r'(?:default|null|never|dynamic|get|set)\b', Keyword, '#pop'),
+ (ident_no_keyword, Text, '#pop'), # custom getter/setter
+ ],
+
+ 'expr-statement': [
+ include('spaces'),
+ # makes semicolon optional here, just to avoid checking the last
+ # one is bracket or not.
+ default(('#pop', 'optional-semicolon', 'expr')),
+ ],
+
+ 'expr': [
+ include('spaces'),
+ (r'@', Name.Decorator, ('#pop', 'optional-expr', 'meta-body',
+ 'meta-ident', 'meta-colon')),
+ (r'(?:\+\+|\-\-|~(?!/)|!|\-)', Operator),
+ (r'\(', Punctuation, ('#pop', 'expr-chain', 'parenthesis')),
+ (r'(?:static|public|private|override|dynamic|inline)\b',
+ Keyword.Declaration),
+ (r'(?:function)\b', Keyword.Declaration, ('#pop', 'expr-chain',
+ 'function-local')),
+ (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket')),
+ (r'(?:true|false|null)\b', Keyword.Constant, ('#pop', 'expr-chain')),
+ (r'(?:this)\b', Keyword, ('#pop', 'expr-chain')),
+ (r'(?:cast)\b', Keyword, ('#pop', 'expr-chain', 'cast')),
+ (r'(?:try)\b', Keyword, ('#pop', 'catch', 'expr')),
+ (r'(?:var)\b', Keyword.Declaration, ('#pop', 'var')),
+ (r'(?:new)\b', Keyword, ('#pop', 'expr-chain', 'new')),
+ (r'(?:switch)\b', Keyword, ('#pop', 'switch')),
+ (r'(?:if)\b', Keyword, ('#pop', 'if')),
+ (r'(?:do)\b', Keyword, ('#pop', 'do')),
+ (r'(?:while)\b', Keyword, ('#pop', 'while')),
+ (r'(?:for)\b', Keyword, ('#pop', 'for')),
+ (r'(?:untyped|throw)\b', Keyword),
+ (r'(?:return)\b', Keyword, ('#pop', 'optional-expr')),
+ (r'(?:macro)\b', Keyword, ('#pop', 'macro')),
+ (r'(?:continue|break)\b', Keyword, '#pop'),
+ (r'(?:\$\s*[a-z]\b|\$(?!'+ident+'))', Name, ('#pop', 'dollar')),
+ (ident_no_keyword, Name, ('#pop', 'expr-chain')),
+
+ # Float
+ (r'\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, ('#pop', 'expr-chain')),
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'expr-chain')),
+ (r'[0-9]+', Number.Integer, ('#pop', 'expr-chain')),
+
+ # String
+ (r"'", String.Single, ('#pop', 'expr-chain', 'string-single-interpol')),
+ (r'"', String.Double, ('#pop', 'expr-chain', 'string-double')),
+
+ # EReg
+ (r'~/(\\\\|\\/|[^/\n])*/[gimsu]*', String.Regex, ('#pop', 'expr-chain')),
+
+ # Array
+ (r'\[', Punctuation, ('#pop', 'expr-chain', 'array-decl')),
+ ],
+
+ 'expr-chain': [
+ include('spaces'),
+ (r'(?:\+\+|\-\-)', Operator),
+ (binop, Operator, ('#pop', 'expr')),
+ (r'(?:in)\b', Keyword, ('#pop', 'expr')),
+ (r'\?', Operator, ('#pop', 'expr', 'ternary', 'expr')),
+ (r'(\.)(' + ident_no_keyword + ')', bygroups(Punctuation, Name)),
+ (r'\[', Punctuation, 'array-access'),
+ (r'\(', Punctuation, 'call'),
+ default('#pop'),
+ ],
+
+ # macro reification
+ 'macro': [
+ include('spaces'),
+ include('meta'),
+ (r':', Punctuation, ('#pop', 'type')),
+
+ (r'(?:extern|private)\b', Keyword.Declaration),
+ (r'(?:abstract)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'abstract')),
+ (r'(?:class|interface)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'macro-class')),
+ (r'(?:enum)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'enum')),
+ (r'(?:typedef)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'typedef')),
+
+ default(('#pop', 'expr')),
+ ],
+
+ 'macro-class': [
+ (r'\{', Punctuation, ('#pop', 'class-body')),
+ include('class')
+ ],
+
+ # cast can be written as "cast expr" or "cast(expr, type)"
+ 'cast': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close',
+ 'cast-type', 'expr')),
+ default(('#pop', 'expr')),
+ ],
+
+ # optionally give a type as the 2nd argument of cast()
+ 'cast-type': [
+ include('spaces'),
+ (r',', Punctuation, ('#pop', 'type')),
+ default('#pop'),
+ ],
+
+ 'catch': [
+ include('spaces'),
+ (r'(?:catch)\b', Keyword, ('expr', 'function-param',
+ 'parenthesis-open')),
+ default('#pop'),
+ ],
+
+ # do-while loop
+ 'do': [
+ include('spaces'),
+ default(('#pop', 'do-while', 'expr')),
+ ],
+
+ # the while after do
+ 'do-while': [
+ include('spaces'),
+ (r'(?:while)\b', Keyword, ('#pop', 'parenthesis',
+ 'parenthesis-open')),
+ ],
+
+ 'while': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
+ ],
+
+ 'for': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
+ ],
+
+ 'if': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'else', 'optional-semicolon', 'expr',
+ 'parenthesis')),
+ ],
+
+ 'else': [
+ include('spaces'),
+ (r'(?:else)\b', Keyword, ('#pop', 'expr')),
+ default('#pop'),
+ ],
+
+ 'switch': [
+ include('spaces'),
+ default(('#pop', 'switch-body', 'bracket-open', 'expr')),
+ ],
+
+ 'switch-body': [
+ include('spaces'),
+ (r'(?:case|default)\b', Keyword, ('case-block', 'case')),
+ (r'\}', Punctuation, '#pop'),
+ ],
+
+ 'case': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ default(('#pop', 'case-sep', 'case-guard', 'expr')),
+ ],
+
+ 'case-sep': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'case')),
+ ],
+
+ 'case-guard': [
+ include('spaces'),
+ (r'(?:if)\b', Keyword, ('#pop', 'parenthesis', 'parenthesis-open')),
+ default('#pop'),
+ ],
+
+ # optional multiple expr under a case
+ 'case-block': [
+ include('spaces'),
+ (r'(?!(?:case|default)\b|\})', Keyword, 'expr-statement'),
+ default('#pop'),
+ ],
+
+ 'new': [
+ include('spaces'),
+ default(('#pop', 'call', 'parenthesis-open', 'type')),
+ ],
+
+ 'array-decl': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ default(('#pop', 'array-decl-sep', 'expr')),
+ ],
+
+ 'array-decl-sep': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'array-decl')),
+ ],
+
+ 'array-access': [
+ include('spaces'),
+ default(('#pop', 'array-access-close', 'expr')),
+ ],
+
+ 'array-access-close': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ ],
+
+ 'comma': [
+ include('spaces'),
+ (r',', Punctuation, '#pop'),
+ ],
+
+ 'colon': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ ],
+
+ 'semicolon': [
+ include('spaces'),
+ (r';', Punctuation, '#pop'),
+ ],
+
+ 'optional-semicolon': [
+ include('spaces'),
+ (r';', Punctuation, '#pop'),
+ default('#pop'),
+ ],
+
+ # identity that CAN be a Haxe keyword
+ 'ident': [
+ include('spaces'),
+ (ident, Name, '#pop'),
+ ],
+
+ 'dollar': [
+ include('spaces'),
+ (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket-close', 'expr')),
+ default(('#pop', 'expr-chain')),
+ ],
+
+ 'type-name': [
+ include('spaces'),
+ (typeid, Name, '#pop'),
+ ],
+
+ 'type-full-name': [
+ include('spaces'),
+ (r'\.', Punctuation, 'ident'),
+ default('#pop'),
+ ],
+
+ 'type': [
+ include('spaces'),
+ (r'\?', Punctuation),
+ (ident, Name, ('#pop', 'type-check', 'type-full-name')),
+ (r'\{', Punctuation, ('#pop', 'type-check', 'type-struct')),
+ (r'\(', Punctuation, ('#pop', 'type-check', 'type-parenthesis')),
+ ],
+
+ 'type-parenthesis': [
+ include('spaces'),
+ default(('#pop', 'parenthesis-close', 'type')),
+ ],
+
+ 'type-check': [
+ include('spaces'),
+ (r'->', Punctuation, ('#pop', 'type')),
+ (r'<(?!=)', Punctuation, 'type-param'),
+ default('#pop'),
+ ],
+
+ 'type-struct': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r'\?', Punctuation),
+ (r'>', Punctuation, ('comma', 'type')),
+ (ident_no_keyword, Name, ('#pop', 'type-struct-sep', 'type', 'colon')),
+ include('class-body'),
+ ],
+
+ 'type-struct-sep': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'type-struct')),
+ ],
+
+ # type-param can be a normal type or a constant literal...
+ 'type-param-type': [
+ # Float
+ (r'\.[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+\.[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, '#pop'),
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex, '#pop'),
+ (r'[0-9]+', Number.Integer, '#pop'),
+
+ # String
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+
+ # EReg
+ (r'~/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex, '#pop'),
+
+ # Array
+ (r'\[', Operator, ('#pop', 'array-decl')),
+
+ include('type'),
+ ],
+
+ # type-param part of a type
+ # ie. the <A,B> path in Map<A,B>
+ 'type-param': [
+ include('spaces'),
+ default(('#pop', 'type-param-sep', 'type-param-type')),
+ ],
+
+ 'type-param-sep': [
+ include('spaces'),
+ (r'>', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'type-param')),
+ ],
+
+ # optional type-param that may include constraint
+ # ie. <T:Constraint, T2:(ConstraintA,ConstraintB)>
+ 'type-param-constraint': [
+ include('spaces'),
+ (r'<(?!=)', Punctuation, ('#pop', 'type-param-constraint-sep',
+ 'type-param-constraint-flag', 'type-name')),
+ default('#pop'),
+ ],
+
+ 'type-param-constraint-sep': [
+ include('spaces'),
+ (r'>', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'type-param-constraint-sep',
+ 'type-param-constraint-flag', 'type-name')),
+ ],
+
+ # the optional constraint inside type-param
+ 'type-param-constraint-flag': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'type-param-constraint-flag-type')),
+ default('#pop'),
+ ],
+
+ 'type-param-constraint-flag-type': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'type-param-constraint-flag-type-sep',
+ 'type')),
+ default(('#pop', 'type')),
+ ],
+
+ 'type-param-constraint-flag-type-sep': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, 'type'),
+ ],
+
+ # a parenthesis expr that contain exactly one expr
+ 'parenthesis': [
+ include('spaces'),
+ default(('#pop', 'parenthesis-close', 'flag', 'expr')),
+ ],
+
+ 'parenthesis-open': [
+ include('spaces'),
+ (r'\(', Punctuation, '#pop'),
+ ],
+
+ 'parenthesis-close': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ ],
+
+ 'var': [
+ include('spaces'),
+ (ident_no_keyword, Text, ('#pop', 'var-sep', 'assign', 'flag', 'prop-get-set')),
+ ],
+
+ # optional more var decl.
+ 'var-sep': [
+ include('spaces'),
+ (r',', Punctuation, ('#pop', 'var')),
+ default('#pop'),
+ ],
+
+ # optional assignment
+ 'assign': [
+ include('spaces'),
+ (r'=', Operator, ('#pop', 'expr')),
+ default('#pop'),
+ ],
+
+ # optional type flag
+ 'flag': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'type')),
+ default('#pop'),
+ ],
+
+ # colon as part of a ternary operator (?:)
+ 'ternary': [
+ include('spaces'),
+ (r':', Operator, '#pop'),
+ ],
+
+ # function call
+ 'call': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ default(('#pop', 'call-sep', 'expr')),
+ ],
+
+ # after a call param
+ 'call-sep': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'call')),
+ ],
+
+ # bracket can be block or object
+ 'bracket': [
+ include('spaces'),
+ (r'(?!(?:\$\s*[a-z]\b|\$(?!'+ident+')))' + ident_no_keyword, Name,
+ ('#pop', 'bracket-check')),
+ (r"'", String.Single, ('#pop', 'bracket-check', 'string-single')),
+ (r'"', String.Double, ('#pop', 'bracket-check', 'string-double')),
+ default(('#pop', 'block')),
+ ],
+
+ 'bracket-check': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'object-sep', 'expr')), # is object
+ default(('#pop', 'block', 'optional-semicolon', 'expr-chain')), # is block
+ ],
+
+ # code block
+ 'block': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ default('expr-statement'),
+ ],
+
+ # object in key-value pairs
+ 'object': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ default(('#pop', 'object-sep', 'expr', 'colon', 'ident-or-string'))
+ ],
+
+ # a key of an object
+ 'ident-or-string': [
+ include('spaces'),
+ (ident_no_keyword, Name, '#pop'),
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+ ],
+
+ # after a key-value pair in object
+ 'object-sep': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'object')),
+ ],
+
+
+
+ }
+
+ def analyse_text(text):
+ if re.match(r'\w+\s*:\s*\w', text):
+ return 0.3
+
+
+class HxmlLexer(RegexLexer):
+ """
+ Lexer for `haXe build <http://haxe.org/doc/compiler>`_ files.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Hxml'
+ aliases = ['haxeml', 'hxml']
+ filenames = ['*.hxml']
+
+ tokens = {
+ 'root': [
+ # Seperator
+ (r'(--)(next)', bygroups(Punctuation, Generic.Heading)),
+ # Compiler switches with one dash
+ (r'(-)(prompt|debug|v)', bygroups(Punctuation, Keyword.Keyword)),
+ # Compilerswitches with two dashes
+ (r'(--)(neko-source|flash-strict|flash-use-stage|no-opt|no-traces|'
+ r'no-inline|times|no-output)', bygroups(Punctuation, Keyword)),
+ # Targets and other options that take an argument
+ (r'(-)(cpp|js|neko|x|as3|swf9?|swf-lib|php|xml|main|lib|D|resource|'
+ r'cp|cmd)( +)(.+)',
+ bygroups(Punctuation, Keyword, Whitespace, String)),
+ # Options that take only numerical arguments
+ (r'(-)(swf-version)( +)(\d+)',
+ bygroups(Punctuation, Keyword, Number.Integer)),
+ # An Option that defines the size, the fps and the background
+ # color of an flash movie
+ (r'(-)(swf-header)( +)(\d+)(:)(\d+)(:)(\d+)(:)([A-Fa-f0-9]{6})',
+ bygroups(Punctuation, Keyword, Whitespace, Number.Integer,
+ Punctuation, Number.Integer, Punctuation, Number.Integer,
+ Punctuation, Number.Hex)),
+ # options with two dashes that takes arguments
+ (r'(--)(js-namespace|php-front|php-lib|remap|gen-hx-classes)( +)'
+ r'(.+)', bygroups(Punctuation, Keyword, Whitespace, String)),
+ # Single line comment, multiline ones are not allowed.
+ (r'#.*', Comment.Single)
+ ]
+ }
diff --git a/pygments/lexers/hdl.py b/pygments/lexers/hdl.py
index 57ffc349..fc5ff719 100644
--- a/pygments/lexers/hdl.py
+++ b/pygments/lexers/hdl.py
@@ -5,15 +5,15 @@
Lexers for hardware descriptor languages.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import RegexLexer, bygroups, include, using, this
-from pygments.token import \
- Text, Comment, Operator, Keyword, Name, String, Number, Punctuation, \
- Error
+
+from pygments.lexer import RegexLexer, bygroups, include, using, this, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
__all__ = ['VerilogLexer', 'SystemVerilogLexer', 'VhdlLexer']
@@ -22,7 +22,7 @@ class VerilogLexer(RegexLexer):
"""
For verilog source code with preprocessor directives.
- *New in Pygments 1.4.*
+ .. versionadded:: 1.4
"""
name = 'verilog'
aliases = ['verilog', 'v']
@@ -37,7 +37,7 @@ class VerilogLexer(RegexLexer):
(r'^\s*`define', Comment.Preproc, 'macro'),
(r'\n', Text),
(r'\s+', Text),
- (r'\\\n', Text), # line continuation
+ (r'\\\n', Text), # line continuation
(r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
(r'[{}#@]', Punctuation),
@@ -46,7 +46,7 @@ class VerilogLexer(RegexLexer):
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
(r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
(r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
- (r'([0-9]+)|(\'b)[0-1]+', Number.Hex), # should be binary
+ (r'([0-9]+)|(\'b)[01]+', Number.Bin),
(r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
(r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
(r'\'[01xz]', Number),
@@ -54,57 +54,69 @@ class VerilogLexer(RegexLexer):
(r'\*/', Error),
(r'[~!%^&*+=|?:<>/-]', Operator),
(r'[()\[\],.;\']', Punctuation),
- (r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant),
+ (r'`[a-zA-Z_]\w*', Name.Constant),
(r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)),
(r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text),
'import'),
- (r'(always|always_comb|always_ff|always_latch|and|assign|automatic|'
- r'begin|break|buf|bufif0|bufif1|case|casex|casez|cmos|const|'
- r'continue|deassign|default|defparam|disable|do|edge|else|end|endcase|'
- r'endfunction|endgenerate|endmodule|endpackage|endprimitive|endspecify|'
- r'endtable|endtask|enum|event|final|for|force|forever|fork|function|'
- r'generate|genvar|highz0|highz1|if|initial|inout|input|'
- r'integer|join|large|localparam|macromodule|medium|module|'
- r'nand|negedge|nmos|nor|not|notif0|notif1|or|output|packed|'
- r'parameter|pmos|posedge|primitive|pull0|pull1|pulldown|pullup|rcmos|'
- r'ref|release|repeat|return|rnmos|rpmos|rtran|rtranif0|'
- r'rtranif1|scalared|signed|small|specify|specparam|strength|'
- r'string|strong0|strong1|struct|table|task|'
- r'tran|tranif0|tranif1|type|typedef|'
- r'unsigned|var|vectored|void|wait|weak0|weak1|while|'
- r'xnor|xor)\b', Keyword),
-
- (r'`(accelerate|autoexpand_vectornets|celldefine|default_nettype|'
- r'else|elsif|endcelldefine|endif|endprotect|endprotected|'
- r'expand_vectornets|ifdef|ifndef|include|noaccelerate|noexpand_vectornets|'
- r'noremove_gatenames|noremove_netnames|nounconnected_drive|'
- r'protect|protected|remove_gatenames|remove_netnames|resetall|'
- r'timescale|unconnected_drive|undef)\b', Comment.Preproc),
-
- (r'\$(bits|bitstoreal|bitstoshortreal|countdrivers|display|fclose|'
- r'fdisplay|finish|floor|fmonitor|fopen|fstrobe|fwrite|'
- r'getpattern|history|incsave|input|itor|key|list|log|'
- r'monitor|monitoroff|monitoron|nokey|nolog|printtimescale|'
- r'random|readmemb|readmemh|realtime|realtobits|reset|reset_count|'
- r'reset_value|restart|rtoi|save|scale|scope|shortrealtobits|'
- r'showscopes|showvariables|showvars|sreadmemb|sreadmemh|'
- r'stime|stop|strobe|time|timeformat|write)\b', Name.Builtin),
-
- (r'(byte|shortint|int|longint|integer|time|'
- r'bit|logic|reg|'
- r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor'
- r'shortreal|real|realtime)\b', Keyword.Type),
- ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ (words((
+ 'always', 'always_comb', 'always_ff', 'always_latch', 'and',
+ 'assign', 'automatic', 'begin', 'break', 'buf', 'bufif0', 'bufif1',
+ 'case', 'casex', 'casez', 'cmos', 'const', 'continue', 'deassign',
+ 'default', 'defparam', 'disable', 'do', 'edge', 'else', 'end', 'endcase',
+ 'endfunction', 'endgenerate', 'endmodule', 'endpackage', 'endprimitive',
+ 'endspecify', 'endtable', 'endtask', 'enum', 'event', 'final', 'for',
+ 'force', 'forever', 'fork', 'function', 'generate', 'genvar', 'highz0',
+ 'highz1', 'if', 'initial', 'inout', 'input', 'integer', 'join', 'large',
+ 'localparam', 'macromodule', 'medium', 'module', 'nand', 'negedge',
+ 'nmos', 'nor', 'not', 'notif0', 'notif1', 'or', 'output', 'packed',
+ 'parameter', 'pmos', 'posedge', 'primitive', 'pull0', 'pull1',
+ 'pulldown', 'pullup', 'rcmos', 'ref', 'release', 'repeat', 'return',
+ 'rnmos', 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 'scalared', 'signed',
+ 'small', 'specify', 'specparam', 'strength', 'string', 'strong0',
+ 'strong1', 'struct', 'table', 'task', 'tran', 'tranif0', 'tranif1',
+ 'type', 'typedef', 'unsigned', 'var', 'vectored', 'void', 'wait',
+ 'weak0', 'weak1', 'while', 'xnor', 'xor'), suffix=r'\b'),
+ Keyword),
+
+ (words((
+ 'accelerate', 'autoexpand_vectornets', 'celldefine', 'default_nettype',
+ 'else', 'elsif', 'endcelldefine', 'endif', 'endprotect', 'endprotected',
+ 'expand_vectornets', 'ifdef', 'ifndef', 'include', 'noaccelerate',
+ 'noexpand_vectornets', 'noremove_gatenames', 'noremove_netnames',
+ 'nounconnected_drive', 'protect', 'protected', 'remove_gatenames',
+ 'remove_netnames', 'resetall', 'timescale', 'unconnected_drive',
+ 'undef'), prefix=r'`', suffix=r'\b'),
+ Comment.Preproc),
+
+ (words((
+ 'bits', 'bitstoreal', 'bitstoshortreal', 'countdrivers', 'display', 'fclose',
+ 'fdisplay', 'finish', 'floor', 'fmonitor', 'fopen', 'fstrobe', 'fwrite',
+ 'getpattern', 'history', 'incsave', 'input', 'itor', 'key', 'list', 'log',
+ 'monitor', 'monitoroff', 'monitoron', 'nokey', 'nolog', 'printtimescale',
+ 'random', 'readmemb', 'readmemh', 'realtime', 'realtobits', 'reset',
+ 'reset_count', 'reset_value', 'restart', 'rtoi', 'save', 'scale', 'scope',
+ 'shortrealtobits', 'showscopes', 'showvariables', 'showvars', 'sreadmemb',
+ 'sreadmemh', 'stime', 'stop', 'strobe', 'time', 'timeformat', 'write'),
+ prefix=r'\$', suffix=r'\b'),
+ Name.Builtin),
+
+ (words((
+ 'byte', 'shortint', 'int', 'longint', 'integer', 'time',
+ 'bit', 'logic', 'reg', 'supply0', 'supply1', 'tri', 'triand',
+ 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wo'
+ 'shortreal', 'real', 'realtime'), suffix=r'\b'),
+ Keyword.Type),
+ ('[a-zA-Z_]\w*:(?!:)', Name.Label),
+ ('[a-zA-Z_]\w*', Name),
],
'string': [
(r'"', String, '#pop'),
(r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
],
'macro': [
(r'[^/\n]+', Comment.Preproc),
@@ -115,13 +127,13 @@ class VerilogLexer(RegexLexer):
(r'\n', Comment.Preproc, '#pop'),
],
'import': [
- (r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop')
+ (r'[\w:]+\*?', Name.Namespace, '#pop')
]
}
def get_tokens_unprocessed(self, text):
for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
+ RegexLexer.get_tokens_unprocessed(self, text):
# Convention: mark all upper case names as constants
if token is Name:
if value.isupper():
@@ -134,7 +146,7 @@ class SystemVerilogLexer(RegexLexer):
Extends verilog lexer to recognise all SystemVerilog keywords from IEEE
1800-2009 standard.
- *New in Pygments 1.5.*
+ .. versionadded:: 1.5
"""
name = 'systemverilog'
aliases = ['systemverilog', 'sv']
@@ -152,7 +164,7 @@ class SystemVerilogLexer(RegexLexer):
(r'\n', Text),
(r'\s+', Text),
- (r'\\\n', Text), # line continuation
+ (r'\\\n', Text), # line continuation
(r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
(r'[{}#@]', Punctuation),
@@ -161,7 +173,7 @@ class SystemVerilogLexer(RegexLexer):
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
(r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
(r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
- (r'([0-9]+)|(\'b)[0-1]+', Number.Hex), # should be binary
+ (r'([0-9]+)|(\'b)[01]+', Number.Bin),
(r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
(r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
(r'\'[01xz]', Number),
@@ -169,79 +181,87 @@ class SystemVerilogLexer(RegexLexer):
(r'\*/', Error),
(r'[~!%^&*+=|?:<>/-]', Operator),
(r'[()\[\],.;\']', Punctuation),
- (r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant),
-
- (r'(accept_on|alias|always|always_comb|always_ff|always_latch|'
- r'and|assert|assign|assume|automatic|before|begin|bind|bins|'
- r'binsof|bit|break|buf|bufif0|bufif1|byte|case|casex|casez|'
- r'cell|chandle|checker|class|clocking|cmos|config|const|constraint|'
- r'context|continue|cover|covergroup|coverpoint|cross|deassign|'
- r'default|defparam|design|disable|dist|do|edge|else|end|endcase|'
- r'endchecker|endclass|endclocking|endconfig|endfunction|endgenerate|'
- r'endgroup|endinterface|endmodule|endpackage|endprimitive|'
- r'endprogram|endproperty|endsequence|endspecify|endtable|'
- r'endtask|enum|event|eventually|expect|export|extends|extern|'
- r'final|first_match|for|force|foreach|forever|fork|forkjoin|'
- r'function|generate|genvar|global|highz0|highz1|if|iff|ifnone|'
- r'ignore_bins|illegal_bins|implies|import|incdir|include|'
- r'initial|inout|input|inside|instance|int|integer|interface|'
- r'intersect|join|join_any|join_none|large|let|liblist|library|'
- r'local|localparam|logic|longint|macromodule|matches|medium|'
- r'modport|module|nand|negedge|new|nexttime|nmos|nor|noshowcancelled|'
- r'not|notif0|notif1|null|or|output|package|packed|parameter|'
- r'pmos|posedge|primitive|priority|program|property|protected|'
- r'pull0|pull1|pulldown|pullup|pulsestyle_ondetect|pulsestyle_onevent|'
- r'pure|rand|randc|randcase|randsequence|rcmos|real|realtime|'
- r'ref|reg|reject_on|release|repeat|restrict|return|rnmos|'
- r'rpmos|rtran|rtranif0|rtranif1|s_always|s_eventually|s_nexttime|'
- r's_until|s_until_with|scalared|sequence|shortint|shortreal|'
- r'showcancelled|signed|small|solve|specify|specparam|static|'
- r'string|strong|strong0|strong1|struct|super|supply0|supply1|'
- r'sync_accept_on|sync_reject_on|table|tagged|task|this|throughout|'
- r'time|timeprecision|timeunit|tran|tranif0|tranif1|tri|tri0|'
- r'tri1|triand|trior|trireg|type|typedef|union|unique|unique0|'
- r'unsigned|until|until_with|untyped|use|uwire|var|vectored|'
- r'virtual|void|wait|wait_order|wand|weak|weak0|weak1|while|'
- r'wildcard|wire|with|within|wor|xnor|xor)\b', Keyword ),
-
- (r'(`__FILE__|`__LINE__|`begin_keywords|`celldefine|`default_nettype|'
- r'`define|`else|`elsif|`end_keywords|`endcelldefine|`endif|'
- r'`ifdef|`ifndef|`include|`line|`nounconnected_drive|`pragma|'
- r'`resetall|`timescale|`unconnected_drive|`undef|`undefineall)\b',
- Comment.Preproc ),
-
- (r'(\$display|\$displayb|\$displayh|\$displayo|\$dumpall|\$dumpfile|'
- r'\$dumpflush|\$dumplimit|\$dumpoff|\$dumpon|\$dumpports|'
- r'\$dumpportsall|\$dumpportsflush|\$dumpportslimit|\$dumpportsoff|'
- r'\$dumpportson|\$dumpvars|\$fclose|\$fdisplay|\$fdisplayb|'
- r'\$fdisplayh|\$fdisplayo|\$feof|\$ferror|\$fflush|\$fgetc|'
- r'\$fgets|\$fmonitor|\$fmonitorb|\$fmonitorh|\$fmonitoro|'
- r'\$fopen|\$fread|\$fscanf|\$fseek|\$fstrobe|\$fstrobeb|\$fstrobeh|'
- r'\$fstrobeo|\$ftell|\$fwrite|\$fwriteb|\$fwriteh|\$fwriteo|'
- r'\$monitor|\$monitorb|\$monitorh|\$monitoro|\$monitoroff|'
- r'\$monitoron|\$plusargs|\$readmemb|\$readmemh|\$rewind|\$sformat|'
- r'\$sformatf|\$sscanf|\$strobe|\$strobeb|\$strobeh|\$strobeo|'
- r'\$swrite|\$swriteb|\$swriteh|\$swriteo|\$test|\$ungetc|'
- r'\$value\$plusargs|\$write|\$writeb|\$writeh|\$writememb|'
- r'\$writememh|\$writeo)\b' , Name.Builtin ),
+ (r'`[a-zA-Z_]\w*', Name.Constant),
+
+ (words((
+ 'accept_on', 'alias', 'always', 'always_comb', 'always_ff', 'always_latch',
+ 'and', 'assert', 'assign', 'assume', 'automatic', 'before', 'begin', 'bind', 'bins',
+ 'binsof', 'bit', 'break', 'buf', 'bufif0', 'bufif1', 'byte', 'case', 'casex', 'casez',
+ 'cell', 'chandle', 'checker', 'class', 'clocking', 'cmos', 'config', 'const', 'constraint',
+ 'context', 'continue', 'cover', 'covergroup', 'coverpoint', 'cross', 'deassign',
+ 'default', 'defparam', 'design', 'disable', 'dist', 'do', 'edge', 'else', 'end', 'endcase',
+ 'endchecker', 'endclass', 'endclocking', 'endconfig', 'endfunction', 'endgenerate',
+ 'endgroup', 'endinterface', 'endmodule', 'endpackage', 'endprimitive',
+ 'endprogram', 'endproperty', 'endsequence', 'endspecify', 'endtable',
+ 'endtask', 'enum', 'event', 'eventually', 'expect', 'export', 'extends', 'extern',
+ 'final', 'first_match', 'for', 'force', 'foreach', 'forever', 'fork', 'forkjoin',
+ 'function', 'generate', 'genvar', 'global', 'highz0', 'highz1', 'if', 'iff', 'ifnone',
+ 'ignore_bins', 'illegal_bins', 'implies', 'import', 'incdir', 'include',
+ 'initial', 'inout', 'input', 'inside', 'instance', 'int', 'integer', 'interface',
+ 'intersect', 'join', 'join_any', 'join_none', 'large', 'let', 'liblist', 'library',
+ 'local', 'localparam', 'logic', 'longint', 'macromodule', 'matches', 'medium',
+ 'modport', 'module', 'nand', 'negedge', 'new', 'nexttime', 'nmos', 'nor', 'noshowcancelled',
+ 'not', 'notif0', 'notif1', 'null', 'or', 'output', 'package', 'packed', 'parameter',
+ 'pmos', 'posedge', 'primitive', 'priority', 'program', 'property', 'protected',
+ 'pull0', 'pull1', 'pulldown', 'pullup', 'pulsestyle_ondetect', 'pulsestyle_onevent',
+ 'pure', 'rand', 'randc', 'randcase', 'randsequence', 'rcmos', 'real', 'realtime',
+ 'ref', 'reg', 'reject_on', 'release', 'repeat', 'restrict', 'return', 'rnmos',
+ 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 's_always', 's_eventually', 's_nexttime',
+ 's_until', 's_until_with', 'scalared', 'sequence', 'shortint', 'shortreal',
+ 'showcancelled', 'signed', 'small', 'solve', 'specify', 'specparam', 'static',
+ 'string', 'strong', 'strong0', 'strong1', 'struct', 'super', 'supply0', 'supply1',
+ 'sync_accept_on', 'sync_reject_on', 'table', 'tagged', 'task', 'this', 'throughout',
+ 'time', 'timeprecision', 'timeunit', 'tran', 'tranif0', 'tranif1', 'tri', 'tri0',
+ 'tri1', 'triand', 'trior', 'trireg', 'type', 'typedef', 'union', 'unique', 'unique0',
+ 'unsigned', 'until', 'until_with', 'untyped', 'use', 'uwire', 'var', 'vectored',
+ 'virtual', 'void', 'wait', 'wait_order', 'wand', 'weak', 'weak0', 'weak1', 'while',
+ 'wildcard', 'wire', 'with', 'within', 'wor', 'xnor', 'xor'), suffix=r'\b'),
+ Keyword),
+
+ (words((
+ '`__FILE__', '`__LINE__', '`begin_keywords', '`celldefine', '`default_nettype',
+ '`define', '`else', '`elsif', '`end_keywords', '`endcelldefine', '`endif',
+ '`ifdef', '`ifndef', '`include', '`line', '`nounconnected_drive', '`pragma',
+ '`resetall', '`timescale', '`unconnected_drive', '`undef', '`undefineall'),
+ suffix=r'\b'),
+ Comment.Preproc),
+
+ (words((
+ '$display', '$displayb', '$displayh', '$displayo', '$dumpall', '$dumpfile',
+ '$dumpflush', '$dumplimit', '$dumpoff', '$dumpon', '$dumpports',
+ '$dumpportsall', '$dumpportsflush', '$dumpportslimit', '$dumpportsoff',
+ '$dumpportson', '$dumpvars', '$fclose', '$fdisplay', '$fdisplayb',
+ '$fdisplayh', '$fdisplayo', '$feof', '$ferror', '$fflush', '$fgetc',
+ '$fgets', '$finish', '$fmonitor', '$fmonitorb', '$fmonitorh', '$fmonitoro',
+ '$fopen', '$fread', '$fscanf', '$fseek', '$fstrobe', '$fstrobeb', '$fstrobeh',
+ '$fstrobeo', '$ftell', '$fwrite', '$fwriteb', '$fwriteh', '$fwriteo',
+ '$monitor', '$monitorb', '$monitorh', '$monitoro', '$monitoroff',
+ '$monitoron', '$plusargs', '$random', '$readmemb', '$readmemh', '$rewind',
+ '$sformat', '$sformatf', '$sscanf', '$strobe', '$strobeb', '$strobeh', '$strobeo',
+ '$swrite', '$swriteb', '$swriteh', '$swriteo', '$test', '$ungetc',
+ '$value$plusargs', '$write', '$writeb', '$writeh', '$writememb',
+ '$writememh', '$writeo'), suffix=r'\b'),
+ Name.Builtin),
(r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(byte|shortint|int|longint|integer|time|'
- r'bit|logic|reg|'
- r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor'
- r'shortreal|real|realtime)\b', Keyword.Type),
- ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ (words((
+ 'byte', 'shortint', 'int', 'longint', 'integer', 'time',
+ 'bit', 'logic', 'reg', 'supply0', 'supply1', 'tri', 'triand',
+ 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wo'
+ 'shortreal', 'real', 'realtime'), suffix=r'\b'),
+ Keyword.Type),
+ ('[a-zA-Z_]\w*:(?!:)', Name.Label),
+ ('[a-zA-Z_]\w*', Name),
],
'classname': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop'),
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
],
'string': [
(r'"', String, '#pop'),
(r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
],
'macro': [
(r'[^/\n]+', Comment.Preproc),
@@ -252,29 +272,25 @@ class SystemVerilogLexer(RegexLexer):
(r'\n', Comment.Preproc, '#pop'),
],
'import': [
- (r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop')
+ (r'[\w:]+\*?', Name.Namespace, '#pop')
]
}
def get_tokens_unprocessed(self, text):
for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
+ RegexLexer.get_tokens_unprocessed(self, text):
# Convention: mark all upper case names as constants
if token is Name:
if value.isupper():
token = Name.Constant
yield index, token, value
- def analyse_text(text):
- if text.startswith('//') or text.startswith('/*'):
- return 0.5
-
class VhdlLexer(RegexLexer):
"""
For VHDL source code.
- *New in Pygments 1.5.*
+ .. versionadded:: 1.5
"""
name = 'vhdl'
aliases = ['vhdl']
@@ -286,23 +302,23 @@ class VhdlLexer(RegexLexer):
'root': [
(r'\n', Text),
(r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'--(?![!#$%&*+./<=>?@\^|_~]).*?$', Comment.Single),
+ (r'\\\n', Text), # line continuation
+ (r'--.*?$', Comment.Single),
(r"'(U|X|0|1|Z|W|L|H|-)'", String.Char),
(r'[~!%^&*+=|?:<>/-]', Operator),
- (r"'[a-zA-Z_][a-zA-Z0-9_]*", Name.Attribute),
+ (r"'[a-z_]\w*", Name.Attribute),
(r'[()\[\],.;\']', Punctuation),
(r'"[^\n\\]*"', String),
- (r'(library)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
+ (r'(library)(\s+)([a-z_]\w*)',
bygroups(Keyword, Text, Name.Namespace)),
(r'(use)(\s+)(entity)', bygroups(Keyword, Text, Keyword)),
- (r'(use)(\s+)([a-zA-Z_][\.a-zA-Z0-9_]*)',
+ (r'(use)(\s+)([a-z_][\w.]*)',
bygroups(Keyword, Text, Name.Namespace)),
- (r'(entity|component)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
+ (r'(entity|component)(\s+)([a-z_]\w*)',
bygroups(Keyword, Text, Name.Class)),
- (r'(architecture|configuration)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)(\s+)'
- r'(of)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)(\s+)(is)',
+ (r'(architecture|configuration)(\s+)([a-z_]\w*)(\s+)'
+ r'(of)(\s+)([a-z_]\w*)(\s+)(is)',
bygroups(Keyword, Text, Name.Class, Text, Keyword, Text,
Name.Class, Text, Keyword)),
@@ -312,45 +328,48 @@ class VhdlLexer(RegexLexer):
include('keywords'),
include('numbers'),
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ (r'[a-z_]\w*', Name),
],
'endblock': [
include('keywords'),
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class),
+ (r'[a-z_]\w*', Name.Class),
(r'(\s+)', Text),
(r';', Punctuation, '#pop'),
],
'types': [
- (r'(boolean|bit|character|severity_level|integer|time|delay_length|'
- r'natural|positive|string|bit_vector|file_open_kind|'
- r'file_open_status|std_ulogic|std_ulogic_vector|std_logic|'
- r'std_logic_vector)\b', Keyword.Type),
+ (words((
+ 'boolean', 'bit', 'character', 'severity_level', 'integer', 'time',
+ 'delay_length', 'natural', 'positive', 'string', 'bit_vector',
+ 'file_open_kind', 'file_open_status', 'std_ulogic', 'std_ulogic_vector',
+ 'std_logic', 'std_logic_vector'), suffix=r'\b'),
+ Keyword.Type),
],
'keywords': [
- (r'(abs|access|after|alias|all|and|'
- r'architecture|array|assert|attribute|begin|block|'
- r'body|buffer|bus|case|component|configuration|'
- r'constant|disconnect|downto|else|elsif|end|'
- r'entity|exit|file|for|function|generate|'
- r'generic|group|guarded|if|impure|in|'
- r'inertial|inout|is|label|library|linkage|'
- r'literal|loop|map|mod|nand|new|'
- r'next|nor|not|null|of|on|'
- r'open|or|others|out|package|port|'
- r'postponed|procedure|process|pure|range|record|'
- r'register|reject|return|rol|ror|select|'
- r'severity|signal|shared|sla|sli|sra|'
- r'srl|subtype|then|to|transport|type|'
- r'units|until|use|variable|wait|when|'
- r'while|with|xnor|xor)\b', Keyword),
+ (words((
+ 'abs', 'access', 'after', 'alias', 'all', 'and',
+ 'architecture', 'array', 'assert', 'attribute', 'begin', 'block',
+ 'body', 'buffer', 'bus', 'case', 'component', 'configuration',
+ 'constant', 'disconnect', 'downto', 'else', 'elsif', 'end',
+ 'entity', 'exit', 'file', 'for', 'function', 'generate',
+ 'generic', 'group', 'guarded', 'if', 'impure', 'in',
+ 'inertial', 'inout', 'is', 'label', 'library', 'linkage',
+ 'literal', 'loop', 'map', 'mod', 'nand', 'new',
+ 'next', 'nor', 'not', 'null', 'of', 'on',
+ 'open', 'or', 'others', 'out', 'package', 'port',
+ 'postponed', 'procedure', 'process', 'pure', 'range', 'record',
+ 'register', 'reject', 'return', 'rol', 'ror', 'select',
+ 'severity', 'signal', 'shared', 'sla', 'sli', 'sra',
+ 'srl', 'subtype', 'then', 'to', 'transport', 'type',
+ 'units', 'until', 'use', 'variable', 'wait', 'when',
+ 'while', 'with', 'xnor', 'xor'), suffix=r'\b'),
+ Keyword),
],
'numbers': [
- (r'\d{1,2}#[0-9a-fA-F_]+#?', Number.Integer),
- (r'[0-1_]+(\.[0-1_])', Number.Integer),
+ (r'\d{1,2}#[0-9a-f_]+#?', Number.Integer),
(r'\d+', Number.Integer),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
- (r'H"[0-9a-fA-F_]+"', Number.Oct),
+ (r'(\d+\.\d*|\.\d+|\d+)E[+-]?\d+', Number.Float),
+ (r'X"[0-9a-f_]+"', Number.Hex),
(r'O"[0-7_]+"', Number.Oct),
- (r'B"[0-1_]+"', Number.Oct),
+ (r'B"[01_]+"', Number.Bin),
],
}
diff --git a/pygments/lexers/html.py b/pygments/lexers/html.py
new file mode 100644
index 00000000..7893952f
--- /dev/null
+++ b/pygments/lexers/html.py
@@ -0,0 +1,601 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.html
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for HTML, XML and related markup.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
+ default, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Punctuation
+from pygments.util import looks_like_xml, html_doctype_matches
+
+from pygments.lexers.javascript import JavascriptLexer
+from pygments.lexers.jvm import ScalaLexer
+from pygments.lexers.css import CssLexer, _indentation, _starts_block
+from pygments.lexers.ruby import RubyLexer
+
+__all__ = ['HtmlLexer', 'DtdLexer', 'XmlLexer', 'XsltLexer', 'HamlLexer',
+ 'ScamlLexer', 'JadeLexer']
+
+
+class HtmlLexer(RegexLexer):
+ """
+ For HTML 4 and XHTML 1 markup. Nested JavaScript and CSS is highlighted
+ by the appropriate lexer.
+ """
+
+ name = 'HTML'
+ aliases = ['html']
+ filenames = ['*.html', '*.htm', '*.xhtml', '*.xslt']
+ mimetypes = ['text/html', 'application/xhtml+xml']
+
+ flags = re.IGNORECASE | re.DOTALL
+ tokens = {
+ 'root': [
+ ('[^<&]+', Text),
+ (r'&\S*?;', Name.Entity),
+ (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
+ ('<!--', Comment, 'comment'),
+ (r'<\?.*?\?>', Comment.Preproc),
+ ('<![^>]*>', Comment.Preproc),
+ (r'(<)(\s*)(script)(\s*)',
+ bygroups(Punctuation, Text, Name.Tag, Text),
+ ('script-content', 'tag')),
+ (r'(<)(\s*)(style)(\s*)',
+ bygroups(Punctuation, Text, Name.Tag, Text),
+ ('style-content', 'tag')),
+ # note: this allows tag names not used in HTML like <x:with-dash>,
+ # this is to support yet-unknown template engines and the like
+ (r'(<)(\s*)([\w:.-]+)',
+ bygroups(Punctuation, Text, Name.Tag), 'tag'),
+ (r'(<)(\s*)(/)(\s*)([\w:.-]+)(\s*)(>)',
+ bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
+ Punctuation)),
+ ],
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'([\w:-]+\s*)(=)(\s*)', bygroups(Name.Attribute, Operator, Text),
+ 'attr'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'(/?)(\s*)(>)', bygroups(Punctuation, Text, Punctuation), '#pop'),
+ ],
+ 'script-content': [
+ (r'(<)(\s*)(/)(\s*)(script)(\s*)(>)',
+ bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
+ Punctuation), '#pop'),
+ (r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
+ ],
+ 'style-content': [
+ (r'(<)(\s*)(/)(\s*)(style)(\s*)(>)',
+ bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
+ Punctuation),'#pop'),
+ (r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
+ ],
+ 'attr': [
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if html_doctype_matches(text):
+ return 0.5
+
+
+class DtdLexer(RegexLexer):
+ """
+ A lexer for DTDs (Document Type Definitions).
+
+ .. versionadded:: 1.5
+ """
+
+ flags = re.MULTILINE | re.DOTALL
+
+ name = 'DTD'
+ aliases = ['dtd']
+ filenames = ['*.dtd']
+ mimetypes = ['application/xml-dtd']
+
+ tokens = {
+ 'root': [
+ include('common'),
+
+ (r'(<!ELEMENT)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Tag), 'element'),
+ (r'(<!ATTLIST)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Tag), 'attlist'),
+ (r'(<!ENTITY)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Entity), 'entity'),
+ (r'(<!NOTATION)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Tag), 'notation'),
+ (r'(<!\[)([^\[\s]+)(\s*)(\[)', # conditional sections
+ bygroups(Keyword, Name.Entity, Text, Keyword)),
+
+ (r'(<!DOCTYPE)(\s+)([^>\s]+)',
+ bygroups(Keyword, Text, Name.Tag)),
+ (r'PUBLIC|SYSTEM', Keyword.Constant),
+ (r'[\[\]>]', Keyword),
+ ],
+
+ 'common': [
+ (r'\s+', Text),
+ (r'(%|&)[^;]*;', Name.Entity),
+ ('<!--', Comment, 'comment'),
+ (r'[(|)*,?+]', Operator),
+ (r'"[^"]*"', String.Double),
+ (r'\'[^\']*\'', String.Single),
+ ],
+
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+
+ 'element': [
+ include('common'),
+ (r'EMPTY|ANY|#PCDATA', Keyword.Constant),
+ (r'[^>\s|()?+*,]+', Name.Tag),
+ (r'>', Keyword, '#pop'),
+ ],
+
+ 'attlist': [
+ include('common'),
+ (r'CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION',
+ Keyword.Constant),
+ (r'#REQUIRED|#IMPLIED|#FIXED', Keyword.Constant),
+ (r'xml:space|xml:lang', Keyword.Reserved),
+ (r'[^>\s|()?+*,]+', Name.Attribute),
+ (r'>', Keyword, '#pop'),
+ ],
+
+ 'entity': [
+ include('common'),
+ (r'SYSTEM|PUBLIC|NDATA', Keyword.Constant),
+ (r'[^>\s|()?+*,]+', Name.Entity),
+ (r'>', Keyword, '#pop'),
+ ],
+
+ 'notation': [
+ include('common'),
+ (r'SYSTEM|PUBLIC', Keyword.Constant),
+ (r'[^>\s|()?+*,]+', Name.Attribute),
+ (r'>', Keyword, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if not looks_like_xml(text) and \
+ ('<!ELEMENT' in text or '<!ATTLIST' in text or '<!ENTITY' in text):
+ return 0.8
+
+
+class XmlLexer(RegexLexer):
+ """
+ Generic lexer for XML (eXtensible Markup Language).
+ """
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ name = 'XML'
+ aliases = ['xml']
+ filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd',
+ '*.wsdl', '*.wsf']
+ mimetypes = ['text/xml', 'application/xml', 'image/svg+xml',
+ 'application/rss+xml', 'application/atom+xml']
+
+ tokens = {
+ 'root': [
+ ('[^<&]+', Text),
+ (r'&\S*?;', Name.Entity),
+ (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
+ ('<!--', Comment, 'comment'),
+ (r'<\?.*?\?>', Comment.Preproc),
+ ('<![^>]*>', Comment.Preproc),
+ (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
+ (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
+ ],
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'attr': [
+ ('\s+', Text),
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if looks_like_xml(text):
+ return 0.45 # less than HTML
+
+
+class XsltLexer(XmlLexer):
+ """
+ A lexer for XSLT.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'XSLT'
+ aliases = ['xslt']
+ filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc
+ mimetypes = ['application/xsl+xml', 'application/xslt+xml']
+
+ EXTRA_KEYWORDS = set((
+ 'apply-imports', 'apply-templates', 'attribute',
+ 'attribute-set', 'call-template', 'choose', 'comment',
+ 'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
+ 'for-each', 'if', 'import', 'include', 'key', 'message',
+ 'namespace-alias', 'number', 'otherwise', 'output', 'param',
+ 'preserve-space', 'processing-instruction', 'sort',
+ 'strip-space', 'stylesheet', 'template', 'text', 'transform',
+ 'value-of', 'variable', 'when', 'with-param'
+ ))
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
+ m = re.match('</?xsl:([^>]*)/?>?', value)
+
+ if token is Name.Tag and m and m.group(1) in self.EXTRA_KEYWORDS:
+ yield index, Keyword, value
+ else:
+ yield index, token, value
+
+ def analyse_text(text):
+ if looks_like_xml(text) and '<xsl' in text:
+ return 0.8
+
+
+class HamlLexer(ExtendedRegexLexer):
+ """
+ For Haml markup.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Haml'
+ aliases = ['haml']
+ filenames = ['*.haml']
+ mimetypes = ['text/x-haml']
+
+ flags = re.IGNORECASE
+ # Haml can include " |\n" anywhere,
+ # which is ignored and used to wrap long lines.
+ # To accomodate this, use this custom faux dot instead.
+ _dot = r'(?: \|\n(?=.* \|)|.)'
+
+ # In certain places, a comma at the end of the line
+ # allows line wrapping as well.
+ _comma_dot = r'(?:,\s*\n|' + _dot + ')'
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'[&!]?==', Punctuation, 'plain'),
+ (r'([&!]?[=~])(' + _comma_dot + r'*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ 'root'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'%[\w:-]+', Name.Tag, 'tag'),
+ (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
+ (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
+ bygroups(Comment, Comment.Special, Comment),
+ '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
+ '#pop'),
+ (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
+ 'haml-comment-block'), '#pop'),
+ (r'(-)(' + _comma_dot + r'*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ '#pop'),
+ (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
+ '#pop'),
+ include('eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
+ (r'\{(,\n|' + _dot + ')*?\}', using(RubyLexer)),
+ (r'\[' + _dot + '*?\]', using(RubyLexer)),
+ (r'\(', Text, 'html-attributes'),
+ (r'/[ \t]*\n', Punctuation, '#pop:2'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'(#\{)(' + _dot + '*?)(\})',
+ bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'\s+', Text),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'\)', Text, '#pop'),
+ ],
+
+ 'html-attribute-value': [
+ (r'[ \t]+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r'@\w+', Name.Variable.Instance, '#pop'),
+ (r'\$\w+', Name.Variable.Global, '#pop'),
+ (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
+ (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
+ ],
+
+ 'html-comment-block': [
+ (_dot + '+', Comment),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'haml-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'filter-block': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+ (r'(#\{)(' + _dot + '*?)(\})',
+ bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+ }
+
+
+class ScamlLexer(ExtendedRegexLexer):
+ """
+ For `Scaml markup <http://scalate.fusesource.org/>`_. Scaml is Haml for Scala.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Scaml'
+ aliases = ['scaml']
+ filenames = ['*.scaml']
+ mimetypes = ['text/x-scaml']
+
+ flags = re.IGNORECASE
+ # Scaml does not yet support the " |\n" notation to
+ # wrap long lines. Once it does, use the custom faux
+ # dot instead.
+ # _dot = r'(?: \|\n(?=.* \|)|.)'
+ _dot = r'.'
+
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'[&!]?==', Punctuation, 'plain'),
+ (r'([&!]?[=~])(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)),
+ 'root'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'%[\w:-]+', Name.Tag, 'tag'),
+ (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
+ (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
+ bygroups(Comment, Comment.Special, Comment),
+ '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
+ '#pop'),
+ (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
+ 'scaml-comment-block'), '#pop'),
+ (r'(-@\s*)(import)?(' + _dot + r'*\n)',
+ bygroups(Punctuation, Keyword, using(ScalaLexer)),
+ '#pop'),
+ (r'(-)(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)),
+ '#pop'),
+ (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
+ '#pop'),
+ include('eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
+ (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
+ (r'\[' + _dot + '*?\]', using(ScalaLexer)),
+ (r'\(', Text, 'html-attributes'),
+ (r'/[ \t]*\n', Punctuation, '#pop:2'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'(#\{)(' + _dot + '*?)(\})',
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'\s+', Text),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'\)', Text, '#pop'),
+ ],
+
+ 'html-attribute-value': [
+ (r'[ \t]+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r'@\w+', Name.Variable.Instance, '#pop'),
+ (r'\$\w+', Name.Variable.Global, '#pop'),
+ (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
+ (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
+ ],
+
+ 'html-comment-block': [
+ (_dot + '+', Comment),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'scaml-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'filter-block': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+ (r'(#\{)(' + _dot + '*?)(\})',
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+ }
+
+
+class JadeLexer(ExtendedRegexLexer):
+ """
+ For Jade markup.
+ Jade is a variant of Scaml, see:
+ http://scalate.fusesource.org/documentation/scaml-reference.html
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Jade'
+ aliases = ['jade']
+ filenames = ['*.jade']
+ mimetypes = ['text/x-jade']
+
+ flags = re.IGNORECASE
+ _dot = r'.'
+
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'[&!]?==', Punctuation, 'plain'),
+ (r'([&!]?[=~])(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)), 'root'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
+ (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
+ bygroups(Comment, Comment.Special, Comment),
+ '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
+ '#pop'),
+ (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
+ 'scaml-comment-block'), '#pop'),
+ (r'(-@\s*)(import)?(' + _dot + r'*\n)',
+ bygroups(Punctuation, Keyword, using(ScalaLexer)),
+ '#pop'),
+ (r'(-)(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)),
+ '#pop'),
+ (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
+ '#pop'),
+ (r'[\w:-]+', Name.Tag, 'tag'),
+ (r'\|', Text, 'eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
+ (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
+ (r'\[' + _dot + '*?\]', using(ScalaLexer)),
+ (r'\(', Text, 'html-attributes'),
+ (r'/[ \t]*\n', Punctuation, '#pop:2'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'(#\{)(' + _dot + '*?)(\})',
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'\s+', Text),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'\)', Text, '#pop'),
+ ],
+
+ 'html-attribute-value': [
+ (r'[ \t]+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r'@\w+', Name.Variable.Instance, '#pop'),
+ (r'\$\w+', Name.Variable.Global, '#pop'),
+ (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
+ (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
+ ],
+
+ 'html-comment-block': [
+ (_dot + '+', Comment),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'scaml-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'filter-block': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+ (r'(#\{)(' + _dot + '*?)(\})',
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+ }
diff --git a/pygments/lexers/idl.py b/pygments/lexers/idl.py
new file mode 100644
index 00000000..a1ab1ad0
--- /dev/null
+++ b/pygments/lexers/idl.py
@@ -0,0 +1,262 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.idl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for IDL.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, Number
+
+__all__ = ['IDLLexer']
+
+
+class IDLLexer(RegexLexer):
+ """
+ Pygments Lexer for IDL (Interactive Data Language).
+
+ .. versionadded:: 1.6
+ """
+ name = 'IDL'
+ aliases = ['idl']
+ filenames = ['*.pro']
+ mimetypes = ['text/idl']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ _RESERVED = (
+ 'and', 'begin', 'break', 'case', 'common', 'compile_opt',
+ 'continue', 'do', 'else', 'end', 'endcase', 'elseelse',
+ 'endfor', 'endforeach', 'endif', 'endrep', 'endswitch',
+ 'endwhile', 'eq', 'for', 'foreach', 'forward_function',
+ 'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le',
+ 'lt', 'mod', 'ne', 'not', 'of', 'on_ioerror', 'or', 'pro',
+ 'repeat', 'switch', 'then', 'until', 'while', 'xor')
+ """Reserved words from: http://www.exelisvis.com/docs/reswords.html"""
+
+ _BUILTIN_LIB = (
+ 'abs', 'acos', 'adapt_hist_equal', 'alog', 'alog10',
+ 'amoeba', 'annotate', 'app_user_dir', 'app_user_dir_query',
+ 'arg_present', 'array_equal', 'array_indices', 'arrow',
+ 'ascii_template', 'asin', 'assoc', 'atan', 'axis',
+ 'a_correlate', 'bandpass_filter', 'bandreject_filter',
+ 'barplot', 'bar_plot', 'beseli', 'beselj', 'beselk',
+ 'besely', 'beta', 'bilinear', 'binary_template', 'bindgen',
+ 'binomial', 'bin_date', 'bit_ffs', 'bit_population',
+ 'blas_axpy', 'blk_con', 'box_cursor', 'breakpoint',
+ 'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder',
+ 'bytscl', 'caldat', 'calendar', 'call_external',
+ 'call_function', 'call_method', 'call_procedure', 'canny',
+ 'catch', 'cd', 'cdf_\w*', 'ceil', 'chebyshev',
+ 'check_math',
+ 'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen',
+ 'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts',
+ 'cmyk_convert', 'colorbar', 'colorize_sample',
+ 'colormap_applicable', 'colormap_gradient',
+ 'colormap_rotation', 'colortable', 'color_convert',
+ 'color_exchange', 'color_quan', 'color_range_map', 'comfit',
+ 'command_line_args', 'complex', 'complexarr', 'complexround',
+ 'compute_mesh_normals', 'cond', 'congrid', 'conj',
+ 'constrained_min', 'contour', 'convert_coord', 'convol',
+ 'convol_fft', 'coord2to3', 'copy_lun', 'correlate', 'cos',
+ 'cosh', 'cpu', 'cramer', 'create_cursor', 'create_struct',
+ 'create_view', 'crossp', 'crvlength', 'cti_test',
+ 'ct_luminance', 'cursor', 'curvefit', 'cvttobm', 'cv_coord',
+ 'cw_animate', 'cw_animate_getp', 'cw_animate_load',
+ 'cw_animate_run', 'cw_arcball', 'cw_bgroup', 'cw_clr_index',
+ 'cw_colorsel', 'cw_defroi', 'cw_field', 'cw_filesel',
+ 'cw_form', 'cw_fslider', 'cw_light_editor',
+ 'cw_light_editor_get', 'cw_light_editor_set', 'cw_orient',
+ 'cw_palette_editor', 'cw_palette_editor_get',
+ 'cw_palette_editor_set', 'cw_pdmenu', 'cw_rgbslider',
+ 'cw_tmpl', 'cw_zoom', 'c_correlate', 'dblarr', 'db_exists',
+ 'dcindgen', 'dcomplex', 'dcomplexarr', 'define_key',
+ 'define_msgblk', 'define_msgblk_from_file', 'defroi',
+ 'defsysv', 'delvar', 'dendrogram', 'dendro_plot', 'deriv',
+ 'derivsig', 'determ', 'device', 'dfpmin', 'diag_matrix',
+ 'dialog_dbconnect', 'dialog_message', 'dialog_pickfile',
+ 'dialog_printersetup', 'dialog_printjob',
+ 'dialog_read_image', 'dialog_write_image', 'digital_filter',
+ 'dilate', 'dindgen', 'dissolve', 'dist', 'distance_measure',
+ 'dlm_load', 'dlm_register', 'doc_library', 'double',
+ 'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec',
+ 'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn',
+ 'eof', 'eos_\w*', 'erase', 'erf', 'erfc', 'erfcx',
+ 'erode', 'errorplot', 'errplot', 'estimator_filter',
+ 'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint',
+ 'extrac', 'extract_slice', 'factorial', 'fft', 'filepath',
+ 'file_basename', 'file_chmod', 'file_copy', 'file_delete',
+ 'file_dirname', 'file_expand_path', 'file_info',
+ 'file_lines', 'file_link', 'file_mkdir', 'file_move',
+ 'file_poll_input', 'file_readlink', 'file_same',
+ 'file_search', 'file_test', 'file_which', 'findgen',
+ 'finite', 'fix', 'flick', 'float', 'floor', 'flow3',
+ 'fltarr', 'flush', 'format_axis_values', 'free_lun',
+ 'fstat', 'fulstr', 'funct', 'fv_test', 'fx_root',
+ 'fz_roots', 'f_cvf', 'f_pdf', 'gamma', 'gamma_ct',
+ 'gauss2dfit', 'gaussfit', 'gaussian_function', 'gaussint',
+ 'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv',
+ 'getwindows', 'get_drive_list', 'get_dxf_objects',
+ 'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size',
+ 'greg2jul', 'grib_\w*', 'grid3', 'griddata',
+ 'grid_input', 'grid_tps', 'gs_iter',
+ 'h5[adfgirst]_\w*', 'h5_browser', 'h5_close',
+ 'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse',
+ 'hanning', 'hash', 'hdf_\w*', 'heap_free',
+ 'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save',
+ 'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal',
+ 'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int',
+ 'i18n_multibytetoutf8', 'i18n_multibytetowidechar',
+ 'i18n_utf8tomultibyte', 'i18n_widechartomultibyte',
+ 'ibeta', 'icontour', 'iconvertcoord', 'idelete', 'identity',
+ 'idlexbr_assistant', 'idlitsys_createtool', 'idl_base64',
+ 'idl_validname', 'iellipse', 'igamma', 'igetcurrent',
+ 'igetdata', 'igetid', 'igetproperty', 'iimage', 'image',
+ 'image_cont', 'image_statistics', 'imaginary', 'imap',
+ 'indgen', 'intarr', 'interpol', 'interpolate',
+ 'interval_volume', 'int_2d', 'int_3d', 'int_tabulated',
+ 'invert', 'ioctl', 'iopen', 'iplot', 'ipolygon',
+ 'ipolyline', 'iputdata', 'iregister', 'ireset', 'iresolve',
+ 'irotate', 'ir_filter', 'isa', 'isave', 'iscale',
+ 'isetcurrent', 'isetproperty', 'ishft', 'isocontour',
+ 'isosurface', 'isurface', 'itext', 'itranslate', 'ivector',
+ 'ivolume', 'izoom', 'i_beta', 'journal', 'json_parse',
+ 'json_serialize', 'jul2greg', 'julday', 'keyword_set',
+ 'krig2d', 'kurtosis', 'kw_test', 'l64indgen', 'label_date',
+ 'label_region', 'ladfit', 'laguerre', 'laplacian',
+ 'la_choldc', 'la_cholmprove', 'la_cholsol', 'la_determ',
+ 'la_eigenproblem', 'la_eigenql', 'la_eigenvec', 'la_elmhes',
+ 'la_gm_linear_model', 'la_hqr', 'la_invert',
+ 'la_least_squares', 'la_least_square_equality',
+ 'la_linear_equation', 'la_ludc', 'la_lumprove', 'la_lusol',
+ 'la_svd', 'la_tridc', 'la_trimprove', 'la_triql',
+ 'la_trired', 'la_trisol', 'least_squares_filter', 'leefilt',
+ 'legend', 'legendre', 'linbcg', 'lindgen', 'linfit',
+ 'linkimage', 'list', 'll_arc_distance', 'lmfit', 'lmgr',
+ 'lngamma', 'lnp_test', 'loadct', 'locale_get',
+ 'logical_and', 'logical_or', 'logical_true', 'lon64arr',
+ 'lonarr', 'long', 'long64', 'lsode', 'ludc', 'lumprove',
+ 'lusol', 'lu_complex', 'machar', 'make_array', 'make_dll',
+ 'make_rt', 'map', 'mapcontinents', 'mapgrid', 'map_2points',
+ 'map_continents', 'map_grid', 'map_image', 'map_patch',
+ 'map_proj_forward', 'map_proj_image', 'map_proj_info',
+ 'map_proj_init', 'map_proj_inverse', 'map_set',
+ 'matrix_multiply', 'matrix_power', 'max', 'md_test',
+ 'mean', 'meanabsdev', 'mean_filter', 'median', 'memory',
+ 'mesh_clip', 'mesh_decimate', 'mesh_issolid', 'mesh_merge',
+ 'mesh_numtriangles', 'mesh_obj', 'mesh_smooth',
+ 'mesh_surfacearea', 'mesh_validate', 'mesh_volume',
+ 'message', 'min', 'min_curve_surf', 'mk_html_help',
+ 'modifyct', 'moment', 'morph_close', 'morph_distance',
+ 'morph_gradient', 'morph_hitormiss', 'morph_open',
+ 'morph_thin', 'morph_tophat', 'multi', 'm_correlate',
+ 'ncdf_\w*', 'newton', 'noise_hurl', 'noise_pick',
+ 'noise_scatter', 'noise_slur', 'norm', 'n_elements',
+ 'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy',
+ 'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid',
+ 'online_help', 'on_error', 'open', 'oplot', 'oploterr',
+ 'parse_url', 'particle_trace', 'path_cache', 'path_sep',
+ 'pcomp', 'plot', 'plot3d', 'ploterr', 'plots', 'plot_3dbox',
+ 'plot_field', 'pnt_line', 'point_lun', 'polarplot',
+ 'polar_contour', 'polar_surface', 'poly', 'polyfill',
+ 'polyfillv', 'polygon', 'polyline', 'polyshade', 'polywarp',
+ 'poly_2d', 'poly_area', 'poly_fit', 'popd', 'powell',
+ 'pref_commit', 'pref_get', 'pref_set', 'prewitt', 'primes',
+ 'print', 'printd', 'product', 'profile', 'profiler',
+ 'profiles', 'project_vol', 'psafm', 'pseudo',
+ 'ps_show_fonts', 'ptrarr', 'ptr_free', 'ptr_new',
+ 'ptr_valid', 'pushd', 'p_correlate', 'qgrid3', 'qhull',
+ 'qromb', 'qromo', 'qsimp', 'query_ascii', 'query_bmp',
+ 'query_csv', 'query_dicom', 'query_gif', 'query_image',
+ 'query_jpeg', 'query_jpeg2000', 'query_mrsid', 'query_pict',
+ 'query_png', 'query_ppm', 'query_srf', 'query_tiff',
+ 'query_wav', 'radon', 'randomn', 'randomu', 'ranks',
+ 'rdpix', 'read', 'reads', 'readu', 'read_ascii',
+ 'read_binary', 'read_bmp', 'read_csv', 'read_dicom',
+ 'read_gif', 'read_image', 'read_interfile', 'read_jpeg',
+ 'read_jpeg2000', 'read_mrsid', 'read_pict', 'read_png',
+ 'read_ppm', 'read_spr', 'read_srf', 'read_sylk',
+ 'read_tiff', 'read_wav', 'read_wave', 'read_x11_bitmap',
+ 'read_xwd', 'real_part', 'rebin', 'recall_commands',
+ 'recon3', 'reduce_colors', 'reform', 'region_grow',
+ 'register_cursor', 'regress', 'replicate',
+ 'replicate_inplace', 'resolve_all', 'resolve_routine',
+ 'restore', 'retall', 'return', 'reverse', 'rk4', 'roberts',
+ 'rot', 'rotate', 'round', 'routine_filepath',
+ 'routine_info', 'rs_test', 'r_correlate', 'r_test',
+ 'save', 'savgol', 'scale3', 'scale3d', 'scope_level',
+ 'scope_traceback', 'scope_varfetch', 'scope_varname',
+ 'search2d', 'search3d', 'sem_create', 'sem_delete',
+ 'sem_lock', 'sem_release', 'setenv', 'set_plot',
+ 'set_shading', 'sfit', 'shade_surf', 'shade_surf_irr',
+ 'shade_volume', 'shift', 'shift_diff', 'shmdebug', 'shmmap',
+ 'shmunmap', 'shmvar', 'show3', 'showfont', 'simplex', 'sin',
+ 'sindgen', 'sinh', 'size', 'skewness', 'skip_lun',
+ 'slicer3', 'slide_image', 'smooth', 'sobel', 'socket',
+ 'sort', 'spawn', 'spher_harm', 'sph_4pnt', 'sph_scat',
+ 'spline', 'spline_p', 'spl_init', 'spl_interp', 'sprsab',
+ 'sprsax', 'sprsin', 'sprstp', 'sqrt', 'standardize',
+ 'stddev', 'stop', 'strarr', 'strcmp', 'strcompress',
+ 'streamline', 'stregex', 'stretch', 'string', 'strjoin',
+ 'strlen', 'strlowcase', 'strmatch', 'strmessage', 'strmid',
+ 'strpos', 'strput', 'strsplit', 'strtrim', 'struct_assign',
+ 'struct_hide', 'strupcase', 'surface', 'surfr', 'svdc',
+ 'svdfit', 'svsol', 'swap_endian', 'swap_endian_inplace',
+ 'symbol', 'systime', 's_test', 't3d', 'tag_names', 'tan',
+ 'tanh', 'tek_color', 'temporary', 'tetra_clip',
+ 'tetra_surface', 'tetra_volume', 'text', 'thin', 'threed',
+ 'timegen', 'time_test2', 'tm_test', 'total', 'trace',
+ 'transpose', 'triangulate', 'trigrid', 'triql', 'trired',
+ 'trisol', 'tri_surf', 'truncate_lun', 'ts_coef', 'ts_diff',
+ 'ts_fcast', 'ts_smooth', 'tv', 'tvcrs', 'tvlct', 'tvrd',
+ 'tvscl', 'typename', 't_cvt', 't_pdf', 'uindgen', 'uint',
+ 'uintarr', 'ul64indgen', 'ulindgen', 'ulon64arr', 'ulonarr',
+ 'ulong', 'ulong64', 'uniq', 'unsharp_mask', 'usersym',
+ 'value_locate', 'variance', 'vector', 'vector_field', 'vel',
+ 'velovect', 'vert_t3d', 'voigt', 'voronoi', 'voxel_proj',
+ 'wait', 'warp_tri', 'watershed', 'wdelete', 'wf_draw',
+ 'where', 'widget_base', 'widget_button', 'widget_combobox',
+ 'widget_control', 'widget_displaycontextmen', 'widget_draw',
+ 'widget_droplist', 'widget_event', 'widget_info',
+ 'widget_label', 'widget_list', 'widget_propertysheet',
+ 'widget_slider', 'widget_tab', 'widget_table',
+ 'widget_text', 'widget_tree', 'widget_tree_move',
+ 'widget_window', 'wiener_filter', 'window', 'writeu',
+ 'write_bmp', 'write_csv', 'write_gif', 'write_image',
+ 'write_jpeg', 'write_jpeg2000', 'write_nrif', 'write_pict',
+ 'write_png', 'write_ppm', 'write_spr', 'write_srf',
+ 'write_sylk', 'write_tiff', 'write_wav', 'write_wave',
+ 'wset', 'wshow', 'wtn', 'wv_applet', 'wv_cwt',
+ 'wv_cw_wavelet', 'wv_denoise', 'wv_dwt', 'wv_fn_coiflet',
+ 'wv_fn_daubechies', 'wv_fn_gaussian', 'wv_fn_haar',
+ 'wv_fn_morlet', 'wv_fn_paul', 'wv_fn_symlet',
+ 'wv_import_data', 'wv_import_wavelet', 'wv_plot3d_wps',
+ 'wv_plot_multires', 'wv_pwt', 'wv_tool_denoise',
+ 'xbm_edit', 'xdisplayfile', 'xdxf', 'xfont',
+ 'xinteranimate', 'xloadct', 'xmanager', 'xmng_tmpl',
+ 'xmtool', 'xobjview', 'xobjview_rotate',
+ 'xobjview_write_image', 'xpalette', 'xpcolor', 'xplot3d',
+ 'xregistered', 'xroi', 'xsq_test', 'xsurface', 'xvaredit',
+ 'xvolume', 'xvolume_rotate', 'xvolume_write_image',
+ 'xyouts', 'zoom', 'zoom_24')
+ """Functions from: http://www.exelisvis.com/docs/routines-1.html"""
+
+ tokens = {
+ 'root': [
+ (r'^\s*;.*?\n', Comment.Singleline),
+ (words(_RESERVED, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(_BUILTIN_LIB, prefix=r'\b', suffix=r'\b'), Name.Builtin),
+ (r'\+=|-=|\^=|\*=|/=|#=|##=|<=|>=|=', Operator),
+ (r'\+\+|--|->|\+|-|##|#|\*|/|<|>|&&|\^|~|\|\|\?|:', Operator),
+ (r'\b(mod=|lt=|le=|eq=|ne=|ge=|gt=|not=|and=|or=|xor=)', Operator),
+ (r'\b(mod|lt|le|eq|ne|ge|gt|not|and|or|xor)\b', Operator),
+ (r'\b[0-9](L|B|S|UL|ULL|LL)?\b', Number),
+ (r'.', Text),
+ ]
+ }
diff --git a/pygments/lexers/igor.py b/pygments/lexers/igor.py
new file mode 100644
index 00000000..b0eaf6aa
--- /dev/null
+++ b/pygments/lexers/igor.py
@@ -0,0 +1,280 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.igor
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Igor Pro.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Keyword, Name, String
+
+__all__ = ['IgorLexer']
+
+
+class IgorLexer(RegexLexer):
+ """
+ Pygments Lexer for Igor Pro procedure files (.ipf).
+ See http://www.wavemetrics.com/ and http://www.igorexchange.com/.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Igor'
+ aliases = ['igor', 'igorpro']
+ filenames = ['*.ipf']
+ mimetypes = ['text/ipf']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ flowControl = (
+ 'if', 'else', 'elseif', 'endif', 'for', 'endfor', 'strswitch', 'switch',
+ 'case', 'default', 'endswitch', 'do', 'while', 'try', 'catch', 'endtry',
+ 'break', 'continue', 'return', 'AbortOnRTE', 'AbortOnValue'
+ )
+ types = (
+ 'variable', 'string', 'constant', 'strconstant', 'NVAR', 'SVAR', 'WAVE',
+ 'STRUCT', 'dfref', 'funcref', 'char', 'uchar', 'int16', 'uint16', 'int32',
+ 'uint32', 'float', 'double'
+ )
+ keywords = (
+ 'override', 'ThreadSafe', 'MultiThread', 'static', 'Proc',
+ 'Picture', 'Prompt', 'DoPrompt', 'macro', 'window', 'function', 'end',
+ 'Structure', 'EndStructure', 'EndMacro', 'Menu', 'SubMenu'
+ )
+ operations = (
+ 'Abort', 'AddFIFOData', 'AddFIFOVectData', 'AddMovieAudio',
+ 'AddMovieFrame', 'APMath', 'Append', 'AppendImage',
+ 'AppendLayoutObject', 'AppendMatrixContour', 'AppendText',
+ 'AppendToGraph', 'AppendToLayout', 'AppendToTable', 'AppendXYZContour',
+ 'AutoPositionWindow', 'BackgroundInfo', 'Beep', 'BoundingBall',
+ 'BrowseURL', 'BuildMenu', 'Button', 'cd', 'Chart', 'CheckBox',
+ 'CheckDisplayed', 'ChooseColor', 'Close', 'CloseMovie', 'CloseProc',
+ 'ColorScale', 'ColorTab2Wave', 'Concatenate', 'ControlBar',
+ 'ControlInfo', 'ControlUpdate', 'ConvexHull', 'Convolve', 'CopyFile',
+ 'CopyFolder', 'CopyScales', 'Correlate', 'CreateAliasShortcut', 'Cross',
+ 'CtrlBackground', 'CtrlFIFO', 'CtrlNamedBackground', 'Cursor',
+ 'CurveFit', 'CustomControl', 'CWT', 'Debugger', 'DebuggerOptions',
+ 'DefaultFont', 'DefaultGuiControls', 'DefaultGuiFont', 'DefineGuide',
+ 'DelayUpdate', 'DeleteFile', 'DeleteFolder', 'DeletePoints',
+ 'Differentiate', 'dir', 'Display', 'DisplayHelpTopic',
+ 'DisplayProcedure', 'DoAlert', 'DoIgorMenu', 'DoUpdate', 'DoWindow',
+ 'DoXOPIdle', 'DrawAction', 'DrawArc', 'DrawBezier', 'DrawLine',
+ 'DrawOval', 'DrawPICT', 'DrawPoly', 'DrawRect', 'DrawRRect', 'DrawText',
+ 'DSPDetrend', 'DSPPeriodogram', 'Duplicate', 'DuplicateDataFolder',
+ 'DWT', 'EdgeStats', 'Edit', 'ErrorBars', 'Execute', 'ExecuteScriptText',
+ 'ExperimentModified', 'Extract', 'FastGaussTransform', 'FastOp',
+ 'FBinRead', 'FBinWrite', 'FFT', 'FIFO2Wave', 'FIFOStatus', 'FilterFIR',
+ 'FilterIIR', 'FindLevel', 'FindLevels', 'FindPeak', 'FindPointsInPoly',
+ 'FindRoots', 'FindSequence', 'FindValue', 'FPClustering', 'fprintf',
+ 'FReadLine', 'FSetPos', 'FStatus', 'FTPDelete', 'FTPDownload',
+ 'FTPUpload', 'FuncFit', 'FuncFitMD', 'GetAxis', 'GetFileFolderInfo',
+ 'GetLastUserMenuInfo', 'GetMarquee', 'GetSelection', 'GetWindow',
+ 'GraphNormal', 'GraphWaveDraw', 'GraphWaveEdit', 'Grep', 'GroupBox',
+ 'Hanning', 'HideIgorMenus', 'HideInfo', 'HideProcedures', 'HideTools',
+ 'HilbertTransform', 'Histogram', 'IFFT', 'ImageAnalyzeParticles',
+ 'ImageBlend', 'ImageBoundaryToMask', 'ImageEdgeDetection',
+ 'ImageFileInfo', 'ImageFilter', 'ImageFocus', 'ImageGenerateROIMask',
+ 'ImageHistModification', 'ImageHistogram', 'ImageInterpolate',
+ 'ImageLineProfile', 'ImageLoad', 'ImageMorphology', 'ImageRegistration',
+ 'ImageRemoveBackground', 'ImageRestore', 'ImageRotate', 'ImageSave',
+ 'ImageSeedFill', 'ImageSnake', 'ImageStats', 'ImageThreshold',
+ 'ImageTransform', 'ImageUnwrapPhase', 'ImageWindow', 'IndexSort',
+ 'InsertPoints', 'Integrate', 'IntegrateODE', 'Interp3DPath',
+ 'Interpolate3D', 'KillBackground', 'KillControl', 'KillDataFolder',
+ 'KillFIFO', 'KillFreeAxis', 'KillPath', 'KillPICTs', 'KillStrings',
+ 'KillVariables', 'KillWaves', 'KillWindow', 'KMeans', 'Label', 'Layout',
+ 'Legend', 'LinearFeedbackShiftRegister', 'ListBox', 'LoadData',
+ 'LoadPackagePreferences', 'LoadPICT', 'LoadWave', 'Loess',
+ 'LombPeriodogram', 'Make', 'MakeIndex', 'MarkPerfTestTime',
+ 'MatrixConvolve', 'MatrixCorr', 'MatrixEigenV', 'MatrixFilter',
+ 'MatrixGaussJ', 'MatrixInverse', 'MatrixLinearSolve',
+ 'MatrixLinearSolveTD', 'MatrixLLS', 'MatrixLUBkSub', 'MatrixLUD',
+ 'MatrixMultiply', 'MatrixOP', 'MatrixSchur', 'MatrixSolve',
+ 'MatrixSVBkSub', 'MatrixSVD', 'MatrixTranspose', 'MeasureStyledText',
+ 'Modify', 'ModifyContour', 'ModifyControl', 'ModifyControlList',
+ 'ModifyFreeAxis', 'ModifyGraph', 'ModifyImage', 'ModifyLayout',
+ 'ModifyPanel', 'ModifyTable', 'ModifyWaterfall', 'MoveDataFolder',
+ 'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow', 'MoveVariable',
+ 'MoveWave', 'MoveWindow', 'NeuralNetworkRun', 'NeuralNetworkTrain',
+ 'NewDataFolder', 'NewFIFO', 'NewFIFOChan', 'NewFreeAxis', 'NewImage',
+ 'NewLayout', 'NewMovie', 'NewNotebook', 'NewPanel', 'NewPath',
+ 'NewWaterfall', 'Note', 'Notebook', 'NotebookAction', 'Open',
+ 'OpenNotebook', 'Optimize', 'ParseOperationTemplate', 'PathInfo',
+ 'PauseForUser', 'PauseUpdate', 'PCA', 'PlayMovie', 'PlayMovieAction',
+ 'PlaySnd', 'PlaySound', 'PopupContextualMenu', 'PopupMenu',
+ 'Preferences', 'PrimeFactors', 'Print', 'printf', 'PrintGraphs',
+ 'PrintLayout', 'PrintNotebook', 'PrintSettings', 'PrintTable',
+ 'Project', 'PulseStats', 'PutScrapText', 'pwd', 'Quit',
+ 'RatioFromNumber', 'Redimension', 'Remove', 'RemoveContour',
+ 'RemoveFromGraph', 'RemoveFromLayout', 'RemoveFromTable', 'RemoveImage',
+ 'RemoveLayoutObjects', 'RemovePath', 'Rename', 'RenameDataFolder',
+ 'RenamePath', 'RenamePICT', 'RenameWindow', 'ReorderImages',
+ 'ReorderTraces', 'ReplaceText', 'ReplaceWave', 'Resample',
+ 'ResumeUpdate', 'Reverse', 'Rotate', 'Save', 'SaveData',
+ 'SaveExperiment', 'SaveGraphCopy', 'SaveNotebook',
+ 'SavePackagePreferences', 'SavePICT', 'SaveTableCopy',
+ 'SetActiveSubwindow', 'SetAxis', 'SetBackground', 'SetDashPattern',
+ 'SetDataFolder', 'SetDimLabel', 'SetDrawEnv', 'SetDrawLayer',
+ 'SetFileFolderInfo', 'SetFormula', 'SetIgorHook', 'SetIgorMenuMode',
+ 'SetIgorOption', 'SetMarquee', 'SetProcessSleep', 'SetRandomSeed',
+ 'SetScale', 'SetVariable', 'SetWaveLock', 'SetWindow', 'ShowIgorMenus',
+ 'ShowInfo', 'ShowTools', 'Silent', 'Sleep', 'Slider', 'Smooth',
+ 'SmoothCustom', 'Sort', 'SoundInRecord', 'SoundInSet',
+ 'SoundInStartChart', 'SoundInStatus', 'SoundInStopChart',
+ 'SphericalInterpolate', 'SphericalTriangulate', 'SplitString',
+ 'sprintf', 'sscanf', 'Stack', 'StackWindows',
+ 'StatsAngularDistanceTest', 'StatsANOVA1Test', 'StatsANOVA2NRTest',
+ 'StatsANOVA2RMTest', 'StatsANOVA2Test', 'StatsChiTest',
+ 'StatsCircularCorrelationTest', 'StatsCircularMeans',
+ 'StatsCircularMoments', 'StatsCircularTwoSampleTest',
+ 'StatsCochranTest', 'StatsContingencyTable', 'StatsDIPTest',
+ 'StatsDunnettTest', 'StatsFriedmanTest', 'StatsFTest',
+ 'StatsHodgesAjneTest', 'StatsJBTest', 'StatsKendallTauTest',
+ 'StatsKSTest', 'StatsKWTest', 'StatsLinearCorrelationTest',
+ 'StatsLinearRegression', 'StatsMultiCorrelationTest',
+ 'StatsNPMCTest', 'StatsNPNominalSRTest', 'StatsQuantiles',
+ 'StatsRankCorrelationTest', 'StatsResample', 'StatsSample',
+ 'StatsScheffeTest', 'StatsSignTest', 'StatsSRTest', 'StatsTTest',
+ 'StatsTukeyTest', 'StatsVariancesTest', 'StatsWatsonUSquaredTest',
+ 'StatsWatsonWilliamsTest', 'StatsWheelerWatsonTest',
+ 'StatsWilcoxonRankTest', 'StatsWRCorrelationTest', 'String',
+ 'StructGet', 'StructPut', 'TabControl', 'Tag', 'TextBox', 'Tile',
+ 'TileWindows', 'TitleBox', 'ToCommandLine', 'ToolsGrid',
+ 'Triangulate3d', 'Unwrap', 'ValDisplay', 'Variable', 'WaveMeanStdv',
+ 'WaveStats', 'WaveTransform', 'wfprintf', 'WignerTransform',
+ 'WindowFunction',
+ )
+ functions = (
+ 'abs', 'acos', 'acosh', 'AiryA', 'AiryAD', 'AiryB', 'AiryBD', 'alog',
+ 'area', 'areaXY', 'asin', 'asinh', 'atan', 'atan2', 'atanh',
+ 'AxisValFromPixel', 'Besseli', 'Besselj', 'Besselk', 'Bessely', 'bessi',
+ 'bessj', 'bessk', 'bessy', 'beta', 'betai', 'BinarySearch',
+ 'BinarySearchInterp', 'binomial', 'binomialln', 'binomialNoise', 'cabs',
+ 'CaptureHistoryStart', 'ceil', 'cequal', 'char2num', 'chebyshev',
+ 'chebyshevU', 'CheckName', 'cmplx', 'cmpstr', 'conj', 'ContourZ', 'cos',
+ 'cosh', 'cot', 'CountObjects', 'CountObjectsDFR', 'cpowi',
+ 'CreationDate', 'csc', 'DataFolderExists', 'DataFolderRefsEqual',
+ 'DataFolderRefStatus', 'date2secs', 'datetime', 'DateToJulian',
+ 'Dawson', 'DDEExecute', 'DDEInitiate', 'DDEPokeString', 'DDEPokeWave',
+ 'DDERequestWave', 'DDEStatus', 'DDETerminate', 'defined', 'deltax', 'digamma',
+ 'DimDelta', 'DimOffset', 'DimSize', 'ei', 'enoise', 'equalWaves', 'erf',
+ 'erfc', 'exists', 'exp', 'expInt', 'expNoise', 'factorial', 'fakedata',
+ 'faverage', 'faverageXY', 'FindDimLabel', 'FindListItem', 'floor',
+ 'FontSizeHeight', 'FontSizeStringWidth', 'FresnelCos', 'FresnelSin',
+ 'gamma', 'gammaInc', 'gammaNoise', 'gammln', 'gammp', 'gammq', 'Gauss',
+ 'Gauss1D', 'Gauss2D', 'gcd', 'GetDefaultFontSize',
+ 'GetDefaultFontStyle', 'GetKeyState', 'GetRTError', 'gnoise',
+ 'GrepString', 'hcsr', 'hermite', 'hermiteGauss', 'HyperG0F1',
+ 'HyperG1F1', 'HyperG2F1', 'HyperGNoise', 'HyperGPFQ', 'IgorVersion',
+ 'ilim', 'imag', 'Inf', 'Integrate1D', 'interp', 'Interp2D', 'Interp3D',
+ 'inverseERF', 'inverseERFC', 'ItemsInList', 'jlim', 'Laguerre',
+ 'LaguerreA', 'LaguerreGauss', 'leftx', 'LegendreA', 'limit', 'ln',
+ 'log', 'logNormalNoise', 'lorentzianNoise', 'magsqr', 'MandelbrotPoint',
+ 'MarcumQ', 'MatrixDet', 'MatrixDot', 'MatrixRank', 'MatrixTrace', 'max',
+ 'mean', 'min', 'mod', 'ModDate', 'NaN', 'norm', 'NumberByKey',
+ 'numpnts', 'numtype', 'NumVarOrDefault', 'NVAR_Exists', 'p2rect',
+ 'ParamIsDefault', 'pcsr', 'Pi', 'PixelFromAxisVal', 'pnt2x',
+ 'poissonNoise', 'poly', 'poly2D', 'PolygonArea', 'qcsr', 'r2polar',
+ 'real', 'rightx', 'round', 'sawtooth', 'ScreenResolution', 'sec',
+ 'SelectNumber', 'sign', 'sin', 'sinc', 'sinh', 'SphericalBessJ',
+ 'SphericalBessJD', 'SphericalBessY', 'SphericalBessYD',
+ 'SphericalHarmonics', 'sqrt', 'StartMSTimer', 'StatsBetaCDF',
+ 'StatsBetaPDF', 'StatsBinomialCDF', 'StatsBinomialPDF',
+ 'StatsCauchyCDF', 'StatsCauchyPDF', 'StatsChiCDF', 'StatsChiPDF',
+ 'StatsCMSSDCDF', 'StatsCorrelation', 'StatsDExpCDF', 'StatsDExpPDF',
+ 'StatsErlangCDF', 'StatsErlangPDF', 'StatsErrorPDF', 'StatsEValueCDF',
+ 'StatsEValuePDF', 'StatsExpCDF', 'StatsExpPDF', 'StatsFCDF',
+ 'StatsFPDF', 'StatsFriedmanCDF', 'StatsGammaCDF', 'StatsGammaPDF',
+ 'StatsGeometricCDF', 'StatsGeometricPDF', 'StatsHyperGCDF',
+ 'StatsHyperGPDF', 'StatsInvBetaCDF', 'StatsInvBinomialCDF',
+ 'StatsInvCauchyCDF', 'StatsInvChiCDF', 'StatsInvCMSSDCDF',
+ 'StatsInvDExpCDF', 'StatsInvEValueCDF', 'StatsInvExpCDF',
+ 'StatsInvFCDF', 'StatsInvFriedmanCDF', 'StatsInvGammaCDF',
+ 'StatsInvGeometricCDF', 'StatsInvKuiperCDF', 'StatsInvLogisticCDF',
+ 'StatsInvLogNormalCDF', 'StatsInvMaxwellCDF', 'StatsInvMooreCDF',
+ 'StatsInvNBinomialCDF', 'StatsInvNCChiCDF', 'StatsInvNCFCDF',
+ 'StatsInvNormalCDF', 'StatsInvParetoCDF', 'StatsInvPoissonCDF',
+ 'StatsInvPowerCDF', 'StatsInvQCDF', 'StatsInvQpCDF',
+ 'StatsInvRayleighCDF', 'StatsInvRectangularCDF', 'StatsInvSpearmanCDF',
+ 'StatsInvStudentCDF', 'StatsInvTopDownCDF', 'StatsInvTriangularCDF',
+ 'StatsInvUsquaredCDF', 'StatsInvVonMisesCDF', 'StatsInvWeibullCDF',
+ 'StatsKuiperCDF', 'StatsLogisticCDF', 'StatsLogisticPDF',
+ 'StatsLogNormalCDF', 'StatsLogNormalPDF', 'StatsMaxwellCDF',
+ 'StatsMaxwellPDF', 'StatsMedian', 'StatsMooreCDF', 'StatsNBinomialCDF',
+ 'StatsNBinomialPDF', 'StatsNCChiCDF', 'StatsNCChiPDF', 'StatsNCFCDF',
+ 'StatsNCFPDF', 'StatsNCTCDF', 'StatsNCTPDF', 'StatsNormalCDF',
+ 'StatsNormalPDF', 'StatsParetoCDF', 'StatsParetoPDF', 'StatsPermute',
+ 'StatsPoissonCDF', 'StatsPoissonPDF', 'StatsPowerCDF',
+ 'StatsPowerNoise', 'StatsPowerPDF', 'StatsQCDF', 'StatsQpCDF',
+ 'StatsRayleighCDF', 'StatsRayleighPDF', 'StatsRectangularCDF',
+ 'StatsRectangularPDF', 'StatsRunsCDF', 'StatsSpearmanRhoCDF',
+ 'StatsStudentCDF', 'StatsStudentPDF', 'StatsTopDownCDF',
+ 'StatsTriangularCDF', 'StatsTriangularPDF', 'StatsTrimmedMean',
+ 'StatsUSquaredCDF', 'StatsVonMisesCDF', 'StatsVonMisesNoise',
+ 'StatsVonMisesPDF', 'StatsWaldCDF', 'StatsWaldPDF', 'StatsWeibullCDF',
+ 'StatsWeibullPDF', 'StopMSTimer', 'str2num', 'stringCRC', 'stringmatch',
+ 'strlen', 'strsearch', 'StudentA', 'StudentT', 'sum', 'SVAR_Exists',
+ 'TagVal', 'tan', 'tanh', 'ThreadGroupCreate', 'ThreadGroupRelease',
+ 'ThreadGroupWait', 'ThreadProcessorCount', 'ThreadReturnValue', 'ticks',
+ 'trunc', 'Variance', 'vcsr', 'WaveCRC', 'WaveDims', 'WaveExists',
+ 'WaveMax', 'WaveMin', 'WaveRefsEqual', 'WaveType', 'WhichListItem',
+ 'WinType', 'WNoise', 'x2pnt', 'xcsr', 'zcsr', 'ZernikeR',
+ )
+ functions += (
+ 'AddListItem', 'AnnotationInfo', 'AnnotationList', 'AxisInfo',
+ 'AxisList', 'CaptureHistory', 'ChildWindowList', 'CleanupName',
+ 'ContourInfo', 'ContourNameList', 'ControlNameList', 'CsrInfo',
+ 'CsrWave', 'CsrXWave', 'CTabList', 'DataFolderDir', 'date',
+ 'DDERequestString', 'FontList', 'FuncRefInfo', 'FunctionInfo',
+ 'FunctionList', 'FunctionPath', 'GetDataFolder', 'GetDefaultFont',
+ 'GetDimLabel', 'GetErrMessage', 'GetFormula',
+ 'GetIndependentModuleName', 'GetIndexedObjName', 'GetIndexedObjNameDFR',
+ 'GetRTErrMessage', 'GetRTStackInfo', 'GetScrapText', 'GetUserData',
+ 'GetWavesDataFolder', 'GrepList', 'GuideInfo', 'GuideNameList', 'Hash',
+ 'IgorInfo', 'ImageInfo', 'ImageNameList', 'IndexedDir', 'IndexedFile',
+ 'JulianToDate', 'LayoutInfo', 'ListMatch', 'LowerStr', 'MacroList',
+ 'NameOfWave', 'note', 'num2char', 'num2istr', 'num2str',
+ 'OperationList', 'PadString', 'ParseFilePath', 'PathList', 'PICTInfo',
+ 'PICTList', 'PossiblyQuoteName', 'ProcedureText', 'RemoveByKey',
+ 'RemoveEnding', 'RemoveFromList', 'RemoveListItem',
+ 'ReplaceNumberByKey', 'ReplaceString', 'ReplaceStringByKey',
+ 'Secs2Date', 'Secs2Time', 'SelectString', 'SortList',
+ 'SpecialCharacterInfo', 'SpecialCharacterList', 'SpecialDirPath',
+ 'StringByKey', 'StringFromList', 'StringList', 'StrVarOrDefault',
+ 'TableInfo', 'TextFile', 'ThreadGroupGetDF', 'time', 'TraceFromPixel',
+ 'TraceInfo', 'TraceNameList', 'UniqueName', 'UnPadString', 'UpperStr',
+ 'VariableList', 'WaveInfo', 'WaveList', 'WaveName', 'WaveUnits',
+ 'WinList', 'WinName', 'WinRecreation', 'XWaveName',
+ 'ContourNameToWaveRef', 'CsrWaveRef', 'CsrXWaveRef',
+ 'ImageNameToWaveRef', 'NewFreeWave', 'TagWaveRef', 'TraceNameToWaveRef',
+ 'WaveRefIndexed', 'XWaveRefFromTrace', 'GetDataFolderDFR',
+ 'GetWavesDataFolderDFR', 'NewFreeDataFolder', 'ThreadGroupGetDFR',
+ )
+
+ tokens = {
+ 'root': [
+ (r'//.*$', Comment.Single),
+ (r'"([^"\\]|\\.)*"', String),
+ # Flow Control.
+ (words(flowControl, prefix=r'\b', suffix=r'\b'), Keyword),
+ # Types.
+ (words(types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ # Keywords.
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
+ # Built-in operations.
+ (words(operations, prefix=r'\b', suffix=r'\b'), Name.Class),
+ # Built-in functions.
+ (words(functions, prefix=r'\b', suffix=r'\b'), Name.Function),
+ # Compiler directives.
+ (r'^#(include|pragma|define|ifdef|ifndef|endif)',
+ Name.Decorator),
+ (r'[^a-z"/]+$', Text),
+ (r'.', Text),
+ ],
+ }
diff --git a/pygments/lexers/inferno.py b/pygments/lexers/inferno.py
new file mode 100644
index 00000000..bfbea571
--- /dev/null
+++ b/pygments/lexers/inferno.py
@@ -0,0 +1,96 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.inferno
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Inferno os and all the related stuff.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default
+from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
+ Name, String, Number
+
+__all__ = ['LimboLexer']
+
+
+class LimboLexer(RegexLexer):
+ """
+ Lexer for `Limbo programming language <http://www.vitanuova.com/inferno/limbo.html>`_
+
+ TODO:
+ - maybe implement better var declaration highlighting
+ - some simple syntax error highlighting
+
+ .. versionadded:: 2.0
+ """
+ name = 'Limbo'
+ aliases = ['limbo']
+ filenames = ['*.b']
+ mimetypes = ['text/limbo']
+
+ tokens = {
+ 'whitespace': [
+ (r'^(\s*)([a-zA-Z_]\w*:(\s*)\n)',
+ bygroups(Text, Name.Label)),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'#(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
+ r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\', String), # stray backslash
+ ],
+ 'statements': [
+ (r'"', String, 'string'),
+ (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])', Number.Float),
+ (r'16r[0-9a-fA-F]+', Number.Hex),
+ (r'8r[0-7]+', Number.Oct),
+ (r'((([1-3]\d)|([2-9]))r)?(\d+)', Number.Integer),
+ (r'[()\[\],.]', Punctuation),
+ (r'[~!%^&*+=|?:<>/-]|(->)|(<-)|(=>)|(::)', Operator),
+ (r'(alt|break|case|continue|cyclic|do|else|exit'
+ r'for|hd|if|implement|import|include|len|load|or'
+ r'pick|return|spawn|tagof|tl|to|while)\b', Keyword),
+ (r'(byte|int|big|real|string|array|chan|list|adt'
+ r'|fn|ref|of|module|self|type)\b', Keyword.Type),
+ (r'(con|iota|nil)\b', Keyword.Constant),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'statement' : [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Punctuation),
+ (';', Punctuation, '#pop'),
+ ],
+ 'root': [
+ include('whitespace'),
+ default('statement'),
+ ],
+ }
+
+ def analyse_text(text):
+ # Any limbo module implements something
+ if re.search(r'^implement \w+;', text, re.MULTILINE):
+ return 0.7
+
+# TODO:
+# - Make lexers for:
+# - asm sources
+# - man pages
+# - mkfiles
+# - module definitions
+# - namespace definitions
+# - shell scripts
+# - maybe keyfiles and fonts
+# they all seem to be quite similar to their equivalents
+# from unix world, so there should not be a lot of problems
diff --git a/pygments/lexers/installers.py b/pygments/lexers/installers.py
new file mode 100644
index 00000000..c436afed
--- /dev/null
+++ b/pygments/lexers/installers.py
@@ -0,0 +1,322 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.installers
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for installer/packager DSLs and formats.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Punctuation, Generic, Number, Whitespace
+
+__all__ = ['NSISLexer', 'RPMSpecLexer', 'SourcesListLexer',
+ 'DebianControlLexer']
+
+
+class NSISLexer(RegexLexer):
+ """
+ For `NSIS <http://nsis.sourceforge.net/>`_ scripts.
+
+ .. versionadded:: 1.6
+ """
+ name = 'NSIS'
+ aliases = ['nsis', 'nsi', 'nsh']
+ filenames = ['*.nsi', '*.nsh']
+ mimetypes = ['text/x-nsis']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'[;#].*\n', Comment),
+ (r"'.*?'", String.Single),
+ (r'"', String.Double, 'str_double'),
+ (r'`', String.Backtick, 'str_backtick'),
+ include('macro'),
+ include('interpol'),
+ include('basic'),
+ (r'\$\{[a-z_|][\w|]*\}', Keyword.Pseudo),
+ (r'/[a-z_]\w*', Name.Attribute),
+ ('.', Text),
+ ],
+ 'basic': [
+ (r'(\n)(Function)(\s+)([._a-z][.\w]*)\b',
+ bygroups(Text, Keyword, Text, Name.Function)),
+ (r'\b([_a-z]\w*)(::)([a-z][a-z0-9]*)\b',
+ bygroups(Keyword.Namespace, Punctuation, Name.Function)),
+ (r'\b([_a-z]\w*)(:)', bygroups(Name.Label, Punctuation)),
+ (r'(\b[ULS]|\B)([!<>=]?=|\<\>?|\>)\B', Operator),
+ (r'[|+-]', Operator),
+ (r'\\', Punctuation),
+ (r'\b(Abort|Add(?:BrandingImage|Size)|'
+ r'Allow(?:RootDirInstall|SkipFiles)|AutoCloseWindow|'
+ r'BG(?:Font|Gradient)|BrandingText|BringToFront|Call(?:InstDLL)?|'
+ r'(?:Sub)?Caption|ChangeUI|CheckBitmap|ClearErrors|CompletedText|'
+ r'ComponentText|CopyFiles|CRCCheck|'
+ r'Create(?:Directory|Font|Shortcut)|Delete(?:INI(?:Sec|Str)|'
+ r'Reg(?:Key|Value))?|DetailPrint|DetailsButtonText|'
+ r'Dir(?:Show|Text|Var|Verify)|(?:Disabled|Enabled)Bitmap|'
+ r'EnableWindow|EnumReg(?:Key|Value)|Exch|Exec(?:Shell|Wait)?|'
+ r'ExpandEnvStrings|File(?:BufSize|Close|ErrorText|Open|'
+ r'Read(?:Byte)?|Seek|Write(?:Byte)?)?|'
+ r'Find(?:Close|First|Next|Window)|FlushINI|Function(?:End)?|'
+ r'Get(?:CurInstType|CurrentAddress|DlgItem|DLLVersion(?:Local)?|'
+ r'ErrorLevel|FileTime(?:Local)?|FullPathName|FunctionAddress|'
+ r'InstDirError|LabelAddress|TempFileName)|'
+ r'Goto|HideWindow|Icon|'
+ r'If(?:Abort|Errors|FileExists|RebootFlag|Silent)|'
+ r'InitPluginsDir|Install(?:ButtonText|Colors|Dir(?:RegKey)?)|'
+ r'Inst(?:ProgressFlags|Type(?:[GS]etText)?)|Int(?:CmpU?|Fmt|Op)|'
+ r'IsWindow|LangString(?:UP)?|'
+ r'License(?:BkColor|Data|ForceSelection|LangString|Text)|'
+ r'LoadLanguageFile|LockWindow|Log(?:Set|Text)|MessageBox|'
+ r'MiscButtonText|Name|Nop|OutFile|(?:Uninst)?Page(?:Ex(?:End)?)?|'
+ r'PluginDir|Pop|Push|Quit|Read(?:(?:Env|INI|Reg)Str|RegDWORD)|'
+ r'Reboot|(?:Un)?RegDLL|Rename|RequestExecutionLevel|ReserveFile|'
+ r'Return|RMDir|SearchPath|Section(?:Divider|End|'
+ r'(?:(?:Get|Set)(?:Flags|InstTypes|Size|Text))|Group(?:End)?|In)?|'
+ r'SendMessage|Set(?:AutoClose|BrandingImage|Compress(?:ionLevel|'
+ r'or(?:DictSize)?)?|CtlColors|CurInstType|DatablockOptimize|'
+ r'DateSave|Details(?:Print|View)|Error(?:s|Level)|FileAttributes|'
+ r'Font|OutPath|Overwrite|PluginUnload|RebootFlag|ShellVarContext|'
+ r'Silent|StaticBkColor)|'
+ r'Show(?:(?:I|Uni)nstDetails|Window)|Silent(?:Un)?Install|Sleep|'
+ r'SpaceTexts|Str(?:CmpS?|Cpy|Len)|SubSection(?:End)?|'
+ r'Uninstall(?:ButtonText|(?:Sub)?Caption|EXEName|Icon|Text)|'
+ r'UninstPage|Var|VI(?:AddVersionKey|ProductVersion)|WindowIcon|'
+ r'Write(?:INIStr|Reg(:?Bin|DWORD|(?:Expand)?Str)|Uninstaller)|'
+ r'XPStyle)\b', Keyword),
+ (r'\b(CUR|END|(?:FILE_ATTRIBUTE_)?'
+ r'(?:ARCHIVE|HIDDEN|NORMAL|OFFLINE|READONLY|SYSTEM|TEMPORARY)|'
+ r'HK(CC|CR|CU|DD|LM|PD|U)|'
+ r'HKEY_(?:CLASSES_ROOT|CURRENT_(?:CONFIG|USER)|DYN_DATA|'
+ r'LOCAL_MACHINE|PERFORMANCE_DATA|USERS)|'
+ r'ID(?:ABORT|CANCEL|IGNORE|NO|OK|RETRY|YES)|'
+ r'MB_(?:ABORTRETRYIGNORE|DEFBUTTON[1-4]|'
+ r'ICON(?:EXCLAMATION|INFORMATION|QUESTION|STOP)|'
+ r'OK(?:CANCEL)?|RETRYCANCEL|RIGHT|SETFOREGROUND|TOPMOST|USERICON|'
+ r'YESNO(?:CANCEL)?)|SET|SHCTX|'
+ r'SW_(?:HIDE|SHOW(?:MAXIMIZED|MINIMIZED|NORMAL))|'
+ r'admin|all|auto|both|bottom|bzip2|checkbox|colored|current|false|'
+ r'force|hide|highest|if(?:diff|newer)|lastused|leave|left|'
+ r'listonly|lzma|nevershow|none|normal|off|on|pop|push|'
+ r'radiobuttons|right|show|silent|silentlog|smooth|textonly|top|'
+ r'true|try|user|zlib)\b', Name.Constant),
+ ],
+ 'macro': [
+ (r'\!(addincludedir(?:dir)?|addplugindir|appendfile|cd|define|'
+ r'delfilefile|echo(?:message)?|else|endif|error|execute|'
+ r'if(?:macro)?n?(?:def)?|include|insertmacro|macro(?:end)?|packhdr|'
+ r'search(?:parse|replace)|system|tempfilesymbol|undef|verbose|'
+ r'warning)\b', Comment.Preproc),
+ ],
+ 'interpol': [
+ (r'\$(R?[0-9])', Name.Builtin.Pseudo), # registers
+ (r'\$(ADMINTOOLS|APPDATA|CDBURN_AREA|COOKIES|COMMONFILES(?:32|64)|'
+ r'DESKTOP|DOCUMENTS|EXE(?:DIR|FILE|PATH)|FAVORITES|FONTS|HISTORY|'
+ r'HWNDPARENT|INTERNET_CACHE|LOCALAPPDATA|MUSIC|NETHOOD|PICTURES|'
+ r'PLUGINSDIR|PRINTHOOD|PROFILE|PROGRAMFILES(?:32|64)|QUICKLAUNCH|'
+ r'RECENT|RESOURCES(?:_LOCALIZED)?|SENDTO|SM(?:PROGRAMS|STARTUP)|'
+ r'STARTMENU|SYSDIR|TEMP(?:LATES)?|VIDEOS|WINDIR|\{NSISDIR\})',
+ Name.Builtin),
+ (r'\$(CMDLINE|INSTDIR|OUTDIR|LANGUAGE)', Name.Variable.Global),
+ (r'\$[a-z_]\w*', Name.Variable),
+ ],
+ 'str_double': [
+ (r'"', String, '#pop'),
+ (r'\$(\\[nrt"]|\$)', String.Escape),
+ include('interpol'),
+ (r'.', String.Double),
+ ],
+ 'str_backtick': [
+ (r'`', String, '#pop'),
+ (r'\$(\\[nrt"]|\$)', String.Escape),
+ include('interpol'),
+ (r'.', String.Double),
+ ],
+ }
+
+
+class RPMSpecLexer(RegexLexer):
+ """
+ For RPM ``.spec`` files.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'RPMSpec'
+ aliases = ['spec']
+ filenames = ['*.spec']
+ mimetypes = ['text/x-rpm-spec']
+
+ _directives = ('(?:package|prep|build|install|clean|check|pre[a-z]*|'
+ 'post[a-z]*|trigger[a-z]*|files)')
+
+ tokens = {
+ 'root': [
+ (r'#.*\n', Comment),
+ include('basic'),
+ ],
+ 'description': [
+ (r'^(%' + _directives + ')(.*)$',
+ bygroups(Name.Decorator, Text), '#pop'),
+ (r'\n', Text),
+ (r'.', Text),
+ ],
+ 'changelog': [
+ (r'\*.*\n', Generic.Subheading),
+ (r'^(%' + _directives + ')(.*)$',
+ bygroups(Name.Decorator, Text), '#pop'),
+ (r'\n', Text),
+ (r'.', Text),
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ include('interpol'),
+ (r'.', String.Double),
+ ],
+ 'basic': [
+ include('macro'),
+ (r'(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|'
+ r'Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|'
+ r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|'
+ r'Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$',
+ bygroups(Generic.Heading, Punctuation, using(this))),
+ (r'^%description', Name.Decorator, 'description'),
+ (r'^%changelog', Name.Decorator, 'changelog'),
+ (r'^(%' + _directives + ')(.*)$', bygroups(Name.Decorator, Text)),
+ (r'%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|'
+ r'make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)',
+ Keyword),
+ include('interpol'),
+ (r"'.*?'", String.Single),
+ (r'"', String.Double, 'string'),
+ (r'.', Text),
+ ],
+ 'macro': [
+ (r'%define.*\n', Comment.Preproc),
+ (r'%\{\!\?.*%define.*\}', Comment.Preproc),
+ (r'(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$',
+ bygroups(Comment.Preproc, Text)),
+ ],
+ 'interpol': [
+ (r'%\{?__[a-z_]+\}?', Name.Function),
+ (r'%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?', Keyword.Pseudo),
+ (r'%\{\?\w+\}', Name.Variable),
+ (r'\$\{?RPM_[A-Z0-9_]+\}?', Name.Variable.Global),
+ (r'%\{[a-zA-Z]\w+\}', Keyword.Constant),
+ ]
+ }
+
+
+class SourcesListLexer(RegexLexer):
+ """
+ Lexer that highlights debian sources.list files.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'Debian Sourcelist'
+ aliases = ['sourceslist', 'sources.list', 'debsources']
+ filenames = ['sources.list']
+ mimetype = ['application/x-debian-sourceslist']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*?$', Comment),
+ (r'^(deb(?:-src)?)(\s+)',
+ bygroups(Keyword, Text), 'distribution')
+ ],
+ 'distribution': [
+ (r'#.*?$', Comment, '#pop'),
+ (r'\$\(ARCH\)', Name.Variable),
+ (r'[^\s$[]+', String),
+ (r'\[', String.Other, 'escaped-distribution'),
+ (r'\$', String),
+ (r'\s+', Text, 'components')
+ ],
+ 'escaped-distribution': [
+ (r'\]', String.Other, '#pop'),
+ (r'\$\(ARCH\)', Name.Variable),
+ (r'[^\]$]+', String.Other),
+ (r'\$', String.Other)
+ ],
+ 'components': [
+ (r'#.*?$', Comment, '#pop:2'),
+ (r'$', Text, '#pop:2'),
+ (r'\s+', Text),
+ (r'\S+', Keyword.Pseudo),
+ ]
+ }
+
+ def analyse_text(text):
+ for line in text.splitlines():
+ line = line.strip()
+ if line.startswith('deb ') or line.startswith('deb-src '):
+ return True
+
+
+class DebianControlLexer(RegexLexer):
+ """
+ Lexer for Debian ``control`` files and ``apt-cache show <pkg>`` outputs.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Debian Control file'
+ aliases = ['control', 'debcontrol']
+ filenames = ['control']
+
+ tokens = {
+ 'root': [
+ (r'^(Description)', Keyword, 'description'),
+ (r'^(Maintainer)(:\s*)', bygroups(Keyword, Text), 'maintainer'),
+ (r'^((Build-)?Depends)', Keyword, 'depends'),
+ (r'^((?:Python-)?Version)(:\s*)(\S+)$',
+ bygroups(Keyword, Text, Number)),
+ (r'^((?:Installed-)?Size)(:\s*)(\S+)$',
+ bygroups(Keyword, Text, Number)),
+ (r'^(MD5Sum|SHA1|SHA256)(:\s*)(\S+)$',
+ bygroups(Keyword, Text, Number)),
+ (r'^([a-zA-Z\-0-9\.]*?)(:\s*)(.*?)$',
+ bygroups(Keyword, Whitespace, String)),
+ ],
+ 'maintainer': [
+ (r'<[^>]+>', Generic.Strong),
+ (r'<[^>]+>$', Generic.Strong, '#pop'),
+ (r',\n?', Text),
+ (r'.', Text),
+ ],
+ 'description': [
+ (r'(.*)(Homepage)(: )(\S+)',
+ bygroups(Text, String, Name, Name.Class)),
+ (r':.*\n', Generic.Strong),
+ (r' .*\n', Text),
+ default('#pop'),
+ ],
+ 'depends': [
+ (r':\s*', Text),
+ (r'(\$)(\{)(\w+\s*:\s*\w+)', bygroups(Operator, Text, Name.Entity)),
+ (r'\(', Text, 'depend_vers'),
+ (r',', Text),
+ (r'\|', Operator),
+ (r'[\s]+', Text),
+ (r'[})]\s*$', Text, '#pop'),
+ (r'\}', Text),
+ (r'[^,]$', Name.Function, '#pop'),
+ (r'([+.a-zA-Z0-9-])(\s*)', bygroups(Name.Function, Text)),
+ (r'\[.*?\]', Name.Entity),
+ ],
+ 'depend_vers': [
+ (r'\),', Text, '#pop'),
+ (r'\)[^,]', Text, '#pop:2'),
+ (r'([><=]+)(\s*)([^)]+)', bygroups(Operator, Text, Number))
+ ]
+ }
diff --git a/pygments/lexers/int_fiction.py b/pygments/lexers/int_fiction.py
new file mode 100644
index 00000000..25c472b1
--- /dev/null
+++ b/pygments/lexers/int_fiction.py
@@ -0,0 +1,1342 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.int_fiction
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for interactive fiction languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, \
+ this, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Generic
+
+__all__ = ['Inform6Lexer', 'Inform6TemplateLexer', 'Inform7Lexer',
+ 'Tads3Lexer']
+
+
+class Inform6Lexer(RegexLexer):
+ """
+ For `Inform 6 <http://inform-fiction.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Inform 6'
+ aliases = ['inform6', 'i6']
+ filenames = ['*.inf']
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ _name = r'[a-zA-Z_]\w*'
+
+ # Inform 7 maps these four character classes to their ASCII
+ # equivalents. To support Inform 6 inclusions within Inform 7,
+ # Inform6Lexer maps them too.
+ _dash = u'\\-\u2010-\u2014'
+ _dquote = u'"\u201c\u201d'
+ _squote = u"'\u2018\u2019"
+ _newline = u'\\n\u0085\u2028\u2029'
+
+ tokens = {
+ 'root': [
+ (r'\A(!%%[^%s]*[%s])+' % (_newline, _newline), Comment.Preproc,
+ 'directive'),
+ default('directive')
+ ],
+ '_whitespace': [
+ (r'\s+', Text),
+ (r'![^%s]*' % _newline, Comment.Single)
+ ],
+ 'default': [
+ include('_whitespace'),
+ (r'\[', Punctuation, 'many-values'), # Array initialization
+ (r':|(?=;)', Punctuation, '#pop'),
+ (r'<', Punctuation), # Second angle bracket in an action statement
+ default(('expression', '_expression'))
+ ],
+
+ # Expressions
+ '_expression': [
+ include('_whitespace'),
+ (r'(?=sp\b)', Text, '#pop'),
+ (r'(?=[%s%s$0-9#a-zA-Z_])' % (_dquote, _squote), Text,
+ ('#pop', 'value')),
+ (r'\+\+|[%s]{1,2}(?!>)|~~?' % _dash, Operator),
+ (r'(?=[()\[%s,?@{:;])' % _dash, Text, '#pop')
+ ],
+ 'expression': [
+ include('_whitespace'),
+ (r'\(', Punctuation, ('expression', '_expression')),
+ (r'\)', Punctuation, '#pop'),
+ (r'\[', Punctuation, ('#pop', 'statements', 'locals')),
+ (r'>(?=(\s+|(![^%s]*))*[>;])' % _newline, Punctuation),
+ (r'\+\+|[%s]{2}(?!>)' % _dash, Operator),
+ (r',', Punctuation, '_expression'),
+ (r'&&?|\|\|?|[=~><]?=|[%s]{1,2}>?|\.\.?[&#]?|::|[<>+*/%%]' % _dash,
+ Operator, '_expression'),
+ (r'(has|hasnt|in|notin|ofclass|or|provides)\b', Operator.Word,
+ '_expression'),
+ (r'sp\b', Name),
+ (r'\?~?', Name.Label, 'label?'),
+ (r'[@{]', Error),
+ default('#pop')
+ ],
+ '_assembly-expression': [
+ (r'\(', Punctuation, ('#push', '_expression')),
+ (r'[\[\]]', Punctuation),
+ (r'[%s]>' % _dash, Punctuation, '_expression'),
+ (r'sp\b', Keyword.Pseudo),
+ (r';', Punctuation, '#pop:3'),
+ include('expression')
+ ],
+ '_for-expression': [
+ (r'\)', Punctuation, '#pop:2'),
+ (r':', Punctuation, '#pop'),
+ include('expression')
+ ],
+ '_keyword-expression': [
+ (r'(from|near|to)\b', Keyword, '_expression'),
+ include('expression')
+ ],
+ '_list-expression': [
+ (r',', Punctuation, '#pop'),
+ include('expression')
+ ],
+ '_object-expression': [
+ (r'has\b', Keyword.Declaration, '#pop'),
+ include('_list-expression')
+ ],
+
+ # Values
+ 'value': [
+ include('_whitespace'),
+ # Strings
+ (r'[%s][^@][%s]' % (_squote, _squote), String.Char, '#pop'),
+ (r'([%s])(@\{[0-9a-fA-F]{1,4}\})([%s])' % (_squote, _squote),
+ bygroups(String.Char, String.Escape, String.Char), '#pop'),
+ (r'([%s])(@.{2})([%s])' % (_squote, _squote),
+ bygroups(String.Char, String.Escape, String.Char), '#pop'),
+ (r'[%s]' % _squote, String.Single, ('#pop', 'dictionary-word')),
+ (r'[%s]' % _dquote, String.Double, ('#pop', 'string')),
+ # Numbers
+ (r'\$[+%s][0-9]*\.?[0-9]*([eE][+%s]?[0-9]+)?' % (_dash, _dash),
+ Number.Float, '#pop'),
+ (r'\$[0-9a-fA-F]+', Number.Hex, '#pop'),
+ (r'\$\$[01]+', Number.Bin, '#pop'),
+ (r'[0-9]+', Number.Integer, '#pop'),
+ # Values prefixed by hashes
+ (r'(##|#a\$)(%s)' % _name, bygroups(Operator, Name), '#pop'),
+ (r'(#g\$)(%s)' % _name,
+ bygroups(Operator, Name.Variable.Global), '#pop'),
+ (r'#[nw]\$', Operator, ('#pop', 'obsolete-dictionary-word')),
+ (r'(#r\$)(%s)' % _name, bygroups(Operator, Name.Function), '#pop'),
+ (r'#', Name.Builtin, ('#pop', 'system-constant')),
+ # System functions
+ (words((
+ 'child', 'children', 'elder', 'eldest', 'glk', 'indirect', 'metaclass',
+ 'parent', 'random', 'sibling', 'younger', 'youngest'), suffix=r'\b'),
+ Name.Builtin, '#pop'),
+ # Metaclasses
+ (r'(?i)(Class|Object|Routine|String)\b', Name.Builtin, '#pop'),
+ # Veneer routines
+ (words((
+ 'Box__Routine', 'CA__Pr', 'CDefArt', 'CInDefArt', 'Cl__Ms',
+ 'Copy__Primitive', 'CP__Tab', 'DA__Pr', 'DB__Pr', 'DefArt', 'Dynam__String',
+ 'EnglishNumber', 'Glk__Wrap', 'IA__Pr', 'IB__Pr', 'InDefArt', 'Main__',
+ 'Meta__class', 'OB__Move', 'OB__Remove', 'OC__Cl', 'OP__Pr', 'Print__Addr',
+ 'Print__PName', 'PrintShortName', 'RA__Pr', 'RA__Sc', 'RL__Pr', 'R_Process',
+ 'RT__ChG', 'RT__ChGt', 'RT__ChLDB', 'RT__ChLDW', 'RT__ChPR', 'RT__ChPrintA',
+ 'RT__ChPrintC', 'RT__ChPrintO', 'RT__ChPrintS', 'RT__ChPS', 'RT__ChR',
+ 'RT__ChSTB', 'RT__ChSTW', 'RT__ChT', 'RT__Err', 'RT__TrPS', 'RV__Pr',
+ 'Symb__Tab', 'Unsigned__Compare', 'WV__Pr', 'Z__Region'),
+ prefix='(?i)', suffix=r'\b'),
+ Name.Builtin, '#pop'),
+ # Other built-in symbols
+ (words((
+ 'call', 'copy', 'create', 'DEBUG', 'destroy', 'DICT_CHAR_SIZE',
+ 'DICT_ENTRY_BYTES', 'DICT_IS_UNICODE', 'DICT_WORD_SIZE', 'false',
+ 'FLOAT_INFINITY', 'FLOAT_NAN', 'FLOAT_NINFINITY', 'GOBJFIELD_CHAIN',
+ 'GOBJFIELD_CHILD', 'GOBJFIELD_NAME', 'GOBJFIELD_PARENT',
+ 'GOBJFIELD_PROPTAB', 'GOBJFIELD_SIBLING', 'GOBJ_EXT_START',
+ 'GOBJ_TOTAL_LENGTH', 'Grammar__Version', 'INDIV_PROP_START', 'INFIX',
+ 'infix__watching', 'MODULE_MODE', 'name', 'nothing', 'NUM_ATTR_BYTES', 'print',
+ 'print_to_array', 'recreate', 'remaining', 'self', 'sender', 'STRICT_MODE',
+ 'sw__var', 'sys__glob0', 'sys__glob1', 'sys__glob2', 'sys_statusline_flag',
+ 'TARGET_GLULX', 'TARGET_ZCODE', 'temp__global2', 'temp__global3',
+ 'temp__global4', 'temp_global', 'true', 'USE_MODULES', 'WORDSIZE'),
+ prefix='(?i)', suffix=r'\b'),
+ Name.Builtin, '#pop'),
+ # Other values
+ (_name, Name, '#pop')
+ ],
+ # Strings
+ 'dictionary-word': [
+ (r'[~^]+', String.Escape),
+ (r'[^~^\\@({%s]+' % _squote, String.Single),
+ (r'[({]', String.Single),
+ (r'@\{[0-9a-fA-F]{,4}\}', String.Escape),
+ (r'@.{2}', String.Escape),
+ (r'[%s]' % _squote, String.Single, '#pop')
+ ],
+ 'string': [
+ (r'[~^]+', String.Escape),
+ (r'[^~^\\@({%s]+' % _dquote, String.Double),
+ (r'[({]', String.Double),
+ (r'\\', String.Escape),
+ (r'@(\\\s*[%s]\s*)*@((\\\s*[%s]\s*)*[0-9])*' %
+ (_newline, _newline), String.Escape),
+ (r'@(\\\s*[%s]\s*)*\{((\\\s*[%s]\s*)*[0-9a-fA-F]){,4}'
+ r'(\\\s*[%s]\s*)*\}' % (_newline, _newline, _newline),
+ String.Escape),
+ (r'@(\\\s*[%s]\s*)*.(\\\s*[%s]\s*)*.' % (_newline, _newline),
+ String.Escape),
+ (r'[%s]' % _dquote, String.Double, '#pop')
+ ],
+ 'plain-string': [
+ (r'[^~^\\({\[\]%s]+' % _dquote, String.Double),
+ (r'[~^({\[\]]', String.Double),
+ (r'\\', String.Escape),
+ (r'[%s]' % _dquote, String.Double, '#pop')
+ ],
+ # Names
+ '_constant': [
+ include('_whitespace'),
+ (_name, Name.Constant, '#pop'),
+ include('value')
+ ],
+ '_global': [
+ include('_whitespace'),
+ (_name, Name.Variable.Global, '#pop'),
+ include('value')
+ ],
+ 'label?': [
+ include('_whitespace'),
+ (_name, Name.Label, '#pop'),
+ default('#pop')
+ ],
+ 'variable?': [
+ include('_whitespace'),
+ (_name, Name.Variable, '#pop'),
+ default('#pop')
+ ],
+ # Values after hashes
+ 'obsolete-dictionary-word': [
+ (r'\S\w*', String.Other, '#pop')
+ ],
+ 'system-constant': [
+ include('_whitespace'),
+ (_name, Name.Builtin, '#pop')
+ ],
+
+ # Directives
+ 'directive': [
+ include('_whitespace'),
+ (r'#', Punctuation),
+ (r';', Punctuation, '#pop'),
+ (r'\[', Punctuation,
+ ('default', 'statements', 'locals', 'routine-name?')),
+ (words((
+ 'abbreviate', 'endif', 'dictionary', 'ifdef', 'iffalse', 'ifndef', 'ifnot',
+ 'iftrue', 'ifv3', 'ifv5', 'release', 'serial', 'switches', 'system_file',
+ 'version'), prefix='(?i)', suffix=r'\b'),
+ Keyword, 'default'),
+ (r'(?i)(array|global)\b', Keyword,
+ ('default', 'directive-keyword?', '_global')),
+ (r'(?i)attribute\b', Keyword, ('default', 'alias?', '_constant')),
+ (r'(?i)class\b', Keyword,
+ ('object-body', 'duplicates', 'class-name')),
+ (r'(?i)(constant|default)\b', Keyword,
+ ('default', 'expression', '_constant')),
+ (r'(?i)(end\b)(.*)', bygroups(Keyword, Text)),
+ (r'(?i)(extend|verb)\b', Keyword, 'grammar'),
+ (r'(?i)fake_action\b', Keyword, ('default', '_constant')),
+ (r'(?i)import\b', Keyword, 'manifest'),
+ (r'(?i)(include|link)\b', Keyword,
+ ('default', 'before-plain-string')),
+ (r'(?i)(lowstring|undef)\b', Keyword, ('default', '_constant')),
+ (r'(?i)message\b', Keyword, ('default', 'diagnostic')),
+ (r'(?i)(nearby|object)\b', Keyword,
+ ('object-body', '_object-head')),
+ (r'(?i)property\b', Keyword,
+ ('default', 'alias?', '_constant', 'property-keyword*')),
+ (r'(?i)replace\b', Keyword,
+ ('default', 'routine-name?', 'routine-name?')),
+ (r'(?i)statusline\b', Keyword, ('default', 'directive-keyword?')),
+ (r'(?i)stub\b', Keyword, ('default', 'routine-name?')),
+ (r'(?i)trace\b', Keyword,
+ ('default', 'trace-keyword?', 'trace-keyword?')),
+ (r'(?i)zcharacter\b', Keyword,
+ ('default', 'directive-keyword?', 'directive-keyword?')),
+ (_name, Name.Class, ('object-body', '_object-head'))
+ ],
+ # [, Replace, Stub
+ 'routine-name?': [
+ include('_whitespace'),
+ (_name, Name.Function, '#pop'),
+ default('#pop')
+ ],
+ 'locals': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r'\*', Punctuation),
+ (_name, Name.Variable)
+ ],
+ # Array
+ 'many-values': [
+ include('_whitespace'),
+ (r';', Punctuation),
+ (r'\]', Punctuation, '#pop'),
+ (r':', Error),
+ default(('expression', '_expression'))
+ ],
+ # Attribute, Property
+ 'alias?': [
+ include('_whitespace'),
+ (r'alias\b', Keyword, ('#pop', '_constant')),
+ default('#pop')
+ ],
+ # Class, Object, Nearby
+ 'class-name': [
+ include('_whitespace'),
+ (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
+ (_name, Name.Class, '#pop')
+ ],
+ 'duplicates': [
+ include('_whitespace'),
+ (r'\(', Punctuation, ('#pop', 'expression', '_expression')),
+ default('#pop')
+ ],
+ '_object-head': [
+ (r'[%s]>' % _dash, Punctuation),
+ (r'(class|has|private|with)\b', Keyword.Declaration, '#pop'),
+ include('_global')
+ ],
+ 'object-body': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop:2'),
+ (r',', Punctuation),
+ (r'class\b', Keyword.Declaration, 'class-segment'),
+ (r'(has|private|with)\b', Keyword.Declaration),
+ (r':', Error),
+ default(('_object-expression', '_expression'))
+ ],
+ 'class-segment': [
+ include('_whitespace'),
+ (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
+ (_name, Name.Class),
+ default('value')
+ ],
+ # Extend, Verb
+ 'grammar': [
+ include('_whitespace'),
+ (r'=', Punctuation, ('#pop', 'default')),
+ (r'\*', Punctuation, ('#pop', 'grammar-line')),
+ default('_directive-keyword')
+ ],
+ 'grammar-line': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r'[/*]', Punctuation),
+ (r'[%s]>' % _dash, Punctuation, 'value'),
+ (r'(noun|scope)\b', Keyword, '=routine'),
+ default('_directive-keyword')
+ ],
+ '=routine': [
+ include('_whitespace'),
+ (r'=', Punctuation, 'routine-name?'),
+ default('#pop')
+ ],
+ # Import
+ 'manifest': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'(?i)global\b', Keyword, '_global'),
+ default('_global')
+ ],
+ # Include, Link, Message
+ 'diagnostic': [
+ include('_whitespace'),
+ (r'[%s]' % _dquote, String.Double, ('#pop', 'message-string')),
+ default(('#pop', 'before-plain-string', 'directive-keyword?'))
+ ],
+ 'before-plain-string': [
+ include('_whitespace'),
+ (r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string'))
+ ],
+ 'message-string': [
+ (r'[~^]+', String.Escape),
+ include('plain-string')
+ ],
+
+ # Keywords used in directives
+ '_directive-keyword!': [
+ include('_whitespace'),
+ (words((
+ 'additive', 'alias', 'buffer', 'class', 'creature', 'data', 'error', 'fatalerror',
+ 'first', 'has', 'held', 'initial', 'initstr', 'last', 'long', 'meta', 'multi',
+ 'multiexcept', 'multiheld', 'multiinside', 'noun', 'number', 'only', 'private',
+ 'replace', 'reverse', 'scope', 'score', 'special', 'string', 'table', 'terminating',
+ 'time', 'topic', 'warning', 'with'), suffix=r'\b'),
+ Keyword, '#pop'),
+ (r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop')
+ ],
+ '_directive-keyword': [
+ include('_directive-keyword!'),
+ include('value')
+ ],
+ 'directive-keyword?': [
+ include('_directive-keyword!'),
+ default('#pop')
+ ],
+ 'property-keyword*': [
+ include('_whitespace'),
+ (r'(additive|long)\b', Keyword),
+ default('#pop')
+ ],
+ 'trace-keyword?': [
+ include('_whitespace'),
+ (words((
+ 'assembly', 'dictionary', 'expressions', 'lines', 'linker',
+ 'objects', 'off', 'on', 'symbols', 'tokens', 'verbs'), suffix=r'\b'),
+ Keyword, '#pop'),
+ default('#pop')
+ ],
+
+ # Statements
+ 'statements': [
+ include('_whitespace'),
+ (r'\]', Punctuation, '#pop'),
+ (r'[;{}]', Punctuation),
+ (words((
+ 'box', 'break', 'continue', 'default', 'give', 'inversion',
+ 'new_line', 'quit', 'read', 'remove', 'return', 'rfalse', 'rtrue',
+ 'spaces', 'string', 'until'), suffix=r'\b'),
+ Keyword, 'default'),
+ (r'(do|else)\b', Keyword),
+ (r'(font|style)\b', Keyword,
+ ('default', 'miscellaneous-keyword?')),
+ (r'for\b', Keyword, ('for', '(?')),
+ (r'(if|switch|while)', Keyword,
+ ('expression', '_expression', '(?')),
+ (r'(jump|save|restore)\b', Keyword, ('default', 'label?')),
+ (r'objectloop\b', Keyword,
+ ('_keyword-expression', 'variable?', '(?')),
+ (r'print(_ret)?\b|(?=[%s])' % _dquote, Keyword, 'print-list'),
+ (r'\.', Name.Label, 'label?'),
+ (r'@', Keyword, 'opcode'),
+ (r'#(?![agrnw]\$|#)', Punctuation, 'directive'),
+ (r'<', Punctuation, 'default'),
+ (r'move\b', Keyword,
+ ('default', '_keyword-expression', '_expression')),
+ default(('default', '_keyword-expression', '_expression'))
+ ],
+ 'miscellaneous-keyword?': [
+ include('_whitespace'),
+ (r'(bold|fixed|from|near|off|on|reverse|roman|to|underline)\b',
+ Keyword, '#pop'),
+ (r'(a|A|an|address|char|name|number|object|property|string|the|'
+ r'The)\b(?=(\s+|(![^%s]*))*\))' % _newline, Keyword.Pseudo,
+ '#pop'),
+ (r'%s(?=(\s+|(![^%s]*))*\))' % (_name, _newline), Name.Function,
+ '#pop'),
+ default('#pop')
+ ],
+ '(?': [
+ include('_whitespace'),
+ (r'\(', Punctuation, '#pop'),
+ default('#pop')
+ ],
+ 'for': [
+ include('_whitespace'),
+ (r';', Punctuation, ('_for-expression', '_expression')),
+ default(('_for-expression', '_expression'))
+ ],
+ 'print-list': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r':', Error),
+ default(('_list-expression', '_expression', '_list-expression', 'form'))
+ ],
+ 'form': [
+ include('_whitespace'),
+ (r'\(', Punctuation, ('#pop', 'miscellaneous-keyword?')),
+ default('#pop')
+ ],
+
+ # Assembly
+ 'opcode': [
+ include('_whitespace'),
+ (r'[%s]' % _dquote, String.Double, ('operands', 'plain-string')),
+ (_name, Keyword, 'operands')
+ ],
+ 'operands': [
+ (r':', Error),
+ default(('_assembly-expression', '_expression'))
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ # 'in' is either a keyword or an operator.
+ # If the token two tokens after 'in' is ')', 'in' is a keyword:
+ # objectloop(a in b)
+ # Otherwise, it is an operator:
+ # objectloop(a in b && true)
+ objectloop_queue = []
+ objectloop_token_count = -1
+ previous_token = None
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self,
+ text):
+ if previous_token is Name.Variable and value == 'in':
+ objectloop_queue = [[index, token, value]]
+ objectloop_token_count = 2
+ elif objectloop_token_count > 0:
+ if token not in Comment and token not in Text:
+ objectloop_token_count -= 1
+ objectloop_queue.append((index, token, value))
+ else:
+ if objectloop_token_count == 0:
+ if objectloop_queue[-1][2] == ')':
+ objectloop_queue[0][1] = Keyword
+ while objectloop_queue:
+ yield objectloop_queue.pop(0)
+ objectloop_token_count = -1
+ yield index, token, value
+ if token not in Comment and token not in Text:
+ previous_token = token
+ while objectloop_queue:
+ yield objectloop_queue.pop(0)
+
+
+class Inform7Lexer(RegexLexer):
+ """
+ For `Inform 7 <http://inform7.com/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Inform 7'
+ aliases = ['inform7', 'i7']
+ filenames = ['*.ni', '*.i7x']
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ _dash = Inform6Lexer._dash
+ _dquote = Inform6Lexer._dquote
+ _newline = Inform6Lexer._newline
+ _start = r'\A|(?<=[%s])' % _newline
+
+ # There are three variants of Inform 7, differing in how to
+ # interpret at signs and braces in I6T. In top-level inclusions, at
+ # signs in the first column are inweb syntax. In phrase definitions
+ # and use options, tokens in braces are treated as I7. Use options
+ # also interpret "{N}".
+ tokens = {}
+ token_variants = ['+i6t-not-inline', '+i6t-inline', '+i6t-use-option']
+
+ for level in token_variants:
+ tokens[level] = {
+ '+i6-root': list(Inform6Lexer.tokens['root']),
+ '+i6t-root': [ # For Inform6TemplateLexer
+ (r'[^%s]*' % Inform6Lexer._newline, Comment.Preproc,
+ ('directive', '+p'))
+ ],
+ 'root': [
+ (r'(\|?\s)+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[%s]' % _dquote, Generic.Heading,
+ ('+main', '+titling', '+titling-string')),
+ default(('+main', '+heading?'))
+ ],
+ '+titling-string': [
+ (r'[^%s]+' % _dquote, Generic.Heading),
+ (r'[%s]' % _dquote, Generic.Heading, '#pop')
+ ],
+ '+titling': [
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[^%s.;:|%s]+' % (_dquote, _newline), Generic.Heading),
+ (r'[%s]' % _dquote, Generic.Heading, '+titling-string'),
+ (r'[%s]{2}|(?<=[\s%s])\|[\s%s]' % (_newline, _dquote, _dquote),
+ Text, ('#pop', '+heading?')),
+ (r'[.;:]|(?<=[\s%s])\|' % _dquote, Text, '#pop'),
+ (r'[|%s]' % _newline, Generic.Heading)
+ ],
+ '+main': [
+ (r'(?i)[^%s:a\[(|%s]+' % (_dquote, _newline), Text),
+ (r'[%s]' % _dquote, String.Double, '+text'),
+ (r':', Text, '+phrase-definition'),
+ (r'(?i)\bas\b', Text, '+use-option'),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ bygroups(Punctuation,
+ using(this, state=('+i6-root', 'directive'),
+ i6t='+i6t-not-inline'), Punctuation)),
+ (r'(%s|(?<=[\s;:.%s]))\|\s|[%s]{2,}' %
+ (_start, _dquote, _newline), Text, '+heading?'),
+ (r'(?i)[a(|%s]' % _newline, Text)
+ ],
+ '+phrase-definition': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ bygroups(Punctuation,
+ using(this, state=('+i6-root', 'directive',
+ 'default', 'statements'),
+ i6t='+i6t-inline'), Punctuation), '#pop'),
+ default('#pop')
+ ],
+ '+use-option': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ bygroups(Punctuation,
+ using(this, state=('+i6-root', 'directive'),
+ i6t='+i6t-use-option'), Punctuation), '#pop'),
+ default('#pop')
+ ],
+ '+comment': [
+ (r'[^\[\]]+', Comment.Multiline),
+ (r'\[', Comment.Multiline, '#push'),
+ (r'\]', Comment.Multiline, '#pop')
+ ],
+ '+text': [
+ (r'[^\[%s]+' % _dquote, String.Double),
+ (r'\[.*?\]', String.Interpol),
+ (r'[%s]' % _dquote, String.Double, '#pop')
+ ],
+ '+heading?': [
+ (r'(\|?\s)+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[%s]{4}\s+' % _dash, Text, '+documentation-heading'),
+ (r'[%s]{1,3}' % _dash, Text),
+ (r'(?i)(volume|book|part|chapter|section)\b[^%s]*' % _newline,
+ Generic.Heading, '#pop'),
+ default('#pop')
+ ],
+ '+documentation-heading': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(?i)documentation\s+', Text, '+documentation-heading2'),
+ default('#pop')
+ ],
+ '+documentation-heading2': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[%s]{4}\s' % _dash, Text, '+documentation'),
+ default('#pop:2')
+ ],
+ '+documentation': [
+ (r'(?i)(%s)\s*(chapter|example)\s*:[^%s]*' %
+ (_start, _newline), Generic.Heading),
+ (r'(?i)(%s)\s*section\s*:[^%s]*' % (_start, _newline),
+ Generic.Subheading),
+ (r'((%s)\t.*?[%s])+' % (_start, _newline),
+ using(this, state='+main')),
+ (r'[^%s\[]+|[%s\[]' % (_newline, _newline), Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ ],
+ '+i6t-not-inline': [
+ (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ Comment.Preproc),
+ (r'(%s)@([%s]+|Purpose:)[^%s]*' % (_start, _dash, _newline),
+ Comment.Preproc),
+ (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ Generic.Heading, '+p')
+ ],
+ '+i6t-use-option': [
+ include('+i6t-not-inline'),
+ (r'(\{)(N)(\})', bygroups(Punctuation, Text, Punctuation))
+ ],
+ '+i6t-inline': [
+ (r'(\{)(\S[^}]*)?(\})',
+ bygroups(Punctuation, using(this, state='+main'),
+ Punctuation))
+ ],
+ '+i6t': [
+ (r'(\{[%s])(![^}]*)(\}?)' % _dash,
+ bygroups(Punctuation, Comment.Single, Punctuation)),
+ (r'(\{[%s])(lines)(:)([^}]*)(\}?)' % _dash,
+ bygroups(Punctuation, Keyword, Punctuation, Text,
+ Punctuation), '+lines'),
+ (r'(\{[%s])([^:}]*)(:?)([^}]*)(\}?)' % _dash,
+ bygroups(Punctuation, Keyword, Punctuation, Text,
+ Punctuation)),
+ (r'(\(\+)(.*?)(\+\)|\Z)',
+ bygroups(Punctuation, using(this, state='+main'),
+ Punctuation))
+ ],
+ '+p': [
+ (r'[^@]+', Comment.Preproc),
+ (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ Comment.Preproc, '#pop'),
+ (r'(%s)@([%s]|Purpose:)' % (_start, _dash), Comment.Preproc),
+ (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ Generic.Heading),
+ (r'@', Comment.Preproc)
+ ],
+ '+lines': [
+ (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ Comment.Preproc),
+ (r'(%s)@([%s]|Purpose:)[^%s]*' % (_start, _dash, _newline),
+ Comment.Preproc),
+ (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ Generic.Heading, '+p'),
+ (r'(%s)@\w*[ %s]' % (_start, _newline), Keyword),
+ (r'![^%s]*' % _newline, Comment.Single),
+ (r'(\{)([%s]endlines)(\})' % _dash,
+ bygroups(Punctuation, Keyword, Punctuation), '#pop'),
+ (r'[^@!{]+?([%s]|\Z)|.' % _newline, Text)
+ ]
+ }
+ # Inform 7 can include snippets of Inform 6 template language,
+ # so all of Inform6Lexer's states are copied here, with
+ # modifications to account for template syntax. Inform7Lexer's
+ # own states begin with '+' to avoid name conflicts. Some of
+ # Inform6Lexer's states begin with '_': these are not modified.
+ # They deal with template syntax either by including modified
+ # states, or by matching r'' then pushing to modified states.
+ for token in Inform6Lexer.tokens:
+ if token == 'root':
+ continue
+ tokens[level][token] = list(Inform6Lexer.tokens[token])
+ if not token.startswith('_'):
+ tokens[level][token][:0] = [include('+i6t'), include(level)]
+
+ def __init__(self, **options):
+ level = options.get('i6t', '+i6t-not-inline')
+ if level not in self._all_tokens:
+ self._tokens = self.__class__.process_tokendef(level)
+ else:
+ self._tokens = self._all_tokens[level]
+ RegexLexer.__init__(self, **options)
+
+
+class Inform6TemplateLexer(Inform7Lexer):
+ """
+ For `Inform 6 template
+ <http://inform7.com/sources/src/i6template/Woven/index.html>`_ code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Inform 6 template'
+ aliases = ['i6t']
+ filenames = ['*.i6t']
+
+ def get_tokens_unprocessed(self, text, stack=('+i6t-root',)):
+ return Inform7Lexer.get_tokens_unprocessed(self, text, stack)
+
+
+class Tads3Lexer(RegexLexer):
+ """
+ For `TADS 3 <http://www.tads.org/>`_ source code.
+ """
+
+ name = 'TADS 3'
+ aliases = ['tads3']
+ filenames = ['*.t']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ _comment_single = r'(?://(?:[^\\\n]|\\+[\w\W])*$)'
+ _comment_multiline = r'(?:/\*(?:[^*]|\*(?!/))*\*/)'
+ _escape = (r'(?:\\(?:[\n\\<>"\'^v bnrt]|u[\da-fA-F]{,4}|x[\da-fA-F]{,2}|'
+ r'[0-3]?[0-7]{1,2}))')
+ _name = r'(?:[_a-zA-Z]\w*)'
+ _no_quote = r'(?=\s|\\?>)'
+ _operator = (r'(?:&&|\|\||\+\+|--|\?\?|::|[.,@\[\]~]|'
+ r'(?:[=+\-*/%!&|^]|<<?|>>?>?)=?)')
+ _ws = r'(?:\\|\s|%s|%s)' % (_comment_single, _comment_multiline)
+ _ws_pp = r'(?:\\\n|[^\S\n]|%s|%s)' % (_comment_single, _comment_multiline)
+
+ def _make_string_state(triple, double, verbatim=None, _escape=_escape):
+ if verbatim:
+ verbatim = ''.join(['(?:%s|%s)' % (re.escape(c.lower()),
+ re.escape(c.upper()))
+ for c in verbatim])
+ char = r'"' if double else r"'"
+ token = String.Double if double else String.Single
+ escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
+ prefix = '%s%s' % ('t' if triple else '', 'd' if double else 's')
+ tag_state_name = '%sqt' % prefix
+ state = []
+ if triple:
+ state += [
+ (r'%s{3,}' % char, token, '#pop'),
+ (r'\\%s+' % char, String.Escape),
+ (char, token)
+ ]
+ else:
+ state.append((char, token, '#pop'))
+ state += [
+ include('s/verbatim'),
+ (r'[^\\<&{}%s]+' % char, token)
+ ]
+ if verbatim:
+ # This regex can't use `(?i)` because escape sequences are
+ # case-sensitive. `<\XMP>` works; `<\xmp>` doesn't.
+ state.append((r'\\?<(/|\\\\|(?!%s)\\)%s(?=[\s=>])' %
+ (_escape, verbatim),
+ Name.Tag, ('#pop', '%sqs' % prefix, tag_state_name)))
+ else:
+ state += [
+ (r'\\?<!([^><\\%s]|<(?!<)|\\%s%s|%s|\\.)*>?' %
+ (char, char, escaped_quotes, _escape), Comment.Multiline),
+ (r'(?i)\\?<listing(?=[\s=>]|\\>)', Name.Tag,
+ ('#pop', '%sqs/listing' % prefix, tag_state_name)),
+ (r'(?i)\\?<xmp(?=[\s=>]|\\>)', Name.Tag,
+ ('#pop', '%sqs/xmp' % prefix, tag_state_name)),
+ (r'\\?<([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)*' %
+ (char, char, escaped_quotes, _escape), Name.Tag,
+ tag_state_name),
+ include('s/entity')
+ ]
+ state += [
+ include('s/escape'),
+ (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
+ (char, char, escaped_quotes, _escape), String.Interpol),
+ (r'[\\&{}<]', token)
+ ]
+ return state
+
+ def _make_tag_state(triple, double, _escape=_escape):
+ char = r'"' if double else r"'"
+ quantifier = r'{3,}' if triple else r''
+ state_name = '%s%sqt' % ('t' if triple else '', 'd' if double else 's')
+ token = String.Double if double else String.Single
+ escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
+ return [
+ (r'%s%s' % (char, quantifier), token, '#pop:2'),
+ (r'(\s|\\\n)+', Text),
+ (r'(=)(\\?")', bygroups(Punctuation, String.Double),
+ 'dqs/%s' % state_name),
+ (r"(=)(\\?')", bygroups(Punctuation, String.Single),
+ 'sqs/%s' % state_name),
+ (r'=', Punctuation, 'uqs/%s' % state_name),
+ (r'\\?>', Name.Tag, '#pop'),
+ (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
+ (char, char, escaped_quotes, _escape), String.Interpol),
+ (r'([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)+' %
+ (char, char, escaped_quotes, _escape), Name.Attribute),
+ include('s/escape'),
+ include('s/verbatim'),
+ include('s/entity'),
+ (r'[\\{}&]', Name.Attribute)
+ ]
+
+ def _make_attribute_value_state(terminator, host_triple, host_double,
+ _escape=_escape):
+ token = (String.Double if terminator == r'"' else
+ String.Single if terminator == r"'" else String.Other)
+ host_char = r'"' if host_double else r"'"
+ host_quantifier = r'{3,}' if host_triple else r''
+ host_token = String.Double if host_double else String.Single
+ escaped_quotes = (r'+|%s(?!%s{2})' % (host_char, host_char)
+ if host_triple else r'')
+ return [
+ (r'%s%s' % (host_char, host_quantifier), host_token, '#pop:3'),
+ (r'%s%s' % (r'' if token is String.Other else r'\\?', terminator),
+ token, '#pop'),
+ include('s/verbatim'),
+ include('s/entity'),
+ (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
+ (host_char, host_char, escaped_quotes, _escape), String.Interpol),
+ (r'([^\s"\'<%s{}\\&])+' % (r'>' if token is String.Other else r''),
+ token),
+ include('s/escape'),
+ (r'["\'\s&{<}\\]', token)
+ ]
+
+ tokens = {
+ 'root': [
+ (u'\ufeff', Text),
+ (r'\{', Punctuation, 'object-body'),
+ (r';+', Punctuation),
+ (r'(?=(argcount|break|case|catch|continue|default|definingobj|'
+ r'delegated|do|else|for|foreach|finally|goto|if|inherited|'
+ r'invokee|local|nil|new|operator|replaced|return|self|switch|'
+ r'targetobj|targetprop|throw|true|try|while)\b)', Text, 'block'),
+ (r'(%s)(%s*)(\()' % (_name, _ws),
+ bygroups(Name.Function, using(this, state='whitespace'),
+ Punctuation),
+ ('block?/root', 'more/parameters', 'main/parameters')),
+ include('whitespace'),
+ (r'\++', Punctuation),
+ (r'[^\s!"%-(*->@-_a-z{-~]+', Error), # Averts an infinite loop
+ (r'(?!\Z)', Text, 'main/root')
+ ],
+ 'main/root': [
+ include('main/basic'),
+ default(('#pop', 'object-body/no-braces', 'classes', 'class'))
+ ],
+ 'object-body/no-braces': [
+ (r';', Punctuation, '#pop'),
+ (r'\{', Punctuation, ('#pop', 'object-body')),
+ include('object-body')
+ ],
+ 'object-body': [
+ (r';', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ (r':', Punctuation, ('classes', 'class')),
+ (r'(%s?)(%s*)(\()' % (_name, _ws),
+ bygroups(Name.Function, using(this, state='whitespace'),
+ Punctuation),
+ ('block?', 'more/parameters', 'main/parameters')),
+ (r'(%s)(%s*)(\{)' % (_name, _ws),
+ bygroups(Name.Function, using(this, state='whitespace'),
+ Punctuation), 'block'),
+ (r'(%s)(%s*)(:)' % (_name, _ws),
+ bygroups(Name.Variable, using(this, state='whitespace'),
+ Punctuation),
+ ('object-body/no-braces', 'classes', 'class')),
+ include('whitespace'),
+ (r'->|%s' % _operator, Punctuation, 'main'),
+ default('main/object-body')
+ ],
+ 'main/object-body': [
+ include('main/basic'),
+ (r'(%s)(%s*)(=?)' % (_name, _ws),
+ bygroups(Name.Variable, using(this, state='whitespace'),
+ Punctuation), ('#pop', 'more', 'main')),
+ default('#pop:2')
+ ],
+ 'block?/root': [
+ (r'\{', Punctuation, ('#pop', 'block')),
+ include('whitespace'),
+ (r'(?=[[\'"<(:])', Text, # It might be a VerbRule macro.
+ ('#pop', 'object-body/no-braces', 'grammar', 'grammar-rules')),
+ # It might be a macro like DefineAction.
+ default(('#pop', 'object-body/no-braces'))
+ ],
+ 'block?': [
+ (r'\{', Punctuation, ('#pop', 'block')),
+ include('whitespace'),
+ default('#pop')
+ ],
+ 'block/basic': [
+ (r'[;:]+', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ (r'default\b', Keyword.Reserved),
+ (r'(%s)(%s*)(:)' % (_name, _ws),
+ bygroups(Name.Label, using(this, state='whitespace'),
+ Punctuation)),
+ include('whitespace')
+ ],
+ 'block': [
+ include('block/basic'),
+ (r'(?!\Z)', Text, ('more', 'main'))
+ ],
+ 'block/embed': [
+ (r'>>', String.Interpol, '#pop'),
+ include('block/basic'),
+ (r'(?!\Z)', Text, ('more/embed', 'main'))
+ ],
+ 'main/basic': [
+ include('whitespace'),
+ (r'\(', Punctuation, ('#pop', 'more', 'main')),
+ (r'\[', Punctuation, ('#pop', 'more/list', 'main')),
+ (r'\{', Punctuation, ('#pop', 'more/inner', 'main/inner',
+ 'more/parameters', 'main/parameters')),
+ (r'\*|\.{3}', Punctuation, '#pop'),
+ (r'(?i)0x[\da-f]+', Number.Hex, '#pop'),
+ (r'(\d+\.(?!\.)\d*|\.\d+)([eE][-+]?\d+)?|\d+[eE][-+]?\d+',
+ Number.Float, '#pop'),
+ (r'0[0-7]+', Number.Oct, '#pop'),
+ (r'\d+', Number.Integer, '#pop'),
+ (r'"""', String.Double, ('#pop', 'tdqs')),
+ (r"'''", String.Single, ('#pop', 'tsqs')),
+ (r'"', String.Double, ('#pop', 'dqs')),
+ (r"'", String.Single, ('#pop', 'sqs')),
+ (r'R"""', String.Regex, ('#pop', 'tdqr')),
+ (r"R'''", String.Regex, ('#pop', 'tsqr')),
+ (r'R"', String.Regex, ('#pop', 'dqr')),
+ (r"R'", String.Regex, ('#pop', 'sqr')),
+ # Two-token keywords
+ (r'(extern)(%s+)(object\b)' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'),
+ Keyword.Reserved)),
+ (r'(function|method)(%s*)(\()' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'),
+ Punctuation),
+ ('#pop', 'block?', 'more/parameters', 'main/parameters')),
+ (r'(modify)(%s+)(grammar\b)' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'),
+ Keyword.Reserved),
+ ('#pop', 'object-body/no-braces', ':', 'grammar')),
+ (r'(new)(%s+(?=(?:function|method)\b))' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'))),
+ (r'(object)(%s+)(template\b)' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'),
+ Keyword.Reserved), ('#pop', 'template')),
+ (r'(string)(%s+)(template\b)' % _ws,
+ bygroups(Keyword, using(this, state='whitespace'),
+ Keyword.Reserved), ('#pop', 'function-name')),
+ # Keywords
+ (r'(argcount|definingobj|invokee|replaced|targetobj|targetprop)\b',
+ Name.Builtin, '#pop'),
+ (r'(break|continue|goto)\b', Keyword.Reserved, ('#pop', 'label')),
+ (r'(case|extern|if|intrinsic|return|static|while)\b',
+ Keyword.Reserved),
+ (r'catch\b', Keyword.Reserved, ('#pop', 'catch')),
+ (r'class\b', Keyword.Reserved,
+ ('#pop', 'object-body/no-braces', 'class')),
+ (r'(default|do|else|finally|try)\b', Keyword.Reserved, '#pop'),
+ (r'(dictionary|property)\b', Keyword.Reserved,
+ ('#pop', 'constants')),
+ (r'enum\b', Keyword.Reserved, ('#pop', 'enum')),
+ (r'export\b', Keyword.Reserved, ('#pop', 'main')),
+ (r'(for|foreach)\b', Keyword.Reserved,
+ ('#pop', 'more/inner', 'main/inner')),
+ (r'(function|method)\b', Keyword.Reserved,
+ ('#pop', 'block?', 'function-name')),
+ (r'grammar\b', Keyword.Reserved,
+ ('#pop', 'object-body/no-braces', 'grammar')),
+ (r'inherited\b', Keyword.Reserved, ('#pop', 'inherited')),
+ (r'local\b', Keyword.Reserved,
+ ('#pop', 'more/local', 'main/local')),
+ (r'(modify|replace|switch|throw|transient)\b', Keyword.Reserved,
+ '#pop'),
+ (r'new\b', Keyword.Reserved, ('#pop', 'class')),
+ (r'(nil|true)\b', Keyword.Constant, '#pop'),
+ (r'object\b', Keyword.Reserved, ('#pop', 'object-body/no-braces')),
+ (r'operator\b', Keyword.Reserved, ('#pop', 'operator')),
+ (r'propertyset\b', Keyword.Reserved,
+ ('#pop', 'propertyset', 'main')),
+ (r'self\b', Name.Builtin.Pseudo, '#pop'),
+ (r'template\b', Keyword.Reserved, ('#pop', 'template')),
+ # Operators
+ (r'(__objref|defined)(%s*)(\()' % _ws,
+ bygroups(Operator.Word, using(this, state='whitespace'),
+ Operator), ('#pop', 'more/__objref', 'main')),
+ (r'delegated\b', Operator.Word),
+ # Compiler-defined macros and built-in properties
+ (r'(__DATE__|__DEBUG|__LINE__|__FILE__|'
+ r'__TADS_MACRO_FORMAT_VERSION|__TADS_SYS_\w*|__TADS_SYSTEM_NAME|'
+ r'__TADS_VERSION_MAJOR|__TADS_VERSION_MINOR|__TADS3|__TIME__|'
+ r'construct|finalize|grammarInfo|grammarTag|lexicalParent|'
+ r'miscVocab|sourceTextGroup|sourceTextGroupName|'
+ r'sourceTextGroupOrder|sourceTextOrder)\b', Name.Builtin, '#pop')
+ ],
+ 'main': [
+ include('main/basic'),
+ (_name, Name, '#pop'),
+ default('#pop')
+ ],
+ 'more/basic': [
+ (r'\(', Punctuation, ('more/list', 'main')),
+ (r'\[', Punctuation, ('more', 'main')),
+ (r'\.{3}', Punctuation),
+ (r'->|\.\.', Punctuation, 'main'),
+ (r'(?=;)|[:)\]]', Punctuation, '#pop'),
+ include('whitespace'),
+ (_operator, Operator, 'main'),
+ (r'\?', Operator, ('main', 'more/conditional', 'main')),
+ (r'(is|not)(%s+)(in\b)' % _ws,
+ bygroups(Operator.Word, using(this, state='whitespace'),
+ Operator.Word)),
+ (r'[^\s!"%-_a-z{-~]+', Error) # Averts an infinite loop
+ ],
+ 'more': [
+ include('more/basic'),
+ default('#pop')
+ ],
+ # Then expression (conditional operator)
+ 'more/conditional': [
+ (r':(?!:)', Operator, '#pop'),
+ include('more')
+ ],
+ # Embedded expressions
+ 'more/embed': [
+ (r'>>', String.Interpol, '#pop:2'),
+ include('more')
+ ],
+ # For/foreach loop initializer or short-form anonymous function
+ 'main/inner': [
+ (r'\(', Punctuation, ('#pop', 'more/inner', 'main/inner')),
+ (r'local\b', Keyword.Reserved, ('#pop', 'main/local')),
+ include('main')
+ ],
+ 'more/inner': [
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation, 'main/inner'),
+ (r'(in|step)\b', Keyword, 'main/inner'),
+ include('more')
+ ],
+ # Local
+ 'main/local': [
+ (_name, Name.Variable, '#pop'),
+ include('whitespace')
+ ],
+ 'more/local': [
+ (r',', Punctuation, 'main/local'),
+ include('more')
+ ],
+ # List
+ 'more/list': [
+ (r'[,:]', Punctuation, 'main'),
+ include('more')
+ ],
+ # Parameter list
+ 'main/parameters': [
+ (r'(%s)(%s*)(?=:)' % (_name, _ws),
+ bygroups(Name.Variable, using(this, state='whitespace')), '#pop'),
+ (r'(%s)(%s+)(%s)' % (_name, _ws, _name),
+ bygroups(Name.Class, using(this, state='whitespace'),
+ Name.Variable), '#pop'),
+ (r'\[+', Punctuation),
+ include('main/basic'),
+ (_name, Name.Variable, '#pop'),
+ default('#pop')
+ ],
+ 'more/parameters': [
+ (r'(:)(%s*(?=[?=,:)]))' % _ws,
+ bygroups(Punctuation, using(this, state='whitespace'))),
+ (r'[?\]]+', Punctuation),
+ (r'[:)]', Punctuation, ('#pop', 'multimethod?')),
+ (r',', Punctuation, 'main/parameters'),
+ (r'=', Punctuation, ('more/parameter', 'main')),
+ include('more')
+ ],
+ 'more/parameter': [
+ (r'(?=[,)])', Text, '#pop'),
+ include('more')
+ ],
+ 'multimethod?': [
+ (r'multimethod\b', Keyword, '#pop'),
+ include('whitespace'),
+ default('#pop')
+ ],
+
+ # Statements and expressions
+ 'more/__objref': [
+ (r',', Punctuation, 'mode'),
+ (r'\)', Operator, '#pop'),
+ include('more')
+ ],
+ 'mode': [
+ (r'(error|warn)\b', Keyword, '#pop'),
+ include('whitespace')
+ ],
+ 'catch': [
+ (r'\(+', Punctuation),
+ (_name, Name.Exception, ('#pop', 'variables')),
+ include('whitespace')
+ ],
+ 'enum': [
+ include('whitespace'),
+ (r'token\b', Keyword, ('#pop', 'constants')),
+ default(('#pop', 'constants'))
+ ],
+ 'grammar': [
+ (r'\)+', Punctuation),
+ (r'\(', Punctuation, 'grammar-tag'),
+ (r':', Punctuation, 'grammar-rules'),
+ (_name, Name.Class),
+ include('whitespace')
+ ],
+ 'grammar-tag': [
+ include('whitespace'),
+ (r'"""([^\\"<]|""?(?!")|\\"+|\\.|<(?!<))+("{3,}|<<)|'
+ r'R"""([^\\"]|""?(?!")|\\"+|\\.)+"{3,}|'
+ r"'''([^\\'<]|''?(?!')|\\'+|\\.|<(?!<))+('{3,}|<<)|"
+ r"R'''([^\\']|''?(?!')|\\'+|\\.)+'{3,}|"
+ r'"([^\\"<]|\\.|<(?!<))+("|<<)|R"([^\\"]|\\.)+"|'
+ r"'([^\\'<]|\\.|<(?!<))+('|<<)|R'([^\\']|\\.)+'|"
+ r"([^)\s\\/]|/(?![/*]))+|\)", String.Other, '#pop')
+ ],
+ 'grammar-rules': [
+ include('string'),
+ include('whitespace'),
+ (r'(\[)(%s*)(badness)' % _ws,
+ bygroups(Punctuation, using(this, state='whitespace'), Keyword),
+ 'main'),
+ (r'->|%s|[()]' % _operator, Punctuation),
+ (_name, Name.Constant),
+ default('#pop:2')
+ ],
+ ':': [
+ (r':', Punctuation, '#pop')
+ ],
+ 'function-name': [
+ (r'(<<([^>]|>>>|>(?!>))*>>)+', String.Interpol),
+ (r'(?=%s?%s*[({])' % (_name, _ws), Text, '#pop'),
+ (_name, Name.Function, '#pop'),
+ include('whitespace')
+ ],
+ 'inherited': [
+ (r'<', Punctuation, ('#pop', 'classes', 'class')),
+ include('whitespace'),
+ (_name, Name.Class, '#pop'),
+ default('#pop')
+ ],
+ 'operator': [
+ (r'negate\b', Operator.Word, '#pop'),
+ include('whitespace'),
+ (_operator, Operator),
+ default('#pop')
+ ],
+ 'propertyset': [
+ (r'\(', Punctuation, ('more/parameters', 'main/parameters')),
+ (r'\{', Punctuation, ('#pop', 'object-body')),
+ include('whitespace')
+ ],
+ 'template': [
+ (r'(?=;)', Text, '#pop'),
+ include('string'),
+ (r'inherited\b', Keyword.Reserved),
+ include('whitespace'),
+ (r'->|\?|%s' % _operator, Punctuation),
+ (_name, Name.Variable)
+ ],
+
+ # Identifiers
+ 'class': [
+ (r'\*|\.{3}', Punctuation, '#pop'),
+ (r'object\b', Keyword.Reserved, '#pop'),
+ (r'transient\b', Keyword.Reserved),
+ (_name, Name.Class, '#pop'),
+ include('whitespace'),
+ default('#pop')
+ ],
+ 'classes': [
+ (r'[:,]', Punctuation, 'class'),
+ include('whitespace'),
+ (r'>', Punctuation, '#pop'),
+ default('#pop')
+ ],
+ 'constants': [
+ (r',+', Punctuation),
+ (r';', Punctuation, '#pop'),
+ (r'property\b', Keyword.Reserved),
+ (_name, Name.Constant),
+ include('whitespace')
+ ],
+ 'label': [
+ (_name, Name.Label, '#pop'),
+ include('whitespace'),
+ default('#pop')
+ ],
+ 'variables': [
+ (r',+', Punctuation),
+ (r'\)', Punctuation, '#pop'),
+ include('whitespace'),
+ (_name, Name.Variable)
+ ],
+
+ # Whitespace and comments
+ 'whitespace': [
+ (r'^%s*#(%s|[^\n]|(?<=\\)\n)*\n?' % (_ws_pp, _comment_multiline),
+ Comment.Preproc),
+ (_comment_single, Comment.Single),
+ (_comment_multiline, Comment.Multiline),
+ (r'\\+\n+%s*#?|\n+|([^\S\n]|\\)+' % _ws_pp, Text)
+ ],
+
+ # Strings
+ 'string': [
+ (r'"""', String.Double, 'tdqs'),
+ (r"'''", String.Single, 'tsqs'),
+ (r'"', String.Double, 'dqs'),
+ (r"'", String.Single, 'sqs')
+ ],
+ 's/escape': [
+ (r'\{\{|\}\}|%s' % _escape, String.Escape)
+ ],
+ 's/verbatim': [
+ (r'<<\s*(as\s+decreasingly\s+likely\s+outcomes|cycling|else|end|'
+ r'first\s+time|one\s+of|only|or|otherwise|'
+ r'(sticky|(then\s+)?(purely\s+)?at)\s+random|stopping|'
+ r'(then\s+)?(half\s+)?shuffled|\|\|)\s*>>', String.Interpol),
+ (r'<<(%%(_(%s|\\?.)|[\-+ ,#]|\[\d*\]?)*\d*\.?\d*(%s|\\?.)|'
+ r'\s*((else|otherwise)\s+)?(if|unless)\b)?' % (_escape, _escape),
+ String.Interpol, ('block/embed', 'more/embed', 'main'))
+ ],
+ 's/entity': [
+ (r'(?i)&(#(x[\da-f]+|\d+)|[a-z][\da-z]*);?', Name.Entity)
+ ],
+ 'tdqs': _make_string_state(True, True),
+ 'tsqs': _make_string_state(True, False),
+ 'dqs': _make_string_state(False, True),
+ 'sqs': _make_string_state(False, False),
+ 'tdqs/listing': _make_string_state(True, True, 'listing'),
+ 'tsqs/listing': _make_string_state(True, False, 'listing'),
+ 'dqs/listing': _make_string_state(False, True, 'listing'),
+ 'sqs/listing': _make_string_state(False, False, 'listing'),
+ 'tdqs/xmp': _make_string_state(True, True, 'xmp'),
+ 'tsqs/xmp': _make_string_state(True, False, 'xmp'),
+ 'dqs/xmp': _make_string_state(False, True, 'xmp'),
+ 'sqs/xmp': _make_string_state(False, False, 'xmp'),
+
+ # Tags
+ 'tdqt': _make_tag_state(True, True),
+ 'tsqt': _make_tag_state(True, False),
+ 'dqt': _make_tag_state(False, True),
+ 'sqt': _make_tag_state(False, False),
+ 'dqs/tdqt': _make_attribute_value_state(r'"', True, True),
+ 'dqs/tsqt': _make_attribute_value_state(r'"', True, False),
+ 'dqs/dqt': _make_attribute_value_state(r'"', False, True),
+ 'dqs/sqt': _make_attribute_value_state(r'"', False, False),
+ 'sqs/tdqt': _make_attribute_value_state(r"'", True, True),
+ 'sqs/tsqt': _make_attribute_value_state(r"'", True, False),
+ 'sqs/dqt': _make_attribute_value_state(r"'", False, True),
+ 'sqs/sqt': _make_attribute_value_state(r"'", False, False),
+ 'uqs/tdqt': _make_attribute_value_state(_no_quote, True, True),
+ 'uqs/tsqt': _make_attribute_value_state(_no_quote, True, False),
+ 'uqs/dqt': _make_attribute_value_state(_no_quote, False, True),
+ 'uqs/sqt': _make_attribute_value_state(_no_quote, False, False),
+
+ # Regular expressions
+ 'tdqr': [
+ (r'[^\\"]+', String.Regex),
+ (r'\\"*', String.Regex),
+ (r'"{3,}', String.Regex, '#pop'),
+ (r'"', String.Regex)
+ ],
+ 'tsqr': [
+ (r"[^\\']+", String.Regex),
+ (r"\\'*", String.Regex),
+ (r"'{3,}", String.Regex, '#pop'),
+ (r"'", String.Regex)
+ ],
+ 'dqr': [
+ (r'[^\\"]+', String.Regex),
+ (r'\\"?', String.Regex),
+ (r'"', String.Regex, '#pop')
+ ],
+ 'sqr': [
+ (r"[^\\']+", String.Regex),
+ (r"\\'?", String.Regex),
+ (r"'", String.Regex, '#pop')
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text, **kwargs):
+ pp = r'^%s*#%s*' % (self._ws_pp, self._ws_pp)
+ if_false_level = 0
+ for index, token, value in (
+ RegexLexer.get_tokens_unprocessed(self, text, **kwargs)):
+ if if_false_level == 0: # Not in a false #if
+ if (token is Comment.Preproc and
+ re.match(r'%sif%s+(0|nil)%s*$\n?' %
+ (pp, self._ws_pp, self._ws_pp), value)):
+ if_false_level = 1
+ else: # In a false #if
+ if token is Comment.Preproc:
+ if (if_false_level == 1 and
+ re.match(r'%sel(if|se)\b' % pp, value)):
+ if_false_level = 0
+ elif re.match(r'%sif' % pp, value):
+ if_false_level += 1
+ elif re.match(r'%sendif\b' % pp, value):
+ if_false_level -= 1
+ else:
+ token = Comment
+ yield index, token, value
diff --git a/pygments/lexers/iolang.py b/pygments/lexers/iolang.py
new file mode 100644
index 00000000..e62dd434
--- /dev/null
+++ b/pygments/lexers/iolang.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.iolang
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Io language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number
+
+__all__ = ['IoLexer']
+
+
+class IoLexer(RegexLexer):
+ """
+ For `Io <http://iolanguage.com/>`_ (a small, prototype-based
+ programming language) source.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Io'
+ filenames = ['*.io']
+ aliases = ['io']
+ mimetypes = ['text/x-iosrc']
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ # Comments
+ (r'//(.*?)\n', Comment.Single),
+ (r'#(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'/\+', Comment.Multiline, 'nestedcomment'),
+ # DoubleQuotedString
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # Operators
+ (r'::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}',
+ Operator),
+ # keywords
+ (r'(clone|do|doFile|doString|method|for|if|else|elseif|then)\b',
+ Keyword),
+ # constants
+ (r'(nil|false|true)\b', Name.Constant),
+ # names
+ (r'(Object|list|List|Map|args|Sequence|Coroutine|File)\b',
+ Name.Builtin),
+ ('[a-zA-Z_]\w*', Name),
+ # numbers
+ (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer)
+ ],
+ 'nestedcomment': [
+ (r'[^+/]+', Comment.Multiline),
+ (r'/\+', Comment.Multiline, '#push'),
+ (r'\+/', Comment.Multiline, '#pop'),
+ (r'[+/]', Comment.Multiline),
+ ]
+ }
diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py
new file mode 100644
index 00000000..7dcfbb4b
--- /dev/null
+++ b/pygments/lexers/javascript.py
@@ -0,0 +1,1199 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.javascript
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for JavaScript and related languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, using, this
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Other
+from pygments.util import get_bool_opt, iteritems
+import pygments.unistring as uni
+
+__all__ = ['JavascriptLexer', 'KalLexer', 'LiveScriptLexer', 'DartLexer',
+ 'TypeScriptLexer', 'LassoLexer', 'ObjectiveJLexer',
+ 'CoffeeScriptLexer', 'MaskLexer']
+
+JS_IDENT_START = ('(?:[$_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') +
+ ']|\\\\u[a-fA-F0-9]{4})')
+JS_IDENT_PART = ('(?:[$' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
+ 'Mn', 'Mc', 'Nd', 'Pc') +
+ u'\u200c\u200d]|\\\\u[a-fA-F0-9]{4})')
+JS_IDENT = JS_IDENT_START + '(?:' + JS_IDENT_PART + ')*'
+
+
+class JavascriptLexer(RegexLexer):
+ """
+ For JavaScript source code.
+ """
+
+ name = 'JavaScript'
+ aliases = ['js', 'javascript']
+ filenames = ['*.js', '*.jsm', ]
+ mimetypes = ['application/javascript', 'application/x-javascript',
+ 'text/x-javascript', 'text/javascript', ]
+
+ flags = re.DOTALL | re.UNICODE | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'<!--', Comment),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'\A#! ?/.*?\n', Comment.Hashbang), # recognized by node.js
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|void|yield|'
+ r'this)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
+ r'extends|final|float|goto|implements|import|int|interface|long|native|'
+ r'package|private|protected|public|short|static|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Reserved),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
+ r'window)\b', Name.Builtin),
+ (JS_IDENT, Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class KalLexer(RegexLexer):
+ """
+ For `Kal`_ source code.
+
+ .. _Kal: http://rzimmerman.github.io/kal
+
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Kal'
+ aliases = ['kal']
+ filenames = ['*.kal']
+ mimetypes = ['text/kal', 'application/kal']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'###[^#].*?###', Comment.Multiline),
+ (r'#(?!##[^#]).*?\n', Comment.Single),
+ ],
+ 'functiondef': [
+ (r'[$a-zA-Z_][\w$]*\s*', Name.Function, '#pop'),
+ include('commentsandwhitespace'),
+ ],
+ 'classdef': [
+ (r'\binherits\s+from\b', Keyword),
+ (r'[$a-zA-Z_][\w$]*\s*\n', Name.Class, '#pop'),
+ (r'[$a-zA-Z_][\w$]*\s*', Name.Class),
+ include('commentsandwhitespace'),
+ ],
+ 'listcomprehension': [
+ (r'\]', Punctuation, '#pop'),
+ (r'\b(property|value)\b', Keyword),
+ include('root'),
+ ],
+ 'waitfor': [
+ (r'\n', Punctuation, '#pop'),
+ (r'\bfrom\b', Keyword),
+ include('root'),
+ ],
+ 'root': [
+ include('commentsandwhitespace'),
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex),
+ (r'\?|:|_(?=\n)|==?|!=|-(?!>)|[<>+*/-]=?',
+ Operator),
+ (r'\b(and|or|isnt|is|not|but|bitwise|mod|\^|xor|exists|'
+ r'doesnt\s+exist)\b', Operator.Word),
+ (r'(?:\([^()]+\))?\s*>', Name.Function),
+ (r'[{(]', Punctuation),
+ (r'\[', Punctuation, 'listcomprehension'),
+ (r'[})\].,]', Punctuation),
+ (r'\b(function|method|task)\b', Keyword.Declaration, 'functiondef'),
+ (r'\bclass\b', Keyword.Declaration, 'classdef'),
+ (r'\b(safe\s+)?wait\s+for\b', Keyword, 'waitfor'),
+ (r'\b(me|this)(\.[$a-zA-Z_][\w.$]*)?\b', Name.Variable.Instance),
+ (r'(?<![.$])(for(\s+(parallel|series))?|in|of|while|until|'
+ r'break|return|continue|'
+ r'when|if|unless|else|otherwise|except\s+when|'
+ r'throw|raise|fail\s+with|try|catch|finally|new|delete|'
+ r'typeof|instanceof|super|run\s+in\s+parallel|'
+ r'inherits\s+from)\b', Keyword),
+ (r'(?<![.$])(true|false|yes|no|on|off|null|nothing|none|'
+ r'NaN|Infinity|undefined)\b',
+ Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'eval|isFinite|isNaN|parseFloat|parseInt|document|window|'
+ r'print)\b',
+ Name.Builtin),
+ (r'[$a-zA-Z_][\w.$]*\s*(:|[+\-*/]?\=)?\b', Name.Variable),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ ('"""', String, 'tdqs'),
+ ("'''", String, 'tsqs'),
+ ('"', String, 'dqs'),
+ ("'", String, 'sqs'),
+ ],
+ 'strings': [
+ (r'[^#\\\'"]+', String),
+ # note that all kal strings are multi-line.
+ # hashmarks, quotes and backslashes must be parsed one at a time
+ ],
+ 'interpoling_string': [
+ (r'\}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\.|\'', String), # double-quoted string don't need ' escapes
+ (r'#\{', String.Interpol, "interpoling_string"),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r'#|\\.|"', String), # single quoted strings don't need " escapses
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ (r'\\.|\'|"', String), # no need to escape quotes in triple-string
+ (r'#\{', String.Interpol, "interpoling_string"),
+ include('strings'),
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
+ include('strings')
+ ],
+ }
+
+
+class LiveScriptLexer(RegexLexer):
+ """
+ For `LiveScript`_ source code.
+
+ .. _LiveScript: http://gkz.github.com/LiveScript/
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'LiveScript'
+ aliases = ['live-script', 'livescript']
+ filenames = ['*.ls']
+ mimetypes = ['text/livescript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'#.*?\n', Comment.Single),
+ ],
+ 'multilineregex': [
+ include('commentsandwhitespace'),
+ (r'//([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'/', String.Regex),
+ (r'[^/#]+', String.Regex)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'//', String.Regex, ('#pop', 'multilineregex')),
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ default('#pop'),
+ ],
+ 'root': [
+ # this next expr leads to infinite loops root -> slashstartsregex
+ # (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
+ r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
+ (r'\+\+|&&|(?<![.$])\b(?:and|x?or|is|isnt|not)\b|\?|:|=|'
+ r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
+ r'~(?!\~?>)|-(?!\-?>)|<(?!\[)|(?<!\])>|'
+ r'[+*`%&|^/])=?',
+ Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(?<![.$])(for|own|in|of|while|until|loop|break|'
+ r'return|continue|switch|when|then|if|unless|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
+ r'extends|this|class|by|const|var|to|til)\b', Keyword,
+ 'slashstartsregex'),
+ (r'(?<![.$])(true|false|yes|no|on|off|'
+ r'null|NaN|Infinity|undefined|void)\b',
+ Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
+ Name.Builtin),
+ (r'[$a-zA-Z_][\w.\-:$]*\s*[:=]\s', Name.Variable,
+ 'slashstartsregex'),
+ (r'@[$a-zA-Z_][\w.\-:$]*\s*[:=]\s', Name.Variable.Instance,
+ 'slashstartsregex'),
+ (r'@', Name.Other, 'slashstartsregex'),
+ (r'@?[$a-zA-Z_][\w-]*', Name.Other, 'slashstartsregex'),
+ (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?(?:[a-zA-Z_]+)?', Number.Float),
+ (r'[0-9]+(~[0-9a-z]+)?(?:[a-zA-Z_]+)?', Number.Integer),
+ ('"""', String, 'tdqs'),
+ ("'''", String, 'tsqs'),
+ ('"', String, 'dqs'),
+ ("'", String, 'sqs'),
+ (r'\\\S+', String),
+ (r'<\[.*?\]>', String),
+ ],
+ 'strings': [
+ (r'[^#\\\'"]+', String),
+ # note that all coffee script strings are multi-line.
+ # hashmarks, quotes and backslashes must be parsed one at a time
+ ],
+ 'interpoling_string': [
+ (r'\}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\.|\'', String), # double-quoted string don't need ' escapes
+ (r'#\{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r'#|\\.|"', String), # single quoted strings don't need " escapses
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ (r'\\.|\'|"', String), # no need to escape quotes in triple-string
+ (r'#\{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings'),
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
+ include('strings')
+ ],
+ }
+
+
+class DartLexer(RegexLexer):
+ """
+ For `Dart <http://dartlang.org/>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Dart'
+ aliases = ['dart']
+ filenames = ['*.dart']
+ mimetypes = ['text/x-dart']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ include('string_literal'),
+ (r'#!(.*?)$', Comment.Preproc),
+ (r'\b(import|export)\b', Keyword, 'import_decl'),
+ (r'\b(library|source|part of|part)\b', Keyword),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'\b(class)\b(\s+)',
+ bygroups(Keyword.Declaration, Text), 'class'),
+ (r'\b(assert|break|case|catch|continue|default|do|else|finally|for|'
+ r'if|in|is|new|return|super|switch|this|throw|try|while)\b',
+ Keyword),
+ (r'\b(abstract|const|extends|factory|final|get|implements|'
+ r'native|operator|set|static|typedef|var)\b', Keyword.Declaration),
+ (r'\b(bool|double|Dynamic|int|num|Object|String|void)\b', Keyword.Type),
+ (r'\b(false|null|true)\b', Keyword.Constant),
+ (r'[~!%^&*+=|?:<>/-]|as\b', Operator),
+ (r'[a-zA-Z_$]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[(){}\[\],.;]', Punctuation),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ # DIGIT+ (‘.’ DIGIT*)? EXPONENT?
+ (r'\d+(\.\d*)?([eE][+-]?\d+)?', Number),
+ (r'\.\d+([eE][+-]?\d+)?', Number), # ‘.’ DIGIT+ EXPONENT?
+ (r'\n', Text)
+ # pseudo-keyword negate intentionally left out
+ ],
+ 'class': [
+ (r'[a-zA-Z_$]\w*', Name.Class, '#pop')
+ ],
+ 'import_decl': [
+ include('string_literal'),
+ (r'\s+', Text),
+ (r'\b(as|show|hide)\b', Keyword),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'\,', Punctuation),
+ (r'\;', Punctuation, '#pop')
+ ],
+ 'string_literal': [
+ # Raw strings.
+ (r'r"""([\w\W]*?)"""', String.Double),
+ (r"r'''([\w\W]*?)'''", String.Single),
+ (r'r"(.*?)"', String.Double),
+ (r"r'(.*?)'", String.Single),
+ # Normal Strings.
+ (r'"""', String.Double, 'string_double_multiline'),
+ (r"'''", String.Single, 'string_single_multiline'),
+ (r'"', String.Double, 'string_double'),
+ (r"'", String.Single, 'string_single')
+ ],
+ 'string_common': [
+ (r"\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z'\"$\\])",
+ String.Escape),
+ (r'(\$)([a-zA-Z_]\w*)', bygroups(String.Interpol, Name)),
+ (r'(\$\{)(.*?)(\})',
+ bygroups(String.Interpol, using(this), String.Interpol))
+ ],
+ 'string_double': [
+ (r'"', String.Double, '#pop'),
+ (r'[^"$\\\n]+', String.Double),
+ include('string_common'),
+ (r'\$+', String.Double)
+ ],
+ 'string_double_multiline': [
+ (r'"""', String.Double, '#pop'),
+ (r'[^"$\\]+', String.Double),
+ include('string_common'),
+ (r'(\$|\")+', String.Double)
+ ],
+ 'string_single': [
+ (r"'", String.Single, '#pop'),
+ (r"[^'$\\\n]+", String.Single),
+ include('string_common'),
+ (r'\$+', String.Single)
+ ],
+ 'string_single_multiline': [
+ (r"'''", String.Single, '#pop'),
+ (r'[^\'$\\]+', String.Single),
+ include('string_common'),
+ (r'(\$|\')+', String.Single)
+ ]
+ }
+
+
+class TypeScriptLexer(RegexLexer):
+ """
+ For `TypeScript <http://typescriptlang.org/>`_ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'TypeScript'
+ aliases = ['ts']
+ filenames = ['*.ts']
+ mimetypes = ['text/x-typescript']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'<!--', Comment),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+ r'this)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
+ r'extends|final|float|goto|implements|import|int|interface|long|native|'
+ r'package|private|protected|public|short|static|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Reserved),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
+ r'window)\b', Name.Builtin),
+ # Match stuff like: module name {...}
+ (r'\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)',
+ bygroups(Keyword.Reserved, Text, Name.Other, Text), 'slashstartsregex'),
+ # Match variable type keywords
+ (r'\b(string|bool|number)\b', Keyword.Type),
+ # Match stuff like: constructor
+ (r'\b(constructor|declare|interface|as|AS)\b', Keyword.Reserved),
+ # Match stuff like: super(argument, list)
+ (r'(super)(\s*)(\([\w,?.$\s]+\s*\))',
+ bygroups(Keyword.Reserved, Text), 'slashstartsregex'),
+ # Match stuff like: function() {...}
+ (r'([a-zA-Z_?.$][\w?.$]*)\(\) \{', Name.Other, 'slashstartsregex'),
+ # Match stuff like: (function: return type)
+ (r'([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)',
+ bygroups(Name.Other, Text, Keyword.Type)),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class LassoLexer(RegexLexer):
+ """
+ For `Lasso <http://www.lassosoft.com/>`_ source code, covering both Lasso 9
+ syntax and LassoScript for Lasso 8.6 and earlier. For Lasso embedded in
+ HTML, use the `LassoHtmlLexer`.
+
+ Additional options accepted:
+
+ `builtinshighlighting`
+ If given and ``True``, highlight builtin types, traits, methods, and
+ members (default: ``True``).
+ `requiredelimiters`
+ If given and ``True``, only highlight code between delimiters as Lasso
+ (default: ``False``).
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Lasso'
+ aliases = ['lasso', 'lassoscript']
+ filenames = ['*.lasso', '*.lasso[89]']
+ alias_filenames = ['*.incl', '*.inc', '*.las']
+ mimetypes = ['text/x-lasso']
+ flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'^#!.+lasso9\b', Comment.Preproc, 'lasso'),
+ (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
+ (r'\[noprocess\]', Comment.Preproc, ('delimiters', 'noprocess')),
+ (r'\[', Comment.Preproc, ('delimiters', 'squarebrackets')),
+ (r'<\?(LassoScript|lasso|=)', Comment.Preproc,
+ ('delimiters', 'anglebrackets')),
+ (r'<(!--.*?-->)?', Other, 'delimiters'),
+ (r'\s+', Other),
+ default(('delimiters', 'lassofile')),
+ ],
+ 'delimiters': [
+ (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
+ (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
+ (r'\[', Comment.Preproc, 'squarebrackets'),
+ (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
+ (r'<(!--.*?-->)?', Other),
+ (r'[^[<]+', Other),
+ ],
+ 'nosquarebrackets': [
+ (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
+ (r'<', Other),
+ (r'[^<]+', Other),
+ ],
+ 'noprocess': [
+ (r'\[/noprocess\]', Comment.Preproc, '#pop'),
+ (r'\[', Other),
+ (r'[^[]', Other),
+ ],
+ 'squarebrackets': [
+ (r'\]', Comment.Preproc, '#pop'),
+ include('lasso'),
+ ],
+ 'anglebrackets': [
+ (r'\?>', Comment.Preproc, '#pop'),
+ include('lasso'),
+ ],
+ 'lassofile': [
+ (r'\]|\?>', Comment.Preproc, '#pop'),
+ include('lasso'),
+ ],
+ 'whitespacecomments': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*\*!.*?\*/', String.Doc),
+ (r'/\*.*?\*/', Comment.Multiline),
+ ],
+ 'lasso': [
+ # whitespace/comments
+ include('whitespacecomments'),
+
+ # literals
+ (r'\d*\.\d+(e[+-]?\d+)?', Number.Float),
+ (r'0x[\da-f]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'([+-]?)(infinity|NaN)\b', bygroups(Operator, Number)),
+ (r"'", String.Single, 'singlestring'),
+ (r'"', String.Double, 'doublestring'),
+ (r'`[^`]*`', String.Backtick),
+
+ # names
+ (r'\$[a-z_][\w.]*', Name.Variable),
+ (r'#([a-z_][\w.]*|\d+)', Name.Variable.Instance),
+ (r"(\.)('[a-z_][\w.]*')",
+ bygroups(Name.Builtin.Pseudo, Name.Variable.Class)),
+ (r"(self)(\s*->\s*)('[a-z_][\w.]*')",
+ bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)),
+ (r'(\.\.?)([a-z_][\w.]*(=(?!=))?)',
+ bygroups(Name.Builtin.Pseudo, Name.Other.Member)),
+ (r'(->\\?\s*|&\s*)([a-z_][\w.]*(=(?!=))?)',
+ bygroups(Operator, Name.Other.Member)),
+ (r'(self|inherited)\b', Name.Builtin.Pseudo),
+ (r'-[a-z_][\w.]*', Name.Attribute),
+ (r'::\s*[a-z_][\w.]*', Name.Label),
+ (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|'
+ r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|'
+ r'Error_DeleteError|Error_FieldRestriction|Error_FileNotFound|'
+ r'Error_InvalidDatabase|Error_InvalidPassword|'
+ r'Error_InvalidUsername|Error_ModuleNotFound|'
+ r'Error_NoError|Error_NoPermission|Error_OutOfMemory|'
+ r'Error_ReqColumnMissing|Error_ReqFieldMissing|'
+ r'Error_RequiredColumnMissing|Error_RequiredFieldMissing|'
+ r'Error_UpdateError)\b', Name.Exception),
+
+ # definitions
+ (r'(define)(\s+)([a-z_][\w.]*)(\s*=>\s*)(type|trait|thread)\b',
+ bygroups(Keyword.Declaration, Text, Name.Class, Operator, Keyword)),
+ (r'(define)(\s+)([a-z_][\w.]*)(\s*->\s*)([a-z_][\w.]*=?|[-+*/%])',
+ bygroups(Keyword.Declaration, Text, Name.Class, Operator,
+ Name.Function), 'signature'),
+ (r'(define)(\s+)([a-z_][\w.]*)',
+ bygroups(Keyword.Declaration, Text, Name.Function), 'signature'),
+ (r'(public|protected|private|provide)(\s+)(([a-z_][\w.]*=?|[-+*/%])'
+ r'(?=\s*\())', bygroups(Keyword, Text, Name.Function),
+ 'signature'),
+ (r'(public|protected|private|provide)(\s+)([a-z_][\w.]*)',
+ bygroups(Keyword, Text, Name.Function)),
+
+ # keywords
+ (r'(true|false|none|minimal|full|all|void)\b', Keyword.Constant),
+ (r'(local|var|variable|global|data(?=\s))\b', Keyword.Declaration),
+ (r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|'
+ r'null|bytes|list|queue|set|stack|staticarray|tie)\b', Keyword.Type),
+ (r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Text, Keyword)),
+ (r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name)),
+ (r'require\b', Keyword, 'requiresection'),
+ (r'(/?)(Namespace_Using)\b', bygroups(Punctuation, Keyword.Namespace)),
+ (r'(/?)(Cache|Database_Names|Database_SchemaNames|'
+ r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
+ r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
+ r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
+ r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|'
+ r'Link_FirstRecord|Link_LastGroup|Link_LastRecord|Link_NextGroup|'
+ r'Link_NextRecord|Link_PrevGroup|Link_PrevRecord|Log|Loop|'
+ r'NoProcess|Output_None|Portal|Private|Protect|Records|Referer|'
+ r'Referrer|Repeating|ResultSet|Rows|Search_Args|Search_Arguments|'
+ r'Select|Sort_Args|Sort_Arguments|Thread_Atomic|Value_List|While|'
+ r'Abort|Case|Else|If_Empty|If_False|If_Null|If_True|Loop_Abort|'
+ r'Loop_Continue|Loop_Count|Params|Params_Up|Return|Return_Value|'
+ r'Run_Children|SOAP_DefineTag|SOAP_LastRequest|SOAP_LastResponse|'
+ r'Tag_Name|ascending|average|by|define|descending|do|equals|'
+ r'frozen|group|handle_failure|import|in|into|join|let|match|max|'
+ r'min|on|order|parent|protected|provide|public|require|returnhome|'
+ r'skip|split_thread|sum|take|thread|to|trait|type|where|with|'
+ r'yield|yieldhome)\b',
+ bygroups(Punctuation, Keyword)),
+
+ # other
+ (r',', Punctuation, 'commamember'),
+ (r'(and|or|not)\b', Operator.Word),
+ (r'([a-z_][\w.]*)(\s*::\s*[a-z_][\w.]*)?(\s*=(?!=))',
+ bygroups(Name, Name.Label, Operator)),
+ (r'(/?)([\w.]+)', bygroups(Punctuation, Name.Other)),
+ (r'(=)(n?bw|n?ew|n?cn|lte?|gte?|n?eq|n?rx|ft)\b',
+ bygroups(Operator, Operator.Word)),
+ (r':=|[-+*/%=<>&|!?\\]+', Operator),
+ (r'[{}():;,@^]', Punctuation),
+ ],
+ 'singlestring': [
+ (r"'", String.Single, '#pop'),
+ (r"[^'\\]+", String.Single),
+ include('escape'),
+ (r"\\", String.Single),
+ ],
+ 'doublestring': [
+ (r'"', String.Double, '#pop'),
+ (r'[^"\\]+', String.Double),
+ include('escape'),
+ (r'\\', String.Double),
+ ],
+ 'escape': [
+ (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:]+:|'
+ r'[abefnrtv?"\'\\]|$)', String.Escape),
+ ],
+ 'signature': [
+ (r'=>', Operator, '#pop'),
+ (r'\)', Punctuation, '#pop'),
+ (r'[(,]', Punctuation, 'parameter'),
+ include('lasso'),
+ ],
+ 'parameter': [
+ (r'\)', Punctuation, '#pop'),
+ (r'-?[a-z_][\w.]*', Name.Attribute, '#pop'),
+ (r'\.\.\.', Name.Builtin.Pseudo),
+ include('lasso'),
+ ],
+ 'requiresection': [
+ (r'(([a-z_][\w.]*=?|[-+*/%])(?=\s*\())', Name, 'requiresignature'),
+ (r'(([a-z_][\w.]*=?|[-+*/%])(?=(\s*::\s*[\w.]+)?\s*,))', Name),
+ (r'[a-z_][\w.]*=?|[-+*/%]', Name, '#pop'),
+ (r'::\s*[a-z_][\w.]*', Name.Label),
+ (r',', Punctuation),
+ include('whitespacecomments'),
+ ],
+ 'requiresignature': [
+ (r'(\)(?=(\s*::\s*[\w.]+)?\s*,))', Punctuation, '#pop'),
+ (r'\)', Punctuation, '#pop:2'),
+ (r'-?[a-z_][\w.]*', Name.Attribute),
+ (r'::\s*[a-z_][\w.]*', Name.Label),
+ (r'\.\.\.', Name.Builtin.Pseudo),
+ (r'[(,]', Punctuation),
+ include('whitespacecomments'),
+ ],
+ 'commamember': [
+ (r'(([a-z_][\w.]*=?|[-+*/%])'
+ r'(?=\s*(\(([^()]*\([^()]*\))*[^)]*\)\s*)?(::[\w.\s]+)?=>))',
+ Name.Function, 'signature'),
+ include('whitespacecomments'),
+ default('#pop'),
+ ],
+ }
+
+ def __init__(self, **options):
+ self.builtinshighlighting = get_bool_opt(
+ options, 'builtinshighlighting', True)
+ self.requiredelimiters = get_bool_opt(
+ options, 'requiredelimiters', False)
+
+ self._builtins = set()
+ self._members = set()
+ if self.builtinshighlighting:
+ from pygments.lexers._lasso_builtins import BUILTINS, MEMBERS
+ for key, value in iteritems(BUILTINS):
+ self._builtins.update(value)
+ for key, value in iteritems(MEMBERS):
+ self._members.update(value)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ if self.requiredelimiters:
+ stack.append('delimiters')
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if (token is Name.Other and value.lower() in self._builtins or
+ token is Name.Other.Member and
+ value.lower().rstrip('=') in self._members):
+ yield index, Name.Builtin, value
+ continue
+ yield index, token, value
+
+ def analyse_text(text):
+ rv = 0.0
+ if 'bin/lasso9' in text:
+ rv += 0.8
+ if re.search(r'<\?lasso', text, re.I):
+ rv += 0.4
+ if re.search(r'local\(', text, re.I):
+ rv += 0.4
+ return rv
+
+
+class ObjectiveJLexer(RegexLexer):
+ """
+ For Objective-J source code with preprocessor directives.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Objective-J'
+ aliases = ['objective-j', 'objectivej', 'obj-j', 'objj']
+ filenames = ['*.j']
+ mimetypes = ['text/x-objective-j']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)*'
+
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ # function definition
+ (r'^(' + _ws + r'[+-]' + _ws + r')([(a-zA-Z_].*?[^(])(' + _ws + r'\{)',
+ bygroups(using(this), using(this, state='function_signature'),
+ using(this))),
+
+ # class definition
+ (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
+ 'classname'),
+ (r'(@class|@protocol)(\s*)', bygroups(Keyword, Text),
+ 'forward_classname'),
+ (r'(\s*)(@end)(\s*)', bygroups(Text, Keyword, Text)),
+
+ include('statements'),
+ ('[{()}]', Punctuation),
+ (';', Punctuation),
+ ],
+ 'whitespace': [
+ (r'(@import)(\s+)("(?:\\\\|\\"|[^"])*")',
+ bygroups(Comment.Preproc, Text, String.Double)),
+ (r'(@import)(\s+)(<(?:\\\\|\\>|[^>])*>)',
+ bygroups(Comment.Preproc, Text, String.Double)),
+ (r'(#(?:include|import))(\s+)("(?:\\\\|\\"|[^"])*")',
+ bygroups(Comment.Preproc, Text, String.Double)),
+ (r'(#(?:include|import))(\s+)(<(?:\\\\|\\>|[^>])*>)',
+ bygroups(Comment.Preproc, Text, String.Double)),
+
+ (r'#if\s+0', Comment.Preproc, 'if0'),
+ (r'#', Comment.Preproc, 'macro'),
+
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'<!--', Comment),
+ ],
+ 'slashstartsregex': [
+ include('whitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop'),
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop'),
+ ],
+ 'statements': [
+ (r'(L|@)?"', String, 'string'),
+ (r"(L|@)?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
+ String.Char),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'0[0-7]+[Ll]?', Number.Oct),
+ (r'\d+[Ll]?', Number.Integer),
+
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?',
+ Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|'
+ r'else|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+ r'prototype|__proto__)\b', Keyword, 'slashstartsregex'),
+
+ (r'(var|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+
+ (r'(@selector|@private|@protected|@public|@encode|'
+ r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
+ r'@synthesize|@dynamic|@for|@accessors|new)\b', Keyword),
+
+ (r'(int|long|float|short|double|char|unsigned|signed|void|'
+ r'id|BOOL|bool|boolean|IBOutlet|IBAction|SEL|@outlet|@action)\b',
+ Keyword.Type),
+
+ (r'(self|super)\b', Name.Builtin),
+
+ (r'(TRUE|YES|FALSE|NO|Nil|nil|NULL)\b', Keyword.Constant),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(ABS|ASIN|ACOS|ATAN|ATAN2|SIN|COS|TAN|EXP|POW|CEIL|FLOOR|ROUND|'
+ r'MIN|MAX|RAND|SQRT|E|LN2|LN10|LOG2E|LOG10E|PI|PI2|PI_2|SQRT1_2|'
+ r'SQRT2)\b', Keyword.Constant),
+
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
+ r'window)\b', Name.Builtin),
+
+ (r'([$a-zA-Z_]\w*)(' + _ws + r')(?=\()',
+ bygroups(Name.Function, using(this))),
+
+ (r'[$a-zA-Z_]\w*', Name),
+ ],
+ 'classname': [
+ # interface definition that inherits
+ (r'([a-zA-Z_]\w*)(' + _ws + r':' + _ws +
+ r')([a-zA-Z_]\w*)?',
+ bygroups(Name.Class, using(this), Name.Class), '#pop'),
+ # interface definition for a category
+ (r'([a-zA-Z_]\w*)(' + _ws + r'\()([a-zA-Z_]\w*)(\))',
+ bygroups(Name.Class, using(this), Name.Label, Text), '#pop'),
+ # simple interface / implementation
+ (r'([a-zA-Z_]\w*)', Name.Class, '#pop'),
+ ],
+ 'forward_classname': [
+ (r'([a-zA-Z_]\w*)(\s*,\s*)',
+ bygroups(Name.Class, Text), '#push'),
+ (r'([a-zA-Z_]\w*)(\s*;?)',
+ bygroups(Name.Class, Text), '#pop'),
+ ],
+ 'function_signature': [
+ include('whitespace'),
+
+ # start of a selector w/ parameters
+ (r'(\(' + _ws + r')' # open paren
+ r'([a-zA-Z_]\w+)' # return type
+ r'(' + _ws + r'\)' + _ws + r')' # close paren
+ r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
+ bygroups(using(this), Keyword.Type, using(this),
+ Name.Function), 'function_parameters'),
+
+ # no-param function
+ (r'(\(' + _ws + r')' # open paren
+ r'([a-zA-Z_]\w+)' # return type
+ r'(' + _ws + r'\)' + _ws + r')' # close paren
+ r'([$a-zA-Z_]\w+)', # function name
+ bygroups(using(this), Keyword.Type, using(this),
+ Name.Function), "#pop"),
+
+ # no return type given, start of a selector w/ parameters
+ (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
+ bygroups(Name.Function), 'function_parameters'),
+
+ # no return type given, no-param function
+ (r'([$a-zA-Z_]\w+)', # function name
+ bygroups(Name.Function), "#pop"),
+
+ default('#pop'),
+ ],
+ 'function_parameters': [
+ include('whitespace'),
+
+ # parameters
+ (r'(\(' + _ws + ')' # open paren
+ r'([^)]+)' # type
+ r'(' + _ws + r'\)' + _ws + r')' # close paren
+ r'([$a-zA-Z_]\w+)', # param name
+ bygroups(using(this), Keyword.Type, using(this), Text)),
+
+ # one piece of a selector name
+ (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
+ Name.Function),
+
+ # smallest possible selector piece
+ (r'(:)', Name.Function),
+
+ # var args
+ (r'(,' + _ws + r'\.\.\.)', using(this)),
+
+ # param name
+ (r'([$a-zA-Z_]\w+)', Text),
+ ],
+ 'expression': [
+ (r'([$a-zA-Z_]\w*)(\()', bygroups(Name.Function,
+ Punctuation)),
+ (r'(\))', Punctuation, "#pop"),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search('^\s*@import\s+[<"]', text, re.MULTILINE):
+ # special directive found in most Objective-J files
+ return True
+ return False
+
+
+class CoffeeScriptLexer(RegexLexer):
+ """
+ For `CoffeeScript`_ source code.
+
+ .. _CoffeeScript: http://coffeescript.org
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'CoffeeScript'
+ aliases = ['coffee-script', 'coffeescript', 'coffee']
+ filenames = ['*.coffee']
+ mimetypes = ['text/coffeescript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'###[^#].*?###', Comment.Multiline),
+ (r'#(?!##[^#]).*?\n', Comment.Single),
+ ],
+ 'multilineregex': [
+ (r'[^/#]+', String.Regex),
+ (r'///([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'#\{', String.Interpol, 'interpoling_string'),
+ (r'[/#]', String.Regex),
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'///', String.Regex, ('#pop', 'multilineregex')),
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ default('#pop'),
+ ],
+ 'root': [
+ # this next expr leads to infinite loops root -> slashstartsregex
+ # (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
+ r'\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&|^/])=?',
+ Operator, 'slashstartsregex'),
+ (r'(?:\([^()]*\))?\s*[=-]>', Name.Function),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(?<![.$])(for|own|in|of|while|until|'
+ r'loop|break|return|continue|'
+ r'switch|when|then|if|unless|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
+ r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
+ (r'(?<![.$])(true|false|yes|no|on|off|null|'
+ r'NaN|Infinity|undefined)\b',
+ Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
+ Name.Builtin),
+ (r'[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable,
+ 'slashstartsregex'),
+ (r'@[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable.Instance,
+ 'slashstartsregex'),
+ (r'@', Name.Other, 'slashstartsregex'),
+ (r'@?[$a-zA-Z_][\w$]*', Name.Other, 'slashstartsregex'),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ ('"""', String, 'tdqs'),
+ ("'''", String, 'tsqs'),
+ ('"', String, 'dqs'),
+ ("'", String, 'sqs'),
+ ],
+ 'strings': [
+ (r'[^#\\\'"]+', String),
+ # note that all coffee script strings are multi-line.
+ # hashmarks, quotes and backslashes must be parsed one at a time
+ ],
+ 'interpoling_string': [
+ (r'\}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\.|\'', String), # double-quoted string don't need ' escapes
+ (r'#\{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r'#|\\.|"', String), # single quoted strings don't need " escapses
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ (r'\\.|\'|"', String), # no need to escape quotes in triple-string
+ (r'#\{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings'),
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
+ include('strings')
+ ],
+ }
+
+
+class MaskLexer(RegexLexer):
+ """
+ For `Mask <http://github.com/atmajs/MaskJS>`__ markup.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Mask'
+ aliases = ['mask']
+ filenames = ['*.mask']
+ mimetypes = ['text/x-mask']
+
+ flags = re.MULTILINE | re.IGNORECASE | re.DOTALL
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'[{};>]', Punctuation),
+ (r"'''", String, 'string-trpl-single'),
+ (r'"""', String, 'string-trpl-double'),
+ (r"'", String, 'string-single'),
+ (r'"', String, 'string-double'),
+ (r'([\w-]+)', Name.Tag, 'node'),
+ (r'([^.#;{>\s]+)', Name.Class, 'node'),
+ (r'(#[\w-]+)', Name.Function, 'node'),
+ (r'(\.[\w-]+)', Name.Variable.Class, 'node')
+ ],
+ 'string-base': [
+ (r'\\.', String.Escape),
+ (r'~\[', String.Interpol, 'interpolation'),
+ (r'.', String.Single),
+ ],
+ 'string-single': [
+ (r"'", String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'string-double': [
+ (r'"', String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'string-trpl-single': [
+ (r"'''", String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'string-trpl-double': [
+ (r'"""', String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'interpolation': [
+ (r'\]', String.Interpol, '#pop'),
+ (r'\s*:', String.Interpol, 'expression'),
+ (r'\s*\w+:', Name.Other),
+ (r'[^\]]+', String.Interpol)
+ ],
+ 'expression': [
+ (r'[^\]]+', using(JavascriptLexer), '#pop')
+ ],
+ 'node': [
+ (r'\s+', Text),
+ (r'\.', Name.Variable.Class, 'node-class'),
+ (r'\#', Name.Function, 'node-id'),
+ (r'style[ \t]*=', Name.Attribute, 'node-attr-style-value'),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'node-attr-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'[>{;]', Punctuation, '#pop')
+ ],
+ 'node-class': [
+ (r'[\w-]+', Name.Variable.Class),
+ (r'~\[', String.Interpol, 'interpolation'),
+ default('#pop')
+ ],
+ 'node-id': [
+ (r'[\w-]+', Name.Function),
+ (r'~\[', String.Interpol, 'interpolation'),
+ default('#pop')
+ ],
+ 'node-attr-value': [
+ (r'\s+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r"'", String, 'string-single-pop2'),
+ (r'"', String, 'string-double-pop2'),
+ default('#pop')
+ ],
+ 'node-attr-style-value': [
+ (r'\s+', Text),
+ (r"'", String.Single, 'css-single-end'),
+ (r'"', String.Single, 'css-double-end'),
+ include('node-attr-value')
+ ],
+ 'css-base': [
+ (r'\s+', Text),
+ (r";", Punctuation),
+ (r"[\w\-]+\s*:", Name.Builtin)
+ ],
+ 'css-single-end': [
+ include('css-base'),
+ (r"'", String.Single, '#pop:2'),
+ (r"[^;']+", Name.Entity)
+ ],
+ 'css-double-end': [
+ include('css-base'),
+ (r'"', String.Single, '#pop:2'),
+ (r'[^;"]+', Name.Entity)
+ ],
+ 'string-single-pop2': [
+ (r"'", String.Single, '#pop:2'),
+ include('string-base')
+ ],
+ 'string-double-pop2': [
+ (r'"', String.Single, '#pop:2'),
+ include('string-base')
+ ],
+ }
diff --git a/pygments/lexers/julia.py b/pygments/lexers/julia.py
new file mode 100644
index 00000000..1304b395
--- /dev/null
+++ b/pygments/lexers/julia.py
@@ -0,0 +1,196 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.julia
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Julia language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, combined, do_insertions
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+from pygments.util import shebang_matches
+
+__all__ = ['JuliaLexer', 'JuliaConsoleLexer']
+
+
+class JuliaLexer(RegexLexer):
+ """
+ For `Julia <http://julialang.org/>`_ source code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Julia'
+ aliases = ['julia', 'jl']
+ filenames = ['*.jl']
+ mimetypes = ['text/x-julia', 'application/x-julia']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ builtins = [
+ 'exit', 'whos', 'edit', 'load', 'is', 'isa', 'isequal', 'typeof', 'tuple',
+ 'ntuple', 'uid', 'hash', 'finalizer', 'convert', 'promote', 'subtype',
+ 'typemin', 'typemax', 'realmin', 'realmax', 'sizeof', 'eps', 'promote_type',
+ 'method_exists', 'applicable', 'invoke', 'dlopen', 'dlsym', 'system',
+ 'error', 'throw', 'assert', 'new', 'Inf', 'Nan', 'pi', 'im',
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'#=', Comment.Multiline, "blockcomment"),
+ (r'#.*$', Comment),
+ (r'[]{}:(),;[@]', Punctuation),
+ (r'\\\n', Text),
+ (r'\\', Text),
+
+ # keywords
+ (r'(begin|while|for|in|return|break|continue|'
+ r'macro|quote|let|if|elseif|else|try|catch|end|'
+ r'bitstype|ccall|do|using|module|import|export|'
+ r'importall|baremodule|immutable)\b', Keyword),
+ (r'(local|global|const)\b', Keyword.Declaration),
+ (r'(Bool|Int|Int8|Int16|Int32|Int64|Uint|Uint8|Uint16|Uint32|Uint64'
+ r'|Float32|Float64|Complex64|Complex128|Any|Nothing|None)\b',
+ Keyword.Type),
+
+ # functions
+ (r'(function)((?:\s|\\\s)+)',
+ bygroups(Keyword, Name.Function), 'funcname'),
+
+ # types
+ (r'(type|typealias|abstract)((?:\s|\\\s)+)',
+ bygroups(Keyword, Name.Class), 'typename'),
+
+ # operators
+ (r'==|!=|<=|>=|->|&&|\|\||::|<:|[-~+/*%=<>&^|.?!$]', Operator),
+ (r'\.\*|\.\^|\.\\|\.\/|\\', Operator),
+
+ # builtins
+ ('(' + '|'.join(builtins) + r')\b', Name.Builtin),
+
+ # backticks
+ (r'`(?s).*?`', String.Backtick),
+
+ # chars
+ (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
+ r"\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
+
+ # try to match trailing transpose
+ (r'(?<=[.\w)\]])\'+', Operator),
+
+ # strings
+ (r'(?:[IL])"', String, 'string'),
+ (r'[E]?"', String, combined('stringescape', 'string')),
+
+ # names
+ (r'@[\w.]+', Name.Decorator),
+ (u'[a-zA-Z_\u00A1-\U0010FFFF][a-zA-Z_0-9\u00A1-\U0010FFFF]*!*', Name),
+
+ # numbers
+ (r'(\d+(_\d+)+\.\d*|\d*\.\d+(_\d+)+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+(_\d+)+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'0b[01]+(_[01]+)+', Number.Bin),
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+(_[0-7]+)+', Number.Oct),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[a-fA-F0-9]+(_[a-fA-F0-9]+)+', Number.Hex),
+ (r'0x[a-fA-F0-9]+', Number.Hex),
+ (r'\d+(_\d+)+', Number.Integer),
+ (r'\d+', Number.Integer)
+ ],
+
+ 'funcname': [
+ ('[a-zA-Z_]\w*', Name.Function, '#pop'),
+ ('\([^\s\w{]{1,2}\)', Operator, '#pop'),
+ ('[^\s\w{]{1,2}', Operator, '#pop'),
+ ],
+
+ 'typename': [
+ ('[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ "blockcomment": [
+ (r'[^=#]', Comment.Multiline),
+ (r'#=', Comment.Multiline, '#push'),
+ (r'=#', Comment.Multiline, '#pop'),
+ (r'[=#]', Comment.Multiline),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ (r'\$(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?',
+ String.Interpol),
+ (r'[^\\"$]+', String),
+ # quotes, dollar signs, and backslashes must be parsed one at a time
+ (r'["\\]', String),
+ # unhandled string formatting sign
+ (r'\$', String)
+ ],
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'julia')
+
+
+line_re = re.compile('.*?\n')
+
+
+class JuliaConsoleLexer(Lexer):
+ """
+ For Julia console sessions. Modeled after MatlabSessionLexer.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Julia console'
+ aliases = ['jlcon']
+
+ def get_tokens_unprocessed(self, text):
+ jllexer = JuliaLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+
+ for match in line_re.finditer(text):
+ line = match.group()
+
+ if line.startswith('julia>'):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:6])]))
+ curcode += line[6:]
+
+ elif line.startswith(' '):
+
+ idx = len(curcode)
+
+ # without is showing error on same line as before...?
+ line = "\n" + line
+ token = (0, Generic.Traceback, line)
+ insertions.append((idx, [token]))
+
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, jllexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+
+ yield match.start(), Generic.Output, line
+
+ if curcode: # or item:
+ for item in do_insertions(
+ insertions, jllexer.get_tokens_unprocessed(curcode)):
+ yield item
diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py
index ed4d257c..4d3c9159 100644
--- a/pygments/lexers/jvm.py
+++ b/pygments/lexers/jvm.py
@@ -5,23 +5,23 @@
Pygments lexers for JVM languages.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
- this
+ this, combined, default, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-from pygments.util import get_choice_opt
+ Number, Punctuation
+from pygments.util import shebang_matches
from pygments import unistring as uni
-
__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
- 'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'KotlinLexer',
- 'XtendLexer', 'AspectJLexer', 'CeylonLexer']
+ 'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'ClojureScriptLexer',
+ 'KotlinLexer', 'XtendLexer', 'AspectJLexer', 'CeylonLexer',
+ 'PigLexer', 'GoloLexer', 'JasminLexer']
class JavaLexer(RegexLexer):
@@ -34,47 +34,49 @@ class JavaLexer(RegexLexer):
filenames = ['*.java']
mimetypes = ['text/x-java']
- flags = re.MULTILINE | re.DOTALL
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
tokens = {
'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]<>]*\s+)+?)' # return arguments
- r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
+ # keywords: go before method names to avoid lexing "throw new XYZ"
+ # as a method signature
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
Keyword),
+ # method names
+ (r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
+ r'((?:[^\W\d]|\$)[\w$]*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'@[^\W\d][\w.]*', Name.Decorator),
(r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
- (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
- (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
+ (r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Operator, Name.Attribute)),
+ (r'^\s*([^\W\d]|\$)[\w$]*:', Name.Label),
+ (r'([^\W\d]|\$)[\w$]*', Name),
+ (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
+ (r'[0-9]+(_+[0-9]+)*L?', Number.Integer),
(r'\n', Text)
],
'class': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+ (r'([^\W\d]|\$)[\w$]*', Name.Class, '#pop')
],
'import': [
- (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
],
}
@@ -83,7 +85,7 @@ class AspectJLexer(JavaLexer):
"""
For `AspectJ <http://www.eclipse.org/aspectj/>`_ source code.
- *New in Pygments 1.6.*
+ .. versionadded:: 1.6
"""
name = 'AspectJ'
@@ -91,7 +93,7 @@ class AspectJLexer(JavaLexer):
filenames = ['*.aj']
mimetypes = ['text/x-aspectj']
- aj_keywords = [
+ aj_keywords = set((
'aspect', 'pointcut', 'privileged', 'call', 'execution',
'initialization', 'preinitialization', 'handler', 'get', 'set',
'staticinitialization', 'target', 'args', 'within', 'withincode',
@@ -101,9 +103,9 @@ class AspectJLexer(JavaLexer):
'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
- ]
- aj_inter_type = ['parents:', 'warning:', 'error:', 'soft:', 'precedence:']
- aj_inter_type_annotation = ['@type', '@method', '@constructor', '@field']
+ ))
+ aj_inter_type = set(('parents:', 'warning:', 'error:', 'soft:', 'precedence:'))
+ aj_inter_type_annotation = set(('@type', '@method', '@constructor', '@field'))
def get_tokens_unprocessed(self, text):
for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
@@ -243,37 +245,41 @@ class ScalaLexer(RegexLexer):
u'\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b'
u'\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]')
- idrest = ur'%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op)
+ idrest = u'%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op)
+ letter_letter_digit = u'%s(?:%s|\d)*' % (letter, letter)
tokens = {
'root': [
# method names
(r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'),
- (ur"'%s" % idrest, Text.Symbol),
+ (u"'%s" % idrest, Text.Symbol),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
- (ur'@%s' % idrest, Name.Decorator),
- (ur'(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|'
- ur'f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|'
- ur'lazy|match|new|override|pr(?:ivate|otected)'
- ur'|re(?:quires|turn)|s(?:ealed|uper)|'
- ur't(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\b|'
+ (u'@%s' % idrest, Name.Decorator),
+ (u'(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|'
+ u'f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|'
+ u'lazy|match|new|override|pr(?:ivate|otected)'
+ u'|re(?:quires|turn)|s(?:ealed|uper)|'
+ u't(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\\b|'
u'(<[%:-]|=>|>:|[#=@_\u21D2\u2190])(\\b|(?=\\s)|$)', Keyword),
- (ur':(?!%s)' % op, Keyword, 'type'),
- (ur'%s%s\b' % (upper, idrest), Name.Class),
+ (u':(?!%s)' % op, Keyword, 'type'),
+ (u'%s%s\\b' % (upper, idrest), Name.Class),
(r'(true|false|null)\b', Keyword.Constant),
(r'(import|package)(\s+)', bygroups(Keyword, Text), 'import'),
(r'(type)(\s+)', bygroups(Keyword, Text), 'type'),
(r'""".*?"""(?!")', String),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
-# (ur'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator,
-# Name.Attribute)),
+ (r'[fs]"""', String, 'interptriplestring'), # interpolated strings
+ (r'[fs]"', String, 'interpstring'), # interpolated strings
+ (r'raw"(\\\\|\\"|[^"])*"', String), # raw strings
+ # (ur'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator,
+ # Name.Attribute)),
(idrest, Name),
(r'`[^`]+`', Name),
(r'\[', Operator, 'typeparam'),
- (r'[\(\)\{\};,.#]', Operator),
+ (r'[(){};,.#]', Operator),
(op, Operator),
(r'([0-9][0-9]*\.[0-9]*|\.[0-9]+)([eE][+-]?[0-9]+)?[fFdD]?',
Number.Float),
@@ -282,43 +288,64 @@ class ScalaLexer(RegexLexer):
(r'\n', Text)
],
'class': [
- (ur'(%s|%s|`[^`]+`)(\s*)(\[)' % (idrest, op),
+ (u'(%s|%s|`[^`]+`)(\\s*)(\\[)' % (idrest, op),
bygroups(Name.Class, Text, Operator), 'typeparam'),
(r'\s+', Text),
- (r'{', Operator, '#pop'),
+ (r'\{', Operator, '#pop'),
(r'\(', Operator, '#pop'),
(r'//.*?\n', Comment.Single, '#pop'),
- (ur'%s|%s|`[^`]+`' % (idrest, op), Name.Class, '#pop'),
+ (u'%s|%s|`[^`]+`' % (idrest, op), Name.Class, '#pop'),
],
'type': [
(r'\s+', Text),
- (u'<[%:]|>:|[#_\u21D2]|forSome|type', Keyword),
- (r'([,\);}]|=>|=)(\s*)', bygroups(Operator, Text), '#pop'),
- (r'[\(\{]', Operator, '#push'),
- (ur'((?:%s|%s|`[^`]+`)(?:\.(?:%s|%s|`[^`]+`))*)(\s*)(\[)' %
+ (r'<[%:]|>:|[#_]|forSome|type', Keyword),
+ (u'([,);}]|=>|=|\u21d2)(\\s*)', bygroups(Operator, Text), '#pop'),
+ (r'[({]', Operator, '#push'),
+ (u'((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)(\\[)' %
(idrest, op, idrest, op),
bygroups(Keyword.Type, Text, Operator), ('#pop', 'typeparam')),
- (ur'((?:%s|%s|`[^`]+`)(?:\.(?:%s|%s|`[^`]+`))*)(\s*)$' %
+ (u'((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)$' %
(idrest, op, idrest, op),
bygroups(Keyword.Type, Text), '#pop'),
(r'//.*?\n', Comment.Single, '#pop'),
- (ur'\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
+ (u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
],
'typeparam': [
(r'[\s,]+', Text),
(u'<[%:]|=>|>:|[#_\u21D2]|forSome|type', Keyword),
- (r'([\]\)\}])', Operator, '#pop'),
- (r'[\(\[\{]', Operator, '#push'),
- (ur'\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
+ (r'([\])}])', Operator, '#pop'),
+ (r'[(\[{]', Operator, '#push'),
+ (u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
],
'comment': [
- (r'[^/\*]+', Comment.Multiline),
+ (r'[^/*]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
'import': [
- (ur'(%s|\.)+' % idrest, Name.Namespace, '#pop')
+ (u'(%s|\\.)+' % idrest, Name.Namespace, '#pop')
+ ],
+ 'interpstringcommon': [
+ (r'[^"$\\]+', String),
+ (r'\$\$', String),
+ (r'\$' + letter_letter_digit, String.Interpol),
+ (r'\$\{', String.Interpol, 'interpbrace'),
+ (r'\\.', String),
+ ],
+ 'interptriplestring': [
+ (r'"""(?!")', String, '#pop'),
+ (r'"', String),
+ include('interpstringcommon'),
+ ],
+ 'interpstring': [
+ (r'"', String, '#pop'),
+ include('interpstringcommon'),
+ ],
+ 'interpbrace': [
+ (r'\}', String.Interpol, '#pop'),
+ (r'\{', String.Interpol, '#push'),
+ include('root'),
],
}
@@ -327,7 +354,7 @@ class GosuLexer(RegexLexer):
"""
For Gosu source code.
- *New in Pygments 1.5.*
+ .. versionadded:: 1.5
"""
name = 'Gosu'
@@ -340,14 +367,14 @@ class GosuLexer(RegexLexer):
tokens = {
'root': [
# method names
- (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # modifiers etc.
- r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
- r'(\s*)(\()', # signature start
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # modifiers etc.
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(in|as|typeof|statictypeof|typeis|typeas|if|else|foreach|for|'
r'index|while|do|continue|break|return|try|catch|finally|this|'
r'throw|new|switch|case|default|eval|super|outer|classpath|'
@@ -360,44 +387,44 @@ class GosuLexer(RegexLexer):
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null|NaN|Infinity)\b', Keyword.Constant),
- (r'(class|interface|enhancement|enum)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
+ (r'(class|interface|enhancement|enum)(\s+)([a-zA-Z_]\w*)',
bygroups(Keyword.Declaration, Text, Name.Class)),
- (r'(uses)(\s+)([a-zA-Z0-9_.]+\*?)',
+ (r'(uses)(\s+)([\w.]+\*?)',
bygroups(Keyword.Namespace, Text, Name.Namespace)),
(r'"', String, 'string'),
- (r'(\??[\.#])([a-zA-Z_][a-zA-Z0-9_]*)',
+ (r'(\??[.#])([a-zA-Z_]\w*)',
bygroups(Operator, Name.Attribute)),
- (r'(:)([a-zA-Z_][a-zA-Z0-9_]*)',
+ (r'(:)([a-zA-Z_]\w*)',
bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
- (r'and|or|not|[\\~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'and|or|not|[\\~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'[0-9]+', Number.Integer),
(r'\n', Text)
],
'templateText': [
- (r'(\\<)|(\\\$)', String),
- (r'(<%@\s+)(extends|params)',
- bygroups(Operator, Name.Decorator), 'stringTemplate'),
- (r'<%!--.*?--%>', Comment.Multiline),
- (r'(<%)|(<%=)', Operator, 'stringTemplate'),
- (r'\$\{', Operator, 'stringTemplateShorthand'),
- (r'.', String)
+ (r'(\\<)|(\\\$)', String),
+ (r'(<%@\s+)(extends|params)',
+ bygroups(Operator, Name.Decorator), 'stringTemplate'),
+ (r'<%!--.*?--%>', Comment.Multiline),
+ (r'(<%)|(<%=)', Operator, 'stringTemplate'),
+ (r'\$\{', Operator, 'stringTemplateShorthand'),
+ (r'.', String)
],
'string': [
- (r'"', String, '#pop'),
- include('templateText')
+ (r'"', String, '#pop'),
+ include('templateText')
],
'stringTemplate': [
- (r'"', String, 'string'),
- (r'%>', Operator, '#pop'),
- include('root')
+ (r'"', String, 'string'),
+ (r'%>', Operator, '#pop'),
+ include('root')
],
'stringTemplateShorthand': [
- (r'"', String, 'string'),
- (r'\{', Operator, 'stringTemplateShorthand'),
- (r'\}', Operator, '#pop'),
- include('root')
+ (r'"', String, 'string'),
+ (r'\{', Operator, 'stringTemplateShorthand'),
+ (r'\}', Operator, '#pop'),
+ include('root')
],
}
@@ -406,18 +433,18 @@ class GosuTemplateLexer(Lexer):
"""
For Gosu templates.
- *New in Pygments 1.5.*
+ .. versionadded:: 1.5
"""
name = 'Gosu Template'
aliases = ['gst']
filenames = ['*.gst']
mimetypes = ['text/x-gosu-template']
- lexer = GosuLexer()
def get_tokens_unprocessed(self, text):
+ lexer = GosuLexer()
stack = ['templateText']
- for item in self.lexer.get_tokens_unprocessed(text, stack):
+ for item in lexer.get_tokens_unprocessed(text, stack):
yield item
@@ -425,27 +452,32 @@ class GroovyLexer(RegexLexer):
"""
For `Groovy <http://groovy.codehaus.org/>`_ source code.
- *New in Pygments 1.5.*
+ .. versionadded:: 1.5
"""
name = 'Groovy'
aliases = ['groovy']
- filenames = ['*.groovy']
+ filenames = ['*.groovy','*.gradle']
mimetypes = ['text/x-groovy']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
+ # Groovy allows a file to start with a shebang
+ (r'#!(.*?)$', Comment.Preproc, 'base'),
+ default('base'),
+ ],
+ 'base': [
# method names
- (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
- r'(\s*)(\()', # signature start
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
Keyword),
@@ -459,35 +491,40 @@ class GroovyLexer(RegexLexer):
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'""".*?"""', String.Double),
+ (r"'''.*?'''", String.Single),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'\$/((?!/\$).)*/\$', String),
(r'/(\\\\|\\"|[^/])*/', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
- (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
- (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
+ (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
- (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
],
}
+ def analyse_text(text):
+ return shebang_matches(text, r'groovy')
+
class IokeLexer(RegexLexer):
"""
For `Ioke <http://ioke.org/>`_ (a strongly typed, dynamic,
prototype based programming language) source.
- *New in Pygments 1.4.*
+ .. versionadded:: 1.4
"""
name = 'Ioke'
filenames = ['*.ik']
@@ -497,45 +534,45 @@ class IokeLexer(RegexLexer):
'interpolatableText': [
(r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
- (r'#{', Punctuation, 'textInterpolationRoot')
- ],
+ (r'#\{', Punctuation, 'textInterpolationRoot')
+ ],
'text': [
(r'(?<!\\)"', String, '#pop'),
include('interpolatableText'),
(r'[^"]', String)
- ],
+ ],
'documentation': [
(r'(?<!\\)"', String.Doc, '#pop'),
include('interpolatableText'),
(r'[^"]', String.Doc)
- ],
+ ],
'textInterpolationRoot': [
- (r'}', Punctuation, '#pop'),
+ (r'\}', Punctuation, '#pop'),
include('root')
- ],
+ ],
'slashRegexp': [
(r'(?<!\\)/[oxpniums]*', String.Regex, '#pop'),
include('interpolatableText'),
(r'\\/', String.Regex),
(r'[^/]', String.Regex)
- ],
+ ],
'squareRegexp': [
(r'(?<!\\)][oxpniums]*', String.Regex, '#pop'),
include('interpolatableText'),
(r'\\]', String.Regex),
(r'[^\]]', String.Regex)
- ],
+ ],
'squareText': [
(r'(?<!\\)]', String, '#pop'),
include('interpolatableText'),
(r'[^\]]', String)
- ],
+ ],
'root': [
(r'\n', Text),
@@ -545,86 +582,86 @@ class IokeLexer(RegexLexer):
(r';(.*?)\n', Comment),
(r'\A#!(.*?)\n', Comment),
- #Regexps
+ # Regexps
(r'#/', String.Regex, 'slashRegexp'),
(r'#r\[', String.Regex, 'squareRegexp'),
- #Symbols
- (r':[a-zA-Z0-9_!:?]+', String.Symbol),
- (r'[a-zA-Z0-9_!:?]+:(?![a-zA-Z0-9_!?])', String.Other),
+ # Symbols
+ (r':[\w!:?]+', String.Symbol),
+ (r'[\w!:?]+:(?![\w!?])', String.Other),
(r':"(\\\\|\\"|[^"])*"', String.Symbol),
- #Documentation
+ # Documentation
(r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
r'|(?<=dsyntax\())\s*"', String.Doc, 'documentation'),
- #Text
+ # Text
(r'"', String, 'text'),
(r'#\[', String, 'squareText'),
- #Mimic
- (r'[a-zA-Z0-9_][a-zA-Z0-9!?_:]+(?=\s*=.*mimic\s)', Name.Entity),
+ # Mimic
+ (r'\w[\w!:?]+(?=\s*=.*mimic\s)', Name.Entity),
- #Assignment
- (r'[a-zA-Z_][a-zA-Z0-9_!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))',
+ # Assignment
+ (r'[a-zA-Z_][\w!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))',
Name.Variable),
# keywords
(r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
- r'with)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+ r'with)(?![\w!:?])', Keyword.Reserved),
# Origin
- (r'(eval|mimic|print|println)(?![a-zA-Z0-9!:_?])', Keyword),
+ (r'(eval|mimic|print|println)(?![\w!:?])', Keyword),
# Base
(r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
- r'(?![a-zA-Z0-9!:_?])', Keyword),
+ r'(?![\w!:?])', Keyword),
# Ground
- (r'(stackTraceAsText)(?![a-zA-Z0-9!:_?])', Keyword),
+ (r'(stackTraceAsText)(?![\w!:?])', Keyword),
- #DefaultBehaviour Literals
- (r'(dict|list|message|set)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+ # DefaultBehaviour Literals
+ (r'(dict|list|message|set)(?![\w!:?])', Keyword.Reserved),
- #DefaultBehaviour Case
+ # DefaultBehaviour Case
(r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
- r'case:otherwise|case:xor)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+ r'case:otherwise|case:xor)(?![\w!:?])', Keyword.Reserved),
- #DefaultBehaviour Reflection
+ # DefaultBehaviour Reflection
(r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
- r'(?![a-zA-Z0-9!:_?])', Keyword),
+ r'(?![\w!:?])', Keyword),
- #DefaultBehaviour Aspects
- (r'(after|around|before)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+ # DefaultBehaviour Aspects
+ (r'(after|around|before)(?![\w!:?])', Keyword.Reserved),
# DefaultBehaviour
(r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
- r'(?![a-zA-Z0-9!:_?])', Keyword),
+ r'(?![\w!:?])', Keyword),
(r'(use|destructuring)', Keyword.Reserved),
- #DefaultBehavior BaseBehavior
+ # DefaultBehavior BaseBehavior
(r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
r'documentation|identity|removeCell!|undefineCell)'
- r'(?![a-zA-Z0-9!:_?])', Keyword),
+ r'(?![\w!:?])', Keyword),
- #DefaultBehavior Internal
+ # DefaultBehavior Internal
(r'(internal:compositeRegexp|internal:concatenateText|'
r'internal:createDecimal|internal:createNumber|'
r'internal:createRegexp|internal:createText)'
- r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+ r'(?![\w!:?])', Keyword.Reserved),
- #DefaultBehaviour Conditions
+ # DefaultBehaviour Conditions
(r'(availableRestarts|bind|error\!|findRestart|handle|'
r'invokeRestart|rescue|restart|signal\!|warn\!)'
- r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
+ r'(?![\w!:?])', Keyword.Reserved),
# constants
- (r'(nil|false|true)(?![a-zA-Z0-9!:_?])', Name.Constant),
+ (r'(nil|false|true)(?![\w!:?])', Name.Constant),
# names
(r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
@@ -636,12 +673,12 @@ class IokeLexer(RegexLexer):
r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
- r'System|Text|Tuple)(?![a-zA-Z0-9!:_?])', Name.Builtin),
+ r'System|Text|Tuple)(?![\w!:?])', Name.Builtin),
# functions
- (ur'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
- ur'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
- ur'(?![a-zA-Z0-9!:_?])', Name.Function),
+ (u'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
+ u'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
+ u'(?![\w!:?])', Name.Function),
# Numbers
(r'-?0[xX][0-9a-fA-F]+', Number.Hex),
@@ -650,25 +687,25 @@ class IokeLexer(RegexLexer):
(r'#\(', Punctuation),
- # Operators
- (ur'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
- ur'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
- ur'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
- ur'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
- ur'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
- ur'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
- ur'\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
- (r'(and|nand|or|xor|nor|return|import)(?![a-zA-Z0-9_!?])',
+ # Operators
+ (r'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
+ r'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
+ r'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
+ r'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
+ r'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
+ r'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
+ u'\\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
+ (r'(and|nand|or|xor|nor|return|import)(?![\w!?])',
Operator),
# Punctuation
- (r'(\`\`|\`|\'\'|\'|\.|\,|@@|@|\[|\]|\(|\)|{|})', Punctuation),
+ (r'(\`\`|\`|\'\'|\'|\.|\,|@@|@|\[|\]|\(|\)|\{|\})', Punctuation),
- #kinds
- (r'[A-Z][a-zA-Z0-9_!:?]*', Name.Class),
+ # kinds
+ (r'[A-Z][\w!:?]*', Name.Class),
- #default cellnames
- (r'[a-z_][a-zA-Z0-9_!:?]*', Name)
+ # default cellnames
+ (r'[a-z_][\w!:?]*', Name)
]
}
@@ -677,26 +714,26 @@ class ClojureLexer(RegexLexer):
"""
Lexer for `Clojure <http://clojure.org/>`_ source code.
- *New in Pygments 0.11.*
+ .. versionadded:: 0.11
"""
name = 'Clojure'
aliases = ['clojure', 'clj']
filenames = ['*.clj']
mimetypes = ['text/x-clojure', 'application/x-clojure']
- special_forms = [
+ special_forms = (
'.', 'def', 'do', 'fn', 'if', 'let', 'new', 'quote', 'var', 'loop'
- ]
+ )
# It's safe to consider 'ns' a declaration thing because it defines a new
# namespace.
- declarations = [
+ declarations = (
'def-', 'defn', 'defn-', 'defmacro', 'defmulti', 'defmethod',
'defstruct', 'defonce', 'declare', 'definline', 'definterface',
'defprotocol', 'defrecord', 'deftype', 'defproject', 'ns'
- ]
+ )
- builtins = [
+ builtins = (
'*', '+', '-', '->', '/', '<', '<=', '=', '==', '>', '>=', '..',
'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
@@ -745,7 +782,7 @@ class ClojureLexer(RegexLexer):
'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
'vector?', 'when', 'when-first', 'when-let', 'when-not',
'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
- 'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper']
+ 'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper')
# valid names for identifiers
# well, names can only not consist fully of numbers
@@ -755,9 +792,6 @@ class ClojureLexer(RegexLexer):
# but that's hard, so just pretend / is part of the name
valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
- def _multi_escape(entries):
- return '(%s)' % ('|'.join(re.escape(entry) + ' ' for entry in entries))
-
tokens = {
'root': [
# the comments - always starting with semicolon
@@ -778,23 +812,23 @@ class ClojureLexer(RegexLexer):
(r"\\(.|[a-z]+)", String.Char),
# keywords
- (r'::?' + valid_name, String.Symbol),
+ (r'::?#?' + valid_name, String.Symbol),
# special operators
(r'~@|[`\'#^~&@]', Operator),
# highlight the special forms
- (_multi_escape(special_forms), Keyword),
+ (words(special_forms, suffix=' '), Keyword),
# Technically, only the special forms are 'keywords'. The problem
# is that only treating them as keywords means that things like
# 'defn' and 'ns' need to be highlighted as builtins. This is ugly
# and weird for most styles. So, as a compromise we're going to
# highlight them as Keyword.Declarations.
- (_multi_escape(declarations), Keyword.Declaration),
+ (words(declarations, suffix=' '), Keyword.Declaration),
# highlight the builtins
- (_multi_escape(builtins), Name.Builtin),
+ (words(builtins, suffix=' '), Name.Builtin),
# the remaining functions
(r'(?<=\()' + valid_name, Name.Function),
@@ -814,12 +848,25 @@ class ClojureLexer(RegexLexer):
}
+class ClojureScriptLexer(ClojureLexer):
+ """
+ Lexer for `ClojureScript <http://clojure.org/clojurescript>`_
+ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'ClojureScript'
+ aliases = ['clojurescript', 'cljs']
+ filenames = ['*.cljs']
+ mimetypes = ['text/x-clojurescript', 'application/x-clojurescript']
+
+
class TeaLangLexer(RegexLexer):
"""
For `Tea <http://teatrove.org/>`_ source code. Only used within a
TeaTemplateLexer.
- *New in Pygments 1.5.*
+ .. versionadded:: 1.5
"""
flags = re.MULTILINE | re.DOTALL
@@ -827,14 +874,14 @@ class TeaLangLexer(RegexLexer):
tokens = {
'root': [
# method names
- (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
- r'(\s*)(\()', # signature start
+ (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
+ (r'@[a-zA-Z_][\w\.]*', Name.Decorator),
(r'(and|break|else|foreach|if|in|not|or|reverse)\b',
Keyword),
(r'(as|call|define)\b', Keyword.Declaration),
@@ -843,9 +890,9 @@ class TeaLangLexer(RegexLexer):
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r'\'(\\\\|\\\'|[^\'])*\'', String),
- (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
- (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
+ (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_\$]\w*', Name),
(r'(isa|[.]{3}|[.]{2}|[=#!<>+-/%&;,.\*\\\(\)\[\]\{\}])', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
@@ -853,10 +900,10 @@ class TeaLangLexer(RegexLexer):
(r'\n', Text)
],
'template': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
- (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
],
}
@@ -865,7 +912,7 @@ class CeylonLexer(RegexLexer):
"""
For `Ceylon <http://ceylon-lang.org/>`_ source code.
- *New in Pygments 1.6.*
+ .. versionadded:: 1.6
"""
name = 'Ceylon'
@@ -881,24 +928,24 @@ class CeylonLexer(RegexLexer):
tokens = {
'root': [
# method names
- (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
- r'(\s*)(\()', # signature start
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(variable|shared|abstract|doc|by|formal|actual|late|native)',
- Name.Decorator),
- (r'(break|case|catch|continue|default|else|finally|for|in|'
- r'variable|if|return|switch|this|throw|try|while|is|exists|dynamic|'
- r'nonempty|then|outer|assert)\b', Keyword),
- (r'(abstracts|extends|satisfies|adapts|'
- r'super|given|of|out|assign|'
- r'transient|volatile)\b', Keyword.Declaration),
- (r'(function|value|void)\b',
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'(shared|abstract|formal|default|actual|variable|deprecated|small|'
+ r'late|literal|doc|by|see|throws|optional|license|tagged|final|native|'
+ r'annotation|sealed)\b', Name.Decorator),
+ (r'(break|case|catch|continue|else|finally|for|in|'
+ r'if|return|switch|this|throw|try|while|is|exists|dynamic|'
+ r'nonempty|then|outer|assert|let)\b', Keyword),
+ (r'(abstracts|extends|satisfies|'
+ r'super|given|of|out|assign)\b', Keyword.Declaration),
+ (r'(function|value|void|new)\b',
Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(assembly|module|package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface|object|alias)(\s+)',
bygroups(Keyword.Declaration, Text), 'class'),
@@ -906,11 +953,11 @@ class CeylonLexer(RegexLexer):
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'", String.Char),
(r'".*``.*``.*"', String.Interpol),
- (r'(\.)([a-z_][a-zA-Z0-9_]*)',
+ (r'(\.)([a-z_]\w*)',
bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
- (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
(r'\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
Number.Float),
@@ -919,139 +966,96 @@ class CeylonLexer(RegexLexer):
Number.Float),
(r'#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+', Number.Hex),
(r'#[0-9a-fA-F]+', Number.Hex),
- (r'\$([01]{4})(_[01]{4})+', Number.Integer),
- (r'\$[01]+', Number.Integer),
+ (r'\$([01]{4})(_[01]{4})+', Number.Bin),
+ (r'\$[01]+', Number.Bin),
(r'\d{1,3}(_\d{3})+[kMGTP]?', Number.Integer),
(r'[0-9]+[kMGTP]?', Number.Integer),
(r'\n', Text)
],
'class': [
- (r'[A-Za-z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+ (r'[A-Za-z_]\w*', Name.Class, '#pop')
],
'import': [
- (r'[a-z][a-zA-Z0-9_.]*',
+ (r'[a-z][\w.]*',
Name.Namespace, '#pop')
],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
}
class KotlinLexer(RegexLexer):
"""
- For `Kotlin <http://confluence.jetbrains.net/display/Kotlin/>`_
+ For `Kotlin <http://kotlin.jetbrains.org/>`_
source code.
- Additional options accepted:
-
- `unicodelevel`
- Determines which Unicode characters this lexer allows for identifiers.
- The possible values are:
-
- * ``none`` -- only the ASCII letters and numbers are allowed. This
- is the fastest selection.
- * ``basic`` -- all Unicode characters from the specification except
- category ``Lo`` are allowed.
- * ``full`` -- all Unicode characters as specified in the C# specs
- are allowed. Note that this means a considerable slowdown since the
- ``Lo`` category has more than 40,000 characters in it!
-
- The default value is ``basic``.
-
- *New in Pygments 1.5.*
+ .. versionadded:: 1.5
"""
name = 'Kotlin'
aliases = ['kotlin']
filenames = ['*.kt']
- mimetypes = ['text/x-kotlin'] # inferred
+ mimetypes = ['text/x-kotlin']
flags = re.MULTILINE | re.DOTALL | re.UNICODE
- # for the range of allowed unicode characters in identifiers,
- # see http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
-
- levels = {
- 'none': '@?[_a-zA-Z][a-zA-Z0-9_]*',
- 'basic': ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
- '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
- uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
- 'full': ('@?(?:_|[^' +
- uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
- + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
- 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
- }
+ kt_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
+ '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
+ 'Mn', 'Mc') + ']*')
+ kt_id = '(' + kt_name + '|`' + kt_name + '`)'
- tokens = {}
- token_variants = True
-
- for levelname, cs_ident in levels.items():
- tokens[levelname] = {
- 'root': [
- # method names
- (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
- r'(' + cs_ident + ')' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Punctuation)),
- (r'^\s*\[.*?\]', Name.Attribute),
- (r'[^\S\n]+', Text),
- (r'\\\n', Text), # line continuation
- (r'//.*?\n', Comment.Single),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'\n', Text),
- (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
- (r'[{}]', Punctuation),
- (r'@"(""|[^"])*"', String),
- (r'"(\\\\|\\"|[^"\n])*["\n]', String),
- (r"'\\.'|'[^\\]'", String.Char),
- (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
- r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
- (r'#[ \t]*(if|endif|else|elif|define|undef|'
- r'line|error|warning|region|endregion|pragma)\b.*?\n',
- Comment.Preproc),
- (r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
- Keyword)),
- (r'(abstract|as|break|catch|'
- r'fun|continue|default|delegate|'
- r'do|else|enum|extern|false|finally|'
- r'fixed|for|goto|if|implicit|in|interface|'
- r'internal|is|lock|null|'
- r'out|override|private|protected|public|readonly|'
- r'ref|return|sealed|sizeof|'
- r'when|this|throw|true|try|typeof|'
- r'unchecked|unsafe|virtual|void|while|'
- r'get|set|new|partial|yield|val|var)\b', Keyword),
- (r'(global)(::)', bygroups(Keyword, Punctuation)),
- (r'(bool|byte|char|decimal|double|dynamic|float|int|long|'
- r'short)\b\??', Keyword.Type),
- (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'class'),
- (r'(package|using)(\s+)', bygroups(Keyword, Text), 'package'),
- (cs_ident, Name),
- ],
- 'class': [
- (cs_ident, Name.Class, '#pop')
- ],
- 'package': [
- (r'(?=\()', Text, '#pop'), # using (resource)
- ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
- ]
- }
-
- def __init__(self, **options):
- level = get_choice_opt(options, 'unicodelevel', self.tokens.keys(),
- 'basic')
- if level not in self._all_tokens:
- # compile the regexes now
- self._tokens = self.__class__.process_tokendef(level)
- else:
- self._tokens = self._all_tokens[level]
-
- RegexLexer.__init__(self, **options)
+ tokens = {
+ 'root': [
+ (r'^\s*\[.*?\]', Name.Attribute),
+ (r'[^\S\n]+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//.*?\n', Comment.Single),
+ (r'/[*].*?[*]/', Comment.Multiline),
+ (r'\n', Text),
+ (r'::|!!|\?[:.]', Operator),
+ (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
+ (r'[{}]', Punctuation),
+ (r'@"(""|[^"])*"', String),
+ (r'"(\\\\|\\"|[^"\n])*["\n]', String),
+ (r"'\\.'|'[^\\]'", String.Char),
+ (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ (r'(class)(\s+)(object)', bygroups(Keyword, Text, Keyword)),
+ (r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'),
+ (r'(package|import)(\s+)', bygroups(Keyword, Text), 'package'),
+ (r'(val|var)(\s+)', bygroups(Keyword, Text), 'property'),
+ (r'(fun)(\s+)', bygroups(Keyword, Text), 'function'),
+ (r'(abstract|annotation|as|break|by|catch|class|continue|do|else|'
+ r'enum|false|final|finally|for|fun|get|if|import|in|inner|'
+ r'internal|is|null|object|open|out|override|package|private|'
+ r'protected|public|reified|return|set|super|this|throw|trait|'
+ r'true|try|type|val|var|vararg|when|where|while|This)\b', Keyword),
+ (kt_id, Name),
+ ],
+ 'package': [
+ (r'\S+', Name.Namespace, '#pop')
+ ],
+ 'class': [
+ (kt_id, Name.Class, '#pop')
+ ],
+ 'property': [
+ (kt_id, Name.Property, '#pop')
+ ],
+ 'function': [
+ (kt_id, Name.Function, '#pop')
+ ],
+ }
class XtendLexer(RegexLexer):
"""
For `Xtend <http://xtend-lang.org/>`_ source code.
- *New in Pygments 1.6.*
+ .. versionadded:: 1.6
"""
name = 'Xtend'
@@ -1064,14 +1068,14 @@ class XtendLexer(RegexLexer):
tokens = {
'root': [
# method names
- (r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_$][a-zA-Z0-9_$]*)' # method name
- r'(\s*)(\()', # signature start
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_$][\w$]*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while|IF|'
r'ELSE|ELSEIF|ENDIF|FOR|ENDFOR|SEPARATOR|BEFORE|AFTER)\b',
@@ -1087,26 +1091,470 @@ class XtendLexer(RegexLexer):
'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r"(''')", String, 'template'),
- (ur"(\u00BB)", String, 'template'),
+ (u'(\u00BB)', String, 'template'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'(\\\\|\\'|[^'])*'", String),
- (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
- (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
- (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
- (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
],
'template': [
(r"'''", String, '#pop'),
- (ur"\u00AB", String, '#pop'),
+ (u'\u00AB', String, '#pop'),
(r'.', String)
],
}
+
+
+class PigLexer(RegexLexer):
+ """
+ For `Pig Latin <https://pig.apache.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Pig'
+ aliases = ['pig']
+ filenames = ['*.pig']
+ mimetypes = ['text/x-pig']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'--.*', Comment),
+ (r'/\*[\w\W]*?\*/', Comment.Multiline),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'\'(?:\\[ntbrf\\\']|\\u[0-9a-f]{4}|[^\'\\\n\r])*\'', String),
+ include('keywords'),
+ include('types'),
+ include('builtins'),
+ include('punct'),
+ include('operators'),
+ (r'[0-9]*\.[0-9]+(e[0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text),
+ (r'([a-z_]\w*)(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[()#:]', Text),
+ (r'[^(:#\'")\s]+', Text),
+ (r'\S+\s+', Text) # TODO: make tests pass without \s+
+ ],
+ 'keywords': [
+ (r'(assert|and|any|all|arrange|as|asc|bag|by|cache|CASE|cat|cd|cp|'
+ r'%declare|%default|define|dense|desc|describe|distinct|du|dump|'
+ r'eval|exex|explain|filter|flatten|foreach|full|generate|group|'
+ r'help|if|illustrate|import|inner|input|into|is|join|kill|left|'
+ r'limit|load|ls|map|matches|mkdir|mv|not|null|onschema|or|order|'
+ r'outer|output|parallel|pig|pwd|quit|register|returns|right|rm|'
+ r'rmf|rollup|run|sample|set|ship|split|stderr|stdin|stdout|store|'
+ r'stream|through|union|using|void)\b', Keyword)
+ ],
+ 'builtins': [
+ (r'(AVG|BinStorage|cogroup|CONCAT|copyFromLocal|copyToLocal|COUNT|'
+ r'cross|DIFF|MAX|MIN|PigDump|PigStorage|SIZE|SUM|TextLoader|'
+ r'TOKENIZE)\b', Name.Builtin)
+ ],
+ 'types': [
+ (r'(bytearray|BIGINTEGER|BIGDECIMAL|chararray|datetime|double|float|'
+ r'int|long|tuple)\b', Keyword.Type)
+ ],
+ 'punct': [
+ (r'[;(){}\[\]]', Punctuation),
+ ],
+ 'operators': [
+ (r'[#=,./%+\-?]', Operator),
+ (r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
+ (r'(==|<=|<|>=|>|!=)', Operator),
+ ],
+ }
+
+
+class GoloLexer(RegexLexer):
+ """
+ For `Golo <http://golo-lang.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Golo'
+ filenames = ['*.golo']
+ aliases = ['golo']
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+
+ (r'#.*$', Comment),
+
+ (r'(\^|\.\.\.|:|\?:|->|==|!=|=|\+|\*|%|/|<=|<|>=|>|=|\.)',
+ Operator),
+ (r'(?<=[^-])(-)(?=[^-])', Operator),
+
+ (r'(?<=[^`])(is|isnt|and|or|not|oftype|in|orIfNull)\b', Operator.Word),
+ (r'[]{}|(),[]', Punctuation),
+
+ (r'(module|import)(\s+)',
+ bygroups(Keyword.Namespace, Text),
+ 'modname'),
+ (r'\b([a-zA-Z_][\w$.]*)(::)', bygroups(Name.Namespace, Punctuation)),
+ (r'\b([a-zA-Z_][\w$]*(?:\.[a-zA-Z_][\w$]*)+)\b', Name.Namespace),
+
+ (r'(let|var)(\s+)',
+ bygroups(Keyword.Declaration, Text),
+ 'varname'),
+ (r'(struct)(\s+)',
+ bygroups(Keyword.Declaration, Text),
+ 'structname'),
+ (r'(function)(\s+)',
+ bygroups(Keyword.Declaration, Text),
+ 'funcname'),
+
+ (r'(null|true|false)\b', Keyword.Constant),
+ (r'(augment|pimp'
+ r'|if|else|case|match|return'
+ r'|case|when|then|otherwise'
+ r'|while|for|foreach'
+ r'|try|catch|finally|throw'
+ r'|local'
+ r'|continue|break)\b', Keyword),
+
+ (r'(map|array|list|set|vector|tuple)(\[)',
+ bygroups(Name.Builtin, Punctuation)),
+ (r'(print|println|readln|raise|fun'
+ r'|asInterfaceInstance)\b', Name.Builtin),
+ (r'(`?[a-zA-Z_][\w$]*)(\()',
+ bygroups(Name.Function, Punctuation)),
+
+ (r'-?[\d_]*\.[\d_]*([eE][+-]?\d[\d_]*)?F?', Number.Float),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'-?\d[\d_]*L', Number.Integer.Long),
+ (r'-?\d[\d_]*', Number.Integer),
+
+ ('`?[a-zA-Z_][\w$]*', Name),
+ (r'@[a-zA-Z_][\w$.]*', Name.Decorator),
+
+ (r'"""', String, combined('stringescape', 'triplestring')),
+ (r'"', String, combined('stringescape', 'doublestring')),
+ (r"'", String, combined('stringescape', 'singlestring')),
+ (r'----((.|\n)*?)----', String.Doc)
+
+ ],
+
+ 'funcname': [
+ (r'`?[a-zA-Z_][\w$]*', Name.Function, '#pop'),
+ ],
+ 'modname': [
+ (r'[a-zA-Z_][\w$.]*\*?', Name.Namespace, '#pop')
+ ],
+ 'structname': [
+ (r'`?[\w.]+\*?', Name.Class, '#pop')
+ ],
+ 'varname': [
+ (r'`?[a-zA-Z_][\w$]*', Name.Variable, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\\'"\n]+', String),
+ (r'[\'"\\]', String)
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'triplestring': [
+ (r'"""', String, '#pop'),
+ include('string'),
+ (r'\n', String),
+ ],
+ 'doublestring': [
+ (r'"', String.Double, '#pop'),
+ include('string'),
+ ],
+ 'singlestring': [
+ (r"'", String, '#pop'),
+ include('string'),
+ ],
+ 'operators': [
+ (r'[#=,./%+\-?]', Operator),
+ (r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
+ (r'(==|<=|<|>=|>|!=)', Operator),
+ ],
+ }
+
+
+class JasminLexer(RegexLexer):
+ """
+ For `Jasmin <http://jasmin.sourceforge.net/>`_ assembly code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Jasmin'
+ aliases = ['jasmin', 'jasminxt']
+ filenames = ['*.j']
+
+ _whitespace = r' \n\t\r'
+ _ws = r'(?:[%s]+)' % _whitespace
+ _separator = r'%s:=' % _whitespace
+ _break = r'(?=[%s]|$)' % _separator
+ _name = r'[^%s]+' % _separator
+ _unqualified_name = r'(?:[^%s.;\[/]+)' % _separator
+
+ tokens = {
+ 'default': [
+ (r'\n', Text, '#pop'),
+ (r"'", String.Single, ('#pop', 'quote')),
+ (r'"', String.Double, 'string'),
+ (r'=', Punctuation),
+ (r':', Punctuation, 'label'),
+ (_ws, Text),
+ (r';.*', Comment.Single),
+ (r'(\$[-+])?0x-?[\da-fA-F]+%s' % _break, Number.Hex),
+ (r'(\$[-+]|\+)?-?\d+%s' % _break, Number.Integer),
+ (r'-?(\d+\.\d*|\.\d+)([eE][-+]?\d+)?[fFdD]?'
+ r'[\x00-\x08\x0b\x0c\x0e-\x1f]*%s' % _break, Number.Float),
+ (r'\$%s' % _name, Name.Variable),
+
+ # Directives
+ (r'\.annotation%s' % _break, Keyword.Reserved, 'annotation'),
+ (r'(\.attribute|\.bytecode|\.debug|\.deprecated|\.enclosing|'
+ r'\.interface|\.line|\.signature|\.source|\.stack|\.var|abstract|'
+ r'annotation|bridge|class|default|enum|field|final|fpstrict|'
+ r'interface|native|private|protected|public|signature|static|'
+ r'synchronized|synthetic|transient|varargs|volatile)%s' % _break,
+ Keyword.Reserved),
+ (r'\.catch%s' % _break, Keyword.Reserved, 'caught-exception'),
+ (r'(\.class|\.implements|\.inner|\.super|inner|invisible|'
+ r'invisibleparam|outer|visible|visibleparam)%s' % _break,
+ Keyword.Reserved, 'class/convert-dots'),
+ (r'\.field%s' % _break, Keyword.Reserved,
+ ('descriptor/convert-dots', 'field')),
+ (r'(\.end|\.limit|use)%s' % _break, Keyword.Reserved,
+ 'no-verification'),
+ (r'\.method%s' % _break, Keyword.Reserved, 'method'),
+ (r'\.set%s' % _break, Keyword.Reserved, 'var'),
+ (r'\.throws%s' % _break, Keyword.Reserved, 'exception'),
+ (r'(from|offset|to|using)%s' % _break, Keyword.Reserved, 'label'),
+ (r'is%s' % _break, Keyword.Reserved,
+ ('descriptor/convert-dots', 'var')),
+ (r'(locals|stack)%s' % _break, Keyword.Reserved, 'verification'),
+ (r'method%s' % _break, Keyword.Reserved, 'enclosing-method'),
+
+ # Instructions
+ (words((
+ 'aaload', 'aastore', 'aconst_null', 'aload', 'aload_0', 'aload_1', 'aload_2',
+ 'aload_3', 'aload_w', 'areturn', 'arraylength', 'astore', 'astore_0', 'astore_1',
+ 'astore_2', 'astore_3', 'astore_w', 'athrow', 'baload', 'bastore', 'bipush',
+ 'breakpoint', 'caload', 'castore', 'd2f', 'd2i', 'd2l', 'dadd', 'daload', 'dastore',
+ 'dcmpg', 'dcmpl', 'dconst_0', 'dconst_1', 'ddiv', 'dload', 'dload_0', 'dload_1',
+ 'dload_2', 'dload_3', 'dload_w', 'dmul', 'dneg', 'drem', 'dreturn', 'dstore', 'dstore_0',
+ 'dstore_1', 'dstore_2', 'dstore_3', 'dstore_w', 'dsub', 'dup', 'dup2', 'dup2_x1',
+ 'dup2_x2', 'dup_x1', 'dup_x2', 'f2d', 'f2i', 'f2l', 'fadd', 'faload', 'fastore', 'fcmpg',
+ 'fcmpl', 'fconst_0', 'fconst_1', 'fconst_2', 'fdiv', 'fload', 'fload_0', 'fload_1',
+ 'fload_2', 'fload_3', 'fload_w', 'fmul', 'fneg', 'frem', 'freturn', 'fstore', 'fstore_0',
+ 'fstore_1', 'fstore_2', 'fstore_3', 'fstore_w', 'fsub', 'i2b', 'i2c', 'i2d', 'i2f', 'i2l',
+ 'i2s', 'iadd', 'iaload', 'iand', 'iastore', 'iconst_0', 'iconst_1', 'iconst_2',
+ 'iconst_3', 'iconst_4', 'iconst_5', 'iconst_m1', 'idiv', 'iinc', 'iinc_w', 'iload',
+ 'iload_0', 'iload_1', 'iload_2', 'iload_3', 'iload_w', 'imul', 'ineg', 'int2byte',
+ 'int2char', 'int2short', 'ior', 'irem', 'ireturn', 'ishl', 'ishr', 'istore', 'istore_0',
+ 'istore_1', 'istore_2', 'istore_3', 'istore_w', 'isub', 'iushr', 'ixor', 'l2d', 'l2f',
+ 'l2i', 'ladd', 'laload', 'land', 'lastore', 'lcmp', 'lconst_0', 'lconst_1', 'ldc2_w',
+ 'ldiv', 'lload', 'lload_0', 'lload_1', 'lload_2', 'lload_3', 'lload_w', 'lmul', 'lneg',
+ 'lookupswitch', 'lor', 'lrem', 'lreturn', 'lshl', 'lshr', 'lstore', 'lstore_0',
+ 'lstore_1', 'lstore_2', 'lstore_3', 'lstore_w', 'lsub', 'lushr', 'lxor',
+ 'monitorenter', 'monitorexit', 'nop', 'pop', 'pop2', 'ret', 'ret_w', 'return', 'saload',
+ 'sastore', 'sipush', 'swap'), suffix=_break), Keyword.Reserved),
+ (r'(anewarray|checkcast|instanceof|ldc|ldc_w|new)%s' % _break,
+ Keyword.Reserved, 'class/no-dots'),
+ (r'invoke(dynamic|interface|nonvirtual|special|'
+ r'static|virtual)%s' % _break, Keyword.Reserved,
+ 'invocation'),
+ (r'(getfield|putfield)%s' % _break, Keyword.Reserved,
+ ('descriptor/no-dots', 'field')),
+ (r'(getstatic|putstatic)%s' % _break, Keyword.Reserved,
+ ('descriptor/no-dots', 'static')),
+ (words((
+ 'goto', 'goto_w', 'if_acmpeq', 'if_acmpne', 'if_icmpeq',
+ 'if_icmpge', 'if_icmpgt', 'if_icmple', 'if_icmplt', 'if_icmpne',
+ 'ifeq', 'ifge', 'ifgt', 'ifle', 'iflt', 'ifne', 'ifnonnull',
+ 'ifnull', 'jsr', 'jsr_w'), suffix=_break),
+ Keyword.Reserved, 'label'),
+ (r'(multianewarray|newarray)%s' % _break, Keyword.Reserved,
+ 'descriptor/convert-dots'),
+ (r'tableswitch%s' % _break, Keyword.Reserved, 'table')
+ ],
+ 'quote': [
+ (r"'", String.Single, '#pop'),
+ (r'\\u[\da-fA-F]{4}', String.Escape),
+ (r"[^'\\]+", String.Single)
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'\\([nrtfb"\'\\]|u[\da-fA-F]{4}|[0-3]?[0-7]{1,2})',
+ String.Escape),
+ (r'[^"\\]+', String.Double)
+ ],
+ 'root': [
+ (r'\n+', Text),
+ (r"'", String.Single, 'quote'),
+ include('default'),
+ (r'(%s)([ \t\r]*)(:)' % _name,
+ bygroups(Name.Label, Text, Punctuation)),
+ (_name, String.Other)
+ ],
+ 'annotation': [
+ (r'\n', Text, ('#pop', 'annotation-body')),
+ (r'default%s' % _break, Keyword.Reserved,
+ ('#pop', 'annotation-default')),
+ include('default')
+ ],
+ 'annotation-body': [
+ (r'\n+', Text),
+ (r'\.end%s' % _break, Keyword.Reserved, '#pop'),
+ include('default'),
+ (_name, String.Other, ('annotation-items', 'descriptor/no-dots'))
+ ],
+ 'annotation-default': [
+ (r'\n+', Text),
+ (r'\.end%s' % _break, Keyword.Reserved, '#pop'),
+ include('default'),
+ default(('annotation-items', 'descriptor/no-dots'))
+ ],
+ 'annotation-items': [
+ (r"'", String.Single, 'quote'),
+ include('default'),
+ (_name, String.Other)
+ ],
+ 'caught-exception': [
+ (r'all%s' % _break, Keyword, '#pop'),
+ include('exception')
+ ],
+ 'class/convert-dots': [
+ include('default'),
+ (r'(L)((?:%s[/.])*)(%s)(;)' % (_unqualified_name, _name),
+ bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
+ '#pop'),
+ (r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class), '#pop')
+ ],
+ 'class/no-dots': [
+ include('default'),
+ (r'\[+', Punctuation, ('#pop', 'descriptor/no-dots')),
+ (r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
+ bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
+ '#pop'),
+ (r'((?:%s/)*)(%s)' % (_unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class), '#pop')
+ ],
+ 'descriptor/convert-dots': [
+ include('default'),
+ (r'\[+', Punctuation),
+ (r'(L)((?:%s[/.])*)(%s?)(;)' % (_unqualified_name, _name),
+ bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
+ '#pop'),
+ (r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
+ default('#pop')
+ ],
+ 'descriptor/no-dots': [
+ include('default'),
+ (r'\[+', Punctuation),
+ (r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
+ bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
+ '#pop'),
+ (r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
+ default('#pop')
+ ],
+ 'descriptors/convert-dots': [
+ (r'\)', Punctuation, '#pop'),
+ default('descriptor/convert-dots')
+ ],
+ 'enclosing-method': [
+ (_ws, Text),
+ (r'(?=[^%s]*\()' % _separator, Text, ('#pop', 'invocation')),
+ default(('#pop', 'class/convert-dots'))
+ ],
+ 'exception': [
+ include('default'),
+ (r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Exception), '#pop')
+ ],
+ 'field': [
+ (r'static%s' % _break, Keyword.Reserved, ('#pop', 'static')),
+ include('default'),
+ (r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
+ (_unqualified_name, _separator, _unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class, Name.Variable.Instance),
+ '#pop')
+ ],
+ 'invocation': [
+ include('default'),
+ (r'((?:%s[/.](?=[^%s(]*[/.]))*)(%s[/.])?(%s)(\()' %
+ (_unqualified_name, _separator, _unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class, Name.Function, Punctuation),
+ ('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
+ 'descriptor/convert-dots'))
+ ],
+ 'label': [
+ include('default'),
+ (_name, Name.Label, '#pop')
+ ],
+ 'method': [
+ include('default'),
+ (r'(%s)(\()' % _name, bygroups(Name.Function, Punctuation),
+ ('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
+ 'descriptor/convert-dots'))
+ ],
+ 'no-verification': [
+ (r'(locals|method|stack)%s' % _break, Keyword.Reserved, '#pop'),
+ include('default')
+ ],
+ 'static': [
+ include('default'),
+ (r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
+ (_unqualified_name, _separator, _unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class, Name.Variable.Class), '#pop')
+ ],
+ 'table': [
+ (r'\n+', Text),
+ (r'default%s' % _break, Keyword.Reserved, '#pop'),
+ include('default'),
+ (_name, Name.Label)
+ ],
+ 'var': [
+ include('default'),
+ (_name, Name.Variable, '#pop')
+ ],
+ 'verification': [
+ include('default'),
+ (r'(Double|Float|Integer|Long|Null|Top|UninitializedThis)%s' %
+ _break, Keyword, '#pop'),
+ (r'Object%s' % _break, Keyword, ('#pop', 'class/no-dots')),
+ (r'Uninitialized%s' % _break, Keyword, ('#pop', 'label'))
+ ]
+ }
+
+ def analyse_text(text):
+ score = 0
+ if re.search(r'^\s*\.class\s', text, re.MULTILINE):
+ score += 0.5
+ if re.search(r'^\s*[a-z]+_[a-z]+\b', text, re.MULTILINE):
+ score += 0.3
+ if re.search(r'^\s*\.(attribute|bytecode|debug|deprecated|enclosing|'
+ r'inner|interface|limit|set|signature|stack)\b', text,
+ re.MULTILINE):
+ score += 0.6
+ return score
diff --git a/pygments/lexers/lisp.py b/pygments/lexers/lisp.py
new file mode 100644
index 00000000..729916e3
--- /dev/null
+++ b/pygments/lexers/lisp.py
@@ -0,0 +1,2123 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.lisp
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Lispy languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, words, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal, Error
+
+from pygments.lexers.python import PythonLexer
+
+__all__ = ['SchemeLexer', 'CommonLispLexer',
+ 'HyLexer', 'RacketLexer',
+ 'NewLispLexer', 'EmacsLispLexer', ]
+
+class SchemeLexer(RegexLexer):
+ """
+ A Scheme lexer, parsing a stream and outputting the tokens
+ needed to highlight scheme code.
+ This lexer could be most probably easily subclassed to parse
+ other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp.
+
+ This parser is checked with pastes from the LISP pastebin
+ at http://paste.lisp.org/ to cover as much syntax as possible.
+
+ It supports the full Scheme syntax as defined in R5RS.
+
+ .. versionadded:: 0.6
+ """
+ name = 'Scheme'
+ aliases = ['scheme', 'scm']
+ filenames = ['*.scm', '*.ss']
+ mimetypes = ['text/x-scheme', 'application/x-scheme']
+
+ # list of known keywords and builtins taken form vim 6.4 scheme.vim
+ # syntax file.
+ keywords = (
+ 'lambda', 'define', 'if', 'else', 'cond', 'and', 'or', 'case', 'let',
+ 'let*', 'letrec', 'begin', 'do', 'delay', 'set!', '=>', 'quote',
+ 'quasiquote', 'unquote', 'unquote-splicing', 'define-syntax',
+ 'let-syntax', 'letrec-syntax', 'syntax-rules'
+ )
+ builtins = (
+ '*', '+', '-', '/', '<', '<=', '=', '>', '>=', 'abs', 'acos', 'angle',
+ 'append', 'apply', 'asin', 'assoc', 'assq', 'assv', 'atan',
+ 'boolean?', 'caaaar', 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr',
+ 'caar', 'cadaar', 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr',
+ 'cadr', 'call-with-current-continuation', 'call-with-input-file',
+ 'call-with-output-file', 'call-with-values', 'call/cc', 'car',
+ 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
+ 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr',
+ 'cdr', 'ceiling', 'char->integer', 'char-alphabetic?', 'char-ci<=?',
+ 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?', 'char-downcase',
+ 'char-lower-case?', 'char-numeric?', 'char-ready?', 'char-upcase',
+ 'char-upper-case?', 'char-whitespace?', 'char<=?', 'char<?', 'char=?',
+ 'char>=?', 'char>?', 'char?', 'close-input-port', 'close-output-port',
+ 'complex?', 'cons', 'cos', 'current-input-port', 'current-output-port',
+ 'denominator', 'display', 'dynamic-wind', 'eof-object?', 'eq?',
+ 'equal?', 'eqv?', 'eval', 'even?', 'exact->inexact', 'exact?', 'exp',
+ 'expt', 'floor', 'for-each', 'force', 'gcd', 'imag-part',
+ 'inexact->exact', 'inexact?', 'input-port?', 'integer->char',
+ 'integer?', 'interaction-environment', 'lcm', 'length', 'list',
+ 'list->string', 'list->vector', 'list-ref', 'list-tail', 'list?',
+ 'load', 'log', 'magnitude', 'make-polar', 'make-rectangular',
+ 'make-string', 'make-vector', 'map', 'max', 'member', 'memq', 'memv',
+ 'min', 'modulo', 'negative?', 'newline', 'not', 'null-environment',
+ 'null?', 'number->string', 'number?', 'numerator', 'odd?',
+ 'open-input-file', 'open-output-file', 'output-port?', 'pair?',
+ 'peek-char', 'port?', 'positive?', 'procedure?', 'quotient',
+ 'rational?', 'rationalize', 'read', 'read-char', 'real-part', 'real?',
+ 'remainder', 'reverse', 'round', 'scheme-report-environment',
+ 'set-car!', 'set-cdr!', 'sin', 'sqrt', 'string', 'string->list',
+ 'string->number', 'string->symbol', 'string-append', 'string-ci<=?',
+ 'string-ci<?', 'string-ci=?', 'string-ci>=?', 'string-ci>?',
+ 'string-copy', 'string-fill!', 'string-length', 'string-ref',
+ 'string-set!', 'string<=?', 'string<?', 'string=?', 'string>=?',
+ 'string>?', 'string?', 'substring', 'symbol->string', 'symbol?',
+ 'tan', 'transcript-off', 'transcript-on', 'truncate', 'values',
+ 'vector', 'vector->list', 'vector-fill!', 'vector-length',
+ 'vector-ref', 'vector-set!', 'vector?', 'with-input-from-file',
+ 'with-output-to-file', 'write', 'write-char', 'zero?'
+ )
+
+ # valid names for identifiers
+ # well, names can only not consist fully of numbers
+ # but this should be good enough for now
+ valid_name = r'[\w!$%&*+,/:<=>?@^~|-]+'
+
+ tokens = {
+ 'root': [
+ # the comments
+ # and going to the end of the line
+ (r';.*$', Comment.Single),
+ # multi-line comment
+ (r'#\|', Comment.Multiline, 'multiline-comment'),
+ # commented form (entire sexpr folliwng)
+ (r'#;\s*\(', Comment, 'commented-form'),
+ # signifies that the program text that follows is written with the
+ # lexical and datum syntax described in r6rs
+ (r'#!r6rs', Comment),
+
+ # whitespaces - usually not relevant
+ (r'\s+', Text),
+
+ # numbers
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+ # support for uncommon kinds of numbers -
+ # have to figure out what the characters mean
+ # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
+
+ # strings, symbols and characters
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'" + valid_name, String.Symbol),
+ (r"#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)", String.Char),
+
+ # constants
+ (r'(#t|#f)', Name.Constant),
+
+ # special operators
+ (r"('|#|`|,@|,|\.)", Operator),
+
+ # highlight the keywords
+ ('(%s)' % '|'.join(re.escape(entry) + ' ' for entry in keywords),
+ Keyword),
+
+ # first variable in a quoted string like
+ # '(this is syntactic sugar)
+ (r"(?<='\()" + valid_name, Name.Variable),
+ (r"(?<=#\()" + valid_name, Name.Variable),
+
+ # highlight the builtins
+ ("(?<=\()(%s)" % '|'.join(re.escape(entry) + ' ' for entry in builtins),
+ Name.Builtin),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Function),
+ # find the remaining variables
+ (valid_name, Name.Variable),
+
+ # the famous parentheses!
+ (r'(\(|\))', Punctuation),
+ (r'(\[|\])', Punctuation),
+ ],
+ 'multiline-comment': [
+ (r'#\|', Comment.Multiline, '#push'),
+ (r'\|#', Comment.Multiline, '#pop'),
+ (r'[^|#]+', Comment.Multiline),
+ (r'[|#]', Comment.Multiline),
+ ],
+ 'commented-form': [
+ (r'\(', Comment, '#push'),
+ (r'\)', Comment, '#pop'),
+ (r'[^()]+', Comment),
+ ],
+ }
+
+
+class CommonLispLexer(RegexLexer):
+ """
+ A Common Lisp lexer.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Common Lisp'
+ aliases = ['common-lisp', 'cl', 'lisp']
+ filenames = ['*.cl', '*.lisp']
+ mimetypes = ['text/x-common-lisp']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ # couple of useful regexes
+
+ # characters that are not macro-characters and can be used to begin a symbol
+ nonmacro = r'\\.|[\w!$%&*+-/<=>?@\[\]^{}~]'
+ constituent = nonmacro + '|[#.:]'
+ terminated = r'(?=[ "()\'\n,;`])' # whitespace or terminating macro characters
+
+ # symbol token, reverse-engineered from hyperspec
+ # Take a deep breath...
+ symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent)
+
+ def __init__(self, **options):
+ from pygments.lexers._cl_builtins import BUILTIN_FUNCTIONS, \
+ SPECIAL_FORMS, MACROS, LAMBDA_LIST_KEYWORDS, DECLARATIONS, \
+ BUILTIN_TYPES, BUILTIN_CLASSES
+ self.builtin_function = BUILTIN_FUNCTIONS
+ self.special_forms = SPECIAL_FORMS
+ self.macros = MACROS
+ self.lambda_list_keywords = LAMBDA_LIST_KEYWORDS
+ self.declarations = DECLARATIONS
+ self.builtin_types = BUILTIN_TYPES
+ self.builtin_classes = BUILTIN_CLASSES
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name.Variable:
+ if value in self.builtin_function:
+ yield index, Name.Builtin, value
+ continue
+ if value in self.special_forms:
+ yield index, Keyword, value
+ continue
+ if value in self.macros:
+ yield index, Name.Builtin, value
+ continue
+ if value in self.lambda_list_keywords:
+ yield index, Keyword, value
+ continue
+ if value in self.declarations:
+ yield index, Keyword, value
+ continue
+ if value in self.builtin_types:
+ yield index, Keyword.Type, value
+ continue
+ if value in self.builtin_classes:
+ yield index, Name.Class, value
+ continue
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ default('body'),
+ ],
+ 'multiline-comment': [
+ (r'#\|', Comment.Multiline, '#push'), # (cf. Hyperspec 2.4.8.19)
+ (r'\|#', Comment.Multiline, '#pop'),
+ (r'[^|#]+', Comment.Multiline),
+ (r'[|#]', Comment.Multiline),
+ ],
+ 'commented-form': [
+ (r'\(', Comment.Preproc, '#push'),
+ (r'\)', Comment.Preproc, '#pop'),
+ (r'[^()]+', Comment.Preproc),
+ ],
+ 'body': [
+ # whitespace
+ (r'\s+', Text),
+
+ # single-line comment
+ (r';.*$', Comment.Single),
+
+ # multi-line comment
+ (r'#\|', Comment.Multiline, 'multiline-comment'),
+
+ # encoding comment (?)
+ (r'#\d*Y.*$', Comment.Special),
+
+ # strings and characters
+ (r'"(\\.|\\\n|[^"\\])*"', String),
+ # quoting
+ (r":" + symbol, String.Symbol),
+ (r"::" + symbol, String.Symbol),
+ (r":#" + symbol, String.Symbol),
+ (r"'" + symbol, String.Symbol),
+ (r"'", Operator),
+ (r"`", Operator),
+
+ # decimal numbers
+ (r'[-+]?\d+\.?' + terminated, Number.Integer),
+ (r'[-+]?\d+/\d+' + terminated, Number),
+ (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)'
+ + terminated, Number.Float),
+
+ # sharpsign strings and characters
+ (r"#\\." + terminated, String.Char),
+ (r"#\\" + symbol, String.Char),
+
+ # vector
+ (r'#\(', Operator, 'body'),
+
+ # bitstring
+ (r'#\d*\*[01]*', Literal.Other),
+
+ # uninterned symbol
+ (r'#:' + symbol, String.Symbol),
+
+ # read-time and load-time evaluation
+ (r'#[.,]', Operator),
+
+ # function shorthand
+ (r'#\'', Name.Function),
+
+ # binary rational
+ (r'#b[+-]?[01]+(/[01]+)?', Number.Bin),
+
+ # octal rational
+ (r'#o[+-]?[0-7]+(/[0-7]+)?', Number.Oct),
+
+ # hex rational
+ (r'#x[+-]?[0-9a-f]+(/[0-9a-f]+)?', Number.Hex),
+
+ # radix rational
+ (r'#\d+r[+-]?[0-9a-z]+(/[0-9a-z]+)?', Number),
+
+ # complex
+ (r'(#c)(\()', bygroups(Number, Punctuation), 'body'),
+
+ # array
+ (r'(#\d+a)(\()', bygroups(Literal.Other, Punctuation), 'body'),
+
+ # structure
+ (r'(#s)(\()', bygroups(Literal.Other, Punctuation), 'body'),
+
+ # path
+ (r'#p?"(\\.|[^"])*"', Literal.Other),
+
+ # reference
+ (r'#\d+=', Operator),
+ (r'#\d+#', Operator),
+
+ # read-time comment
+ (r'#+nil' + terminated + '\s*\(', Comment.Preproc, 'commented-form'),
+
+ # read-time conditional
+ (r'#[+-]', Operator),
+
+ # special operators that should have been parsed already
+ (r'(,@|,|\.)', Operator),
+
+ # special constants
+ (r'(t|nil)' + terminated, Name.Constant),
+
+ # functions and variables
+ (r'\*' + symbol + '\*', Name.Variable.Global),
+ (symbol, Name.Variable),
+
+ # parentheses
+ (r'\(', Punctuation, 'body'),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ }
+
+
+class HyLexer(RegexLexer):
+ """
+ Lexer for `Hy <http://hylang.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Hy'
+ aliases = ['hylang']
+ filenames = ['*.hy']
+ mimetypes = ['text/x-hy', 'application/x-hy']
+
+ special_forms = (
+ 'cond', 'for', '->', '->>', 'car',
+ 'cdr', 'first', 'rest', 'let', 'when', 'unless',
+ 'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator',
+ ',', 'list_comp', 'kwapply', '~', 'is', 'in', 'is-not', 'not-in',
+ 'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=',
+ 'foreach', 'while',
+ 'eval-and-compile', 'eval-when-compile'
+ )
+
+ declarations = (
+ 'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv'
+ )
+
+ hy_builtins = ()
+
+ hy_core = (
+ 'cycle', 'dec', 'distinct', 'drop', 'even?', 'filter', 'inc',
+ 'instance?', 'iterable?', 'iterate', 'iterator?', 'neg?',
+ 'none?', 'nth', 'numeric?', 'odd?', 'pos?', 'remove', 'repeat',
+ 'repeatedly', 'take', 'take_nth', 'take_while', 'zero?'
+ )
+
+ builtins = hy_builtins + hy_core
+
+ # valid names for identifiers
+ # well, names can only not consist fully of numbers
+ # but this should be good enough for now
+ valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
+
+ def _multi_escape(entries):
+ return words(entries, suffix=' ')
+
+ tokens = {
+ 'root': [
+ # the comments - always starting with semicolon
+ # and going to the end of the line
+ (r';.*$', Comment.Single),
+
+ # whitespaces - usually not relevant
+ (r'[,\s]+', Text),
+
+ # numbers
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+
+ # strings, symbols and characters
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'" + valid_name, String.Symbol),
+ (r"\\(.|[a-z]+)", String.Char),
+ (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
+ (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
+
+ # keywords
+ (r'::?' + valid_name, String.Symbol),
+
+ # special operators
+ (r'~@|[`\'#^~&@]', Operator),
+
+ include('py-keywords'),
+ include('py-builtins'),
+
+ # highlight the special forms
+ (_multi_escape(special_forms), Keyword),
+
+ # Technically, only the special forms are 'keywords'. The problem
+ # is that only treating them as keywords means that things like
+ # 'defn' and 'ns' need to be highlighted as builtins. This is ugly
+ # and weird for most styles. So, as a compromise we're going to
+ # highlight them as Keyword.Declarations.
+ (_multi_escape(declarations), Keyword.Declaration),
+
+ # highlight the builtins
+ (_multi_escape(builtins), Name.Builtin),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Function),
+
+ # find the remaining variables
+ (valid_name, Name.Variable),
+
+ # Hy accepts vector notation
+ (r'(\[|\])', Punctuation),
+
+ # Hy accepts map notation
+ (r'(\{|\})', Punctuation),
+
+ # the famous parentheses!
+ (r'(\(|\))', Punctuation),
+
+ ],
+ 'py-keywords': PythonLexer.tokens['keywords'],
+ 'py-builtins': PythonLexer.tokens['builtins'],
+ }
+
+ def analyse_text(text):
+ if '(import ' in text or '(defn ' in text:
+ return 0.9
+
+
+class RacketLexer(RegexLexer):
+ """
+ Lexer for `Racket <http://racket-lang.org/>`_ source code (formerly
+ known as PLT Scheme).
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Racket'
+ aliases = ['racket', 'rkt']
+ filenames = ['*.rkt', '*.rktd', '*.rktl']
+ mimetypes = ['text/x-racket', 'application/x-racket']
+
+ # Generated by example.rkt
+ _keywords = (
+ '#%app', '#%datum', '#%declare', '#%expression', '#%module-begin',
+ '#%plain-app', '#%plain-lambda', '#%plain-module-begin',
+ '#%printing-module-begin', '#%provide', '#%require',
+ '#%stratified-body', '#%top', '#%top-interaction',
+ '#%variable-reference', '->', '->*', '->*m', '->d', '->dm', '->i',
+ '->m', '...', ':do-in', '==', '=>', '_', 'absent', 'abstract',
+ 'all-defined-out', 'all-from-out', 'and', 'any', 'augment', 'augment*',
+ 'augment-final', 'augment-final*', 'augride', 'augride*', 'begin',
+ 'begin-for-syntax', 'begin0', 'case', 'case->', 'case->m',
+ 'case-lambda', 'class', 'class*', 'class-field-accessor',
+ 'class-field-mutator', 'class/c', 'class/derived', 'combine-in',
+ 'combine-out', 'command-line', 'compound-unit', 'compound-unit/infer',
+ 'cond', 'contract', 'contract-out', 'contract-struct', 'contracted',
+ 'define', 'define-compound-unit', 'define-compound-unit/infer',
+ 'define-contract-struct', 'define-custom-hash-types',
+ 'define-custom-set-types', 'define-for-syntax',
+ 'define-local-member-name', 'define-logger', 'define-match-expander',
+ 'define-member-name', 'define-module-boundary-contract',
+ 'define-namespace-anchor', 'define-opt/c', 'define-sequence-syntax',
+ 'define-serializable-class', 'define-serializable-class*',
+ 'define-signature', 'define-signature-form', 'define-struct',
+ 'define-struct/contract', 'define-struct/derived', 'define-syntax',
+ 'define-syntax-rule', 'define-syntaxes', 'define-unit',
+ 'define-unit-binding', 'define-unit-from-context',
+ 'define-unit/contract', 'define-unit/new-import-export',
+ 'define-unit/s', 'define-values', 'define-values-for-export',
+ 'define-values-for-syntax', 'define-values/invoke-unit',
+ 'define-values/invoke-unit/infer', 'define/augment',
+ 'define/augment-final', 'define/augride', 'define/contract',
+ 'define/final-prop', 'define/match', 'define/overment',
+ 'define/override', 'define/override-final', 'define/private',
+ 'define/public', 'define/public-final', 'define/pubment',
+ 'define/subexpression-pos-prop', 'delay', 'delay/idle', 'delay/name',
+ 'delay/strict', 'delay/sync', 'delay/thread', 'do', 'else', 'except',
+ 'except-in', 'except-out', 'export', 'extends', 'failure-cont',
+ 'false', 'false/c', 'field', 'field-bound?', 'file',
+ 'flat-murec-contract', 'flat-rec-contract', 'for', 'for*', 'for*/and',
+ 'for*/first', 'for*/fold', 'for*/fold/derived', 'for*/hash',
+ 'for*/hasheq', 'for*/hasheqv', 'for*/last', 'for*/list', 'for*/lists',
+ 'for*/mutable-set', 'for*/mutable-seteq', 'for*/mutable-seteqv',
+ 'for*/or', 'for*/product', 'for*/set', 'for*/seteq', 'for*/seteqv',
+ 'for*/sum', 'for*/vector', 'for*/weak-set', 'for*/weak-seteq',
+ 'for*/weak-seteqv', 'for-label', 'for-meta', 'for-syntax',
+ 'for-template', 'for/and', 'for/first', 'for/fold', 'for/fold/derived',
+ 'for/hash', 'for/hasheq', 'for/hasheqv', 'for/last', 'for/list',
+ 'for/lists', 'for/mutable-set', 'for/mutable-seteq',
+ 'for/mutable-seteqv', 'for/or', 'for/product', 'for/set', 'for/seteq',
+ 'for/seteqv', 'for/sum', 'for/vector', 'for/weak-set',
+ 'for/weak-seteq', 'for/weak-seteqv', 'gen:custom-write', 'gen:dict',
+ 'gen:equal+hash', 'gen:set', 'gen:stream', 'generic', 'get-field',
+ 'if', 'implies', 'import', 'include', 'include-at/relative-to',
+ 'include-at/relative-to/reader', 'include/reader', 'inherit',
+ 'inherit-field', 'inherit/inner', 'inherit/super', 'init',
+ 'init-depend', 'init-field', 'init-rest', 'inner', 'inspect',
+ 'instantiate', 'interface', 'interface*', 'invoke-unit',
+ 'invoke-unit/infer', 'lambda', 'lazy', 'let', 'let*', 'let*-values',
+ 'let-syntax', 'let-syntaxes', 'let-values', 'let/cc', 'let/ec',
+ 'letrec', 'letrec-syntax', 'letrec-syntaxes', 'letrec-syntaxes+values',
+ 'letrec-values', 'lib', 'link', 'local', 'local-require', 'log-debug',
+ 'log-error', 'log-fatal', 'log-info', 'log-warning', 'match', 'match*',
+ 'match*/derived', 'match-define', 'match-define-values',
+ 'match-lambda', 'match-lambda*', 'match-lambda**', 'match-let',
+ 'match-let*', 'match-let*-values', 'match-let-values', 'match-letrec',
+ 'match/derived', 'match/values', 'member-name-key', 'method-contract?',
+ 'mixin', 'module', 'module*', 'module+', 'nand', 'new', 'nor',
+ 'object-contract', 'object/c', 'only', 'only-in', 'only-meta-in',
+ 'open', 'opt/c', 'or', 'overment', 'overment*', 'override',
+ 'override*', 'override-final', 'override-final*', 'parameterize',
+ 'parameterize*', 'parameterize-break', 'parametric->/c', 'place',
+ 'place*', 'planet', 'prefix', 'prefix-in', 'prefix-out', 'private',
+ 'private*', 'prompt-tag/c', 'protect-out', 'provide',
+ 'provide-signature-elements', 'provide/contract', 'public', 'public*',
+ 'public-final', 'public-final*', 'pubment', 'pubment*', 'quasiquote',
+ 'quasisyntax', 'quasisyntax/loc', 'quote', 'quote-syntax',
+ 'quote-syntax/prune', 'recontract-out', 'recursive-contract',
+ 'relative-in', 'rename', 'rename-in', 'rename-inner', 'rename-out',
+ 'rename-super', 'require', 'send', 'send*', 'send+', 'send-generic',
+ 'send/apply', 'send/keyword-apply', 'set!', 'set!-values',
+ 'set-field!', 'shared', 'stream', 'stream-cons', 'struct', 'struct*',
+ 'struct-copy', 'struct-field-index', 'struct-out', 'struct/c',
+ 'struct/ctc', 'struct/dc', 'submod', 'super', 'super-instantiate',
+ 'super-make-object', 'super-new', 'syntax', 'syntax-case',
+ 'syntax-case*', 'syntax-id-rules', 'syntax-rules', 'syntax/loc', 'tag',
+ 'this', 'this%', 'thunk', 'thunk*', 'time', 'unconstrained-domain->',
+ 'unit', 'unit-from-context', 'unit/c', 'unit/new-import-export',
+ 'unit/s', 'unless', 'unquote', 'unquote-splicing', 'unsyntax',
+ 'unsyntax-splicing', 'values/drop', 'when', 'with-continuation-mark',
+ 'with-contract', 'with-handlers', 'with-handlers*', 'with-method',
+ 'with-syntax', u'λ'
+ )
+
+ # Generated by example.rkt
+ _builtins = (
+ '*', '+', '-', '/', '<', '</c', '<=', '<=/c', '=', '=/c', '>', '>/c',
+ '>=', '>=/c', 'abort-current-continuation', 'abs', 'absolute-path?',
+ 'acos', 'add-between', 'add1', 'alarm-evt', 'always-evt', 'and/c',
+ 'andmap', 'angle', 'any/c', 'append', 'append*', 'append-map', 'apply',
+ 'argmax', 'argmin', 'arithmetic-shift', 'arity-at-least',
+ 'arity-at-least-value', 'arity-at-least?', 'arity-checking-wrapper',
+ 'arity-includes?', 'arity=?', 'asin', 'assf', 'assoc', 'assq', 'assv',
+ 'atan', 'bad-number-of-results', 'banner', 'base->-doms/c',
+ 'base->-rngs/c', 'base->?', 'between/c', 'bitwise-and',
+ 'bitwise-bit-field', 'bitwise-bit-set?', 'bitwise-ior', 'bitwise-not',
+ 'bitwise-xor', 'blame-add-car-context', 'blame-add-cdr-context',
+ 'blame-add-context', 'blame-add-missing-party',
+ 'blame-add-nth-arg-context', 'blame-add-or-context',
+ 'blame-add-range-context', 'blame-add-unknown-context',
+ 'blame-context', 'blame-contract', 'blame-fmt->-string',
+ 'blame-negative', 'blame-original?', 'blame-positive',
+ 'blame-replace-negative', 'blame-source', 'blame-swap',
+ 'blame-swapped?', 'blame-update', 'blame-value', 'blame?', 'boolean=?',
+ 'boolean?', 'bound-identifier=?', 'box', 'box-cas!', 'box-immutable',
+ 'box-immutable/c', 'box/c', 'box?', 'break-enabled', 'break-thread',
+ 'build-chaperone-contract-property', 'build-compound-type-name',
+ 'build-contract-property', 'build-flat-contract-property',
+ 'build-list', 'build-path', 'build-path/convention-type',
+ 'build-string', 'build-vector', 'byte-pregexp', 'byte-pregexp?',
+ 'byte-ready?', 'byte-regexp', 'byte-regexp?', 'byte?', 'bytes',
+ 'bytes->immutable-bytes', 'bytes->list', 'bytes->path',
+ 'bytes->path-element', 'bytes->string/latin-1', 'bytes->string/locale',
+ 'bytes->string/utf-8', 'bytes-append', 'bytes-append*',
+ 'bytes-close-converter', 'bytes-convert', 'bytes-convert-end',
+ 'bytes-converter?', 'bytes-copy', 'bytes-copy!',
+ 'bytes-environment-variable-name?', 'bytes-fill!', 'bytes-join',
+ 'bytes-length', 'bytes-no-nuls?', 'bytes-open-converter', 'bytes-ref',
+ 'bytes-set!', 'bytes-utf-8-index', 'bytes-utf-8-length',
+ 'bytes-utf-8-ref', 'bytes<?', 'bytes=?', 'bytes>?', 'bytes?', 'caaaar',
+ 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar',
+ 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr',
+ 'call-in-nested-thread', 'call-with-atomic-output-file',
+ 'call-with-break-parameterization',
+ 'call-with-composable-continuation', 'call-with-continuation-barrier',
+ 'call-with-continuation-prompt', 'call-with-current-continuation',
+ 'call-with-default-reading-parameterization',
+ 'call-with-escape-continuation', 'call-with-exception-handler',
+ 'call-with-file-lock/timeout', 'call-with-immediate-continuation-mark',
+ 'call-with-input-bytes', 'call-with-input-file',
+ 'call-with-input-file*', 'call-with-input-string',
+ 'call-with-output-bytes', 'call-with-output-file',
+ 'call-with-output-file*', 'call-with-output-string',
+ 'call-with-parameterization', 'call-with-semaphore',
+ 'call-with-semaphore/enable-break', 'call-with-values', 'call/cc',
+ 'call/ec', 'car', 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr',
+ 'cdadr', 'cdar', 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr',
+ 'cdddr', 'cddr', 'cdr', 'ceiling', 'channel-get', 'channel-put',
+ 'channel-put-evt', 'channel-put-evt?', 'channel-try-get', 'channel/c',
+ 'channel?', 'chaperone-box', 'chaperone-channel',
+ 'chaperone-continuation-mark-key', 'chaperone-contract-property?',
+ 'chaperone-contract?', 'chaperone-evt', 'chaperone-hash',
+ 'chaperone-of?', 'chaperone-procedure', 'chaperone-prompt-tag',
+ 'chaperone-struct', 'chaperone-struct-type', 'chaperone-vector',
+ 'chaperone?', 'char->integer', 'char-alphabetic?', 'char-blank?',
+ 'char-ci<=?', 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?',
+ 'char-downcase', 'char-foldcase', 'char-general-category',
+ 'char-graphic?', 'char-iso-control?', 'char-lower-case?',
+ 'char-numeric?', 'char-punctuation?', 'char-ready?', 'char-symbolic?',
+ 'char-title-case?', 'char-titlecase', 'char-upcase',
+ 'char-upper-case?', 'char-utf-8-length', 'char-whitespace?', 'char<=?',
+ 'char<?', 'char=?', 'char>=?', 'char>?', 'char?',
+ 'check-duplicate-identifier', 'checked-procedure-check-and-extract',
+ 'choice-evt', 'class->interface', 'class-info', 'class?',
+ 'cleanse-path', 'close-input-port', 'close-output-port',
+ 'coerce-chaperone-contract', 'coerce-chaperone-contracts',
+ 'coerce-contract', 'coerce-contract/f', 'coerce-contracts',
+ 'coerce-flat-contract', 'coerce-flat-contracts', 'collect-garbage',
+ 'collection-file-path', 'collection-path', 'compile',
+ 'compile-allow-set!-undefined', 'compile-context-preservation-enabled',
+ 'compile-enforce-module-constants', 'compile-syntax',
+ 'compiled-expression?', 'compiled-module-expression?',
+ 'complete-path?', 'complex?', 'compose', 'compose1', 'conjugate',
+ 'cons', 'cons/c', 'cons?', 'const', 'continuation-mark-key/c',
+ 'continuation-mark-key?', 'continuation-mark-set->context',
+ 'continuation-mark-set->list', 'continuation-mark-set->list*',
+ 'continuation-mark-set-first', 'continuation-mark-set?',
+ 'continuation-marks', 'continuation-prompt-available?',
+ 'continuation-prompt-tag?', 'continuation?',
+ 'contract-continuation-mark-key', 'contract-first-order',
+ 'contract-first-order-passes?', 'contract-name', 'contract-proc',
+ 'contract-projection', 'contract-property?',
+ 'contract-random-generate', 'contract-stronger?',
+ 'contract-struct-exercise', 'contract-struct-generate',
+ 'contract-val-first-projection', 'contract?', 'convert-stream',
+ 'copy-directory/files', 'copy-file', 'copy-port', 'cos', 'cosh',
+ 'count', 'current-blame-format', 'current-break-parameterization',
+ 'current-code-inspector', 'current-command-line-arguments',
+ 'current-compile', 'current-compiled-file-roots',
+ 'current-continuation-marks', 'current-contract-region',
+ 'current-custodian', 'current-directory', 'current-directory-for-user',
+ 'current-drive', 'current-environment-variables', 'current-error-port',
+ 'current-eval', 'current-evt-pseudo-random-generator',
+ 'current-future', 'current-gc-milliseconds',
+ 'current-get-interaction-input-port', 'current-inexact-milliseconds',
+ 'current-input-port', 'current-inspector',
+ 'current-library-collection-links', 'current-library-collection-paths',
+ 'current-load', 'current-load-extension',
+ 'current-load-relative-directory', 'current-load/use-compiled',
+ 'current-locale', 'current-logger', 'current-memory-use',
+ 'current-milliseconds', 'current-module-declare-name',
+ 'current-module-declare-source', 'current-module-name-resolver',
+ 'current-module-path-for-load', 'current-namespace',
+ 'current-output-port', 'current-parameterization',
+ 'current-preserved-thread-cell-values', 'current-print',
+ 'current-process-milliseconds', 'current-prompt-read',
+ 'current-pseudo-random-generator', 'current-read-interaction',
+ 'current-reader-guard', 'current-readtable', 'current-seconds',
+ 'current-security-guard', 'current-subprocess-custodian-mode',
+ 'current-thread', 'current-thread-group',
+ 'current-thread-initial-stack-size',
+ 'current-write-relative-directory', 'curry', 'curryr',
+ 'custodian-box-value', 'custodian-box?', 'custodian-limit-memory',
+ 'custodian-managed-list', 'custodian-memory-accounting-available?',
+ 'custodian-require-memory', 'custodian-shutdown-all', 'custodian?',
+ 'custom-print-quotable-accessor', 'custom-print-quotable?',
+ 'custom-write-accessor', 'custom-write-property-proc', 'custom-write?',
+ 'date', 'date*', 'date*-nanosecond', 'date*-time-zone-name', 'date*?',
+ 'date-day', 'date-dst?', 'date-hour', 'date-minute', 'date-month',
+ 'date-second', 'date-time-zone-offset', 'date-week-day', 'date-year',
+ 'date-year-day', 'date?', 'datum->syntax', 'datum-intern-literal',
+ 'default-continuation-prompt-tag', 'degrees->radians',
+ 'delete-directory', 'delete-directory/files', 'delete-file',
+ 'denominator', 'dict->list', 'dict-can-functional-set?',
+ 'dict-can-remove-keys?', 'dict-clear', 'dict-clear!', 'dict-copy',
+ 'dict-count', 'dict-empty?', 'dict-for-each', 'dict-has-key?',
+ 'dict-implements/c', 'dict-implements?', 'dict-iter-contract',
+ 'dict-iterate-first', 'dict-iterate-key', 'dict-iterate-next',
+ 'dict-iterate-value', 'dict-key-contract', 'dict-keys', 'dict-map',
+ 'dict-mutable?', 'dict-ref', 'dict-ref!', 'dict-remove',
+ 'dict-remove!', 'dict-set', 'dict-set!', 'dict-set*', 'dict-set*!',
+ 'dict-update', 'dict-update!', 'dict-value-contract', 'dict-values',
+ 'dict?', 'directory-exists?', 'directory-list', 'display',
+ 'display-lines', 'display-lines-to-file', 'display-to-file',
+ 'displayln', 'double-flonum?', 'drop', 'drop-right', 'dropf',
+ 'dropf-right', 'dump-memory-stats', 'dup-input-port',
+ 'dup-output-port', 'dynamic-get-field', 'dynamic-place',
+ 'dynamic-place*', 'dynamic-require', 'dynamic-require-for-syntax',
+ 'dynamic-send', 'dynamic-set-field!', 'dynamic-wind', 'eighth',
+ 'empty', 'empty-sequence', 'empty-stream', 'empty?',
+ 'environment-variables-copy', 'environment-variables-names',
+ 'environment-variables-ref', 'environment-variables-set!',
+ 'environment-variables?', 'eof', 'eof-evt', 'eof-object?',
+ 'ephemeron-value', 'ephemeron?', 'eprintf', 'eq-contract-val',
+ 'eq-contract?', 'eq-hash-code', 'eq?', 'equal-contract-val',
+ 'equal-contract?', 'equal-hash-code', 'equal-secondary-hash-code',
+ 'equal<%>', 'equal?', 'equal?/recur', 'eqv-hash-code', 'eqv?', 'error',
+ 'error-display-handler', 'error-escape-handler',
+ 'error-print-context-length', 'error-print-source-location',
+ 'error-print-width', 'error-value->string-handler', 'eval',
+ 'eval-jit-enabled', 'eval-syntax', 'even?', 'evt/c', 'evt?',
+ 'exact->inexact', 'exact-ceiling', 'exact-floor', 'exact-integer?',
+ 'exact-nonnegative-integer?', 'exact-positive-integer?', 'exact-round',
+ 'exact-truncate', 'exact?', 'executable-yield-handler', 'exit',
+ 'exit-handler', 'exn', 'exn-continuation-marks', 'exn-message',
+ 'exn:break', 'exn:break-continuation', 'exn:break:hang-up',
+ 'exn:break:hang-up?', 'exn:break:terminate', 'exn:break:terminate?',
+ 'exn:break?', 'exn:fail', 'exn:fail:contract',
+ 'exn:fail:contract:arity', 'exn:fail:contract:arity?',
+ 'exn:fail:contract:blame', 'exn:fail:contract:blame-object',
+ 'exn:fail:contract:blame?', 'exn:fail:contract:continuation',
+ 'exn:fail:contract:continuation?', 'exn:fail:contract:divide-by-zero',
+ 'exn:fail:contract:divide-by-zero?',
+ 'exn:fail:contract:non-fixnum-result',
+ 'exn:fail:contract:non-fixnum-result?', 'exn:fail:contract:variable',
+ 'exn:fail:contract:variable-id', 'exn:fail:contract:variable?',
+ 'exn:fail:contract?', 'exn:fail:filesystem',
+ 'exn:fail:filesystem:errno', 'exn:fail:filesystem:errno-errno',
+ 'exn:fail:filesystem:errno?', 'exn:fail:filesystem:exists',
+ 'exn:fail:filesystem:exists?', 'exn:fail:filesystem:missing-module',
+ 'exn:fail:filesystem:missing-module-path',
+ 'exn:fail:filesystem:missing-module?', 'exn:fail:filesystem:version',
+ 'exn:fail:filesystem:version?', 'exn:fail:filesystem?',
+ 'exn:fail:network', 'exn:fail:network:errno',
+ 'exn:fail:network:errno-errno', 'exn:fail:network:errno?',
+ 'exn:fail:network?', 'exn:fail:object', 'exn:fail:object?',
+ 'exn:fail:out-of-memory', 'exn:fail:out-of-memory?', 'exn:fail:read',
+ 'exn:fail:read-srclocs', 'exn:fail:read:eof', 'exn:fail:read:eof?',
+ 'exn:fail:read:non-char', 'exn:fail:read:non-char?', 'exn:fail:read?',
+ 'exn:fail:syntax', 'exn:fail:syntax-exprs',
+ 'exn:fail:syntax:missing-module',
+ 'exn:fail:syntax:missing-module-path',
+ 'exn:fail:syntax:missing-module?', 'exn:fail:syntax:unbound',
+ 'exn:fail:syntax:unbound?', 'exn:fail:syntax?', 'exn:fail:unsupported',
+ 'exn:fail:unsupported?', 'exn:fail:user', 'exn:fail:user?',
+ 'exn:fail?', 'exn:misc:match?', 'exn:missing-module-accessor',
+ 'exn:missing-module?', 'exn:srclocs-accessor', 'exn:srclocs?', 'exn?',
+ 'exp', 'expand', 'expand-once', 'expand-syntax', 'expand-syntax-once',
+ 'expand-syntax-to-top-form', 'expand-to-top-form', 'expand-user-path',
+ 'explode-path', 'expt', 'externalizable<%>', 'false?', 'field-names',
+ 'fifth', 'file->bytes', 'file->bytes-lines', 'file->lines',
+ 'file->list', 'file->string', 'file->value', 'file-exists?',
+ 'file-name-from-path', 'file-or-directory-identity',
+ 'file-or-directory-modify-seconds', 'file-or-directory-permissions',
+ 'file-position', 'file-position*', 'file-size',
+ 'file-stream-buffer-mode', 'file-stream-port?', 'file-truncate',
+ 'filename-extension', 'filesystem-change-evt',
+ 'filesystem-change-evt-cancel', 'filesystem-change-evt?',
+ 'filesystem-root-list', 'filter', 'filter-map', 'filter-not',
+ 'filter-read-input-port', 'find-executable-path', 'find-files',
+ 'find-library-collection-links', 'find-library-collection-paths',
+ 'find-relative-path', 'find-system-path', 'findf', 'first', 'fixnum?',
+ 'flat-contract', 'flat-contract-predicate', 'flat-contract-property?',
+ 'flat-contract?', 'flat-named-contract', 'flatten',
+ 'floating-point-bytes->real', 'flonum?', 'floor', 'flush-output',
+ 'fold-files', 'foldl', 'foldr', 'for-each', 'force', 'format',
+ 'fourth', 'fprintf', 'free-identifier=?', 'free-label-identifier=?',
+ 'free-template-identifier=?', 'free-transformer-identifier=?',
+ 'fsemaphore-count', 'fsemaphore-post', 'fsemaphore-try-wait?',
+ 'fsemaphore-wait', 'fsemaphore?', 'future', 'future?',
+ 'futures-enabled?', 'gcd', 'generate-member-key',
+ 'generate-temporaries', 'generic-set?', 'generic?', 'gensym',
+ 'get-output-bytes', 'get-output-string', 'get-preference',
+ 'get/build-val-first-projection', 'getenv',
+ 'global-port-print-handler', 'group-execute-bit', 'group-read-bit',
+ 'group-write-bit', 'guard-evt', 'handle-evt', 'handle-evt?',
+ 'has-contract?', 'hash', 'hash->list', 'hash-clear', 'hash-clear!',
+ 'hash-copy', 'hash-copy-clear', 'hash-count', 'hash-empty?',
+ 'hash-eq?', 'hash-equal?', 'hash-eqv?', 'hash-for-each',
+ 'hash-has-key?', 'hash-iterate-first', 'hash-iterate-key',
+ 'hash-iterate-next', 'hash-iterate-value', 'hash-keys', 'hash-map',
+ 'hash-placeholder?', 'hash-ref', 'hash-ref!', 'hash-remove',
+ 'hash-remove!', 'hash-set', 'hash-set!', 'hash-set*', 'hash-set*!',
+ 'hash-update', 'hash-update!', 'hash-values', 'hash-weak?', 'hash/c',
+ 'hash?', 'hasheq', 'hasheqv', 'identifier-binding',
+ 'identifier-binding-symbol', 'identifier-label-binding',
+ 'identifier-prune-lexical-context',
+ 'identifier-prune-to-source-module',
+ 'identifier-remove-from-definition-context',
+ 'identifier-template-binding', 'identifier-transformer-binding',
+ 'identifier?', 'identity', 'imag-part', 'immutable?',
+ 'impersonate-box', 'impersonate-channel',
+ 'impersonate-continuation-mark-key', 'impersonate-hash',
+ 'impersonate-procedure', 'impersonate-prompt-tag',
+ 'impersonate-struct', 'impersonate-vector', 'impersonator-contract?',
+ 'impersonator-ephemeron', 'impersonator-of?',
+ 'impersonator-prop:application-mark', 'impersonator-prop:contracted',
+ 'impersonator-property-accessor-procedure?', 'impersonator-property?',
+ 'impersonator?', 'implementation?', 'implementation?/c', 'in-bytes',
+ 'in-bytes-lines', 'in-cycle', 'in-dict', 'in-dict-keys',
+ 'in-dict-pairs', 'in-dict-values', 'in-directory', 'in-hash',
+ 'in-hash-keys', 'in-hash-pairs', 'in-hash-values', 'in-indexed',
+ 'in-input-port-bytes', 'in-input-port-chars', 'in-lines', 'in-list',
+ 'in-mlist', 'in-naturals', 'in-parallel', 'in-permutations', 'in-port',
+ 'in-producer', 'in-range', 'in-sequences', 'in-set', 'in-stream',
+ 'in-string', 'in-value', 'in-values*-sequence', 'in-values-sequence',
+ 'in-vector', 'inexact->exact', 'inexact-real?', 'inexact?',
+ 'infinite?', 'input-port-append', 'input-port?', 'inspector?',
+ 'instanceof/c', 'integer->char', 'integer->integer-bytes',
+ 'integer-bytes->integer', 'integer-in', 'integer-length',
+ 'integer-sqrt', 'integer-sqrt/remainder', 'integer?',
+ 'interface->method-names', 'interface-extension?', 'interface?',
+ 'internal-definition-context-seal', 'internal-definition-context?',
+ 'is-a?', 'is-a?/c', 'keyword->string', 'keyword-apply', 'keyword<?',
+ 'keyword?', 'keywords-match', 'kill-thread', 'last', 'last-pair',
+ 'lcm', 'length', 'liberal-define-context?', 'link-exists?', 'list',
+ 'list*', 'list->bytes', 'list->mutable-set', 'list->mutable-seteq',
+ 'list->mutable-seteqv', 'list->set', 'list->seteq', 'list->seteqv',
+ 'list->string', 'list->vector', 'list->weak-set', 'list->weak-seteq',
+ 'list->weak-seteqv', 'list-ref', 'list-tail', 'list/c', 'list?',
+ 'listof', 'load', 'load-extension', 'load-on-demand-enabled',
+ 'load-relative', 'load-relative-extension', 'load/cd',
+ 'load/use-compiled', 'local-expand', 'local-expand/capture-lifts',
+ 'local-transformer-expand', 'local-transformer-expand/capture-lifts',
+ 'locale-string-encoding', 'log', 'log-level?', 'log-max-level',
+ 'log-message', 'log-receiver?', 'logger-name', 'logger?', 'magnitude',
+ 'make-arity-at-least', 'make-base-empty-namespace',
+ 'make-base-namespace', 'make-bytes', 'make-channel',
+ 'make-chaperone-contract', 'make-continuation-mark-key',
+ 'make-continuation-prompt-tag', 'make-contract', 'make-custodian',
+ 'make-custodian-box', 'make-custom-hash', 'make-custom-hash-types',
+ 'make-custom-set', 'make-custom-set-types', 'make-date', 'make-date*',
+ 'make-derived-parameter', 'make-directory', 'make-directory*',
+ 'make-do-sequence', 'make-empty-namespace',
+ 'make-environment-variables', 'make-ephemeron', 'make-exn',
+ 'make-exn:break', 'make-exn:break:hang-up', 'make-exn:break:terminate',
+ 'make-exn:fail', 'make-exn:fail:contract',
+ 'make-exn:fail:contract:arity', 'make-exn:fail:contract:blame',
+ 'make-exn:fail:contract:continuation',
+ 'make-exn:fail:contract:divide-by-zero',
+ 'make-exn:fail:contract:non-fixnum-result',
+ 'make-exn:fail:contract:variable', 'make-exn:fail:filesystem',
+ 'make-exn:fail:filesystem:errno', 'make-exn:fail:filesystem:exists',
+ 'make-exn:fail:filesystem:missing-module',
+ 'make-exn:fail:filesystem:version', 'make-exn:fail:network',
+ 'make-exn:fail:network:errno', 'make-exn:fail:object',
+ 'make-exn:fail:out-of-memory', 'make-exn:fail:read',
+ 'make-exn:fail:read:eof', 'make-exn:fail:read:non-char',
+ 'make-exn:fail:syntax', 'make-exn:fail:syntax:missing-module',
+ 'make-exn:fail:syntax:unbound', 'make-exn:fail:unsupported',
+ 'make-exn:fail:user', 'make-file-or-directory-link',
+ 'make-flat-contract', 'make-fsemaphore', 'make-generic',
+ 'make-handle-get-preference-locked', 'make-hash',
+ 'make-hash-placeholder', 'make-hasheq', 'make-hasheq-placeholder',
+ 'make-hasheqv', 'make-hasheqv-placeholder',
+ 'make-immutable-custom-hash', 'make-immutable-hash',
+ 'make-immutable-hasheq', 'make-immutable-hasheqv',
+ 'make-impersonator-property', 'make-input-port',
+ 'make-input-port/read-to-peek', 'make-inspector',
+ 'make-keyword-procedure', 'make-known-char-range-list',
+ 'make-limited-input-port', 'make-list', 'make-lock-file-name',
+ 'make-log-receiver', 'make-logger', 'make-mixin-contract',
+ 'make-mutable-custom-set', 'make-none/c', 'make-object',
+ 'make-output-port', 'make-parameter', 'make-phantom-bytes',
+ 'make-pipe', 'make-pipe-with-specials', 'make-placeholder',
+ 'make-polar', 'make-prefab-struct', 'make-primitive-class',
+ 'make-proj-contract', 'make-pseudo-random-generator',
+ 'make-reader-graph', 'make-readtable', 'make-rectangular',
+ 'make-rename-transformer', 'make-resolved-module-path',
+ 'make-security-guard', 'make-semaphore', 'make-set!-transformer',
+ 'make-shared-bytes', 'make-sibling-inspector', 'make-special-comment',
+ 'make-srcloc', 'make-string', 'make-struct-field-accessor',
+ 'make-struct-field-mutator', 'make-struct-type',
+ 'make-struct-type-property', 'make-syntax-delta-introducer',
+ 'make-syntax-introducer', 'make-temporary-file',
+ 'make-tentative-pretty-print-output-port', 'make-thread-cell',
+ 'make-thread-group', 'make-vector', 'make-weak-box',
+ 'make-weak-custom-hash', 'make-weak-custom-set', 'make-weak-hash',
+ 'make-weak-hasheq', 'make-weak-hasheqv', 'make-will-executor', 'map',
+ 'match-equality-test', 'matches-arity-exactly?', 'max', 'mcar', 'mcdr',
+ 'mcons', 'member', 'member-name-key-hash-code', 'member-name-key=?',
+ 'member-name-key?', 'memf', 'memq', 'memv', 'merge-input',
+ 'method-in-interface?', 'min', 'mixin-contract', 'module->exports',
+ 'module->imports', 'module->language-info', 'module->namespace',
+ 'module-compiled-cross-phase-persistent?', 'module-compiled-exports',
+ 'module-compiled-imports', 'module-compiled-language-info',
+ 'module-compiled-name', 'module-compiled-submodules',
+ 'module-declared?', 'module-path-index-join',
+ 'module-path-index-resolve', 'module-path-index-split',
+ 'module-path-index-submodule', 'module-path-index?', 'module-path?',
+ 'module-predefined?', 'module-provide-protected?', 'modulo', 'mpair?',
+ 'mutable-set', 'mutable-seteq', 'mutable-seteqv', 'n->th',
+ 'nack-guard-evt', 'namespace-anchor->empty-namespace',
+ 'namespace-anchor->namespace', 'namespace-anchor?',
+ 'namespace-attach-module', 'namespace-attach-module-declaration',
+ 'namespace-base-phase', 'namespace-mapped-symbols',
+ 'namespace-module-identifier', 'namespace-module-registry',
+ 'namespace-require', 'namespace-require/constant',
+ 'namespace-require/copy', 'namespace-require/expansion-time',
+ 'namespace-set-variable-value!', 'namespace-symbol->identifier',
+ 'namespace-syntax-introduce', 'namespace-undefine-variable!',
+ 'namespace-unprotect-module', 'namespace-variable-value', 'namespace?',
+ 'nan?', 'natural-number/c', 'negate', 'negative?', 'never-evt',
+ u'new-∀/c', u'new-∃/c', 'newline', 'ninth', 'non-empty-listof',
+ 'none/c', 'normal-case-path', 'normalize-arity', 'normalize-path',
+ 'normalized-arity?', 'not', 'not/c', 'null', 'null?', 'number->string',
+ 'number?', 'numerator', 'object%', 'object->vector', 'object-info',
+ 'object-interface', 'object-method-arity-includes?', 'object-name',
+ 'object=?', 'object?', 'odd?', 'one-of/c', 'open-input-bytes',
+ 'open-input-file', 'open-input-output-file', 'open-input-string',
+ 'open-output-bytes', 'open-output-file', 'open-output-nowhere',
+ 'open-output-string', 'or/c', 'order-of-magnitude', 'ormap',
+ 'other-execute-bit', 'other-read-bit', 'other-write-bit',
+ 'output-port?', 'pair?', 'parameter-procedure=?', 'parameter/c',
+ 'parameter?', 'parameterization?', 'parse-command-line', 'partition',
+ 'path->bytes', 'path->complete-path', 'path->directory-path',
+ 'path->string', 'path-add-suffix', 'path-convention-type',
+ 'path-element->bytes', 'path-element->string', 'path-element?',
+ 'path-for-some-system?', 'path-list-string->path-list', 'path-only',
+ 'path-replace-suffix', 'path-string?', 'path<?', 'path?',
+ 'pathlist-closure', 'peek-byte', 'peek-byte-or-special', 'peek-bytes',
+ 'peek-bytes!', 'peek-bytes!-evt', 'peek-bytes-avail!',
+ 'peek-bytes-avail!*', 'peek-bytes-avail!-evt',
+ 'peek-bytes-avail!/enable-break', 'peek-bytes-evt', 'peek-char',
+ 'peek-char-or-special', 'peek-string', 'peek-string!',
+ 'peek-string!-evt', 'peek-string-evt', 'peeking-input-port',
+ 'permutations', 'phantom-bytes?', 'pi', 'pi.f', 'pipe-content-length',
+ 'place-break', 'place-channel', 'place-channel-get',
+ 'place-channel-put', 'place-channel-put/get', 'place-channel?',
+ 'place-dead-evt', 'place-enabled?', 'place-kill', 'place-location?',
+ 'place-message-allowed?', 'place-sleep', 'place-wait', 'place?',
+ 'placeholder-get', 'placeholder-set!', 'placeholder?',
+ 'poll-guard-evt', 'port->bytes', 'port->bytes-lines', 'port->lines',
+ 'port->list', 'port->string', 'port-closed-evt', 'port-closed?',
+ 'port-commit-peeked', 'port-count-lines!', 'port-count-lines-enabled',
+ 'port-counts-lines?', 'port-display-handler', 'port-file-identity',
+ 'port-file-unlock', 'port-next-location', 'port-print-handler',
+ 'port-progress-evt', 'port-provides-progress-evts?',
+ 'port-read-handler', 'port-try-file-lock?', 'port-write-handler',
+ 'port-writes-atomic?', 'port-writes-special?', 'port?', 'positive?',
+ 'predicate/c', 'prefab-key->struct-type', 'prefab-key?',
+ 'prefab-struct-key', 'preferences-lock-file-mode', 'pregexp',
+ 'pregexp?', 'pretty-display', 'pretty-format', 'pretty-print',
+ 'pretty-print-.-symbol-without-bars',
+ 'pretty-print-abbreviate-read-macros', 'pretty-print-columns',
+ 'pretty-print-current-style-table', 'pretty-print-depth',
+ 'pretty-print-exact-as-decimal', 'pretty-print-extend-style-table',
+ 'pretty-print-handler', 'pretty-print-newline',
+ 'pretty-print-post-print-hook', 'pretty-print-pre-print-hook',
+ 'pretty-print-print-hook', 'pretty-print-print-line',
+ 'pretty-print-remap-stylable', 'pretty-print-show-inexactness',
+ 'pretty-print-size-hook', 'pretty-print-style-table?',
+ 'pretty-printing', 'pretty-write', 'primitive-closure?',
+ 'primitive-result-arity', 'primitive?', 'print', 'print-as-expression',
+ 'print-boolean-long-form', 'print-box', 'print-graph',
+ 'print-hash-table', 'print-mpair-curly-braces',
+ 'print-pair-curly-braces', 'print-reader-abbreviations',
+ 'print-struct', 'print-syntax-width', 'print-unreadable',
+ 'print-vector-length', 'printable/c', 'printable<%>', 'printf',
+ 'procedure->method', 'procedure-arity', 'procedure-arity-includes/c',
+ 'procedure-arity-includes?', 'procedure-arity?',
+ 'procedure-closure-contents-eq?', 'procedure-extract-target',
+ 'procedure-keywords', 'procedure-reduce-arity',
+ 'procedure-reduce-keyword-arity', 'procedure-rename',
+ 'procedure-struct-type?', 'procedure?', 'process', 'process*',
+ 'process*/ports', 'process/ports', 'processor-count', 'progress-evt?',
+ 'promise-forced?', 'promise-running?', 'promise/c', 'promise?',
+ 'prop:arity-string', 'prop:chaperone-contract',
+ 'prop:checked-procedure', 'prop:contract', 'prop:contracted',
+ 'prop:custom-print-quotable', 'prop:custom-write', 'prop:dict',
+ 'prop:dict/contract', 'prop:equal+hash', 'prop:evt',
+ 'prop:exn:missing-module', 'prop:exn:srclocs', 'prop:flat-contract',
+ 'prop:impersonator-of', 'prop:input-port',
+ 'prop:liberal-define-context', 'prop:opt-chaperone-contract',
+ 'prop:opt-chaperone-contract-get-test', 'prop:opt-chaperone-contract?',
+ 'prop:output-port', 'prop:place-location', 'prop:procedure',
+ 'prop:rename-transformer', 'prop:sequence', 'prop:set!-transformer',
+ 'prop:stream', 'proper-subset?', 'pseudo-random-generator->vector',
+ 'pseudo-random-generator-vector?', 'pseudo-random-generator?',
+ 'put-preferences', 'putenv', 'quotient', 'quotient/remainder',
+ 'radians->degrees', 'raise', 'raise-argument-error',
+ 'raise-arguments-error', 'raise-arity-error', 'raise-blame-error',
+ 'raise-contract-error', 'raise-mismatch-error',
+ 'raise-not-cons-blame-error', 'raise-range-error',
+ 'raise-result-error', 'raise-syntax-error', 'raise-type-error',
+ 'raise-user-error', 'random', 'random-seed', 'range', 'rational?',
+ 'rationalize', 'read', 'read-accept-bar-quote', 'read-accept-box',
+ 'read-accept-compiled', 'read-accept-dot', 'read-accept-graph',
+ 'read-accept-infix-dot', 'read-accept-lang', 'read-accept-quasiquote',
+ 'read-accept-reader', 'read-byte', 'read-byte-or-special',
+ 'read-bytes', 'read-bytes!', 'read-bytes!-evt', 'read-bytes-avail!',
+ 'read-bytes-avail!*', 'read-bytes-avail!-evt',
+ 'read-bytes-avail!/enable-break', 'read-bytes-evt', 'read-bytes-line',
+ 'read-bytes-line-evt', 'read-case-sensitive', 'read-char',
+ 'read-char-or-special', 'read-curly-brace-as-paren',
+ 'read-decimal-as-inexact', 'read-eval-print-loop', 'read-language',
+ 'read-line', 'read-line-evt', 'read-on-demand-source',
+ 'read-square-bracket-as-paren', 'read-string', 'read-string!',
+ 'read-string!-evt', 'read-string-evt', 'read-syntax',
+ 'read-syntax/recursive', 'read/recursive', 'readtable-mapping',
+ 'readtable?', 'real->decimal-string', 'real->double-flonum',
+ 'real->floating-point-bytes', 'real->single-flonum', 'real-in',
+ 'real-part', 'real?', 'reencode-input-port', 'reencode-output-port',
+ 'regexp', 'regexp-match', 'regexp-match*', 'regexp-match-evt',
+ 'regexp-match-exact?', 'regexp-match-peek',
+ 'regexp-match-peek-immediate', 'regexp-match-peek-positions',
+ 'regexp-match-peek-positions*',
+ 'regexp-match-peek-positions-immediate',
+ 'regexp-match-peek-positions-immediate/end',
+ 'regexp-match-peek-positions/end', 'regexp-match-positions',
+ 'regexp-match-positions*', 'regexp-match-positions/end',
+ 'regexp-match/end', 'regexp-match?', 'regexp-max-lookbehind',
+ 'regexp-quote', 'regexp-replace', 'regexp-replace*',
+ 'regexp-replace-quote', 'regexp-replaces', 'regexp-split',
+ 'regexp-try-match', 'regexp?', 'relative-path?', 'relocate-input-port',
+ 'relocate-output-port', 'remainder', 'remove', 'remove*',
+ 'remove-duplicates', 'remq', 'remq*', 'remv', 'remv*',
+ 'rename-file-or-directory', 'rename-transformer-target',
+ 'rename-transformer?', 'reroot-path', 'resolve-path',
+ 'resolved-module-path-name', 'resolved-module-path?', 'rest',
+ 'reverse', 'round', 'second', 'seconds->date', 'security-guard?',
+ 'semaphore-peek-evt', 'semaphore-peek-evt?', 'semaphore-post',
+ 'semaphore-try-wait?', 'semaphore-wait', 'semaphore-wait/enable-break',
+ 'semaphore?', 'sequence->list', 'sequence->stream',
+ 'sequence-add-between', 'sequence-andmap', 'sequence-append',
+ 'sequence-count', 'sequence-filter', 'sequence-fold',
+ 'sequence-for-each', 'sequence-generate', 'sequence-generate*',
+ 'sequence-length', 'sequence-map', 'sequence-ormap', 'sequence-ref',
+ 'sequence-tail', 'sequence?', 'set', 'set!-transformer-procedure',
+ 'set!-transformer?', 'set->list', 'set->stream', 'set-add', 'set-add!',
+ 'set-box!', 'set-clear', 'set-clear!', 'set-copy', 'set-copy-clear',
+ 'set-count', 'set-empty?', 'set-eq?', 'set-equal?', 'set-eqv?',
+ 'set-first', 'set-for-each', 'set-implements/c', 'set-implements?',
+ 'set-intersect', 'set-intersect!', 'set-map', 'set-mcar!', 'set-mcdr!',
+ 'set-member?', 'set-mutable?', 'set-phantom-bytes!',
+ 'set-port-next-location!', 'set-remove', 'set-remove!', 'set-rest',
+ 'set-subtract', 'set-subtract!', 'set-symmetric-difference',
+ 'set-symmetric-difference!', 'set-union', 'set-union!', 'set-weak?',
+ 'set/c', 'set=?', 'set?', 'seteq', 'seteqv', 'seventh', 'sgn',
+ 'shared-bytes', 'shell-execute', 'shrink-path-wrt', 'shuffle',
+ 'simple-form-path', 'simplify-path', 'sin', 'single-flonum?', 'sinh',
+ 'sixth', 'skip-projection-wrapper?', 'sleep',
+ 'some-system-path->string', 'sort', 'special-comment-value',
+ 'special-comment?', 'special-filter-input-port', 'split-at',
+ 'split-at-right', 'split-path', 'splitf-at', 'splitf-at-right', 'sqr',
+ 'sqrt', 'srcloc', 'srcloc->string', 'srcloc-column', 'srcloc-line',
+ 'srcloc-position', 'srcloc-source', 'srcloc-span', 'srcloc?',
+ 'stop-after', 'stop-before', 'stream->list', 'stream-add-between',
+ 'stream-andmap', 'stream-append', 'stream-count', 'stream-empty?',
+ 'stream-filter', 'stream-first', 'stream-fold', 'stream-for-each',
+ 'stream-length', 'stream-map', 'stream-ormap', 'stream-ref',
+ 'stream-rest', 'stream-tail', 'stream?', 'string',
+ 'string->bytes/latin-1', 'string->bytes/locale', 'string->bytes/utf-8',
+ 'string->immutable-string', 'string->keyword', 'string->list',
+ 'string->number', 'string->path', 'string->path-element',
+ 'string->some-system-path', 'string->symbol',
+ 'string->uninterned-symbol', 'string->unreadable-symbol',
+ 'string-append', 'string-append*', 'string-ci<=?', 'string-ci<?',
+ 'string-ci=?', 'string-ci>=?', 'string-ci>?', 'string-copy',
+ 'string-copy!', 'string-downcase', 'string-environment-variable-name?',
+ 'string-fill!', 'string-foldcase', 'string-join', 'string-len/c',
+ 'string-length', 'string-locale-ci<?', 'string-locale-ci=?',
+ 'string-locale-ci>?', 'string-locale-downcase', 'string-locale-upcase',
+ 'string-locale<?', 'string-locale=?', 'string-locale>?',
+ 'string-no-nuls?', 'string-normalize-nfc', 'string-normalize-nfd',
+ 'string-normalize-nfkc', 'string-normalize-nfkd',
+ 'string-normalize-spaces', 'string-ref', 'string-replace',
+ 'string-set!', 'string-split', 'string-titlecase', 'string-trim',
+ 'string-upcase', 'string-utf-8-length', 'string<=?', 'string<?',
+ 'string=?', 'string>=?', 'string>?', 'string?', 'struct->vector',
+ 'struct-accessor-procedure?', 'struct-constructor-procedure?',
+ 'struct-info', 'struct-mutator-procedure?',
+ 'struct-predicate-procedure?', 'struct-type-info',
+ 'struct-type-make-constructor', 'struct-type-make-predicate',
+ 'struct-type-property-accessor-procedure?', 'struct-type-property/c',
+ 'struct-type-property?', 'struct-type?', 'struct:arity-at-least',
+ 'struct:date', 'struct:date*', 'struct:exn', 'struct:exn:break',
+ 'struct:exn:break:hang-up', 'struct:exn:break:terminate',
+ 'struct:exn:fail', 'struct:exn:fail:contract',
+ 'struct:exn:fail:contract:arity', 'struct:exn:fail:contract:blame',
+ 'struct:exn:fail:contract:continuation',
+ 'struct:exn:fail:contract:divide-by-zero',
+ 'struct:exn:fail:contract:non-fixnum-result',
+ 'struct:exn:fail:contract:variable', 'struct:exn:fail:filesystem',
+ 'struct:exn:fail:filesystem:errno',
+ 'struct:exn:fail:filesystem:exists',
+ 'struct:exn:fail:filesystem:missing-module',
+ 'struct:exn:fail:filesystem:version', 'struct:exn:fail:network',
+ 'struct:exn:fail:network:errno', 'struct:exn:fail:object',
+ 'struct:exn:fail:out-of-memory', 'struct:exn:fail:read',
+ 'struct:exn:fail:read:eof', 'struct:exn:fail:read:non-char',
+ 'struct:exn:fail:syntax', 'struct:exn:fail:syntax:missing-module',
+ 'struct:exn:fail:syntax:unbound', 'struct:exn:fail:unsupported',
+ 'struct:exn:fail:user', 'struct:srcloc',
+ 'struct:wrapped-extra-arg-arrow', 'struct?', 'sub1', 'subbytes',
+ 'subclass?', 'subclass?/c', 'subprocess', 'subprocess-group-enabled',
+ 'subprocess-kill', 'subprocess-pid', 'subprocess-status',
+ 'subprocess-wait', 'subprocess?', 'subset?', 'substring',
+ 'symbol->string', 'symbol-interned?', 'symbol-unreadable?', 'symbol<?',
+ 'symbol=?', 'symbol?', 'symbols', 'sync', 'sync/enable-break',
+ 'sync/timeout', 'sync/timeout/enable-break', 'syntax->datum',
+ 'syntax->list', 'syntax-arm', 'syntax-column', 'syntax-disarm',
+ 'syntax-e', 'syntax-line', 'syntax-local-bind-syntaxes',
+ 'syntax-local-certifier', 'syntax-local-context',
+ 'syntax-local-expand-expression', 'syntax-local-get-shadower',
+ 'syntax-local-introduce', 'syntax-local-lift-context',
+ 'syntax-local-lift-expression',
+ 'syntax-local-lift-module-end-declaration',
+ 'syntax-local-lift-provide', 'syntax-local-lift-require',
+ 'syntax-local-lift-values-expression',
+ 'syntax-local-make-definition-context',
+ 'syntax-local-make-delta-introducer',
+ 'syntax-local-module-defined-identifiers',
+ 'syntax-local-module-exports',
+ 'syntax-local-module-required-identifiers', 'syntax-local-name',
+ 'syntax-local-phase-level', 'syntax-local-submodules',
+ 'syntax-local-transforming-module-provides?', 'syntax-local-value',
+ 'syntax-local-value/immediate', 'syntax-original?', 'syntax-position',
+ 'syntax-property', 'syntax-property-symbol-keys', 'syntax-protect',
+ 'syntax-rearm', 'syntax-recertify', 'syntax-shift-phase-level',
+ 'syntax-source', 'syntax-source-module', 'syntax-span', 'syntax-taint',
+ 'syntax-tainted?', 'syntax-track-origin',
+ 'syntax-transforming-module-expression?', 'syntax-transforming?',
+ 'syntax/c', 'syntax?', 'system', 'system*', 'system*/exit-code',
+ 'system-big-endian?', 'system-idle-evt', 'system-language+country',
+ 'system-library-subpath', 'system-path-convention-type', 'system-type',
+ 'system/exit-code', 'tail-marks-match?', 'take', 'take-right', 'takef',
+ 'takef-right', 'tan', 'tanh', 'tcp-abandon-port', 'tcp-accept',
+ 'tcp-accept-evt', 'tcp-accept-ready?', 'tcp-accept/enable-break',
+ 'tcp-addresses', 'tcp-close', 'tcp-connect',
+ 'tcp-connect/enable-break', 'tcp-listen', 'tcp-listener?', 'tcp-port?',
+ 'tentative-pretty-print-port-cancel',
+ 'tentative-pretty-print-port-transfer', 'tenth', 'terminal-port?',
+ 'the-unsupplied-arg', 'third', 'thread', 'thread-cell-ref',
+ 'thread-cell-set!', 'thread-cell-values?', 'thread-cell?',
+ 'thread-dead-evt', 'thread-dead?', 'thread-group?', 'thread-receive',
+ 'thread-receive-evt', 'thread-resume', 'thread-resume-evt',
+ 'thread-rewind-receive', 'thread-running?', 'thread-send',
+ 'thread-suspend', 'thread-suspend-evt', 'thread-try-receive',
+ 'thread-wait', 'thread/suspend-to-kill', 'thread?', 'time-apply',
+ 'touch', 'transplant-input-port', 'transplant-output-port', 'true',
+ 'truncate', 'udp-addresses', 'udp-bind!', 'udp-bound?', 'udp-close',
+ 'udp-connect!', 'udp-connected?', 'udp-multicast-interface',
+ 'udp-multicast-join-group!', 'udp-multicast-leave-group!',
+ 'udp-multicast-loopback?', 'udp-multicast-set-interface!',
+ 'udp-multicast-set-loopback!', 'udp-multicast-set-ttl!',
+ 'udp-multicast-ttl', 'udp-open-socket', 'udp-receive!',
+ 'udp-receive!*', 'udp-receive!-evt', 'udp-receive!/enable-break',
+ 'udp-receive-ready-evt', 'udp-send', 'udp-send*', 'udp-send-evt',
+ 'udp-send-ready-evt', 'udp-send-to', 'udp-send-to*', 'udp-send-to-evt',
+ 'udp-send-to/enable-break', 'udp-send/enable-break', 'udp?', 'unbox',
+ 'uncaught-exception-handler', 'unit?', 'unspecified-dom',
+ 'unsupplied-arg?', 'use-collection-link-paths',
+ 'use-compiled-file-paths', 'use-user-specific-search-paths',
+ 'user-execute-bit', 'user-read-bit', 'user-write-bit',
+ 'value-contract', 'values', 'variable-reference->empty-namespace',
+ 'variable-reference->module-base-phase',
+ 'variable-reference->module-declaration-inspector',
+ 'variable-reference->module-path-index',
+ 'variable-reference->module-source', 'variable-reference->namespace',
+ 'variable-reference->phase',
+ 'variable-reference->resolved-module-path',
+ 'variable-reference-constant?', 'variable-reference?', 'vector',
+ 'vector->immutable-vector', 'vector->list',
+ 'vector->pseudo-random-generator', 'vector->pseudo-random-generator!',
+ 'vector->values', 'vector-append', 'vector-argmax', 'vector-argmin',
+ 'vector-copy', 'vector-copy!', 'vector-count', 'vector-drop',
+ 'vector-drop-right', 'vector-fill!', 'vector-filter',
+ 'vector-filter-not', 'vector-immutable', 'vector-immutable/c',
+ 'vector-immutableof', 'vector-length', 'vector-map', 'vector-map!',
+ 'vector-member', 'vector-memq', 'vector-memv', 'vector-ref',
+ 'vector-set!', 'vector-set*!', 'vector-set-performance-stats!',
+ 'vector-split-at', 'vector-split-at-right', 'vector-take',
+ 'vector-take-right', 'vector/c', 'vector?', 'vectorof', 'version',
+ 'void', 'void?', 'weak-box-value', 'weak-box?', 'weak-set',
+ 'weak-seteq', 'weak-seteqv', 'will-execute', 'will-executor?',
+ 'will-register', 'will-try-execute', 'with-input-from-bytes',
+ 'with-input-from-file', 'with-input-from-string',
+ 'with-output-to-bytes', 'with-output-to-file', 'with-output-to-string',
+ 'would-be-future', 'wrap-evt', 'wrapped-extra-arg-arrow',
+ 'wrapped-extra-arg-arrow-extra-neg-party-argument',
+ 'wrapped-extra-arg-arrow-real-func', 'wrapped-extra-arg-arrow?',
+ 'writable<%>', 'write', 'write-byte', 'write-bytes',
+ 'write-bytes-avail', 'write-bytes-avail*', 'write-bytes-avail-evt',
+ 'write-bytes-avail/enable-break', 'write-char', 'write-special',
+ 'write-special-avail*', 'write-special-evt', 'write-string',
+ 'write-to-file', 'xor', 'zero?', '~.a', '~.s', '~.v', '~a', '~e', '~r',
+ '~s', '~v'
+ )
+
+ _opening_parenthesis = r'[([{]'
+ _closing_parenthesis = r'[)\]}]'
+ _delimiters = r'()[\]{}",\'`;\s'
+ _symbol = r'(?u)(?:\|[^|]*\||\\[\w\W]|[^|\\%s]+)+' % _delimiters
+ _exact_decimal_prefix = r'(?:#e)?(?:#d)?(?:#e)?'
+ _exponent = r'(?:[defls][-+]?\d+)'
+ _inexact_simple_no_hashes = r'(?:\d+(?:/\d+|\.\d*)?|\.\d+)'
+ _inexact_simple = (r'(?:%s|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|'
+ r'\d+(?:\.\d*#+|/\d+#+)))' % _inexact_simple_no_hashes)
+ _inexact_normal_no_hashes = r'(?:%s%s?)' % (_inexact_simple_no_hashes,
+ _exponent)
+ _inexact_normal = r'(?:%s%s?)' % (_inexact_simple, _exponent)
+ _inexact_special = r'(?:(?:inf|nan)\.[0f])'
+ _inexact_real = r'(?:[-+]?%s|[-+]%s)' % (_inexact_normal,
+ _inexact_special)
+ _inexact_unsigned = r'(?:%s|%s)' % (_inexact_normal, _inexact_special)
+
+ tokens = {
+ 'root': [
+ (_closing_parenthesis, Error),
+ (r'(?!\Z)', Text, 'unquoted-datum')
+ ],
+ 'datum': [
+ (r'(?s)#;|#![ /]([^\\\n]|\\.)*', Comment),
+ (u';[^\\n\\r\x85\u2028\u2029]*', Comment.Single),
+ (r'#\|', Comment.Multiline, 'block-comment'),
+
+ # Whitespaces
+ (r'(?u)\s+', Text),
+
+ # Numbers: Keep in mind Racket reader hash prefixes, which
+ # can denote the base or the type. These don't map neatly
+ # onto Pygments token types; some judgment calls here.
+
+ # #d or no prefix
+ (r'(?i)%s[-+]?\d+(?=[%s])' % (_exact_decimal_prefix, _delimiters),
+ Number.Integer, '#pop'),
+ (r'(?i)%s[-+]?(\d+(\.\d*)?|\.\d+)([deflst][-+]?\d+)?(?=[%s])' %
+ (_exact_decimal_prefix, _delimiters), Number.Float, '#pop'),
+ (r'(?i)%s[-+]?(%s([-+]%s?i)?|[-+]%s?i)(?=[%s])' %
+ (_exact_decimal_prefix, _inexact_normal_no_hashes,
+ _inexact_normal_no_hashes, _inexact_normal_no_hashes,
+ _delimiters), Number, '#pop'),
+
+ # Inexact without explicit #i
+ (r'(?i)(#d)?(%s([-+]%s?i)?|[-+]%s?i|%s@%s)(?=[%s])' %
+ (_inexact_real, _inexact_unsigned, _inexact_unsigned,
+ _inexact_real, _inexact_real, _delimiters), Number.Float,
+ '#pop'),
+
+ # The remaining extflonums
+ (r'(?i)(([-+]?%st[-+]?\d+)|[-+](inf|nan)\.t)(?=[%s])' %
+ (_inexact_simple, _delimiters), Number.Float, '#pop'),
+
+ # #b
+ (r'(?i)(#[ei])?#b%s' % _symbol, Number.Bin, '#pop'),
+
+ # #o
+ (r'(?i)(#[ei])?#o%s' % _symbol, Number.Oct, '#pop'),
+
+ # #x
+ (r'(?i)(#[ei])?#x%s' % _symbol, Number.Hex, '#pop'),
+
+ # #i is always inexact, i.e. float
+ (r'(?i)(#d)?#i%s' % _symbol, Number.Float, '#pop'),
+
+ # Strings and characters
+ (r'#?"', String.Double, ('#pop', 'string')),
+ (r'#<<(.+)\n(^(?!\1$).*$\n)*^\1$', String.Heredoc, '#pop'),
+ (r'#\\(u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8})', String.Char, '#pop'),
+ (r'(?is)#\\([0-7]{3}|[a-z]+|.)', String.Char, '#pop'),
+ (r'(?s)#[pr]x#?"(\\?.)*?"', String.Regex, '#pop'),
+
+ # Constants
+ (r'#(true|false|[tTfF])', Name.Constant, '#pop'),
+
+ # Keyword argument names (e.g. #:keyword)
+ (r'#:%s' % _symbol, Keyword.Declaration, '#pop'),
+
+ # Reader extensions
+ (r'(#lang |#!)(\S+)',
+ bygroups(Keyword.Namespace, Name.Namespace)),
+ (r'#reader', Keyword.Namespace, 'quoted-datum'),
+
+ # Other syntax
+ (r"(?i)\.(?=[%s])|#c[is]|#['`]|#,@?" % _delimiters, Operator),
+ (r"'|#[s&]|#hash(eqv?)?|#\d*(?=%s)" % _opening_parenthesis,
+ Operator, ('#pop', 'quoted-datum'))
+ ],
+ 'datum*': [
+ (r'`|,@?', Operator),
+ (_symbol, String.Symbol, '#pop'),
+ (r'[|\\]', Error),
+ default('#pop')
+ ],
+ 'list': [
+ (_closing_parenthesis, Punctuation, '#pop')
+ ],
+ 'unquoted-datum': [
+ include('datum'),
+ (r'quote(?=[%s])' % _delimiters, Keyword,
+ ('#pop', 'quoted-datum')),
+ (r'`', Operator, ('#pop', 'quasiquoted-datum')),
+ (r'quasiquote(?=[%s])' % _delimiters, Keyword,
+ ('#pop', 'quasiquoted-datum')),
+ (_opening_parenthesis, Punctuation, ('#pop', 'unquoted-list')),
+ (words(_keywords, prefix='(?u)', suffix='(?=[%s])' % _delimiters),
+ Keyword, '#pop'),
+ (words(_builtins, prefix='(?u)', suffix='(?=[%s])' % _delimiters),
+ Name.Builtin, '#pop'),
+ (_symbol, Name, '#pop'),
+ include('datum*')
+ ],
+ 'unquoted-list': [
+ include('list'),
+ (r'(?!\Z)', Text, 'unquoted-datum')
+ ],
+ 'quasiquoted-datum': [
+ include('datum'),
+ (r',@?', Operator, ('#pop', 'unquoted-datum')),
+ (r'unquote(-splicing)?(?=[%s])' % _delimiters, Keyword,
+ ('#pop', 'unquoted-datum')),
+ (_opening_parenthesis, Punctuation, ('#pop', 'quasiquoted-list')),
+ include('datum*')
+ ],
+ 'quasiquoted-list': [
+ include('list'),
+ (r'(?!\Z)', Text, 'quasiquoted-datum')
+ ],
+ 'quoted-datum': [
+ include('datum'),
+ (_opening_parenthesis, Punctuation, ('#pop', 'quoted-list')),
+ include('datum*')
+ ],
+ 'quoted-list': [
+ include('list'),
+ (r'(?!\Z)', Text, 'quoted-datum')
+ ],
+ 'block-comment': [
+ (r'#\|', Comment.Multiline, '#push'),
+ (r'\|#', Comment.Multiline, '#pop'),
+ (r'[^#|]+|.', Comment.Multiline)
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'(?s)\\([0-7]{1,3}|x[\da-fA-F]{1,2}|u[\da-fA-F]{1,4}|'
+ r'U[\da-fA-F]{1,8}|.)', String.Escape),
+ (r'[^\\"]+', String.Double)
+ ]
+ }
+
+
+class NewLispLexer(RegexLexer):
+ """
+ For `newLISP. <www.newlisp.org>`_ source code (version 10.3.0).
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'NewLisp'
+ aliases = ['newlisp']
+ filenames = ['*.lsp', '*.nl']
+ mimetypes = ['text/x-newlisp', 'application/x-newlisp']
+
+ flags = re.IGNORECASE | re.MULTILINE | re.UNICODE
+
+ # list of built-in functions for newLISP version 10.3
+ builtins = (
+ '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++',
+ '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10',
+ '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7',
+ '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs',
+ 'acos', 'acosh', 'add', 'address', 'amb', 'and', 'append-file',
+ 'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin',
+ 'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec',
+ 'base64-enc', 'bayes-query', 'bayes-train', 'begin',
+ 'beta', 'betai', 'bind', 'binomial', 'bits', 'callback',
+ 'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean',
+ 'close', 'command-event', 'cond', 'cons', 'constant',
+ 'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count',
+ 'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry',
+ 'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec',
+ 'def-new', 'default', 'define-macro', 'define',
+ 'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device',
+ 'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while',
+ 'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup',
+ 'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event',
+ 'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand',
+ 'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter',
+ 'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt',
+ 'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln',
+ 'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string',
+ 'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc',
+ 'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert',
+ 'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error',
+ 'last', 'legal?', 'length', 'let', 'letex', 'letn',
+ 'list?', 'list', 'load', 'local', 'log', 'lookup',
+ 'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat',
+ 'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply',
+ 'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error',
+ 'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local',
+ 'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping',
+ 'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select',
+ 'net-send-to', 'net-send-udp', 'net-send', 'net-service',
+ 'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper',
+ 'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack',
+ 'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop',
+ 'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print',
+ 'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event',
+ 'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand',
+ 'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file',
+ 'read-key', 'read-line', 'read-utf8', 'reader-event',
+ 'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex',
+ 'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse',
+ 'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self',
+ 'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all',
+ 'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent',
+ 'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt',
+ 'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?',
+ 'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term',
+ 'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case',
+ 'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?',
+ 'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until',
+ 'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while',
+ 'write', 'write-char', 'write-file', 'write-line',
+ 'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?',
+ )
+
+ # valid names
+ valid_name = r'([\w!$%&*+.,/<=>?@^~|-])+|(\[.*?\])+'
+
+ tokens = {
+ 'root': [
+ # shebang
+ (r'#!(.*?)$', Comment.Preproc),
+ # comments starting with semicolon
+ (r';.*$', Comment.Single),
+ # comments starting with #
+ (r'#.*$', Comment.Single),
+
+ # whitespace
+ (r'\s+', Text),
+
+ # strings, symbols and characters
+ (r'"(\\\\|\\"|[^"])*"', String),
+
+ # braces
+ (r'\{', String, "bracestring"),
+
+ # [text] ... [/text] delimited strings
+ (r'\[text\]*', String, "tagstring"),
+
+ # 'special' operators...
+ (r"('|:)", Operator),
+
+ # highlight the builtins
+ (words(builtins, suffix=r'\b'),
+ Keyword),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Variable),
+
+ # the remaining variables
+ (valid_name, String.Symbol),
+
+ # parentheses
+ (r'(\(|\))', Punctuation),
+ ],
+
+ # braced strings...
+ 'bracestring': [
+ (r'\{', String, "#push"),
+ (r'\}', String, "#pop"),
+ ('[^{}]+', String),
+ ],
+
+ # tagged [text]...[/text] delimited strings...
+ 'tagstring': [
+ (r'(?s)(.*?)(\[/text\])', String, '#pop'),
+ ],
+ }
+
+
+class EmacsLispLexer(RegexLexer):
+ """
+ An ELisp lexer, parsing a stream and outputting the tokens
+ needed to highlight elisp code.
+
+ .. versionadded:: 2.1
+ """
+ name = 'EmacsLisp'
+ aliases = ['emacs', 'elisp']
+ filenames = ['*.el']
+ mimetypes = ['text/x-elisp', 'application/x-elisp']
+
+ flags = re.MULTILINE
+
+ # couple of useful regexes
+
+ # characters that are not macro-characters and can be used to begin a symbol
+ nonmacro = r'\\.|[\w!$%&*+-/<=>?@^{}~|]'
+ constituent = nonmacro + '|[#.:]'
+ terminated = r'(?=[ "()\]\'\n,;`])' # whitespace or terminating macro characters
+
+ # symbol token, reverse-engineered from hyperspec
+ # Take a deep breath...
+ symbol = r'((?:%s)(?:%s)*)' % (nonmacro, constituent)
+
+ macros = set((
+ 'atomic-change-group', 'case', 'block', 'cl-block', 'cl-callf', 'cl-callf2',
+ 'cl-case', 'cl-decf', 'cl-declaim', 'cl-declare',
+ 'cl-define-compiler-macro', 'cl-defmacro', 'cl-defstruct',
+ 'cl-defsubst', 'cl-deftype', 'cl-defun', 'cl-destructuring-bind',
+ 'cl-do', 'cl-do*', 'cl-do-all-symbols', 'cl-do-symbols', 'cl-dolist',
+ 'cl-dotimes', 'cl-ecase', 'cl-etypecase', 'eval-when', 'cl-eval-when', 'cl-flet',
+ 'cl-flet*', 'cl-function', 'cl-incf', 'cl-labels', 'cl-letf',
+ 'cl-letf*', 'cl-load-time-value', 'cl-locally', 'cl-loop',
+ 'cl-macrolet', 'cl-multiple-value-bind', 'cl-multiple-value-setq',
+ 'cl-progv', 'cl-psetf', 'cl-psetq', 'cl-pushnew', 'cl-remf',
+ 'cl-return', 'cl-return-from', 'cl-rotatef', 'cl-shiftf',
+ 'cl-symbol-macrolet', 'cl-tagbody', 'cl-the', 'cl-typecase',
+ 'combine-after-change-calls', 'condition-case-unless-debug', 'decf',
+ 'declaim', 'declare', 'declare-function', 'def-edebug-spec',
+ 'defadvice', 'defclass', 'defcustom', 'defface', 'defgeneric',
+ 'defgroup', 'define-advice', 'define-alternatives',
+ 'define-compiler-macro', 'define-derived-mode', 'define-generic-mode',
+ 'define-global-minor-mode', 'define-globalized-minor-mode',
+ 'define-minor-mode', 'define-modify-macro',
+ 'define-obsolete-face-alias', 'define-obsolete-function-alias',
+ 'define-obsolete-variable-alias', 'define-setf-expander',
+ 'define-skeleton', 'defmacro', 'defmethod', 'defsetf', 'defstruct',
+ 'defsubst', 'deftheme', 'deftype', 'defun', 'defvar-local',
+ 'delay-mode-hooks', 'destructuring-bind', 'do', 'do*',
+ 'do-all-symbols', 'do-symbols', 'dolist', 'dont-compile', 'dotimes',
+ 'dotimes-with-progress-reporter', 'ecase', 'ert-deftest', 'etypecase',
+ 'eval-and-compile', 'eval-when-compile', 'flet', 'ignore-errors',
+ 'incf', 'labels', 'lambda', 'letrec', 'lexical-let', 'lexical-let*',
+ 'loop', 'multiple-value-bind', 'multiple-value-setq', 'noreturn',
+ 'oref', 'oref-default', 'oset', 'oset-default', 'pcase',
+ 'pcase-defmacro', 'pcase-dolist', 'pcase-exhaustive', 'pcase-let',
+ 'pcase-let*', 'pop', 'psetf', 'psetq', 'push', 'pushnew', 'remf',
+ 'return', 'rotatef', 'rx', 'save-match-data', 'save-selected-window',
+ 'save-window-excursion', 'setf', 'setq-local', 'shiftf',
+ 'track-mouse', 'typecase', 'unless', 'use-package', 'when',
+ 'while-no-input', 'with-case-table', 'with-category-table',
+ 'with-coding-priority', 'with-current-buffer', 'with-demoted-errors',
+ 'with-eval-after-load', 'with-file-modes', 'with-local-quit',
+ 'with-output-to-string', 'with-output-to-temp-buffer',
+ 'with-parsed-tramp-file-name', 'with-selected-frame',
+ 'with-selected-window', 'with-silent-modifications', 'with-slots',
+ 'with-syntax-table', 'with-temp-buffer', 'with-temp-file',
+ 'with-temp-message', 'with-timeout', 'with-tramp-connection-property',
+ 'with-tramp-file-property', 'with-tramp-progress-reporter',
+ 'with-wrapper-hook', 'load-time-value', 'locally', 'macrolet', 'progv', 'return-from'
+ ))
+
+ special_forms = set((
+ 'and', 'catch', 'cond', 'condition-case', 'defconst', 'defvar',
+ 'function', 'if', 'interactive', 'let', 'let*', 'or', 'prog1',
+ 'prog2', 'progn', 'quote', 'save-current-buffer', 'save-excursion',
+ 'save-restriction', 'setq', 'setq-default', 'subr-arity',
+ 'unwind-protect', 'while',
+ ))
+
+ builtin_function = set((
+ '%', '*', '+', '-', '/', '/=', '1+', '1-', '<', '<=', '=', '>', '>=',
+ 'Snarf-documentation', 'abort-recursive-edit', 'abs',
+ 'accept-process-output', 'access-file', 'accessible-keymaps', 'acos',
+ 'active-minibuffer-window', 'add-face-text-property',
+ 'add-name-to-file', 'add-text-properties', 'all-completions',
+ 'append', 'apply', 'apropos-internal', 'aref', 'arrayp', 'aset',
+ 'ash', 'asin', 'assoc', 'assoc-string', 'assq', 'atan', 'atom',
+ 'autoload', 'autoload-do-load', 'backtrace', 'backtrace--locals',
+ 'backtrace-debug', 'backtrace-eval', 'backtrace-frame',
+ 'backward-char', 'backward-prefix-chars', 'barf-if-buffer-read-only',
+ 'base64-decode-region', 'base64-decode-string',
+ 'base64-encode-region', 'base64-encode-string', 'beginning-of-line',
+ 'bidi-find-overridden-directionality', 'bidi-resolved-levels',
+ 'bitmap-spec-p', 'bobp', 'bolp', 'bool-vector',
+ 'bool-vector-count-consecutive', 'bool-vector-count-population',
+ 'bool-vector-exclusive-or', 'bool-vector-intersection',
+ 'bool-vector-not', 'bool-vector-p', 'bool-vector-set-difference',
+ 'bool-vector-subsetp', 'bool-vector-union', 'boundp',
+ 'buffer-base-buffer', 'buffer-chars-modified-tick',
+ 'buffer-enable-undo', 'buffer-file-name', 'buffer-has-markers-at',
+ 'buffer-list', 'buffer-live-p', 'buffer-local-value',
+ 'buffer-local-variables', 'buffer-modified-p', 'buffer-modified-tick',
+ 'buffer-name', 'buffer-size', 'buffer-string', 'buffer-substring',
+ 'buffer-substring-no-properties', 'buffer-swap-text', 'bufferp',
+ 'bury-buffer-internal', 'byte-code', 'byte-code-function-p',
+ 'byte-to-position', 'byte-to-string', 'byteorder',
+ 'call-interactively', 'call-last-kbd-macro', 'call-process',
+ 'call-process-region', 'cancel-kbd-macro-events', 'capitalize',
+ 'capitalize-region', 'capitalize-word', 'car', 'car-less-than-car',
+ 'car-safe', 'case-table-p', 'category-docstring',
+ 'category-set-mnemonics', 'category-table', 'category-table-p',
+ 'ccl-execute', 'ccl-execute-on-string', 'ccl-program-p', 'cdr',
+ 'cdr-safe', 'ceiling', 'char-after', 'char-before',
+ 'char-category-set', 'char-charset', 'char-equal', 'char-or-string-p',
+ 'char-resolve-modifiers', 'char-syntax', 'char-table-extra-slot',
+ 'char-table-p', 'char-table-parent', 'char-table-range',
+ 'char-table-subtype', 'char-to-string', 'char-width', 'characterp',
+ 'charset-after', 'charset-id-internal', 'charset-plist',
+ 'charset-priority-list', 'charsetp', 'check-coding-system',
+ 'check-coding-systems-region', 'clear-buffer-auto-save-failure',
+ 'clear-charset-maps', 'clear-face-cache', 'clear-font-cache',
+ 'clear-image-cache', 'clear-string', 'clear-this-command-keys',
+ 'close-font', 'clrhash', 'coding-system-aliases',
+ 'coding-system-base', 'coding-system-eol-type', 'coding-system-p',
+ 'coding-system-plist', 'coding-system-priority-list',
+ 'coding-system-put', 'color-distance', 'color-gray-p',
+ 'color-supported-p', 'combine-after-change-execute',
+ 'command-error-default-function', 'command-remapping', 'commandp',
+ 'compare-buffer-substrings', 'compare-strings',
+ 'compare-window-configurations', 'completing-read',
+ 'compose-region-internal', 'compose-string-internal',
+ 'composition-get-gstring', 'compute-motion', 'concat', 'cons',
+ 'consp', 'constrain-to-field', 'continue-process',
+ 'controlling-tty-p', 'coordinates-in-window-p', 'copy-alist',
+ 'copy-category-table', 'copy-file', 'copy-hash-table', 'copy-keymap',
+ 'copy-marker', 'copy-sequence', 'copy-syntax-table', 'copysign',
+ 'cos', 'current-active-maps', 'current-bidi-paragraph-direction',
+ 'current-buffer', 'current-case-table', 'current-column',
+ 'current-global-map', 'current-idle-time', 'current-indentation',
+ 'current-input-mode', 'current-local-map', 'current-message',
+ 'current-minor-mode-maps', 'current-time', 'current-time-string',
+ 'current-time-zone', 'current-window-configuration',
+ 'cygwin-convert-file-name-from-windows',
+ 'cygwin-convert-file-name-to-windows', 'daemon-initialized',
+ 'daemonp', 'dbus--init-bus', 'dbus-get-unique-name',
+ 'dbus-message-internal', 'debug-timer-check', 'declare-equiv-charset',
+ 'decode-big5-char', 'decode-char', 'decode-coding-region',
+ 'decode-coding-string', 'decode-sjis-char', 'decode-time',
+ 'default-boundp', 'default-file-modes', 'default-printer-name',
+ 'default-toplevel-value', 'default-value', 'define-category',
+ 'define-charset-alias', 'define-charset-internal',
+ 'define-coding-system-alias', 'define-coding-system-internal',
+ 'define-fringe-bitmap', 'define-hash-table-test', 'define-key',
+ 'define-prefix-command', 'delete',
+ 'delete-all-overlays', 'delete-and-extract-region', 'delete-char',
+ 'delete-directory-internal', 'delete-field', 'delete-file',
+ 'delete-frame', 'delete-other-windows-internal', 'delete-overlay',
+ 'delete-process', 'delete-region', 'delete-terminal',
+ 'delete-window-internal', 'delq', 'describe-buffer-bindings',
+ 'describe-vector', 'destroy-fringe-bitmap', 'detect-coding-region',
+ 'detect-coding-string', 'ding', 'directory-file-name',
+ 'directory-files', 'directory-files-and-attributes', 'discard-input',
+ 'display-supports-face-attributes-p', 'do-auto-save', 'documentation',
+ 'documentation-property', 'downcase', 'downcase-region',
+ 'downcase-word', 'draw-string', 'dump-colors', 'dump-emacs',
+ 'dump-face', 'dump-frame-glyph-matrix', 'dump-glyph-matrix',
+ 'dump-glyph-row', 'dump-redisplay-history', 'dump-tool-bar-row',
+ 'elt', 'emacs-pid', 'encode-big5-char', 'encode-char',
+ 'encode-coding-region', 'encode-coding-string', 'encode-sjis-char',
+ 'encode-time', 'end-kbd-macro', 'end-of-line', 'eobp', 'eolp', 'eq',
+ 'eql', 'equal', 'equal-including-properties', 'erase-buffer',
+ 'error-message-string', 'eval', 'eval-buffer', 'eval-region',
+ 'event-convert-list', 'execute-kbd-macro', 'exit-recursive-edit',
+ 'exp', 'expand-file-name', 'expt', 'external-debugging-output',
+ 'face-attribute-relative-p', 'face-attributes-as-vector', 'face-font',
+ 'fboundp', 'fceiling', 'fetch-bytecode', 'ffloor',
+ 'field-beginning', 'field-end', 'field-string',
+ 'field-string-no-properties', 'file-accessible-directory-p',
+ 'file-acl', 'file-attributes', 'file-attributes-lessp',
+ 'file-directory-p', 'file-executable-p', 'file-exists-p',
+ 'file-locked-p', 'file-modes', 'file-name-absolute-p',
+ 'file-name-all-completions', 'file-name-as-directory',
+ 'file-name-completion', 'file-name-directory',
+ 'file-name-nondirectory', 'file-newer-than-file-p', 'file-readable-p',
+ 'file-regular-p', 'file-selinux-context', 'file-symlink-p',
+ 'file-system-info', 'file-system-info', 'file-writable-p',
+ 'fillarray', 'find-charset-region', 'find-charset-string',
+ 'find-coding-systems-region-internal', 'find-composition-internal',
+ 'find-file-name-handler', 'find-font', 'find-operation-coding-system',
+ 'float', 'float-time', 'floatp', 'floor', 'fmakunbound',
+ 'following-char', 'font-at', 'font-drive-otf', 'font-face-attributes',
+ 'font-family-list', 'font-get', 'font-get-glyphs',
+ 'font-get-system-font', 'font-get-system-normal-font', 'font-info',
+ 'font-match-p', 'font-otf-alternates', 'font-put',
+ 'font-shape-gstring', 'font-spec', 'font-variation-glyphs',
+ 'font-xlfd-name', 'fontp', 'fontset-font', 'fontset-info',
+ 'fontset-list', 'fontset-list-all', 'force-mode-line-update',
+ 'force-window-update', 'format', 'format-mode-line',
+ 'format-network-address', 'format-time-string', 'forward-char',
+ 'forward-comment', 'forward-line', 'forward-word',
+ 'frame-border-width', 'frame-bottom-divider-width',
+ 'frame-can-run-window-configuration-change-hook', 'frame-char-height',
+ 'frame-char-width', 'frame-face-alist', 'frame-first-window',
+ 'frame-focus', 'frame-font-cache', 'frame-fringe-width', 'frame-list',
+ 'frame-live-p', 'frame-or-buffer-changed-p', 'frame-parameter',
+ 'frame-parameters', 'frame-pixel-height', 'frame-pixel-width',
+ 'frame-pointer-visible-p', 'frame-right-divider-width',
+ 'frame-root-window', 'frame-scroll-bar-height',
+ 'frame-scroll-bar-width', 'frame-selected-window', 'frame-terminal',
+ 'frame-text-cols', 'frame-text-height', 'frame-text-lines',
+ 'frame-text-width', 'frame-total-cols', 'frame-total-lines',
+ 'frame-visible-p', 'framep', 'frexp', 'fringe-bitmaps-at-pos',
+ 'fround', 'fset', 'ftruncate', 'funcall', 'funcall-interactively',
+ 'function-equal', 'functionp', 'gap-position', 'gap-size',
+ 'garbage-collect', 'gc-status', 'generate-new-buffer-name', 'get',
+ 'get-buffer', 'get-buffer-create', 'get-buffer-process',
+ 'get-buffer-window', 'get-byte', 'get-char-property',
+ 'get-char-property-and-overlay', 'get-file-buffer', 'get-file-char',
+ 'get-internal-run-time', 'get-load-suffixes', 'get-pos-property',
+ 'get-process', 'get-screen-color', 'get-text-property',
+ 'get-unicode-property-internal', 'get-unused-category',
+ 'get-unused-iso-final-char', 'getenv-internal', 'gethash',
+ 'gfile-add-watch', 'gfile-rm-watch', 'global-key-binding',
+ 'gnutls-available-p', 'gnutls-boot', 'gnutls-bye', 'gnutls-deinit',
+ 'gnutls-error-fatalp', 'gnutls-error-string', 'gnutls-errorp',
+ 'gnutls-get-initstage', 'gnutls-peer-status',
+ 'gnutls-peer-status-warning-describe', 'goto-char', 'gpm-mouse-start',
+ 'gpm-mouse-stop', 'group-gid', 'group-real-gid',
+ 'handle-save-session', 'handle-switch-frame', 'hash-table-count',
+ 'hash-table-p', 'hash-table-rehash-size',
+ 'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
+ 'hash-table-weakness', 'iconify-frame', 'identity', 'image-flush',
+ 'image-mask-p', 'image-metadata', 'image-size', 'imagemagick-types',
+ 'imagep', 'indent-to', 'indirect-function', 'indirect-variable',
+ 'init-image-library', 'inotify-add-watch', 'inotify-rm-watch',
+ 'input-pending-p', 'insert', 'insert-and-inherit',
+ 'insert-before-markers', 'insert-before-markers-and-inherit',
+ 'insert-buffer-substring', 'insert-byte', 'insert-char',
+ 'insert-file-contents', 'insert-startup-screen', 'int86',
+ 'integer-or-marker-p', 'integerp', 'interactive-form', 'intern',
+ 'intern-soft', 'internal--track-mouse', 'internal-char-font',
+ 'internal-complete-buffer', 'internal-copy-lisp-face',
+ 'internal-default-process-filter',
+ 'internal-default-process-sentinel', 'internal-describe-syntax-value',
+ 'internal-event-symbol-parse-modifiers',
+ 'internal-face-x-get-resource', 'internal-get-lisp-face-attribute',
+ 'internal-lisp-face-attribute-values', 'internal-lisp-face-empty-p',
+ 'internal-lisp-face-equal-p', 'internal-lisp-face-p',
+ 'internal-make-lisp-face', 'internal-make-var-non-special',
+ 'internal-merge-in-global-face',
+ 'internal-set-alternative-font-family-alist',
+ 'internal-set-alternative-font-registry-alist',
+ 'internal-set-font-selection-order',
+ 'internal-set-lisp-face-attribute',
+ 'internal-set-lisp-face-attribute-from-resource',
+ 'internal-show-cursor', 'internal-show-cursor-p', 'interrupt-process',
+ 'invisible-p', 'invocation-directory', 'invocation-name', 'isnan',
+ 'iso-charset', 'key-binding', 'key-description',
+ 'keyboard-coding-system', 'keymap-parent', 'keymap-prompt', 'keymapp',
+ 'keywordp', 'kill-all-local-variables', 'kill-buffer', 'kill-emacs',
+ 'kill-local-variable', 'kill-process', 'last-nonminibuffer-frame',
+ 'lax-plist-get', 'lax-plist-put', 'ldexp', 'length',
+ 'libxml-parse-html-region', 'libxml-parse-xml-region',
+ 'line-beginning-position', 'line-end-position', 'line-pixel-height',
+ 'list', 'list-fonts', 'list-system-processes', 'listp', 'load',
+ 'load-average', 'local-key-binding', 'local-variable-if-set-p',
+ 'local-variable-p', 'locale-info', 'locate-file-internal',
+ 'lock-buffer', 'log', 'logand', 'logb', 'logior', 'lognot', 'logxor',
+ 'looking-at', 'lookup-image', 'lookup-image-map', 'lookup-key',
+ 'lower-frame', 'lsh', 'macroexpand', 'make-bool-vector',
+ 'make-byte-code', 'make-category-set', 'make-category-table',
+ 'make-char', 'make-char-table', 'make-directory-internal',
+ 'make-frame-invisible', 'make-frame-visible', 'make-hash-table',
+ 'make-indirect-buffer', 'make-keymap', 'make-list',
+ 'make-local-variable', 'make-marker', 'make-network-process',
+ 'make-overlay', 'make-serial-process', 'make-sparse-keymap',
+ 'make-string', 'make-symbol', 'make-symbolic-link', 'make-temp-name',
+ 'make-terminal-frame', 'make-variable-buffer-local',
+ 'make-variable-frame-local', 'make-vector', 'makunbound',
+ 'map-char-table', 'map-charset-chars', 'map-keymap',
+ 'map-keymap-internal', 'mapatoms', 'mapc', 'mapcar', 'mapconcat',
+ 'maphash', 'mark-marker', 'marker-buffer', 'marker-insertion-type',
+ 'marker-position', 'markerp', 'match-beginning', 'match-data',
+ 'match-end', 'matching-paren', 'max', 'max-char', 'md5', 'member',
+ 'memory-info', 'memory-limit', 'memory-use-counts', 'memq', 'memql',
+ 'menu-bar-menu-at-x-y', 'menu-or-popup-active-p',
+ 'menu-or-popup-active-p', 'merge-face-attribute', 'message',
+ 'message-box', 'message-or-box', 'min',
+ 'minibuffer-completion-contents', 'minibuffer-contents',
+ 'minibuffer-contents-no-properties', 'minibuffer-depth',
+ 'minibuffer-prompt', 'minibuffer-prompt-end',
+ 'minibuffer-selected-window', 'minibuffer-window', 'minibufferp',
+ 'minor-mode-key-binding', 'mod', 'modify-category-entry',
+ 'modify-frame-parameters', 'modify-syntax-entry',
+ 'mouse-pixel-position', 'mouse-position', 'move-overlay',
+ 'move-point-visually', 'move-to-column', 'move-to-window-line',
+ 'msdos-downcase-filename', 'msdos-long-file-names', 'msdos-memget',
+ 'msdos-memput', 'msdos-mouse-disable', 'msdos-mouse-enable',
+ 'msdos-mouse-init', 'msdos-mouse-p', 'msdos-remember-default-colors',
+ 'msdos-set-keyboard', 'msdos-set-mouse-buttons',
+ 'multibyte-char-to-unibyte', 'multibyte-string-p', 'narrow-to-region',
+ 'natnump', 'nconc', 'network-interface-info',
+ 'network-interface-list', 'new-fontset', 'newline-cache-check',
+ 'next-char-property-change', 'next-frame', 'next-overlay-change',
+ 'next-property-change', 'next-read-file-uses-dialog-p',
+ 'next-single-char-property-change', 'next-single-property-change',
+ 'next-window', 'nlistp', 'nreverse', 'nth', 'nthcdr', 'null',
+ 'number-or-marker-p', 'number-to-string', 'numberp',
+ 'open-dribble-file', 'open-font', 'open-termscript',
+ 'optimize-char-table', 'other-buffer', 'other-window-for-scrolling',
+ 'overlay-buffer', 'overlay-end', 'overlay-get', 'overlay-lists',
+ 'overlay-properties', 'overlay-put', 'overlay-recenter',
+ 'overlay-start', 'overlayp', 'overlays-at', 'overlays-in',
+ 'parse-partial-sexp', 'play-sound-internal', 'plist-get',
+ 'plist-member', 'plist-put', 'point', 'point-marker', 'point-max',
+ 'point-max-marker', 'point-min', 'point-min-marker',
+ 'pos-visible-in-window-p', 'position-bytes', 'posix-looking-at',
+ 'posix-search-backward', 'posix-search-forward', 'posix-string-match',
+ 'posn-at-point', 'posn-at-x-y', 'preceding-char',
+ 'prefix-numeric-value', 'previous-char-property-change',
+ 'previous-frame', 'previous-overlay-change',
+ 'previous-property-change', 'previous-single-char-property-change',
+ 'previous-single-property-change', 'previous-window', 'prin1',
+ 'prin1-to-string', 'princ', 'print', 'process-attributes',
+ 'process-buffer', 'process-coding-system', 'process-command',
+ 'process-connection', 'process-contact', 'process-datagram-address',
+ 'process-exit-status', 'process-filter', 'process-filter-multibyte-p',
+ 'process-id', 'process-inherit-coding-system-flag', 'process-list',
+ 'process-mark', 'process-name', 'process-plist',
+ 'process-query-on-exit-flag', 'process-running-child-p',
+ 'process-send-eof', 'process-send-region', 'process-send-string',
+ 'process-sentinel', 'process-status', 'process-tty-name',
+ 'process-type', 'processp', 'profiler-cpu-log',
+ 'profiler-cpu-running-p', 'profiler-cpu-start', 'profiler-cpu-stop',
+ 'profiler-memory-log', 'profiler-memory-running-p',
+ 'profiler-memory-start', 'profiler-memory-stop', 'propertize',
+ 'purecopy', 'put', 'put-text-property',
+ 'put-unicode-property-internal', 'puthash', 'query-font',
+ 'query-fontset', 'quit-process', 'raise-frame', 'random', 'rassoc',
+ 'rassq', 're-search-backward', 're-search-forward', 'read',
+ 'read-buffer', 'read-char', 'read-char-exclusive',
+ 'read-coding-system', 'read-command', 'read-event',
+ 'read-from-minibuffer', 'read-from-string', 'read-function',
+ 'read-key-sequence', 'read-key-sequence-vector',
+ 'read-no-blanks-input', 'read-non-nil-coding-system', 'read-string',
+ 'read-variable', 'recent-auto-save-p', 'recent-doskeys',
+ 'recent-keys', 'recenter', 'recursion-depth', 'recursive-edit',
+ 'redirect-debugging-output', 'redirect-frame-focus', 'redisplay',
+ 'redraw-display', 'redraw-frame', 'regexp-quote', 'region-beginning',
+ 'region-end', 'register-ccl-program', 'register-code-conversion-map',
+ 'remhash', 'remove-list-of-text-properties', 'remove-text-properties',
+ 'rename-buffer', 'rename-file', 'replace-match',
+ 'reset-this-command-lengths', 'resize-mini-window-internal',
+ 'restore-buffer-modified-p', 'resume-tty', 'reverse', 'round',
+ 'run-hook-with-args', 'run-hook-with-args-until-failure',
+ 'run-hook-with-args-until-success', 'run-hook-wrapped', 'run-hooks',
+ 'run-window-configuration-change-hook', 'run-window-scroll-functions',
+ 'safe-length', 'scan-lists', 'scan-sexps', 'scroll-down',
+ 'scroll-left', 'scroll-other-window', 'scroll-right', 'scroll-up',
+ 'search-backward', 'search-forward', 'secure-hash', 'select-frame',
+ 'select-window', 'selected-frame', 'selected-window',
+ 'self-insert-command', 'send-string-to-terminal', 'sequencep',
+ 'serial-process-configure', 'set', 'set-buffer',
+ 'set-buffer-auto-saved', 'set-buffer-major-mode',
+ 'set-buffer-modified-p', 'set-buffer-multibyte', 'set-case-table',
+ 'set-category-table', 'set-char-table-extra-slot',
+ 'set-char-table-parent', 'set-char-table-range', 'set-charset-plist',
+ 'set-charset-priority', 'set-coding-system-priority',
+ 'set-cursor-size', 'set-default', 'set-default-file-modes',
+ 'set-default-toplevel-value', 'set-file-acl', 'set-file-modes',
+ 'set-file-selinux-context', 'set-file-times', 'set-fontset-font',
+ 'set-frame-height', 'set-frame-position', 'set-frame-selected-window',
+ 'set-frame-size', 'set-frame-width', 'set-fringe-bitmap-face',
+ 'set-input-interrupt-mode', 'set-input-meta-mode', 'set-input-mode',
+ 'set-keyboard-coding-system-internal', 'set-keymap-parent',
+ 'set-marker', 'set-marker-insertion-type', 'set-match-data',
+ 'set-message-beep', 'set-minibuffer-window',
+ 'set-mouse-pixel-position', 'set-mouse-position',
+ 'set-network-process-option', 'set-output-flow-control',
+ 'set-process-buffer', 'set-process-coding-system',
+ 'set-process-datagram-address', 'set-process-filter',
+ 'set-process-filter-multibyte',
+ 'set-process-inherit-coding-system-flag', 'set-process-plist',
+ 'set-process-query-on-exit-flag', 'set-process-sentinel',
+ 'set-process-window-size', 'set-quit-char',
+ 'set-safe-terminal-coding-system-internal', 'set-screen-color',
+ 'set-standard-case-table', 'set-syntax-table',
+ 'set-terminal-coding-system-internal', 'set-terminal-local-value',
+ 'set-terminal-parameter', 'set-text-properties', 'set-time-zone-rule',
+ 'set-visited-file-modtime', 'set-window-buffer',
+ 'set-window-combination-limit', 'set-window-configuration',
+ 'set-window-dedicated-p', 'set-window-display-table',
+ 'set-window-fringes', 'set-window-hscroll', 'set-window-margins',
+ 'set-window-new-normal', 'set-window-new-pixel',
+ 'set-window-new-total', 'set-window-next-buffers',
+ 'set-window-parameter', 'set-window-point', 'set-window-prev-buffers',
+ 'set-window-redisplay-end-trigger', 'set-window-scroll-bars',
+ 'set-window-start', 'set-window-vscroll', 'setcar', 'setcdr',
+ 'setplist', 'show-face-resources', 'signal', 'signal-process', 'sin',
+ 'single-key-description', 'skip-chars-backward', 'skip-chars-forward',
+ 'skip-syntax-backward', 'skip-syntax-forward', 'sleep-for', 'sort',
+ 'sort-charsets', 'special-variable-p', 'split-char',
+ 'split-window-internal', 'sqrt', 'standard-case-table',
+ 'standard-category-table', 'standard-syntax-table', 'start-kbd-macro',
+ 'start-process', 'stop-process', 'store-kbd-macro-event', 'string',
+ 'string-as-multibyte', 'string-as-unibyte', 'string-bytes',
+ 'string-collate-equalp', 'string-collate-lessp', 'string-equal',
+ 'string-lessp', 'string-make-multibyte', 'string-make-unibyte',
+ 'string-match', 'string-to-char', 'string-to-multibyte',
+ 'string-to-number', 'string-to-syntax', 'string-to-unibyte',
+ 'string-width', 'stringp', 'subr-name', 'subrp',
+ 'subst-char-in-region', 'substitute-command-keys',
+ 'substitute-in-file-name', 'substring', 'substring-no-properties',
+ 'suspend-emacs', 'suspend-tty', 'suspicious-object', 'sxhash',
+ 'symbol-function', 'symbol-name', 'symbol-plist', 'symbol-value',
+ 'symbolp', 'syntax-table', 'syntax-table-p', 'system-groups',
+ 'system-move-file-to-trash', 'system-name', 'system-users', 'tan',
+ 'terminal-coding-system', 'terminal-list', 'terminal-live-p',
+ 'terminal-local-value', 'terminal-name', 'terminal-parameter',
+ 'terminal-parameters', 'terpri', 'test-completion',
+ 'text-char-description', 'text-properties-at', 'text-property-any',
+ 'text-property-not-all', 'this-command-keys',
+ 'this-command-keys-vector', 'this-single-command-keys',
+ 'this-single-command-raw-keys', 'time-add', 'time-less-p',
+ 'time-subtract', 'tool-bar-get-system-style', 'tool-bar-height',
+ 'tool-bar-pixel-width', 'top-level', 'trace-redisplay',
+ 'trace-to-stderr', 'translate-region-internal', 'transpose-regions',
+ 'truncate', 'try-completion', 'tty-display-color-cells',
+ 'tty-display-color-p', 'tty-no-underline',
+ 'tty-suppress-bold-inverse-default-colors', 'tty-top-frame',
+ 'tty-type', 'type-of', 'undo-boundary', 'unencodable-char-position',
+ 'unhandled-file-name-directory', 'unibyte-char-to-multibyte',
+ 'unibyte-string', 'unicode-property-table-internal', 'unify-charset',
+ 'unintern', 'unix-sync', 'unlock-buffer', 'upcase', 'upcase-initials',
+ 'upcase-initials-region', 'upcase-region', 'upcase-word',
+ 'use-global-map', 'use-local-map', 'user-full-name',
+ 'user-login-name', 'user-real-login-name', 'user-real-uid',
+ 'user-uid', 'variable-binding-locus', 'vconcat', 'vector',
+ 'vector-or-char-table-p', 'vectorp', 'verify-visited-file-modtime',
+ 'vertical-motion', 'visible-frame-list', 'visited-file-modtime',
+ 'w16-get-clipboard-data', 'w16-selection-exists-p',
+ 'w16-set-clipboard-data', 'w32-battery-status',
+ 'w32-default-color-map', 'w32-define-rgb-color',
+ 'w32-display-monitor-attributes-list', 'w32-frame-menu-bar-size',
+ 'w32-frame-rect', 'w32-get-clipboard-data',
+ 'w32-get-codepage-charset', 'w32-get-console-codepage',
+ 'w32-get-console-output-codepage', 'w32-get-current-locale-id',
+ 'w32-get-default-locale-id', 'w32-get-keyboard-layout',
+ 'w32-get-locale-info', 'w32-get-valid-codepages',
+ 'w32-get-valid-keyboard-layouts', 'w32-get-valid-locale-ids',
+ 'w32-has-winsock', 'w32-long-file-name', 'w32-reconstruct-hot-key',
+ 'w32-register-hot-key', 'w32-registered-hot-keys',
+ 'w32-selection-exists-p', 'w32-send-sys-command',
+ 'w32-set-clipboard-data', 'w32-set-console-codepage',
+ 'w32-set-console-output-codepage', 'w32-set-current-locale',
+ 'w32-set-keyboard-layout', 'w32-set-process-priority',
+ 'w32-shell-execute', 'w32-short-file-name', 'w32-toggle-lock-key',
+ 'w32-unload-winsock', 'w32-unregister-hot-key', 'w32-window-exists-p',
+ 'w32notify-add-watch', 'w32notify-rm-watch',
+ 'waiting-for-user-input-p', 'where-is-internal', 'widen',
+ 'widget-apply', 'widget-get', 'widget-put',
+ 'window-absolute-pixel-edges', 'window-at', 'window-body-height',
+ 'window-body-width', 'window-bottom-divider-width', 'window-buffer',
+ 'window-combination-limit', 'window-configuration-frame',
+ 'window-configuration-p', 'window-dedicated-p',
+ 'window-display-table', 'window-edges', 'window-end', 'window-frame',
+ 'window-fringes', 'window-header-line-height', 'window-hscroll',
+ 'window-inside-absolute-pixel-edges', 'window-inside-edges',
+ 'window-inside-pixel-edges', 'window-left-child',
+ 'window-left-column', 'window-line-height', 'window-list',
+ 'window-list-1', 'window-live-p', 'window-margins',
+ 'window-minibuffer-p', 'window-mode-line-height', 'window-new-normal',
+ 'window-new-pixel', 'window-new-total', 'window-next-buffers',
+ 'window-next-sibling', 'window-normal-size', 'window-old-point',
+ 'window-parameter', 'window-parameters', 'window-parent',
+ 'window-pixel-edges', 'window-pixel-height', 'window-pixel-left',
+ 'window-pixel-top', 'window-pixel-width', 'window-point',
+ 'window-prev-buffers', 'window-prev-sibling',
+ 'window-redisplay-end-trigger', 'window-resize-apply',
+ 'window-resize-apply-total', 'window-right-divider-width',
+ 'window-scroll-bar-height', 'window-scroll-bar-width',
+ 'window-scroll-bars', 'window-start', 'window-system',
+ 'window-text-height', 'window-text-pixel-size', 'window-text-width',
+ 'window-top-child', 'window-top-line', 'window-total-height',
+ 'window-total-width', 'window-use-time', 'window-valid-p',
+ 'window-vscroll', 'windowp', 'write-char', 'write-region',
+ 'x-backspace-delete-keys-p', 'x-change-window-property',
+ 'x-change-window-property', 'x-close-connection',
+ 'x-close-connection', 'x-create-frame', 'x-create-frame',
+ 'x-delete-window-property', 'x-delete-window-property',
+ 'x-disown-selection-internal', 'x-display-backing-store',
+ 'x-display-backing-store', 'x-display-color-cells',
+ 'x-display-color-cells', 'x-display-grayscale-p',
+ 'x-display-grayscale-p', 'x-display-list', 'x-display-list',
+ 'x-display-mm-height', 'x-display-mm-height', 'x-display-mm-width',
+ 'x-display-mm-width', 'x-display-monitor-attributes-list',
+ 'x-display-pixel-height', 'x-display-pixel-height',
+ 'x-display-pixel-width', 'x-display-pixel-width', 'x-display-planes',
+ 'x-display-planes', 'x-display-save-under', 'x-display-save-under',
+ 'x-display-screens', 'x-display-screens', 'x-display-visual-class',
+ 'x-display-visual-class', 'x-family-fonts', 'x-file-dialog',
+ 'x-file-dialog', 'x-file-dialog', 'x-focus-frame', 'x-frame-geometry',
+ 'x-frame-geometry', 'x-get-atom-name', 'x-get-resource',
+ 'x-get-selection-internal', 'x-hide-tip', 'x-hide-tip',
+ 'x-list-fonts', 'x-load-color-file', 'x-menu-bar-open-internal',
+ 'x-menu-bar-open-internal', 'x-open-connection', 'x-open-connection',
+ 'x-own-selection-internal', 'x-parse-geometry', 'x-popup-dialog',
+ 'x-popup-menu', 'x-register-dnd-atom', 'x-select-font',
+ 'x-select-font', 'x-selection-exists-p', 'x-selection-owner-p',
+ 'x-send-client-message', 'x-server-max-request-size',
+ 'x-server-max-request-size', 'x-server-vendor', 'x-server-vendor',
+ 'x-server-version', 'x-server-version', 'x-show-tip', 'x-show-tip',
+ 'x-synchronize', 'x-synchronize', 'x-uses-old-gtk-dialog',
+ 'x-window-property', 'x-window-property', 'x-wm-set-size-hint',
+ 'xw-color-defined-p', 'xw-color-defined-p', 'xw-color-values',
+ 'xw-color-values', 'xw-display-color-p', 'xw-display-color-p',
+ 'yes-or-no-p', 'zlib-available-p', 'zlib-decompress-region',
+ 'forward-point',
+ ))
+
+ builtin_function_highlighted = set((
+ 'defvaralias', 'provide', 'require',
+ 'with-no-warnings', 'define-widget', 'with-electric-help',
+ 'throw', 'defalias', 'featurep'
+ ))
+
+ lambda_list_keywords = set((
+ '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
+ '&rest', '&whole',
+ ))
+
+ error_keywords = set((
+ 'cl-assert', 'cl-check-type', 'error', 'signal',
+ 'user-error', 'warn',
+ ))
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name.Variable:
+ if value in EmacsLispLexer.builtin_function:
+ yield index, Name.Function, value
+ continue
+ if value in EmacsLispLexer.special_forms:
+ yield index, Keyword, value
+ continue
+ if value in EmacsLispLexer.error_keywords:
+ yield index, Name.Exception, value
+ continue
+ if value in EmacsLispLexer.builtin_function_highlighted:
+ yield index, Name.Builtin, value
+ continue
+ if value in EmacsLispLexer.macros:
+ yield index, Name.Builtin, value
+ continue
+ if value in EmacsLispLexer.lambda_list_keywords:
+ yield index, Keyword.Pseudo, value
+ continue
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ default('body'),
+ ],
+ 'body': [
+ # whitespace
+ (r'\s+', Text),
+
+ # single-line comment
+ (r';.*$', Comment.Single),
+
+ # strings and characters
+ (r'"', String, 'string'),
+ (r'\?([^\\]|\\.)', String.Char),
+ # quoting
+ (r":" + symbol, Name.Builtin),
+ (r"::" + symbol, String.Symbol),
+ (r"'" + symbol, String.Symbol),
+ (r"'", Operator),
+ (r"`", Operator),
+
+ # decimal numbers
+ (r'[-+]?\d+\.?' + terminated, Number.Integer),
+ (r'[-+]?\d+/\d+' + terminated, Number),
+ (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)'
+ + terminated, Number.Float),
+
+ # vectors
+ (r'\[|\]', Punctuation),
+
+ # uninterned symbol
+ (r'#:' + symbol, String.Symbol),
+
+ # read syntax for char tables
+ (r'#\^\^?', Operator),
+
+ # function shorthand
+ (r'#\'', Name.Function),
+
+ # binary rational
+ (r'#[bB][+-]?[01]+(/[01]+)?', Number.Bin),
+
+ # octal rational
+ (r'#[oO][+-]?[0-7]+(/[0-7]+)?', Number.Oct),
+
+ # hex rational
+ (r'#[xX][+-]?[0-9a-fA-F]+(/[0-9a-fA-F]+)?', Number.Hex),
+
+ # radix rational
+ (r'#\d+r[+-]?[0-9a-zA-Z]+(/[0-9a-zA-Z]+)?', Number),
+
+ # reference
+ (r'#\d+=', Operator),
+ (r'#\d+#', Operator),
+
+ # special operators that should have been parsed already
+ (r'(,@|,|\.|:)', Operator),
+
+ # special constants
+ (r'(t|nil)' + terminated, Name.Constant),
+
+ # functions and variables
+ (r'\*' + symbol + '\*', Name.Variable.Global),
+ (symbol, Name.Variable),
+
+ # parentheses
+ (r'#\(', Operator, 'body'),
+ (r'\(', Punctuation, 'body'),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r'[^"\\`]+', String),
+ (r'`%s\'' % symbol, String.Symbol),
+ (r'`', String),
+ (r'\\.', String),
+ (r'\\\n', String),
+ (r'"', String, '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/make.py b/pygments/lexers/make.py
new file mode 100644
index 00000000..473b1aff
--- /dev/null
+++ b/pygments/lexers/make.py
@@ -0,0 +1,201 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.make
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Makefiles and similar.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, \
+ do_insertions, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Punctuation
+from pygments.lexers.shell import BashLexer
+
+__all__ = ['MakefileLexer', 'BaseMakefileLexer', 'CMakeLexer']
+
+
+class MakefileLexer(Lexer):
+ """
+ Lexer for BSD and GNU make extensions (lenient enough to handle both in
+ the same file even).
+
+ *Rewritten in Pygments 0.10.*
+ """
+
+ name = 'Makefile'
+ aliases = ['make', 'makefile', 'mf', 'bsdmake']
+ filenames = ['*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile']
+ mimetypes = ['text/x-makefile']
+
+ r_special = re.compile(
+ r'^(?:'
+ # BSD Make
+ r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|'
+ # GNU Make
+ r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:|vpath)|'
+ # GNU Automake
+ r'\s*(if|else|endif))(?=\s)')
+ r_comment = re.compile(r'^\s*@?#')
+
+ def get_tokens_unprocessed(self, text):
+ ins = []
+ lines = text.splitlines(True)
+ done = ''
+ lex = BaseMakefileLexer(**self.options)
+ backslashflag = False
+ for line in lines:
+ if self.r_special.match(line) or backslashflag:
+ ins.append((len(done), [(0, Comment.Preproc, line)]))
+ backslashflag = line.strip().endswith('\\')
+ elif self.r_comment.match(line):
+ ins.append((len(done), [(0, Comment, line)]))
+ else:
+ done += line
+ for item in do_insertions(ins, lex.get_tokens_unprocessed(done)):
+ yield item
+
+ def analyse_text(text):
+ # Many makefiles have $(BIG_CAPS) style variables
+ if re.search(r'\$\([A-Z_]+\)', text):
+ return 0.1
+
+
+class BaseMakefileLexer(RegexLexer):
+ """
+ Lexer for simple Makefiles (no preprocessing).
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Base Makefile'
+ aliases = ['basemake']
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ # recipes (need to allow spaces because of expandtabs)
+ (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)),
+ # special variables
+ (r'\$[<@$+%?|*]', Keyword),
+ (r'\s+', Text),
+ (r'#.*?\n', Comment),
+ (r'(export)(\s+)(?=[\w${}\t -]+\n)',
+ bygroups(Keyword, Text), 'export'),
+ (r'export\s+', Keyword),
+ # assignment
+ (r'([\w${}.-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
+ bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
+ # strings
+ (r'(?s)"(\\\\|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\.|[^'\\])*'", String.Single),
+ # targets
+ (r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text),
+ 'block-header'),
+ # expansions
+ (r'\$\(', Keyword, 'expansion'),
+ ],
+ 'expansion': [
+ (r'[^$a-zA-Z_)]+', Text),
+ (r'[a-zA-Z_]+', Name.Variable),
+ (r'\$', Keyword),
+ (r'\(', Keyword, '#push'),
+ (r'\)', Keyword, '#pop'),
+ ],
+ 'export': [
+ (r'[\w${}-]+', Name.Variable),
+ (r'\n', Text, '#pop'),
+ (r'\s+', Text),
+ ],
+ 'block-header': [
+ (r'[,|]', Punctuation),
+ (r'#.*?\n', Comment, '#pop'),
+ (r'\\\n', Text), # line continuation
+ (r'\$\(', Keyword, 'expansion'),
+ (r'[a-zA-Z_]+', Name),
+ (r'\n', Text, '#pop'),
+ (r'.', Text),
+ ],
+ }
+
+
+class CMakeLexer(RegexLexer):
+ """
+ Lexer for `CMake <http://cmake.org/Wiki/CMake>`_ files.
+
+ .. versionadded:: 1.2
+ """
+ name = 'CMake'
+ aliases = ['cmake']
+ filenames = ['*.cmake', 'CMakeLists.txt']
+ mimetypes = ['text/x-cmake']
+
+ tokens = {
+ 'root': [
+ # (r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|'
+ # r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|'
+ # r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|'
+ # r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|'
+ # r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|'
+ # r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|'
+ # r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|'
+ # r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|'
+ # r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|'
+ # r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|'
+ # r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|'
+ # r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|'
+ # r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|'
+ # r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|'
+ # r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|'
+ # r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|'
+ # r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|'
+ # r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|'
+ # r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|'
+ # r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|'
+ # r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|'
+ # r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|'
+ # r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|'
+ # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
+ # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
+ # r'COUNTARGS)\b', Name.Builtin, 'args'),
+ (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
+ Punctuation), 'args'),
+ include('keywords'),
+ include('ws')
+ ],
+ 'args': [
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
+ (r'(\$\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)),
+ (r'(\$<)(.+?)(>)', bygroups(Operator, Name.Variable, Operator)),
+ (r'(?s)".*?"', String.Double),
+ (r'\\\S+', String),
+ (r'[^)$"# \t\n]+', String),
+ (r'\n', Text), # explicitly legal
+ include('keywords'),
+ include('ws')
+ ],
+ 'string': [
+
+ ],
+ 'keywords': [
+ (r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|'
+ r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword),
+ ],
+ 'ws': [
+ (r'[ \t]+', Text),
+ (r'#.*\n', Comment),
+ ]
+ }
+
+ def analyse_text(text):
+ exp = r'^ *CMAKE_MINIMUM_REQUIRED *\( *VERSION *\d(\.\d)* *( FATAL_ERROR)? *\) *$'
+ if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE):
+ return 0.8
+ return 0.0
diff --git a/pygments/lexers/markup.py b/pygments/lexers/markup.py
new file mode 100644
index 00000000..aac8d27e
--- /dev/null
+++ b/pygments/lexers/markup.py
@@ -0,0 +1,502 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.markup
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for non-HTML markup languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexers.html import HtmlLexer, XmlLexer
+from pygments.lexers.javascript import JavascriptLexer
+from pygments.lexers.css import CssLexer
+
+from pygments.lexer import RegexLexer, DelegatingLexer, include, bygroups, \
+ using, this, do_insertions, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Other
+from pygments.util import get_bool_opt, ClassNotFound
+
+__all__ = ['BBCodeLexer', 'MoinWikiLexer', 'RstLexer', 'TexLexer', 'GroffLexer',
+ 'MozPreprocHashLexer', 'MozPreprocPercentLexer',
+ 'MozPreprocXulLexer', 'MozPreprocJavascriptLexer',
+ 'MozPreprocCssLexer']
+
+
+class BBCodeLexer(RegexLexer):
+ """
+ A lexer that highlights BBCode(-like) syntax.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'BBCode'
+ aliases = ['bbcode']
+ mimetypes = ['text/x-bbcode']
+
+ tokens = {
+ 'root': [
+ (r'[^[]+', Text),
+ # tag/end tag begin
+ (r'\[/?\w+', Keyword, 'tag'),
+ # stray bracket
+ (r'\[', Text),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ # attribute with value
+ (r'(\w+)(=)("?[^\s"\]]+"?)',
+ bygroups(Name.Attribute, Operator, String)),
+ # tag argument (a la [color=green])
+ (r'(=)("?[^\s"\]]+"?)',
+ bygroups(Operator, String)),
+ # tag end
+ (r'\]', Keyword, '#pop'),
+ ],
+ }
+
+
+class MoinWikiLexer(RegexLexer):
+ """
+ For MoinMoin (and Trac) Wiki markup.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'MoinMoin/Trac Wiki markup'
+ aliases = ['trac-wiki', 'moin']
+ filenames = []
+ mimetypes = ['text/x-trac-wiki']
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'^#.*$', Comment),
+ (r'(!)(\S+)', bygroups(Keyword, Text)), # Ignore-next
+ # Titles
+ (r'^(=+)([^=]+)(=+)(\s*#.+)?$',
+ bygroups(Generic.Heading, using(this), Generic.Heading, String)),
+ # Literal code blocks, with optional shebang
+ (r'(\{\{\{)(\n#!.+)?', bygroups(Name.Builtin, Name.Namespace), 'codeblock'),
+ (r'(\'\'\'?|\|\||`|__|~~|\^|,,|::)', Comment), # Formatting
+ # Lists
+ (r'^( +)([.*-])( )', bygroups(Text, Name.Builtin, Text)),
+ (r'^( +)([a-z]{1,5}\.)( )', bygroups(Text, Name.Builtin, Text)),
+ # Other Formatting
+ (r'\[\[\w+.*?\]\]', Keyword), # Macro
+ (r'(\[[^\s\]]+)(\s+[^\]]+?)?(\])',
+ bygroups(Keyword, String, Keyword)), # Link
+ (r'^----+$', Keyword), # Horizontal rules
+ (r'[^\n\'\[{!_~^,|]+', Text),
+ (r'\n', Text),
+ (r'.', Text),
+ ],
+ 'codeblock': [
+ (r'\}\}\}', Name.Builtin, '#pop'),
+ # these blocks are allowed to be nested in Trac, but not MoinMoin
+ (r'\{\{\{', Text, '#push'),
+ (r'[^{}]+', Comment.Preproc), # slurp boring text
+ (r'.', Comment.Preproc), # allow loose { or }
+ ],
+ }
+
+
+class RstLexer(RegexLexer):
+ """
+ For `reStructuredText <http://docutils.sf.net/rst.html>`_ markup.
+
+ .. versionadded:: 0.7
+
+ Additional options accepted:
+
+ `handlecodeblocks`
+ Highlight the contents of ``.. sourcecode:: language``,
+ ``.. code:: language`` and ``.. code-block:: language``
+ directives with a lexer for the given language (default:
+ ``True``).
+
+ .. versionadded:: 0.8
+ """
+ name = 'reStructuredText'
+ aliases = ['rst', 'rest', 'restructuredtext']
+ filenames = ['*.rst', '*.rest']
+ mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"]
+ flags = re.MULTILINE
+
+ def _handle_sourcecode(self, match):
+ from pygments.lexers import get_lexer_by_name
+
+ # section header
+ yield match.start(1), Punctuation, match.group(1)
+ yield match.start(2), Text, match.group(2)
+ yield match.start(3), Operator.Word, match.group(3)
+ yield match.start(4), Punctuation, match.group(4)
+ yield match.start(5), Text, match.group(5)
+ yield match.start(6), Keyword, match.group(6)
+ yield match.start(7), Text, match.group(7)
+
+ # lookup lexer if wanted and existing
+ lexer = None
+ if self.handlecodeblocks:
+ try:
+ lexer = get_lexer_by_name(match.group(6).strip())
+ except ClassNotFound:
+ pass
+ indention = match.group(8)
+ indention_size = len(indention)
+ code = (indention + match.group(9) + match.group(10) + match.group(11))
+
+ # no lexer for this language. handle it like it was a code block
+ if lexer is None:
+ yield match.start(8), String, code
+ return
+
+ # highlight the lines with the lexer.
+ ins = []
+ codelines = code.splitlines(True)
+ code = ''
+ for line in codelines:
+ if len(line) > indention_size:
+ ins.append((len(code), [(0, Text, line[:indention_size])]))
+ code += line[indention_size:]
+ else:
+ code += line
+ for item in do_insertions(ins, lexer.get_tokens_unprocessed(code)):
+ yield item
+
+ # from docutils.parsers.rst.states
+ closers = u'\'")]}>\u2019\u201d\xbb!?'
+ unicode_delimiters = u'\u2010\u2011\u2012\u2013\u2014\u00a0'
+ end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))'
+ % (re.escape(unicode_delimiters),
+ re.escape(closers)))
+
+ tokens = {
+ 'root': [
+ # Heading with overline
+ (r'^(=+|-+|`+|:+|\.+|\'+|"+|~+|\^+|_+|\*+|\++|#+)([ \t]*\n)'
+ r'(.+)(\n)(\1)(\n)',
+ bygroups(Generic.Heading, Text, Generic.Heading,
+ Text, Generic.Heading, Text)),
+ # Plain heading
+ (r'^(\S.*)(\n)(={3,}|-{3,}|`{3,}|:{3,}|\.{3,}|\'{3,}|"{3,}|'
+ r'~{3,}|\^{3,}|_{3,}|\*{3,}|\+{3,}|#{3,})(\n)',
+ bygroups(Generic.Heading, Text, Generic.Heading, Text)),
+ # Bulleted lists
+ (r'^(\s*)([-*+])( .+\n(?:\1 .+\n)*)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ # Numbered lists
+ (r'^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ (r'^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ # Numbered, but keep words at BOL from becoming lists
+ (r'^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ (r'^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ # Line blocks
+ (r'^(\s*)(\|)( .+\n(?:\| .+\n)*)',
+ bygroups(Text, Operator, using(this, state='inline'))),
+ # Sourcecode directives
+ (r'^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)'
+ r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)',
+ _handle_sourcecode),
+ # A directive
+ (r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
+ bygroups(Punctuation, Text, Operator.Word, Punctuation, Text,
+ using(this, state='inline'))),
+ # A reference target
+ (r'^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$',
+ bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
+ # A footnote/citation target
+ (r'^( *\.\.)(\s*)(\[.+\])(.*?)$',
+ bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
+ # A substitution def
+ (r'^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
+ bygroups(Punctuation, Text, Name.Tag, Text, Operator.Word,
+ Punctuation, Text, using(this, state='inline'))),
+ # Comments
+ (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc),
+ # Field list
+ (r'^( *)(:[a-zA-Z-]+:)(\s*)$', bygroups(Text, Name.Class, Text)),
+ (r'^( *)(:.*?:)([ \t]+)(.*?)$',
+ bygroups(Text, Name.Class, Text, Name.Function)),
+ # Definition list
+ (r'^(\S.*(?<!::)\n)((?:(?: +.*)\n)+)',
+ bygroups(using(this, state='inline'), using(this, state='inline'))),
+ # Code blocks
+ (r'(::)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\3.*|)\n)+)',
+ bygroups(String.Escape, Text, String, String, Text, String)),
+ include('inline'),
+ ],
+ 'inline': [
+ (r'\\.', Text), # escape
+ (r'``', String, 'literal'), # code
+ (r'(`.+?)(<.+?>)(`__?)', # reference with inline target
+ bygroups(String, String.Interpol, String)),
+ (r'`.+?`__?', String), # reference
+ (r'(`.+?`)(:[a-zA-Z0-9:-]+?:)?',
+ bygroups(Name.Variable, Name.Attribute)), # role
+ (r'(:[a-zA-Z0-9:-]+?:)(`.+?`)',
+ bygroups(Name.Attribute, Name.Variable)), # role (content first)
+ (r'\*\*.+?\*\*', Generic.Strong), # Strong emphasis
+ (r'\*.+?\*', Generic.Emph), # Emphasis
+ (r'\[.*?\]_', String), # Footnote or citation
+ (r'<.+?>', Name.Tag), # Hyperlink
+ (r'[^\\\n\[*`:]+', Text),
+ (r'.', Text),
+ ],
+ 'literal': [
+ (r'[^`]+', String),
+ (r'``' + end_string_suffix, String, '#pop'),
+ (r'`', String),
+ ]
+ }
+
+ def __init__(self, **options):
+ self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
+ RegexLexer.__init__(self, **options)
+
+ def analyse_text(text):
+ if text[:2] == '..' and text[2:3] != '.':
+ return 0.3
+ p1 = text.find("\n")
+ p2 = text.find("\n", p1 + 1)
+ if (p2 > -1 and # has two lines
+ p1 * 2 + 1 == p2 and # they are the same length
+ text[p1+1] in '-=' and # the next line both starts and ends with
+ text[p1+1] == text[p2-1]): # ...a sufficiently high header
+ return 0.5
+
+
+class TexLexer(RegexLexer):
+ """
+ Lexer for the TeX and LaTeX typesetting languages.
+ """
+
+ name = 'TeX'
+ aliases = ['tex', 'latex']
+ filenames = ['*.tex', '*.aux', '*.toc']
+ mimetypes = ['text/x-tex', 'text/x-latex']
+
+ tokens = {
+ 'general': [
+ (r'%.*?\n', Comment),
+ (r'[{}]', Name.Builtin),
+ (r'[&_^]', Name.Builtin),
+ ],
+ 'root': [
+ (r'\\\[', String.Backtick, 'displaymath'),
+ (r'\\\(', String, 'inlinemath'),
+ (r'\$\$', String.Backtick, 'displaymath'),
+ (r'\$', String, 'inlinemath'),
+ (r'\\([a-zA-Z]+|.)', Keyword, 'command'),
+ (r'\\$', Keyword),
+ include('general'),
+ (r'[^\\$%&_^{}]+', Text),
+ ],
+ 'math': [
+ (r'\\([a-zA-Z]+|.)', Name.Variable),
+ include('general'),
+ (r'[0-9]+', Number),
+ (r'[-=!+*/()\[\]]', Operator),
+ (r'[^=!+*/()\[\]\\$%&_^{}0-9-]+', Name.Builtin),
+ ],
+ 'inlinemath': [
+ (r'\\\)', String, '#pop'),
+ (r'\$', String, '#pop'),
+ include('math'),
+ ],
+ 'displaymath': [
+ (r'\\\]', String, '#pop'),
+ (r'\$\$', String, '#pop'),
+ (r'\$', Name.Builtin),
+ include('math'),
+ ],
+ 'command': [
+ (r'\[.*?\]', Name.Attribute),
+ (r'\*', Keyword),
+ default('#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ for start in ("\\documentclass", "\\input", "\\documentstyle",
+ "\\relax"):
+ if text[:len(start)] == start:
+ return True
+
+
+class GroffLexer(RegexLexer):
+ """
+ Lexer for the (g)roff typesetting language, supporting groff
+ extensions. Mainly useful for highlighting manpage sources.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'Groff'
+ aliases = ['groff', 'nroff', 'man']
+ filenames = ['*.[1234567]', '*.man']
+ mimetypes = ['application/x-troff', 'text/troff']
+
+ tokens = {
+ 'root': [
+ (r'(\.)(\w+)', bygroups(Text, Keyword), 'request'),
+ (r'\.', Punctuation, 'request'),
+ # Regular characters, slurp till we find a backslash or newline
+ (r'[^\\\n]+', Text, 'textline'),
+ default('textline'),
+ ],
+ 'textline': [
+ include('escapes'),
+ (r'[^\\\n]+', Text),
+ (r'\n', Text, '#pop'),
+ ],
+ 'escapes': [
+ # groff has many ways to write escapes.
+ (r'\\"[^\n]*', Comment),
+ (r'\\[fn]\w', String.Escape),
+ (r'\\\(.{2}', String.Escape),
+ (r'\\.\[.*\]', String.Escape),
+ (r'\\.', String.Escape),
+ (r'\\\n', Text, 'request'),
+ ],
+ 'request': [
+ (r'\n', Text, '#pop'),
+ include('escapes'),
+ (r'"[^\n"]+"', String.Double),
+ (r'\d+', Number),
+ (r'\S+', String),
+ (r'\s+', Text),
+ ],
+ }
+
+ def analyse_text(text):
+ if text[:1] != '.':
+ return False
+ if text[:3] == '.\\"':
+ return True
+ if text[:4] == '.TH ':
+ return True
+ if text[1:3].isalnum() and text[3].isspace():
+ return 0.9
+
+
+class MozPreprocHashLexer(RegexLexer):
+ """
+ Lexer for Mozilla Preprocessor files (with '#' as the marker).
+
+ Other data is left untouched.
+
+ .. versionadded:: 2.0
+ """
+ name = 'mozhashpreproc'
+ aliases = [name]
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'^#', Comment.Preproc, ('expr', 'exprstart')),
+ (r'.+', Other),
+ ],
+ 'exprstart': [
+ (r'(literal)(.*)', bygroups(Comment.Preproc, Text), '#pop:2'),
+ (words((
+ 'define', 'undef', 'if', 'ifdef', 'ifndef', 'else', 'elif',
+ 'elifdef', 'elifndef', 'endif', 'expand', 'filter', 'unfilter',
+ 'include', 'includesubst', 'error')),
+ Comment.Preproc, '#pop'),
+ ],
+ 'expr': [
+ (words(('!', '!=', '==', '&&', '||')), Operator),
+ (r'(defined)(\()', bygroups(Keyword, Punctuation)),
+ (r'\)', Punctuation),
+ (r'[0-9]+', Number.Decimal),
+ (r'__\w+?__', Name.Variable),
+ (r'@\w+?@', Name.Class),
+ (r'\w+', Name),
+ (r'\n', Text, '#pop'),
+ (r'\s+', Text),
+ (r'\S', Punctuation),
+ ],
+ }
+
+
+class MozPreprocPercentLexer(MozPreprocHashLexer):
+ """
+ Lexer for Mozilla Preprocessor files (with '%' as the marker).
+
+ Other data is left untouched.
+
+ .. versionadded:: 2.0
+ """
+ name = 'mozpercentpreproc'
+ aliases = [name]
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'^%', Comment.Preproc, ('expr', 'exprstart')),
+ (r'.+', Other),
+ ],
+ }
+
+
+class MozPreprocXulLexer(DelegatingLexer):
+ """
+ Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
+ `XmlLexer`.
+
+ .. versionadded:: 2.0
+ """
+ name = "XUL+mozpreproc"
+ aliases = ['xul+mozpreproc']
+ filenames = ['*.xul.in']
+ mimetypes = []
+
+ def __init__(self, **options):
+ super(MozPreprocXulLexer, self).__init__(
+ XmlLexer, MozPreprocHashLexer, **options)
+
+
+class MozPreprocJavascriptLexer(DelegatingLexer):
+ """
+ Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
+ `JavascriptLexer`.
+
+ .. versionadded:: 2.0
+ """
+ name = "Javascript+mozpreproc"
+ aliases = ['javascript+mozpreproc']
+ filenames = ['*.js.in']
+ mimetypes = []
+
+ def __init__(self, **options):
+ super(MozPreprocJavascriptLexer, self).__init__(
+ JavascriptLexer, MozPreprocHashLexer, **options)
+
+
+class MozPreprocCssLexer(DelegatingLexer):
+ """
+ Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
+ `CssLexer`.
+
+ .. versionadded:: 2.0
+ """
+ name = "CSS+mozpreproc"
+ aliases = ['css+mozpreproc']
+ filenames = ['*.css.in']
+ mimetypes = []
+
+ def __init__(self, **options):
+ super(MozPreprocCssLexer, self).__init__(
+ CssLexer, MozPreprocPercentLexer, **options)
+
diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py
index f0e49fef..7a92f5bb 100644
--- a/pygments/lexers/math.py
+++ b/pygments/lexers/math.py
@@ -3,1916 +3,19 @@
pygments.lexers.math
~~~~~~~~~~~~~~~~~~~~
- Lexers for math languages.
+ Just export lexers that were contained in this module.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
+from pygments.lexers.python import NumPyLexer
+from pygments.lexers.matlab import MatlabLexer, MatlabSessionLexer, \
+ OctaveLexer, ScilabLexer
+from pygments.lexers.julia import JuliaLexer, JuliaConsoleLexer
+from pygments.lexers.r import RConsoleLexer, SLexer, RdLexer
+from pygments.lexers.modeling import BugsLexer, JagsLexer, StanLexer
+from pygments.lexers.idl import IDLLexer
+from pygments.lexers.algebra import MuPADLexer
-from pygments.util import shebang_matches
-from pygments.lexer import Lexer, RegexLexer, bygroups, include, \
- combined, do_insertions
-from pygments.token import Comment, String, Punctuation, Keyword, Name, \
- Operator, Number, Text, Generic
-
-from pygments.lexers.agile import PythonLexer
-from pygments.lexers import _scilab_builtins
-from pygments.lexers import _stan_builtins
-
-__all__ = ['JuliaLexer', 'JuliaConsoleLexer', 'MuPADLexer', 'MatlabLexer',
- 'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer', 'NumPyLexer',
- 'RConsoleLexer', 'SLexer', 'JagsLexer', 'BugsLexer', 'StanLexer',
- 'IDLLexer', 'RdLexer', 'IgorLexer']
-
-
-class JuliaLexer(RegexLexer):
- """
- For `Julia <http://julialang.org/>`_ source code.
-
- *New in Pygments 1.6.*
- """
- name = 'Julia'
- aliases = ['julia','jl']
- filenames = ['*.jl']
- mimetypes = ['text/x-julia','application/x-julia']
-
- builtins = [
- 'exit','whos','edit','load','is','isa','isequal','typeof','tuple',
- 'ntuple','uid','hash','finalizer','convert','promote','subtype',
- 'typemin','typemax','realmin','realmax','sizeof','eps','promote_type',
- 'method_exists','applicable','invoke','dlopen','dlsym','system',
- 'error','throw','assert','new','Inf','Nan','pi','im',
- ]
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'#.*$', Comment),
- (r'[]{}:(),;[@]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
-
- # keywords
- (r'(begin|while|for|in|return|break|continue|'
- r'macro|quote|let|if|elseif|else|try|catch|end|'
- r'bitstype|ccall|do|using|module|import|export|'
- r'importall|baremodule|immutable)\b', Keyword),
- (r'(local|global|const)\b', Keyword.Declaration),
- (r'(Bool|Int|Int8|Int16|Int32|Int64|Uint|Uint8|Uint16|Uint32|Uint64'
- r'|Float32|Float64|Complex64|Complex128|Any|Nothing|None)\b',
- Keyword.Type),
-
- # functions
- (r'(function)((?:\s|\\\s)+)',
- bygroups(Keyword,Name.Function), 'funcname'),
-
- # types
- (r'(type|typealias|abstract)((?:\s|\\\s)+)',
- bygroups(Keyword,Name.Class), 'typename'),
-
- # operators
- (r'==|!=|<=|>=|->|&&|\|\||::|<:|[-~+/*%=<>&^|.?!$]', Operator),
- (r'\.\*|\.\^|\.\\|\.\/|\\', Operator),
-
- # builtins
- ('(' + '|'.join(builtins) + r')\b', Name.Builtin),
-
- # backticks
- (r'`(?s).*?`', String.Backtick),
-
- # chars
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
- r"\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
-
- # try to match trailing transpose
- (r'(?<=[.\w\)\]])\'+', Operator),
-
- # strings
- (r'(?:[IL])"', String, 'string'),
- (r'[E]?"', String, combined('stringescape', 'string')),
-
- # names
- (r'@[a-zA-Z0-9_.]+', Name.Decorator),
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
-
- # numbers
- (r'(\d+(_\d+)+\.\d*|\d*\.\d+(_\d+)+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+(_\d+)+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'0b[01]+(_[01]+)+', Number.Binary),
- (r'0b[01]+', Number.Binary),
- (r'0o[0-7]+(_[0-7]+)+', Number.Oct),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[a-fA-F0-9]+(_[a-fA-F0-9]+)+', Number.Hex),
- (r'0x[a-fA-F0-9]+', Number.Hex),
- (r'\d+(_\d+)+', Number.Integer),
- (r'\d+', Number.Integer)
- ],
-
- 'funcname': [
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop'),
- ('\([^\s\w{]{1,2}\)', Operator, '#pop'),
- ('[^\s\w{]{1,2}', Operator, '#pop'),
- ],
-
- 'typename': [
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
- ],
-
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
-
- 'string': [
- (r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- (r'\$(\([a-zA-Z0-9_]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?',
- String.Interpol),
- (r'[^\\"$]+', String),
- # quotes, dollar signs, and backslashes must be parsed one at a time
- (r'["\\]', String),
- # unhandled string formatting sign
- (r'\$', String)
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'julia')
-
-
-line_re = re.compile('.*?\n')
-
-class JuliaConsoleLexer(Lexer):
- """
- For Julia console sessions. Modeled after MatlabSessionLexer.
-
- *New in Pygments 1.6.*
- """
- name = 'Julia console'
- aliases = ['jlcon']
-
- def get_tokens_unprocessed(self, text):
- jllexer = JuliaLexer(**self.options)
-
- curcode = ''
- insertions = []
-
- for match in line_re.finditer(text):
- line = match.group()
-
- if line.startswith('julia>'):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:3])]))
- curcode += line[3:]
-
- elif line.startswith(' '):
-
- idx = len(curcode)
-
- # without is showing error on same line as before...?
- line = "\n" + line
- token = (0, Generic.Traceback, line)
- insertions.append((idx, [token]))
-
- else:
- if curcode:
- for item in do_insertions(
- insertions, jllexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
-
- yield match.start(), Generic.Output, line
-
- if curcode: # or item:
- for item in do_insertions(
- insertions, jllexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class MuPADLexer(RegexLexer):
- """
- A `MuPAD <http://www.mupad.com>`_ lexer.
- Contributed by Christopher Creutzig <christopher@creutzig.de>.
-
- *New in Pygments 0.8.*
- """
- name = 'MuPAD'
- aliases = ['mupad']
- filenames = ['*.mu']
-
- tokens = {
- 'root' : [
- (r'//.*?$', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"(?:[^"\\]|\\.)*"', String),
- (r'\(|\)|\[|\]|\{|\}', Punctuation),
- (r'''(?x)\b(?:
- next|break|end|
- axiom|end_axiom|category|end_category|domain|end_domain|inherits|
- if|%if|then|elif|else|end_if|
- case|of|do|otherwise|end_case|
- while|end_while|
- repeat|until|end_repeat|
- for|from|to|downto|step|end_for|
- proc|local|option|save|begin|end_proc|
- delete|frame
- )\b''', Keyword),
- (r'''(?x)\b(?:
- DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
- DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
- DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
- DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
- )\b''', Name.Class),
- (r'''(?x)\b(?:
- PI|EULER|E|CATALAN|
- NIL|FAIL|undefined|infinity|
- TRUE|FALSE|UNKNOWN
- )\b''',
- Name.Constant),
- (r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
- (r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
- (r'''(?x)\b(?:
- and|or|not|xor|
- assuming|
- div|mod|
- union|minus|intersect|in|subset
- )\b''',
- Operator.Word),
- (r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
- #(r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
- (r'''(?x)
- ((?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)
- (?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*)(\s*)([(])''',
- bygroups(Name.Function, Text, Punctuation)),
- (r'''(?x)
- (?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)
- (?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*''', Name.Variable),
- (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
- (r'\.[0-9]+(?:e[0-9]+)?', Number),
- (r'.', Text)
- ],
- 'comment' : [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ]
- }
-
-
-class MatlabLexer(RegexLexer):
- """
- For Matlab source code.
-
- *New in Pygments 0.10.*
- """
- name = 'Matlab'
- aliases = ['matlab']
- filenames = ['*.m']
- mimetypes = ['text/matlab']
-
- #
- # These lists are generated automatically.
- # Run the following in bash shell:
- #
- # for f in elfun specfun elmat; do
- # echo -n "$f = "
- # matlab -nojvm -r "help $f;exit;" | perl -ne \
- # 'push(@c,$1) if /^ (\w+)\s+-/; END {print q{["}.join(q{","},@c).qq{"]\n};}'
- # done
- #
- # elfun: Elementary math functions
- # specfun: Special Math functions
- # elmat: Elementary matrices and matrix manipulation
- #
- # taken from Matlab version 7.4.0.336 (R2007a)
- #
- elfun = ["sin","sind","sinh","asin","asind","asinh","cos","cosd","cosh",
- "acos","acosd","acosh","tan","tand","tanh","atan","atand","atan2",
- "atanh","sec","secd","sech","asec","asecd","asech","csc","cscd",
- "csch","acsc","acscd","acsch","cot","cotd","coth","acot","acotd",
- "acoth","hypot","exp","expm1","log","log1p","log10","log2","pow2",
- "realpow","reallog","realsqrt","sqrt","nthroot","nextpow2","abs",
- "angle","complex","conj","imag","real","unwrap","isreal","cplxpair",
- "fix","floor","ceil","round","mod","rem","sign"]
- specfun = ["airy","besselj","bessely","besselh","besseli","besselk","beta",
- "betainc","betaln","ellipj","ellipke","erf","erfc","erfcx",
- "erfinv","expint","gamma","gammainc","gammaln","psi","legendre",
- "cross","dot","factor","isprime","primes","gcd","lcm","rat",
- "rats","perms","nchoosek","factorial","cart2sph","cart2pol",
- "pol2cart","sph2cart","hsv2rgb","rgb2hsv"]
- elmat = ["zeros","ones","eye","repmat","rand","randn","linspace","logspace",
- "freqspace","meshgrid","accumarray","size","length","ndims","numel",
- "disp","isempty","isequal","isequalwithequalnans","cat","reshape",
- "diag","blkdiag","tril","triu","fliplr","flipud","flipdim","rot90",
- "find","end","sub2ind","ind2sub","bsxfun","ndgrid","permute",
- "ipermute","shiftdim","circshift","squeeze","isscalar","isvector",
- "ans","eps","realmax","realmin","pi","i","inf","nan","isnan",
- "isinf","isfinite","j","why","compan","gallery","hadamard","hankel",
- "hilb","invhilb","magic","pascal","rosser","toeplitz","vander",
- "wilkinson"]
-
- tokens = {
- 'root': [
- # line starting with '!' is sent as a system command. not sure what
- # label to use...
- (r'^!.*', String.Other),
- (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
- (r'%.*$', Comment),
- (r'^\s*function', Keyword, 'deffunc'),
-
- # from 'iskeyword' on version 7.11 (R2010):
- (r'(break|case|catch|classdef|continue|else|elseif|end|enumerated|'
- r'events|for|function|global|if|methods|otherwise|parfor|'
- r'persistent|properties|return|spmd|switch|try|while)\b', Keyword),
-
- ("(" + "|".join(elfun+specfun+elmat) + r')\b', Name.Builtin),
-
- # line continuation with following comment:
- (r'\.\.\..*$', Comment),
-
- # operators:
- (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
- # operators requiring escape for re:
- (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
- # punctuation:
- (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
- (r'=|:|;', Punctuation),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w\)\]])\'', Operator),
-
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer),
-
- (r'(?<![\w\)\]])\'', String, 'string'),
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
- (r'.', Text),
- ],
- 'string': [
- (r'[^\']*\'', String, '#pop')
- ],
- 'blockcomment': [
- (r'^\s*%\}', Comment.Multiline, '#pop'),
- (r'^.*\n', Comment.Multiline),
- (r'.', Comment.Multiline),
- ],
- 'deffunc': [
- (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Text.Whitespace, Text, Text.Whitespace, Punctuation,
- Text.Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Text.Whitespace), '#pop'),
- ],
- }
-
- def analyse_text(text):
- if re.match('^\s*%', text, re.M): # comment
- return 0.9
- elif re.match('^!\w+', text, re.M): # system cmd
- return 0.9
- return 0.1
-
-
-line_re = re.compile('.*?\n')
-
-class MatlabSessionLexer(Lexer):
- """
- For Matlab sessions. Modeled after PythonConsoleLexer.
- Contributed by Ken Schutte <kschutte@csail.mit.edu>.
-
- *New in Pygments 0.10.*
- """
- name = 'Matlab session'
- aliases = ['matlabsession']
-
- def get_tokens_unprocessed(self, text):
- mlexer = MatlabLexer(**self.options)
-
- curcode = ''
- insertions = []
-
- for match in line_re.finditer(text):
- line = match.group()
-
- if line.startswith('>>'):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:3])]))
- curcode += line[3:]
-
- elif line.startswith('???'):
-
- idx = len(curcode)
-
- # without is showing error on same line as before...?
- line = "\n" + line
- token = (0, Generic.Traceback, line)
- insertions.append((idx, [token]))
-
- else:
- if curcode:
- for item in do_insertions(
- insertions, mlexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
-
- yield match.start(), Generic.Output, line
-
- if curcode: # or item:
- for item in do_insertions(
- insertions, mlexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class OctaveLexer(RegexLexer):
- """
- For GNU Octave source code.
-
- *New in Pygments 1.5.*
- """
- name = 'Octave'
- aliases = ['octave']
- filenames = ['*.m']
- mimetypes = ['text/octave']
-
- # These lists are generated automatically.
- # Run the following in bash shell:
- #
- # First dump all of the Octave manual into a plain text file:
- #
- # $ info octave --subnodes -o octave-manual
- #
- # Now grep through it:
-
- # for i in \
- # "Built-in Function" "Command" "Function File" \
- # "Loadable Function" "Mapping Function";
- # do
- # perl -e '@name = qw('"$i"');
- # print lc($name[0]),"_kw = [\n"';
- #
- # perl -n -e 'print "\"$1\",\n" if /-- '"$i"': .* (\w*) \(/;' \
- # octave-manual | sort | uniq ;
- # echo "]" ;
- # echo;
- # done
-
- # taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011)
-
- builtin_kw = [ "addlistener", "addpath", "addproperty", "all",
- "and", "any", "argnames", "argv", "assignin",
- "atexit", "autoload",
- "available_graphics_toolkits", "beep_on_error",
- "bitand", "bitmax", "bitor", "bitshift", "bitxor",
- "cat", "cell", "cellstr", "char", "class", "clc",
- "columns", "command_line_path",
- "completion_append_char", "completion_matches",
- "complex", "confirm_recursive_rmdir", "cputime",
- "crash_dumps_octave_core", "ctranspose", "cumprod",
- "cumsum", "debug_on_error", "debug_on_interrupt",
- "debug_on_warning", "default_save_options",
- "dellistener", "diag", "diff", "disp",
- "doc_cache_file", "do_string_escapes", "double",
- "drawnow", "e", "echo_executing_commands", "eps",
- "eq", "errno", "errno_list", "error", "eval",
- "evalin", "exec", "exist", "exit", "eye", "false",
- "fclear", "fclose", "fcntl", "fdisp", "feof",
- "ferror", "feval", "fflush", "fgetl", "fgets",
- "fieldnames", "file_in_loadpath", "file_in_path",
- "filemarker", "filesep", "find_dir_in_path",
- "fixed_point_format", "fnmatch", "fopen", "fork",
- "formula", "fprintf", "fputs", "fread", "freport",
- "frewind", "fscanf", "fseek", "fskipl", "ftell",
- "functions", "fwrite", "ge", "genpath", "get",
- "getegid", "getenv", "geteuid", "getgid",
- "getpgrp", "getpid", "getppid", "getuid", "glob",
- "gt", "gui_mode", "history_control",
- "history_file", "history_size",
- "history_timestamp_format_string", "home",
- "horzcat", "hypot", "ifelse",
- "ignore_function_time_stamp", "inferiorto",
- "info_file", "info_program", "inline", "input",
- "intmax", "intmin", "ipermute",
- "is_absolute_filename", "isargout", "isbool",
- "iscell", "iscellstr", "ischar", "iscomplex",
- "isempty", "isfield", "isfloat", "isglobal",
- "ishandle", "isieee", "isindex", "isinteger",
- "islogical", "ismatrix", "ismethod", "isnull",
- "isnumeric", "isobject", "isreal",
- "is_rooted_relative_filename", "issorted",
- "isstruct", "isvarname", "kbhit", "keyboard",
- "kill", "lasterr", "lasterror", "lastwarn",
- "ldivide", "le", "length", "link", "linspace",
- "logical", "lstat", "lt", "make_absolute_filename",
- "makeinfo_program", "max_recursion_depth", "merge",
- "methods", "mfilename", "minus", "mislocked",
- "mkdir", "mkfifo", "mkstemp", "mldivide", "mlock",
- "mouse_wheel_zoom", "mpower", "mrdivide", "mtimes",
- "munlock", "nargin", "nargout",
- "native_float_format", "ndims", "ne", "nfields",
- "nnz", "norm", "not", "numel", "nzmax",
- "octave_config_info", "octave_core_file_limit",
- "octave_core_file_name",
- "octave_core_file_options", "ones", "or",
- "output_max_field_width", "output_precision",
- "page_output_immediately", "page_screen_output",
- "path", "pathsep", "pause", "pclose", "permute",
- "pi", "pipe", "plus", "popen", "power",
- "print_empty_dimensions", "printf",
- "print_struct_array_contents", "prod",
- "program_invocation_name", "program_name",
- "putenv", "puts", "pwd", "quit", "rats", "rdivide",
- "readdir", "readlink", "read_readline_init_file",
- "realmax", "realmin", "rehash", "rename",
- "repelems", "re_read_readline_init_file", "reset",
- "reshape", "resize", "restoredefaultpath",
- "rethrow", "rmdir", "rmfield", "rmpath", "rows",
- "save_header_format_string", "save_precision",
- "saving_history", "scanf", "set", "setenv",
- "shell_cmd", "sighup_dumps_octave_core",
- "sigterm_dumps_octave_core", "silent_functions",
- "single", "size", "size_equal", "sizemax",
- "sizeof", "sleep", "source", "sparse_auto_mutate",
- "split_long_rows", "sprintf", "squeeze", "sscanf",
- "stat", "stderr", "stdin", "stdout", "strcmp",
- "strcmpi", "string_fill_char", "strncmp",
- "strncmpi", "struct", "struct_levels_to_print",
- "strvcat", "subsasgn", "subsref", "sum", "sumsq",
- "superiorto", "suppress_verbose_help_message",
- "symlink", "system", "tic", "tilde_expand",
- "times", "tmpfile", "tmpnam", "toc", "toupper",
- "transpose", "true", "typeinfo", "umask", "uminus",
- "uname", "undo_string_escapes", "unlink", "uplus",
- "upper", "usage", "usleep", "vec", "vectorize",
- "vertcat", "waitpid", "warning", "warranty",
- "whos_line_format", "yes_or_no", "zeros",
- "inf", "Inf", "nan", "NaN"]
-
- command_kw = [ "close", "load", "who", "whos", ]
-
- function_kw = [ "accumarray", "accumdim", "acosd", "acotd",
- "acscd", "addtodate", "allchild", "ancestor",
- "anova", "arch_fit", "arch_rnd", "arch_test",
- "area", "arma_rnd", "arrayfun", "ascii", "asctime",
- "asecd", "asind", "assert", "atand",
- "autoreg_matrix", "autumn", "axes", "axis", "bar",
- "barh", "bartlett", "bartlett_test", "beep",
- "betacdf", "betainv", "betapdf", "betarnd",
- "bicgstab", "bicubic", "binary", "binocdf",
- "binoinv", "binopdf", "binornd", "bitcmp",
- "bitget", "bitset", "blackman", "blanks",
- "blkdiag", "bone", "box", "brighten", "calendar",
- "cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf",
- "cauchy_rnd", "caxis", "celldisp", "center", "cgs",
- "chisquare_test_homogeneity",
- "chisquare_test_independence", "circshift", "cla",
- "clabel", "clf", "clock", "cloglog", "closereq",
- "colon", "colorbar", "colormap", "colperm",
- "comet", "common_size", "commutation_matrix",
- "compan", "compare_versions", "compass",
- "computer", "cond", "condest", "contour",
- "contourc", "contourf", "contrast", "conv",
- "convhull", "cool", "copper", "copyfile", "cor",
- "corrcoef", "cor_test", "cosd", "cotd", "cov",
- "cplxpair", "cross", "cscd", "cstrcat", "csvread",
- "csvwrite", "ctime", "cumtrapz", "curl", "cut",
- "cylinder", "date", "datenum", "datestr",
- "datetick", "datevec", "dblquad", "deal",
- "deblank", "deconv", "delaunay", "delaunayn",
- "delete", "demo", "detrend", "diffpara", "diffuse",
- "dir", "discrete_cdf", "discrete_inv",
- "discrete_pdf", "discrete_rnd", "display",
- "divergence", "dlmwrite", "dos", "dsearch",
- "dsearchn", "duplication_matrix", "durbinlevinson",
- "ellipsoid", "empirical_cdf", "empirical_inv",
- "empirical_pdf", "empirical_rnd", "eomday",
- "errorbar", "etime", "etreeplot", "example",
- "expcdf", "expinv", "expm", "exppdf", "exprnd",
- "ezcontour", "ezcontourf", "ezmesh", "ezmeshc",
- "ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor",
- "factorial", "fail", "fcdf", "feather", "fftconv",
- "fftfilt", "fftshift", "figure", "fileattrib",
- "fileparts", "fill", "findall", "findobj",
- "findstr", "finv", "flag", "flipdim", "fliplr",
- "flipud", "fpdf", "fplot", "fractdiff", "freqz",
- "freqz_plot", "frnd", "fsolve",
- "f_test_regression", "ftp", "fullfile", "fzero",
- "gamcdf", "gaminv", "gampdf", "gamrnd", "gca",
- "gcbf", "gcbo", "gcf", "genvarname", "geocdf",
- "geoinv", "geopdf", "geornd", "getfield", "ginput",
- "glpk", "gls", "gplot", "gradient",
- "graphics_toolkit", "gray", "grid", "griddata",
- "griddatan", "gtext", "gunzip", "gzip", "hadamard",
- "hamming", "hankel", "hanning", "hggroup",
- "hidden", "hilb", "hist", "histc", "hold", "hot",
- "hotelling_test", "housh", "hsv", "hurst",
- "hygecdf", "hygeinv", "hygepdf", "hygernd",
- "idivide", "ifftshift", "image", "imagesc",
- "imfinfo", "imread", "imshow", "imwrite", "index",
- "info", "inpolygon", "inputname", "interpft",
- "interpn", "intersect", "invhilb", "iqr", "isa",
- "isdefinite", "isdir", "is_duplicate_entry",
- "isequal", "isequalwithequalnans", "isfigure",
- "ishermitian", "ishghandle", "is_leap_year",
- "isletter", "ismac", "ismember", "ispc", "isprime",
- "isprop", "isscalar", "issquare", "isstrprop",
- "issymmetric", "isunix", "is_valid_file_id",
- "isvector", "jet", "kendall",
- "kolmogorov_smirnov_cdf",
- "kolmogorov_smirnov_test", "kruskal_wallis_test",
- "krylov", "kurtosis", "laplace_cdf", "laplace_inv",
- "laplace_pdf", "laplace_rnd", "legend", "legendre",
- "license", "line", "linkprop", "list_primes",
- "loadaudio", "loadobj", "logistic_cdf",
- "logistic_inv", "logistic_pdf", "logistic_rnd",
- "logit", "loglog", "loglogerr", "logm", "logncdf",
- "logninv", "lognpdf", "lognrnd", "logspace",
- "lookfor", "ls_command", "lsqnonneg", "magic",
- "mahalanobis", "manova", "matlabroot",
- "mcnemar_test", "mean", "meansq", "median", "menu",
- "mesh", "meshc", "meshgrid", "meshz", "mexext",
- "mget", "mkpp", "mode", "moment", "movefile",
- "mpoles", "mput", "namelengthmax", "nargchk",
- "nargoutchk", "nbincdf", "nbininv", "nbinpdf",
- "nbinrnd", "nchoosek", "ndgrid", "newplot", "news",
- "nonzeros", "normcdf", "normest", "norminv",
- "normpdf", "normrnd", "now", "nthroot", "null",
- "ocean", "ols", "onenormest", "optimget",
- "optimset", "orderfields", "orient", "orth",
- "pack", "pareto", "parseparams", "pascal", "patch",
- "pathdef", "pcg", "pchip", "pcolor", "pcr",
- "peaks", "periodogram", "perl", "perms", "pie",
- "pink", "planerot", "playaudio", "plot",
- "plotmatrix", "plotyy", "poisscdf", "poissinv",
- "poisspdf", "poissrnd", "polar", "poly",
- "polyaffine", "polyarea", "polyderiv", "polyfit",
- "polygcd", "polyint", "polyout", "polyreduce",
- "polyval", "polyvalm", "postpad", "powerset",
- "ppder", "ppint", "ppjumps", "ppplot", "ppval",
- "pqpnonneg", "prepad", "primes", "print",
- "print_usage", "prism", "probit", "qp", "qqplot",
- "quadcc", "quadgk", "quadl", "quadv", "quiver",
- "qzhess", "rainbow", "randi", "range", "rank",
- "ranks", "rat", "reallog", "realpow", "realsqrt",
- "record", "rectangle_lw", "rectangle_sw",
- "rectint", "refresh", "refreshdata",
- "regexptranslate", "repmat", "residue", "ribbon",
- "rindex", "roots", "rose", "rosser", "rotdim",
- "rref", "run", "run_count", "rundemos", "run_test",
- "runtests", "saveas", "saveaudio", "saveobj",
- "savepath", "scatter", "secd", "semilogx",
- "semilogxerr", "semilogy", "semilogyerr",
- "setaudio", "setdiff", "setfield", "setxor",
- "shading", "shift", "shiftdim", "sign_test",
- "sinc", "sind", "sinetone", "sinewave", "skewness",
- "slice", "sombrero", "sortrows", "spaugment",
- "spconvert", "spdiags", "spearman", "spectral_adf",
- "spectral_xdf", "specular", "speed", "spencer",
- "speye", "spfun", "sphere", "spinmap", "spline",
- "spones", "sprand", "sprandn", "sprandsym",
- "spring", "spstats", "spy", "sqp", "stairs",
- "statistics", "std", "stdnormal_cdf",
- "stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd",
- "stem", "stft", "strcat", "strchr", "strjust",
- "strmatch", "strread", "strsplit", "strtok",
- "strtrim", "strtrunc", "structfun", "studentize",
- "subplot", "subsindex", "subspace", "substr",
- "substruct", "summer", "surf", "surface", "surfc",
- "surfl", "surfnorm", "svds", "swapbytes",
- "sylvester_matrix", "symvar", "synthesis", "table",
- "tand", "tar", "tcdf", "tempdir", "tempname",
- "test", "text", "textread", "textscan", "tinv",
- "title", "toeplitz", "tpdf", "trace", "trapz",
- "treelayout", "treeplot", "triangle_lw",
- "triangle_sw", "tril", "trimesh", "triplequad",
- "triplot", "trisurf", "triu", "trnd", "tsearchn",
- "t_test", "t_test_regression", "type", "unidcdf",
- "unidinv", "unidpdf", "unidrnd", "unifcdf",
- "unifinv", "unifpdf", "unifrnd", "union", "unique",
- "unix", "unmkpp", "unpack", "untabify", "untar",
- "unwrap", "unzip", "u_test", "validatestring",
- "vander", "var", "var_test", "vech", "ver",
- "version", "view", "voronoi", "voronoin",
- "waitforbuttonpress", "wavread", "wavwrite",
- "wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday",
- "welch_test", "what", "white", "whitebg",
- "wienrnd", "wilcoxon_test", "wilkinson", "winter",
- "xlabel", "xlim", "ylabel", "yulewalker", "zip",
- "zlabel", "z_test", ]
-
- loadable_kw = [ "airy", "amd", "balance", "besselh", "besseli",
- "besselj", "besselk", "bessely", "bitpack",
- "bsxfun", "builtin", "ccolamd", "cellfun",
- "cellslices", "chol", "choldelete", "cholinsert",
- "cholinv", "cholshift", "cholupdate", "colamd",
- "colloc", "convhulln", "convn", "csymamd",
- "cummax", "cummin", "daspk", "daspk_options",
- "dasrt", "dasrt_options", "dassl", "dassl_options",
- "dbclear", "dbdown", "dbstack", "dbstatus",
- "dbstop", "dbtype", "dbup", "dbwhere", "det",
- "dlmread", "dmperm", "dot", "eig", "eigs",
- "endgrent", "endpwent", "etree", "fft", "fftn",
- "fftw", "filter", "find", "full", "gcd",
- "getgrent", "getgrgid", "getgrnam", "getpwent",
- "getpwnam", "getpwuid", "getrusage", "givens",
- "gmtime", "gnuplot_binary", "hess", "ifft",
- "ifftn", "inv", "isdebugmode", "issparse", "kron",
- "localtime", "lookup", "lsode", "lsode_options",
- "lu", "luinc", "luupdate", "matrix_type", "max",
- "min", "mktime", "pinv", "qr", "qrdelete",
- "qrinsert", "qrshift", "qrupdate", "quad",
- "quad_options", "qz", "rand", "rande", "randg",
- "randn", "randp", "randperm", "rcond", "regexp",
- "regexpi", "regexprep", "schur", "setgrent",
- "setpwent", "sort", "spalloc", "sparse", "spparms",
- "sprank", "sqrtm", "strfind", "strftime",
- "strptime", "strrep", "svd", "svd_driver", "syl",
- "symamd", "symbfact", "symrcm", "time", "tsearch",
- "typecast", "urlread", "urlwrite", ]
-
- mapping_kw = [ "abs", "acos", "acosh", "acot", "acoth", "acsc",
- "acsch", "angle", "arg", "asec", "asech", "asin",
- "asinh", "atan", "atanh", "beta", "betainc",
- "betaln", "bincoeff", "cbrt", "ceil", "conj", "cos",
- "cosh", "cot", "coth", "csc", "csch", "erf", "erfc",
- "erfcx", "erfinv", "exp", "finite", "fix", "floor",
- "fmod", "gamma", "gammainc", "gammaln", "imag",
- "isalnum", "isalpha", "isascii", "iscntrl",
- "isdigit", "isfinite", "isgraph", "isinf",
- "islower", "isna", "isnan", "isprint", "ispunct",
- "isspace", "isupper", "isxdigit", "lcm", "lgamma",
- "log", "lower", "mod", "real", "rem", "round",
- "roundb", "sec", "sech", "sign", "sin", "sinh",
- "sqrt", "tan", "tanh", "toascii", "tolower", "xor",
- ]
-
- builtin_consts = [ "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA",
- "OCTAVE_HOME", "OCTAVE_VERSION", "PAGER",
- "PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET",
- "SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO",
- "S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE",
- "WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED",
- "WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG",
- "WSTOPSIG", "WTERMSIG", "WUNTRACED", ]
-
- tokens = {
- 'root': [
- #We should look into multiline comments
- (r'[%#].*$', Comment),
- (r'^\s*function', Keyword, 'deffunc'),
-
- # from 'iskeyword' on hg changeset 8cc154f45e37
- (r'(__FILE__|__LINE__|break|case|catch|classdef|continue|do|else|'
- r'elseif|end|end_try_catch|end_unwind_protect|endclassdef|'
- r'endevents|endfor|endfunction|endif|endmethods|endproperties|'
- r'endswitch|endwhile|events|for|function|get|global|if|methods|'
- r'otherwise|persistent|properties|return|set|static|switch|try|'
- r'until|unwind_protect|unwind_protect_cleanup|while)\b', Keyword),
-
- ("(" + "|".join( builtin_kw + command_kw
- + function_kw + loadable_kw
- + mapping_kw) + r')\b', Name.Builtin),
-
- ("(" + "|".join(builtin_consts) + r')\b', Name.Constant),
-
- # operators in Octave but not Matlab:
- (r'-=|!=|!|/=|--', Operator),
- # operators:
- (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
- # operators in Octave but not Matlab requiring escape for re:
- (r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*',Operator),
- # operators requiring escape for re:
- (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
-
- # punctuation:
- (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
- (r'=|:|;', Punctuation),
-
- (r'"[^"]*"', String),
-
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w\)\]])\'', Operator),
- (r'(?<![\w\)\]])\'', String, 'string'),
-
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
- (r'.', Text),
- ],
- 'string': [
- (r"[^']*'", String, '#pop'),
- ],
- 'deffunc': [
- (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Text.Whitespace, Text, Text.Whitespace, Punctuation,
- Text.Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Text.Whitespace), '#pop'),
- ],
- }
-
- def analyse_text(text):
- if re.match('^\s*[%#]', text, re.M): #Comment
- return 0.1
-
-
-class ScilabLexer(RegexLexer):
- """
- For Scilab source code.
-
- *New in Pygments 1.5.*
- """
- name = 'Scilab'
- aliases = ['scilab']
- filenames = ['*.sci', '*.sce', '*.tst']
- mimetypes = ['text/scilab']
-
- tokens = {
- 'root': [
- (r'//.*?$', Comment.Single),
- (r'^\s*function', Keyword, 'deffunc'),
-
- (r'(__FILE__|__LINE__|break|case|catch|classdef|continue|do|else|'
- r'elseif|end|end_try_catch|end_unwind_protect|endclassdef|'
- r'endevents|endfor|endfunction|endif|endmethods|endproperties|'
- r'endswitch|endwhile|events|for|function|get|global|if|methods|'
- r'otherwise|persistent|properties|return|set|static|switch|try|'
- r'until|unwind_protect|unwind_protect_cleanup|while)\b', Keyword),
-
- ("(" + "|".join(_scilab_builtins.functions_kw +
- _scilab_builtins.commands_kw +
- _scilab_builtins.macros_kw
- ) + r')\b', Name.Builtin),
-
- (r'(%s)\b' % "|".join(map(re.escape, _scilab_builtins.builtin_consts)),
- Name.Constant),
-
- # operators:
- (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
- # operators requiring escape for re:
- (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
- # punctuation:
- (r'[\[\](){}@.,=:;]', Punctuation),
-
- (r'"[^"]*"', String),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w\)\]])\'', Operator),
- (r'(?<![\w\)\]])\'', String, 'string'),
-
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer),
-
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
- (r'.', Text),
- ],
- 'string': [
- (r"[^']*'", String, '#pop'),
- (r'.', String, '#pop'),
- ],
- 'deffunc': [
- (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Text.Whitespace, Text, Text.Whitespace, Punctuation,
- Text.Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Text.Whitespace), '#pop'),
- ],
- }
-
-
-class NumPyLexer(PythonLexer):
- """
- A Python lexer recognizing Numerical Python builtins.
-
- *New in Pygments 0.10.*
- """
-
- name = 'NumPy'
- aliases = ['numpy']
-
- # override the mimetypes to not inherit them from python
- mimetypes = []
- filenames = []
-
- EXTRA_KEYWORDS = set([
- 'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
- 'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
- 'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
- 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'argmax', 'argmin',
- 'argsort', 'argwhere', 'around', 'array', 'array2string', 'array_equal',
- 'array_equiv', 'array_repr', 'array_split', 'array_str', 'arrayrange',
- 'asanyarray', 'asarray', 'asarray_chkfinite', 'ascontiguousarray',
- 'asfarray', 'asfortranarray', 'asmatrix', 'asscalar', 'astype',
- 'atleast_1d', 'atleast_2d', 'atleast_3d', 'average', 'bartlett',
- 'base_repr', 'beta', 'binary_repr', 'bincount', 'binomial',
- 'bitwise_and', 'bitwise_not', 'bitwise_or', 'bitwise_xor', 'blackman',
- 'bmat', 'broadcast', 'byte_bounds', 'bytes', 'byteswap', 'c_',
- 'can_cast', 'ceil', 'choose', 'clip', 'column_stack', 'common_type',
- 'compare_chararrays', 'compress', 'concatenate', 'conj', 'conjugate',
- 'convolve', 'copy', 'corrcoef', 'correlate', 'cos', 'cosh', 'cov',
- 'cross', 'cumprod', 'cumproduct', 'cumsum', 'delete', 'deprecate',
- 'diag', 'diagflat', 'diagonal', 'diff', 'digitize', 'disp', 'divide',
- 'dot', 'dsplit', 'dstack', 'dtype', 'dump', 'dumps', 'ediff1d', 'empty',
- 'empty_like', 'equal', 'exp', 'expand_dims', 'expm1', 'extract', 'eye',
- 'fabs', 'fastCopyAndTranspose', 'fft', 'fftfreq', 'fftshift', 'fill',
- 'finfo', 'fix', 'flat', 'flatnonzero', 'flatten', 'fliplr', 'flipud',
- 'floor', 'floor_divide', 'fmod', 'frexp', 'fromarrays', 'frombuffer',
- 'fromfile', 'fromfunction', 'fromiter', 'frompyfunc', 'fromstring',
- 'generic', 'get_array_wrap', 'get_include', 'get_numarray_include',
- 'get_numpy_include', 'get_printoptions', 'getbuffer', 'getbufsize',
- 'geterr', 'geterrcall', 'geterrobj', 'getfield', 'gradient', 'greater',
- 'greater_equal', 'gumbel', 'hamming', 'hanning', 'histogram',
- 'histogram2d', 'histogramdd', 'hsplit', 'hstack', 'hypot', 'i0',
- 'identity', 'ifft', 'imag', 'index_exp', 'indices', 'inf', 'info',
- 'inner', 'insert', 'int_asbuffer', 'interp', 'intersect1d',
- 'intersect1d_nu', 'inv', 'invert', 'iscomplex', 'iscomplexobj',
- 'isfinite', 'isfortran', 'isinf', 'isnan', 'isneginf', 'isposinf',
- 'isreal', 'isrealobj', 'isscalar', 'issctype', 'issubclass_',
- 'issubdtype', 'issubsctype', 'item', 'itemset', 'iterable', 'ix_',
- 'kaiser', 'kron', 'ldexp', 'left_shift', 'less', 'less_equal', 'lexsort',
- 'linspace', 'load', 'loads', 'loadtxt', 'log', 'log10', 'log1p', 'log2',
- 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'logspace',
- 'lstsq', 'mat', 'matrix', 'max', 'maximum', 'maximum_sctype',
- 'may_share_memory', 'mean', 'median', 'meshgrid', 'mgrid', 'min',
- 'minimum', 'mintypecode', 'mod', 'modf', 'msort', 'multiply', 'nan',
- 'nan_to_num', 'nanargmax', 'nanargmin', 'nanmax', 'nanmin', 'nansum',
- 'ndenumerate', 'ndim', 'ndindex', 'negative', 'newaxis', 'newbuffer',
- 'newbyteorder', 'nonzero', 'not_equal', 'obj2sctype', 'ogrid', 'ones',
- 'ones_like', 'outer', 'permutation', 'piecewise', 'pinv', 'pkgload',
- 'place', 'poisson', 'poly', 'poly1d', 'polyadd', 'polyder', 'polydiv',
- 'polyfit', 'polyint', 'polymul', 'polysub', 'polyval', 'power', 'prod',
- 'product', 'ptp', 'put', 'putmask', 'r_', 'randint', 'random_integers',
- 'random_sample', 'ranf', 'rank', 'ravel', 'real', 'real_if_close',
- 'recarray', 'reciprocal', 'reduce', 'remainder', 'repeat', 'require',
- 'reshape', 'resize', 'restoredot', 'right_shift', 'rint', 'roll',
- 'rollaxis', 'roots', 'rot90', 'round', 'round_', 'row_stack', 's_',
- 'sample', 'savetxt', 'sctype2char', 'searchsorted', 'seed', 'select',
- 'set_numeric_ops', 'set_printoptions', 'set_string_function',
- 'setbufsize', 'setdiff1d', 'seterr', 'seterrcall', 'seterrobj',
- 'setfield', 'setflags', 'setmember1d', 'setxor1d', 'shape',
- 'show_config', 'shuffle', 'sign', 'signbit', 'sin', 'sinc', 'sinh',
- 'size', 'slice', 'solve', 'sometrue', 'sort', 'sort_complex', 'source',
- 'split', 'sqrt', 'square', 'squeeze', 'standard_normal', 'std',
- 'subtract', 'sum', 'svd', 'swapaxes', 'take', 'tan', 'tanh', 'tensordot',
- 'test', 'tile', 'tofile', 'tolist', 'tostring', 'trace', 'transpose',
- 'trapz', 'tri', 'tril', 'trim_zeros', 'triu', 'true_divide', 'typeDict',
- 'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
- 'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
- 'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
- ])
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- PythonLexer.get_tokens_unprocessed(self, text):
- if token is Name and value in self.EXTRA_KEYWORDS:
- yield index, Keyword.Pseudo, value
- else:
- yield index, token, value
-
- def analyse_text(text):
- return (shebang_matches(text, r'pythonw?(2(\.\d)?)?') or
- 'import ' in text[:1000]) \
- and ('import numpy' in text or 'from numpy import' in text)
-
-
-class RConsoleLexer(Lexer):
- """
- For R console transcripts or R CMD BATCH output files.
- """
-
- name = 'RConsole'
- aliases = ['rconsole', 'rout']
- filenames = ['*.Rout']
-
- def get_tokens_unprocessed(self, text):
- slexer = SLexer(**self.options)
-
- current_code_block = ''
- insertions = []
-
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith('>') or line.startswith('+'):
- # Colorize the prompt as such,
- # then put rest of line into current_code_block
- insertions.append((len(current_code_block),
- [(0, Generic.Prompt, line[:2])]))
- current_code_block += line[2:]
- else:
- # We have reached a non-prompt line!
- # If we have stored prompt lines, need to process them first.
- if current_code_block:
- # Weave together the prompts and highlight code.
- for item in do_insertions(insertions,
- slexer.get_tokens_unprocessed(current_code_block)):
- yield item
- # Reset vars for next code block.
- current_code_block = ''
- insertions = []
- # Now process the actual line itself, this is output from R.
- yield match.start(), Generic.Output, line
-
- # If we happen to end on a code block with nothing after it, need to
- # process the last code block. This is neither elegant nor DRY so
- # should be changed.
- if current_code_block:
- for item in do_insertions(insertions,
- slexer.get_tokens_unprocessed(current_code_block)):
- yield item
-
-
-class SLexer(RegexLexer):
- """
- For S, S-plus, and R source code.
-
- *New in Pygments 0.10.*
- """
-
- name = 'S'
- aliases = ['splus', 's', 'r']
- filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile']
- mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
- 'text/x-R', 'text/x-r-history', 'text/x-r-profile']
-
- tokens = {
- 'comments': [
- (r'#.*$', Comment.Single),
- ],
- 'valid_name': [
- (r'[a-zA-Z][0-9a-zA-Z\._]*', Text),
- # can begin with ., but not if that is followed by a digit
- (r'\.[a-zA-Z_][0-9a-zA-Z\._]*', Text),
- ],
- 'punctuation': [
- (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
- ],
- 'keywords': [
- (r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
- r'(?![0-9a-zA-Z\._])',
- Keyword.Reserved)
- ],
- 'operators': [
- (r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
- (r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator)
- ],
- 'builtin_symbols': [
- (r'(NULL|NA(_(integer|real|complex|character)_)?|'
- r'Inf|TRUE|FALSE|NaN|\.\.(\.|[0-9]+))'
- r'(?![0-9a-zA-Z\._])',
- Keyword.Constant),
- (r'(T|F)\b', Keyword.Variable),
- ],
- 'numbers': [
- # hex number
- (r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
- # decimal number
- (r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+)([eE][+-]?[0-9]+)?[Li]?',
- Number),
- ],
- 'statements': [
- include('comments'),
- # whitespaces
- (r'\s+', Text),
- (r'`.*?`', String.Backtick),
- (r'\'', String, 'string_squote'),
- (r'\"', String, 'string_dquote'),
- include('builtin_symbols'),
- include('numbers'),
- include('keywords'),
- include('punctuation'),
- include('operators'),
- include('valid_name'),
- ],
- 'root': [
- include('statements'),
- # blocks:
- (r'\{|\}', Punctuation),
- #(r'\{', Punctuation, 'block'),
- (r'.', Text),
- ],
- #'block': [
- # include('statements'),
- # ('\{', Punctuation, '#push'),
- # ('\}', Punctuation, '#pop')
- #],
- 'string_squote': [
- (r'([^\'\\]|\\.)*\'', String, '#pop'),
- ],
- 'string_dquote': [
- (r'([^"\\]|\\.)*"', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
- return 0.11
-
-
-class BugsLexer(RegexLexer):
- """
- Pygments Lexer for `OpenBugs <http://www.openbugs.info/w/>`_ and WinBugs
- models.
-
- *New in Pygments 1.6.*
- """
-
- name = 'BUGS'
- aliases = ['bugs', 'winbugs', 'openbugs']
- filenames = ['*.bug']
-
- _FUNCTIONS = [
- # Scalar functions
- 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
- 'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance',
- 'equals', 'expr', 'gammap', 'ilogit', 'icloglog', 'integral', 'log',
- 'logfact', 'loggam', 'logit', 'max', 'min', 'phi', 'post.p.value',
- 'pow', 'prior.p.value', 'probit', 'replicate.post', 'replicate.prior',
- 'round', 'sin', 'sinh', 'solution', 'sqrt', 'step', 'tan', 'tanh',
- 'trunc',
- # Vector functions
- 'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals',
- 'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM',
- 'sd', 'sort', 'sum',
- ## Special
- 'D', 'I', 'F', 'T', 'C']
- """ OpenBUGS built-in functions
-
- From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII
-
- This also includes
-
- - T, C, I : Truncation and censoring.
- ``T`` and ``C`` are in OpenBUGS. ``I`` in WinBUGS.
- - D : ODE
- - F : Functional http://www.openbugs.info/Examples/Functionals.html
-
- """
-
- _DISTRIBUTIONS = ['dbern', 'dbin', 'dcat', 'dnegbin', 'dpois',
- 'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp',
- 'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar',
- 'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar',
- 'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm',
- 'dmt', 'dwish']
- """ OpenBUGS built-in distributions
-
- Functions from
- http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI
- """
-
-
- tokens = {
- 'whitespace' : [
- (r"\s+", Text),
- ],
- 'comments' : [
- # Comments
- (r'#.*$', Comment.Single),
- ],
- 'root': [
- # Comments
- include('comments'),
- include('whitespace'),
- # Block start
- (r'(model)(\s+)({)',
- bygroups(Keyword.Namespace, Text, Punctuation)),
- # Reserved Words
- (r'(for|in)(?![0-9a-zA-Z\._])', Keyword.Reserved),
- # Built-in Functions
- (r'(%s)(?=\s*\()'
- % r'|'.join(_FUNCTIONS + _DISTRIBUTIONS),
- Name.Builtin),
- # Regular variable names
- (r'[A-Za-z][A-Za-z0-9_.]*', Name),
- # Number Literals
- (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
- # Punctuation
- (r'\[|\]|\(|\)|:|,|;', Punctuation),
- # Assignment operators
- # SLexer makes these tokens Operators.
- (r'<-|~', Operator),
- # Infix and prefix operators
- (r'\+|-|\*|/', Operator),
- # Block
- (r'[{}]', Punctuation),
- ]
- }
-
- def analyse_text(text):
- if re.search(r"^\s*model\s*{", text, re.M):
- return 0.7
- else:
- return 0.0
-
-class JagsLexer(RegexLexer):
- """
- Pygments Lexer for JAGS.
-
- *New in Pygments 1.6.*
- """
-
- name = 'JAGS'
- aliases = ['jags']
- filenames = ['*.jag', '*.bug']
-
- ## JAGS
- _FUNCTIONS = [
- 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
- 'cos', 'cosh', 'cloglog',
- 'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact',
- 'loggam', 'logit', 'phi', 'pow', 'probit', 'round', 'sin', 'sinh',
- 'sqrt', 'step', 'tan', 'tanh', 'trunc', 'inprod', 'interp.lin',
- 'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse',
- 'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan',
- # Truncation/Censoring (should I include)
- 'T', 'I']
- # Distributions with density, probability and quartile functions
- _DISTRIBUTIONS = ['[dpq]%s' % x for x in
- ['bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
- 'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
- 'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib']]
- # Other distributions without density and probability
- _OTHER_DISTRIBUTIONS = [
- 'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper',
- 'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq',
- 'dnbinom', 'dweibull', 'ddirich']
-
- tokens = {
- 'whitespace' : [
- (r"\s+", Text),
- ],
- 'names' : [
- # Regular variable names
- (r'[a-zA-Z][a-zA-Z0-9_.]*\b', Name),
- ],
- 'comments' : [
- # do not use stateful comments
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- # Comments
- (r'#.*$', Comment.Single),
- ],
- 'root': [
- # Comments
- include('comments'),
- include('whitespace'),
- # Block start
- (r'(model|data)(\s+)({)',
- bygroups(Keyword.Namespace, Text, Punctuation)),
- (r'var(?![0-9a-zA-Z\._])', Keyword.Declaration),
- # Reserved Words
- (r'(for|in)(?![0-9a-zA-Z\._])', Keyword.Reserved),
- # Builtins
- # Need to use lookahead because . is a valid char
- (r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS
- + _DISTRIBUTIONS
- + _OTHER_DISTRIBUTIONS),
- Name.Builtin),
- # Names
- include('names'),
- # Number Literals
- (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
- (r'\[|\]|\(|\)|:|,|;', Punctuation),
- # Assignment operators
- (r'<-|~', Operator),
- # # JAGS includes many more than OpenBUGS
- (r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator),
- (r'[{}]', Punctuation),
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*model\s*\{', text, re.M):
- if re.search(r'^\s*data\s*\{', text, re.M):
- return 0.9
- elif re.search(r'^\s*var', text, re.M):
- return 0.9
- else:
- return 0.3
- else:
- return 0
-
-class StanLexer(RegexLexer):
- """Pygments Lexer for Stan models.
-
- The Stan modeling language is specified in the *Stan 1.3.0
- Modeling Language Manual* `pdf
- <http://code.google.com/p/stan/downloads/detail?name=stan-reference-1.3.0.pdf>`_.
-
- *New in Pygments 1.6.*
- """
-
- name = 'Stan'
- aliases = ['stan']
- filenames = ['*.stan']
-
- tokens = {
- 'whitespace' : [
- (r"\s+", Text),
- ],
- 'comments' : [
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- # Comments
- (r'(//|#).*$', Comment.Single),
- ],
- 'root': [
- # Stan is more restrictive on strings than this regex
- (r'"[^"]*"', String),
- # Comments
- include('comments'),
- # block start
- include('whitespace'),
- # Block start
- (r'(%s)(\s*)({)' %
- r'|'.join(('data', r'transformed\s+?data',
- 'parameters', r'transformed\s+parameters',
- 'model', r'generated\s+quantities')),
- bygroups(Keyword.Namespace, Text, Punctuation)),
- # Reserved Words
- (r'(%s)\b' % r'|'.join(_stan_builtins.KEYWORDS), Keyword),
- # Truncation
- (r'T(?=\s*\[)', Keyword),
- # Data types
- (r'(%s)\b' % r'|'.join(_stan_builtins.TYPES), Keyword.Type),
- # Punctuation
- (r"[;:,\[\]()]", Punctuation),
- # Builtin
- (r'(%s)(?=\s*\()'
- % r'|'.join(_stan_builtins.FUNCTIONS
- + _stan_builtins.DISTRIBUTIONS),
- Name.Builtin),
- # Special names ending in __, like lp__
- (r'[A-Za-z][A-Za-z0-9_]*__\b', Name.Builtin.Pseudo),
- (r'(%s)\b' % r'|'.join(_stan_builtins.RESERVED), Keyword.Reserved),
- # Regular variable names
- (r'[A-Za-z][A-Za-z0-9_]*\b', Name),
- # Real Literals
- (r'-?[0-9]+(\.[0-9]+)?[eE]-?[0-9]+', Number.Float),
- (r'-?[0-9]*\.[0-9]*', Number.Float),
- # Integer Literals
- (r'-?[0-9]+', Number.Integer),
- # Assignment operators
- # SLexer makes these tokens Operators.
- (r'<-|~', Operator),
- # Infix and prefix operators (and = )
- (r"\+|-|\.?\*|\.?/|\\|'|==?|!=?|<=?|>=?|\|\||&&", Operator),
- # Block delimiters
- (r'[{}]', Punctuation),
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*parameters\s*\{', text, re.M):
- return 1.0
- else:
- return 0.0
-
-
-class IDLLexer(RegexLexer):
- """
- Pygments Lexer for IDL (Interactive Data Language).
-
- *New in Pygments 1.6.*
- """
- name = 'IDL'
- aliases = ['idl']
- filenames = ['*.pro']
- mimetypes = ['text/idl']
-
- _RESERVED = ['and', 'begin', 'break', 'case', 'common', 'compile_opt',
- 'continue', 'do', 'else', 'end', 'endcase', 'elseelse',
- 'endfor', 'endforeach', 'endif', 'endrep', 'endswitch',
- 'endwhile', 'eq', 'for', 'foreach', 'forward_function',
- 'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le',
- 'lt', 'mod', 'ne', 'not', 'of', 'on_ioerror', 'or', 'pro',
- 'repeat', 'switch', 'then', 'until', 'while', 'xor']
- """Reserved words from: http://www.exelisvis.com/docs/reswords.html"""
-
- _BUILTIN_LIB = ['abs', 'acos', 'adapt_hist_equal', 'alog', 'alog10',
- 'amoeba', 'annotate', 'app_user_dir', 'app_user_dir_query',
- 'arg_present', 'array_equal', 'array_indices', 'arrow',
- 'ascii_template', 'asin', 'assoc', 'atan', 'axis',
- 'a_correlate', 'bandpass_filter', 'bandreject_filter',
- 'barplot', 'bar_plot', 'beseli', 'beselj', 'beselk',
- 'besely', 'beta', 'bilinear', 'binary_template', 'bindgen',
- 'binomial', 'bin_date', 'bit_ffs', 'bit_population',
- 'blas_axpy', 'blk_con', 'box_cursor', 'breakpoint',
- 'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder',
- 'bytscl', 'caldat', 'calendar', 'call_external',
- 'call_function', 'call_method', 'call_procedure', 'canny',
- 'catch', 'cd', 'cdf_[0-9a-za-z_]*', 'ceil', 'chebyshev',
- 'check_math',
- 'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen',
- 'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts',
- 'cmyk_convert', 'colorbar', 'colorize_sample',
- 'colormap_applicable', 'colormap_gradient',
- 'colormap_rotation', 'colortable', 'color_convert',
- 'color_exchange', 'color_quan', 'color_range_map', 'comfit',
- 'command_line_args', 'complex', 'complexarr', 'complexround',
- 'compute_mesh_normals', 'cond', 'congrid', 'conj',
- 'constrained_min', 'contour', 'convert_coord', 'convol',
- 'convol_fft', 'coord2to3', 'copy_lun', 'correlate', 'cos',
- 'cosh', 'cpu', 'cramer', 'create_cursor', 'create_struct',
- 'create_view', 'crossp', 'crvlength', 'cti_test',
- 'ct_luminance', 'cursor', 'curvefit', 'cvttobm', 'cv_coord',
- 'cw_animate', 'cw_animate_getp', 'cw_animate_load',
- 'cw_animate_run', 'cw_arcball', 'cw_bgroup', 'cw_clr_index',
- 'cw_colorsel', 'cw_defroi', 'cw_field', 'cw_filesel',
- 'cw_form', 'cw_fslider', 'cw_light_editor',
- 'cw_light_editor_get', 'cw_light_editor_set', 'cw_orient',
- 'cw_palette_editor', 'cw_palette_editor_get',
- 'cw_palette_editor_set', 'cw_pdmenu', 'cw_rgbslider',
- 'cw_tmpl', 'cw_zoom', 'c_correlate', 'dblarr', 'db_exists',
- 'dcindgen', 'dcomplex', 'dcomplexarr', 'define_key',
- 'define_msgblk', 'define_msgblk_from_file', 'defroi',
- 'defsysv', 'delvar', 'dendrogram', 'dendro_plot', 'deriv',
- 'derivsig', 'determ', 'device', 'dfpmin', 'diag_matrix',
- 'dialog_dbconnect', 'dialog_message', 'dialog_pickfile',
- 'dialog_printersetup', 'dialog_printjob',
- 'dialog_read_image', 'dialog_write_image', 'digital_filter',
- 'dilate', 'dindgen', 'dissolve', 'dist', 'distance_measure',
- 'dlm_load', 'dlm_register', 'doc_library', 'double',
- 'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec',
- 'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn',
- 'eof', 'eos_[0-9a-za-z_]*', 'erase', 'erf', 'erfc', 'erfcx',
- 'erode', 'errorplot', 'errplot', 'estimator_filter',
- 'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint',
- 'extrac', 'extract_slice', 'factorial', 'fft', 'filepath',
- 'file_basename', 'file_chmod', 'file_copy', 'file_delete',
- 'file_dirname', 'file_expand_path', 'file_info',
- 'file_lines', 'file_link', 'file_mkdir', 'file_move',
- 'file_poll_input', 'file_readlink', 'file_same',
- 'file_search', 'file_test', 'file_which', 'findgen',
- 'finite', 'fix', 'flick', 'float', 'floor', 'flow3',
- 'fltarr', 'flush', 'format_axis_values', 'free_lun',
- 'fstat', 'fulstr', 'funct', 'fv_test', 'fx_root',
- 'fz_roots', 'f_cvf', 'f_pdf', 'gamma', 'gamma_ct',
- 'gauss2dfit', 'gaussfit', 'gaussian_function', 'gaussint',
- 'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv',
- 'getwindows', 'get_drive_list', 'get_dxf_objects',
- 'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size',
- 'greg2jul', 'grib_[0-9a-za-z_]*', 'grid3', 'griddata',
- 'grid_input', 'grid_tps', 'gs_iter',
- 'h5[adfgirst]_[0-9a-za-z_]*', 'h5_browser', 'h5_close',
- 'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse',
- 'hanning', 'hash', 'hdf_[0-9a-za-z_]*', 'heap_free',
- 'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save',
- 'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal',
- 'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int',
- 'i18n_multibytetoutf8', 'i18n_multibytetowidechar',
- 'i18n_utf8tomultibyte', 'i18n_widechartomultibyte',
- 'ibeta', 'icontour', 'iconvertcoord', 'idelete', 'identity',
- 'idlexbr_assistant', 'idlitsys_createtool', 'idl_base64',
- 'idl_validname', 'iellipse', 'igamma', 'igetcurrent',
- 'igetdata', 'igetid', 'igetproperty', 'iimage', 'image',
- 'image_cont', 'image_statistics', 'imaginary', 'imap',
- 'indgen', 'intarr', 'interpol', 'interpolate',
- 'interval_volume', 'int_2d', 'int_3d', 'int_tabulated',
- 'invert', 'ioctl', 'iopen', 'iplot', 'ipolygon',
- 'ipolyline', 'iputdata', 'iregister', 'ireset', 'iresolve',
- 'irotate', 'ir_filter', 'isa', 'isave', 'iscale',
- 'isetcurrent', 'isetproperty', 'ishft', 'isocontour',
- 'isosurface', 'isurface', 'itext', 'itranslate', 'ivector',
- 'ivolume', 'izoom', 'i_beta', 'journal', 'json_parse',
- 'json_serialize', 'jul2greg', 'julday', 'keyword_set',
- 'krig2d', 'kurtosis', 'kw_test', 'l64indgen', 'label_date',
- 'label_region', 'ladfit', 'laguerre', 'laplacian',
- 'la_choldc', 'la_cholmprove', 'la_cholsol', 'la_determ',
- 'la_eigenproblem', 'la_eigenql', 'la_eigenvec', 'la_elmhes',
- 'la_gm_linear_model', 'la_hqr', 'la_invert',
- 'la_least_squares', 'la_least_square_equality',
- 'la_linear_equation', 'la_ludc', 'la_lumprove', 'la_lusol',
- 'la_svd', 'la_tridc', 'la_trimprove', 'la_triql',
- 'la_trired', 'la_trisol', 'least_squares_filter', 'leefilt',
- 'legend', 'legendre', 'linbcg', 'lindgen', 'linfit',
- 'linkimage', 'list', 'll_arc_distance', 'lmfit', 'lmgr',
- 'lngamma', 'lnp_test', 'loadct', 'locale_get',
- 'logical_and', 'logical_or', 'logical_true', 'lon64arr',
- 'lonarr', 'long', 'long64', 'lsode', 'ludc', 'lumprove',
- 'lusol', 'lu_complex', 'machar', 'make_array', 'make_dll',
- 'make_rt', 'map', 'mapcontinents', 'mapgrid', 'map_2points',
- 'map_continents', 'map_grid', 'map_image', 'map_patch',
- 'map_proj_forward', 'map_proj_image', 'map_proj_info',
- 'map_proj_init', 'map_proj_inverse', 'map_set',
- 'matrix_multiply', 'matrix_power', 'max', 'md_test',
- 'mean', 'meanabsdev', 'mean_filter', 'median', 'memory',
- 'mesh_clip', 'mesh_decimate', 'mesh_issolid', 'mesh_merge',
- 'mesh_numtriangles', 'mesh_obj', 'mesh_smooth',
- 'mesh_surfacearea', 'mesh_validate', 'mesh_volume',
- 'message', 'min', 'min_curve_surf', 'mk_html_help',
- 'modifyct', 'moment', 'morph_close', 'morph_distance',
- 'morph_gradient', 'morph_hitormiss', 'morph_open',
- 'morph_thin', 'morph_tophat', 'multi', 'm_correlate',
- 'ncdf_[0-9a-za-z_]*', 'newton', 'noise_hurl', 'noise_pick',
- 'noise_scatter', 'noise_slur', 'norm', 'n_elements',
- 'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy',
- 'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid',
- 'online_help', 'on_error', 'open', 'oplot', 'oploterr',
- 'parse_url', 'particle_trace', 'path_cache', 'path_sep',
- 'pcomp', 'plot', 'plot3d', 'ploterr', 'plots', 'plot_3dbox',
- 'plot_field', 'pnt_line', 'point_lun', 'polarplot',
- 'polar_contour', 'polar_surface', 'poly', 'polyfill',
- 'polyfillv', 'polygon', 'polyline', 'polyshade', 'polywarp',
- 'poly_2d', 'poly_area', 'poly_fit', 'popd', 'powell',
- 'pref_commit', 'pref_get', 'pref_set', 'prewitt', 'primes',
- 'print', 'printd', 'product', 'profile', 'profiler',
- 'profiles', 'project_vol', 'psafm', 'pseudo',
- 'ps_show_fonts', 'ptrarr', 'ptr_free', 'ptr_new',
- 'ptr_valid', 'pushd', 'p_correlate', 'qgrid3', 'qhull',
- 'qromb', 'qromo', 'qsimp', 'query_ascii', 'query_bmp',
- 'query_csv', 'query_dicom', 'query_gif', 'query_image',
- 'query_jpeg', 'query_jpeg2000', 'query_mrsid', 'query_pict',
- 'query_png', 'query_ppm', 'query_srf', 'query_tiff',
- 'query_wav', 'radon', 'randomn', 'randomu', 'ranks',
- 'rdpix', 'read', 'reads', 'readu', 'read_ascii',
- 'read_binary', 'read_bmp', 'read_csv', 'read_dicom',
- 'read_gif', 'read_image', 'read_interfile', 'read_jpeg',
- 'read_jpeg2000', 'read_mrsid', 'read_pict', 'read_png',
- 'read_ppm', 'read_spr', 'read_srf', 'read_sylk',
- 'read_tiff', 'read_wav', 'read_wave', 'read_x11_bitmap',
- 'read_xwd', 'real_part', 'rebin', 'recall_commands',
- 'recon3', 'reduce_colors', 'reform', 'region_grow',
- 'register_cursor', 'regress', 'replicate',
- 'replicate_inplace', 'resolve_all', 'resolve_routine',
- 'restore', 'retall', 'return', 'reverse', 'rk4', 'roberts',
- 'rot', 'rotate', 'round', 'routine_filepath',
- 'routine_info', 'rs_test', 'r_correlate', 'r_test',
- 'save', 'savgol', 'scale3', 'scale3d', 'scope_level',
- 'scope_traceback', 'scope_varfetch', 'scope_varname',
- 'search2d', 'search3d', 'sem_create', 'sem_delete',
- 'sem_lock', 'sem_release', 'setenv', 'set_plot',
- 'set_shading', 'sfit', 'shade_surf', 'shade_surf_irr',
- 'shade_volume', 'shift', 'shift_diff', 'shmdebug', 'shmmap',
- 'shmunmap', 'shmvar', 'show3', 'showfont', 'simplex', 'sin',
- 'sindgen', 'sinh', 'size', 'skewness', 'skip_lun',
- 'slicer3', 'slide_image', 'smooth', 'sobel', 'socket',
- 'sort', 'spawn', 'spher_harm', 'sph_4pnt', 'sph_scat',
- 'spline', 'spline_p', 'spl_init', 'spl_interp', 'sprsab',
- 'sprsax', 'sprsin', 'sprstp', 'sqrt', 'standardize',
- 'stddev', 'stop', 'strarr', 'strcmp', 'strcompress',
- 'streamline', 'stregex', 'stretch', 'string', 'strjoin',
- 'strlen', 'strlowcase', 'strmatch', 'strmessage', 'strmid',
- 'strpos', 'strput', 'strsplit', 'strtrim', 'struct_assign',
- 'struct_hide', 'strupcase', 'surface', 'surfr', 'svdc',
- 'svdfit', 'svsol', 'swap_endian', 'swap_endian_inplace',
- 'symbol', 'systime', 's_test', 't3d', 'tag_names', 'tan',
- 'tanh', 'tek_color', 'temporary', 'tetra_clip',
- 'tetra_surface', 'tetra_volume', 'text', 'thin', 'threed',
- 'timegen', 'time_test2', 'tm_test', 'total', 'trace',
- 'transpose', 'triangulate', 'trigrid', 'triql', 'trired',
- 'trisol', 'tri_surf', 'truncate_lun', 'ts_coef', 'ts_diff',
- 'ts_fcast', 'ts_smooth', 'tv', 'tvcrs', 'tvlct', 'tvrd',
- 'tvscl', 'typename', 't_cvt', 't_pdf', 'uindgen', 'uint',
- 'uintarr', 'ul64indgen', 'ulindgen', 'ulon64arr', 'ulonarr',
- 'ulong', 'ulong64', 'uniq', 'unsharp_mask', 'usersym',
- 'value_locate', 'variance', 'vector', 'vector_field', 'vel',
- 'velovect', 'vert_t3d', 'voigt', 'voronoi', 'voxel_proj',
- 'wait', 'warp_tri', 'watershed', 'wdelete', 'wf_draw',
- 'where', 'widget_base', 'widget_button', 'widget_combobox',
- 'widget_control', 'widget_displaycontextmen', 'widget_draw',
- 'widget_droplist', 'widget_event', 'widget_info',
- 'widget_label', 'widget_list', 'widget_propertysheet',
- 'widget_slider', 'widget_tab', 'widget_table',
- 'widget_text', 'widget_tree', 'widget_tree_move',
- 'widget_window', 'wiener_filter', 'window', 'writeu',
- 'write_bmp', 'write_csv', 'write_gif', 'write_image',
- 'write_jpeg', 'write_jpeg2000', 'write_nrif', 'write_pict',
- 'write_png', 'write_ppm', 'write_spr', 'write_srf',
- 'write_sylk', 'write_tiff', 'write_wav', 'write_wave',
- 'wset', 'wshow', 'wtn', 'wv_applet', 'wv_cwt',
- 'wv_cw_wavelet', 'wv_denoise', 'wv_dwt', 'wv_fn_coiflet',
- 'wv_fn_daubechies', 'wv_fn_gaussian', 'wv_fn_haar',
- 'wv_fn_morlet', 'wv_fn_paul', 'wv_fn_symlet',
- 'wv_import_data', 'wv_import_wavelet', 'wv_plot3d_wps',
- 'wv_plot_multires', 'wv_pwt', 'wv_tool_denoise',
- 'xbm_edit', 'xdisplayfile', 'xdxf', 'xfont',
- 'xinteranimate', 'xloadct', 'xmanager', 'xmng_tmpl',
- 'xmtool', 'xobjview', 'xobjview_rotate',
- 'xobjview_write_image', 'xpalette', 'xpcolor', 'xplot3d',
- 'xregistered', 'xroi', 'xsq_test', 'xsurface', 'xvaredit',
- 'xvolume', 'xvolume_rotate', 'xvolume_write_image',
- 'xyouts', 'zoom', 'zoom_24']
- """Functions from: http://www.exelisvis.com/docs/routines-1.html"""
-
- tokens = {
- 'root': [
- (r'^\s*;.*?\n', Comment.Singleline),
- (r'\b(' + '|'.join(_RESERVED) + r')\b', Keyword),
- (r'\b(' + '|'.join(_BUILTIN_LIB) + r')\b', Name.Builtin),
- (r'\+=|-=|\^=|\*=|/=|#=|##=|<=|>=|=', Operator),
- (r'\+\+|--|->|\+|-|##|#|\*|/|<|>|&&|\^|~|\|\|\?|:', Operator),
- (r'\b(mod=|lt=|le=|eq=|ne=|ge=|gt=|not=|and=|or=|xor=)', Operator),
- (r'\b(mod|lt|le|eq|ne|ge|gt|not|and|or|xor)\b', Operator),
- (r'\b[0-9](L|B|S|UL|ULL|LL)?\b', Number),
- (r'.', Text),
- ]
- }
-
-
-class RdLexer(RegexLexer):
- """
- Pygments Lexer for R documentation (Rd) files
-
- This is a very minimal implementation, highlighting little more
- than the macros. A description of Rd syntax is found in `Writing R
- Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
- and `Parsing Rd files <developer.r-project.org/parseRd.pdf>`_.
-
- *New in Pygments 1.6.*
- """
- name = 'Rd'
- aliases = ['rd']
- filenames = ['*.Rd']
- mimetypes = ['text/x-r-doc']
-
- # To account for verbatim / LaTeX-like / and R-like areas
- # would require parsing.
- tokens = {
- 'root' : [
- # catch escaped brackets and percent sign
- (r'\\[\\{}%]', String.Escape),
- # comments
- (r'%.*$', Comment),
- # special macros with no arguments
- (r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
- # macros
- (r'\\[a-zA-Z]+\b', Keyword),
- # special preprocessor macros
- (r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
- # non-escaped brackets
- (r'[{}]', Name.Builtin),
- # everything else
- (r'[^\\%\n{}]+', Text),
- (r'.', Text),
- ]
- }
-
-
-class IgorLexer(RegexLexer):
- """
- Pygments Lexer for Igor Pro procedure files (.ipf).
- See http://www.wavemetrics.com/ and http://www.igorexchange.com/.
-
- *New in Pygments 1.7.*
- """
-
- name = 'Igor'
- aliases = ['igor', 'igorpro']
- filenames = ['*.ipf']
- mimetypes = ['text/ipf']
-
- flags = re.IGNORECASE
-
- flowControl = [
- 'if', 'else', 'elseif', 'endif', 'for', 'endfor', 'strswitch', 'switch',
- 'case', 'endswitch', 'do', 'while', 'try', 'catch', 'endtry', 'break',
- 'continue', 'return',
- ]
- types = [
- 'variable', 'string', 'constant', 'strconstant', 'NVAR', 'SVAR', 'WAVE',
- 'STRUCT', 'ThreadSafe', 'function', 'end', 'static', 'macro', 'window',
- 'graph', 'Structure', 'EndStructure', 'EndMacro', 'FuncFit', 'Proc',
- 'Picture', 'Menu', 'SubMenu', 'Prompt', 'DoPrompt',
- ]
- operations = [
- 'Abort', 'AddFIFOData', 'AddFIFOVectData', 'AddMovieAudio',
- 'AddMovieFrame', 'APMath', 'Append', 'AppendImage',
- 'AppendLayoutObject', 'AppendMatrixContour', 'AppendText',
- 'AppendToGraph', 'AppendToLayout', 'AppendToTable', 'AppendXYZContour',
- 'AutoPositionWindow', 'BackgroundInfo', 'Beep', 'BoundingBall',
- 'BrowseURL', 'BuildMenu', 'Button', 'cd', 'Chart', 'CheckBox',
- 'CheckDisplayed', 'ChooseColor', 'Close', 'CloseMovie', 'CloseProc',
- 'ColorScale', 'ColorTab2Wave', 'Concatenate', 'ControlBar',
- 'ControlInfo', 'ControlUpdate', 'ConvexHull', 'Convolve', 'CopyFile',
- 'CopyFolder', 'CopyScales', 'Correlate', 'CreateAliasShortcut', 'Cross',
- 'CtrlBackground', 'CtrlFIFO', 'CtrlNamedBackground', 'Cursor',
- 'CurveFit', 'CustomControl', 'CWT', 'Debugger', 'DebuggerOptions',
- 'DefaultFont', 'DefaultGuiControls', 'DefaultGuiFont', 'DefineGuide',
- 'DelayUpdate', 'DeleteFile', 'DeleteFolder', 'DeletePoints',
- 'Differentiate', 'dir', 'Display', 'DisplayHelpTopic',
- 'DisplayProcedure', 'DoAlert', 'DoIgorMenu', 'DoUpdate', 'DoWindow',
- 'DoXOPIdle', 'DrawAction', 'DrawArc', 'DrawBezier', 'DrawLine',
- 'DrawOval', 'DrawPICT', 'DrawPoly', 'DrawRect', 'DrawRRect', 'DrawText',
- 'DSPDetrend', 'DSPPeriodogram', 'Duplicate', 'DuplicateDataFolder',
- 'DWT', 'EdgeStats', 'Edit', 'ErrorBars', 'Execute', 'ExecuteScriptText',
- 'ExperimentModified', 'Extract', 'FastGaussTransform', 'FastOp',
- 'FBinRead', 'FBinWrite', 'FFT', 'FIFO2Wave', 'FIFOStatus', 'FilterFIR',
- 'FilterIIR', 'FindLevel', 'FindLevels', 'FindPeak', 'FindPointsInPoly',
- 'FindRoots', 'FindSequence', 'FindValue', 'FPClustering', 'fprintf',
- 'FReadLine', 'FSetPos', 'FStatus', 'FTPDelete', 'FTPDownload',
- 'FTPUpload', 'FuncFit', 'FuncFitMD', 'GetAxis', 'GetFileFolderInfo',
- 'GetLastUserMenuInfo', 'GetMarquee', 'GetSelection', 'GetWindow',
- 'GraphNormal', 'GraphWaveDraw', 'GraphWaveEdit', 'Grep', 'GroupBox',
- 'Hanning', 'HideIgorMenus', 'HideInfo', 'HideProcedures', 'HideTools',
- 'HilbertTransform', 'Histogram', 'IFFT', 'ImageAnalyzeParticles',
- 'ImageBlend', 'ImageBoundaryToMask', 'ImageEdgeDetection',
- 'ImageFileInfo', 'ImageFilter', 'ImageFocus', 'ImageGenerateROIMask',
- 'ImageHistModification', 'ImageHistogram', 'ImageInterpolate',
- 'ImageLineProfile', 'ImageLoad', 'ImageMorphology', 'ImageRegistration',
- 'ImageRemoveBackground', 'ImageRestore', 'ImageRotate', 'ImageSave',
- 'ImageSeedFill', 'ImageSnake', 'ImageStats', 'ImageThreshold',
- 'ImageTransform', 'ImageUnwrapPhase', 'ImageWindow', 'IndexSort',
- 'InsertPoints', 'Integrate', 'IntegrateODE', 'Interp3DPath',
- 'Interpolate3D', 'KillBackground', 'KillControl', 'KillDataFolder',
- 'KillFIFO', 'KillFreeAxis', 'KillPath', 'KillPICTs', 'KillStrings',
- 'KillVariables', 'KillWaves', 'KillWindow', 'KMeans', 'Label', 'Layout',
- 'Legend', 'LinearFeedbackShiftRegister', 'ListBox', 'LoadData',
- 'LoadPackagePreferences', 'LoadPICT', 'LoadWave', 'Loess',
- 'LombPeriodogram', 'Make', 'MakeIndex', 'MarkPerfTestTime',
- 'MatrixConvolve', 'MatrixCorr', 'MatrixEigenV', 'MatrixFilter',
- 'MatrixGaussJ', 'MatrixInverse', 'MatrixLinearSolve',
- 'MatrixLinearSolveTD', 'MatrixLLS', 'MatrixLUBkSub', 'MatrixLUD',
- 'MatrixMultiply', 'MatrixOP', 'MatrixSchur', 'MatrixSolve',
- 'MatrixSVBkSub', 'MatrixSVD', 'MatrixTranspose', 'MeasureStyledText',
- 'Modify', 'ModifyContour', 'ModifyControl', 'ModifyControlList',
- 'ModifyFreeAxis', 'ModifyGraph', 'ModifyImage', 'ModifyLayout',
- 'ModifyPanel', 'ModifyTable', 'ModifyWaterfall', 'MoveDataFolder',
- 'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow', 'MoveVariable',
- 'MoveWave', 'MoveWindow', 'NeuralNetworkRun', 'NeuralNetworkTrain',
- 'NewDataFolder', 'NewFIFO', 'NewFIFOChan', 'NewFreeAxis', 'NewImage',
- 'NewLayout', 'NewMovie', 'NewNotebook', 'NewPanel', 'NewPath',
- 'NewWaterfall', 'Note', 'Notebook', 'NotebookAction', 'Open',
- 'OpenNotebook', 'Optimize', 'ParseOperationTemplate', 'PathInfo',
- 'PauseForUser', 'PauseUpdate', 'PCA', 'PlayMovie', 'PlayMovieAction',
- 'PlaySnd', 'PlaySound', 'PopupContextualMenu', 'PopupMenu',
- 'Preferences', 'PrimeFactors', 'Print', 'printf', 'PrintGraphs',
- 'PrintLayout', 'PrintNotebook', 'PrintSettings', 'PrintTable',
- 'Project', 'PulseStats', 'PutScrapText', 'pwd', 'Quit',
- 'RatioFromNumber', 'Redimension', 'Remove', 'RemoveContour',
- 'RemoveFromGraph', 'RemoveFromLayout', 'RemoveFromTable', 'RemoveImage',
- 'RemoveLayoutObjects', 'RemovePath', 'Rename', 'RenameDataFolder',
- 'RenamePath', 'RenamePICT', 'RenameWindow', 'ReorderImages',
- 'ReorderTraces', 'ReplaceText', 'ReplaceWave', 'Resample',
- 'ResumeUpdate', 'Reverse', 'Rotate', 'Save', 'SaveData',
- 'SaveExperiment', 'SaveGraphCopy', 'SaveNotebook',
- 'SavePackagePreferences', 'SavePICT', 'SaveTableCopy',
- 'SetActiveSubwindow', 'SetAxis', 'SetBackground', 'SetDashPattern',
- 'SetDataFolder', 'SetDimLabel', 'SetDrawEnv', 'SetDrawLayer',
- 'SetFileFolderInfo', 'SetFormula', 'SetIgorHook', 'SetIgorMenuMode',
- 'SetIgorOption', 'SetMarquee', 'SetProcessSleep', 'SetRandomSeed',
- 'SetScale', 'SetVariable', 'SetWaveLock', 'SetWindow', 'ShowIgorMenus',
- 'ShowInfo', 'ShowTools', 'Silent', 'Sleep', 'Slider', 'Smooth',
- 'SmoothCustom', 'Sort', 'SoundInRecord', 'SoundInSet',
- 'SoundInStartChart', 'SoundInStatus', 'SoundInStopChart',
- 'SphericalInterpolate', 'SphericalTriangulate', 'SplitString',
- 'sprintf', 'sscanf', 'Stack', 'StackWindows',
- 'StatsAngularDistanceTest', 'StatsANOVA1Test', 'StatsANOVA2NRTest',
- 'StatsANOVA2RMTest', 'StatsANOVA2Test', 'StatsChiTest',
- 'StatsCircularCorrelationTest', 'StatsCircularMeans',
- 'StatsCircularMoments', 'StatsCircularTwoSampleTest',
- 'StatsCochranTest', 'StatsContingencyTable', 'StatsDIPTest',
- 'StatsDunnettTest', 'StatsFriedmanTest', 'StatsFTest',
- 'StatsHodgesAjneTest', 'StatsJBTest', 'StatsKendallTauTest',
- 'StatsKSTest', 'StatsKWTest', 'StatsLinearCorrelationTest',
- 'StatsLinearRegression', 'StatsMultiCorrelationTest',
- 'StatsNPMCTest', 'StatsNPNominalSRTest', 'StatsQuantiles',
- 'StatsRankCorrelationTest', 'StatsResample', 'StatsSample',
- 'StatsScheffeTest', 'StatsSignTest', 'StatsSRTest', 'StatsTTest',
- 'StatsTukeyTest', 'StatsVariancesTest', 'StatsWatsonUSquaredTest',
- 'StatsWatsonWilliamsTest', 'StatsWheelerWatsonTest',
- 'StatsWilcoxonRankTest', 'StatsWRCorrelationTest', 'String',
- 'StructGet', 'StructPut', 'TabControl', 'Tag', 'TextBox', 'Tile',
- 'TileWindows', 'TitleBox', 'ToCommandLine', 'ToolsGrid',
- 'Triangulate3d', 'Unwrap', 'ValDisplay', 'Variable', 'WaveMeanStdv',
- 'WaveStats', 'WaveTransform', 'wfprintf', 'WignerTransform',
- 'WindowFunction',
- ]
- functions = [
- 'abs', 'acos', 'acosh', 'AiryA', 'AiryAD', 'AiryB', 'AiryBD', 'alog',
- 'area', 'areaXY', 'asin', 'asinh', 'atan', 'atan2', 'atanh',
- 'AxisValFromPixel', 'Besseli', 'Besselj', 'Besselk', 'Bessely', 'bessi',
- 'bessj', 'bessk', 'bessy', 'beta', 'betai', 'BinarySearch',
- 'BinarySearchInterp', 'binomial', 'binomialln', 'binomialNoise', 'cabs',
- 'CaptureHistoryStart', 'ceil', 'cequal', 'char2num', 'chebyshev',
- 'chebyshevU', 'CheckName', 'cmplx', 'cmpstr', 'conj', 'ContourZ', 'cos',
- 'cosh', 'cot', 'CountObjects', 'CountObjectsDFR', 'cpowi',
- 'CreationDate', 'csc', 'DataFolderExists', 'DataFolderRefsEqual',
- 'DataFolderRefStatus', 'date2secs', 'datetime', 'DateToJulian',
- 'Dawson', 'DDEExecute', 'DDEInitiate', 'DDEPokeString', 'DDEPokeWave',
- 'DDERequestWave', 'DDEStatus', 'DDETerminate', 'deltax', 'digamma',
- 'DimDelta', 'DimOffset', 'DimSize', 'ei', 'enoise', 'equalWaves', 'erf',
- 'erfc', 'exists', 'exp', 'expInt', 'expNoise', 'factorial', 'fakedata',
- 'faverage', 'faverageXY', 'FindDimLabel', 'FindListItem', 'floor',
- 'FontSizeHeight', 'FontSizeStringWidth', 'FresnelCos', 'FresnelSin',
- 'gamma', 'gammaInc', 'gammaNoise', 'gammln', 'gammp', 'gammq', 'Gauss',
- 'Gauss1D', 'Gauss2D', 'gcd', 'GetDefaultFontSize',
- 'GetDefaultFontStyle', 'GetKeyState', 'GetRTError', 'gnoise',
- 'GrepString', 'hcsr', 'hermite', 'hermiteGauss', 'HyperG0F1',
- 'HyperG1F1', 'HyperG2F1', 'HyperGNoise', 'HyperGPFQ', 'IgorVersion',
- 'ilim', 'imag', 'Inf', 'Integrate1D', 'interp', 'Interp2D', 'Interp3D',
- 'inverseERF', 'inverseERFC', 'ItemsInList', 'jlim', 'Laguerre',
- 'LaguerreA', 'LaguerreGauss', 'leftx', 'LegendreA', 'limit', 'ln',
- 'log', 'logNormalNoise', 'lorentzianNoise', 'magsqr', 'MandelbrotPoint',
- 'MarcumQ', 'MatrixDet', 'MatrixDot', 'MatrixRank', 'MatrixTrace', 'max',
- 'mean', 'min', 'mod', 'ModDate', 'NaN', 'norm', 'NumberByKey',
- 'numpnts', 'numtype', 'NumVarOrDefault', 'NVAR_Exists', 'p2rect',
- 'ParamIsDefault', 'pcsr', 'Pi', 'PixelFromAxisVal', 'pnt2x',
- 'poissonNoise', 'poly', 'poly2D', 'PolygonArea', 'qcsr', 'r2polar',
- 'real', 'rightx', 'round', 'sawtooth', 'ScreenResolution', 'sec',
- 'SelectNumber', 'sign', 'sin', 'sinc', 'sinh', 'SphericalBessJ',
- 'SphericalBessJD', 'SphericalBessY', 'SphericalBessYD',
- 'SphericalHarmonics', 'sqrt', 'StartMSTimer', 'StatsBetaCDF',
- 'StatsBetaPDF', 'StatsBinomialCDF', 'StatsBinomialPDF',
- 'StatsCauchyCDF', 'StatsCauchyPDF', 'StatsChiCDF', 'StatsChiPDF',
- 'StatsCMSSDCDF', 'StatsCorrelation', 'StatsDExpCDF', 'StatsDExpPDF',
- 'StatsErlangCDF', 'StatsErlangPDF', 'StatsErrorPDF', 'StatsEValueCDF',
- 'StatsEValuePDF', 'StatsExpCDF', 'StatsExpPDF', 'StatsFCDF',
- 'StatsFPDF', 'StatsFriedmanCDF', 'StatsGammaCDF', 'StatsGammaPDF',
- 'StatsGeometricCDF', 'StatsGeometricPDF', 'StatsHyperGCDF',
- 'StatsHyperGPDF', 'StatsInvBetaCDF', 'StatsInvBinomialCDF',
- 'StatsInvCauchyCDF', 'StatsInvChiCDF', 'StatsInvCMSSDCDF',
- 'StatsInvDExpCDF', 'StatsInvEValueCDF', 'StatsInvExpCDF',
- 'StatsInvFCDF', 'StatsInvFriedmanCDF', 'StatsInvGammaCDF',
- 'StatsInvGeometricCDF', 'StatsInvKuiperCDF', 'StatsInvLogisticCDF',
- 'StatsInvLogNormalCDF', 'StatsInvMaxwellCDF', 'StatsInvMooreCDF',
- 'StatsInvNBinomialCDF', 'StatsInvNCChiCDF', 'StatsInvNCFCDF',
- 'StatsInvNormalCDF', 'StatsInvParetoCDF', 'StatsInvPoissonCDF',
- 'StatsInvPowerCDF', 'StatsInvQCDF', 'StatsInvQpCDF',
- 'StatsInvRayleighCDF', 'StatsInvRectangularCDF', 'StatsInvSpearmanCDF',
- 'StatsInvStudentCDF', 'StatsInvTopDownCDF', 'StatsInvTriangularCDF',
- 'StatsInvUsquaredCDF', 'StatsInvVonMisesCDF', 'StatsInvWeibullCDF',
- 'StatsKuiperCDF', 'StatsLogisticCDF', 'StatsLogisticPDF',
- 'StatsLogNormalCDF', 'StatsLogNormalPDF', 'StatsMaxwellCDF',
- 'StatsMaxwellPDF', 'StatsMedian', 'StatsMooreCDF', 'StatsNBinomialCDF',
- 'StatsNBinomialPDF', 'StatsNCChiCDF', 'StatsNCChiPDF', 'StatsNCFCDF',
- 'StatsNCFPDF', 'StatsNCTCDF', 'StatsNCTPDF', 'StatsNormalCDF',
- 'StatsNormalPDF', 'StatsParetoCDF', 'StatsParetoPDF', 'StatsPermute',
- 'StatsPoissonCDF', 'StatsPoissonPDF', 'StatsPowerCDF',
- 'StatsPowerNoise', 'StatsPowerPDF', 'StatsQCDF', 'StatsQpCDF',
- 'StatsRayleighCDF', 'StatsRayleighPDF', 'StatsRectangularCDF',
- 'StatsRectangularPDF', 'StatsRunsCDF', 'StatsSpearmanRhoCDF',
- 'StatsStudentCDF', 'StatsStudentPDF', 'StatsTopDownCDF',
- 'StatsTriangularCDF', 'StatsTriangularPDF', 'StatsTrimmedMean',
- 'StatsUSquaredCDF', 'StatsVonMisesCDF', 'StatsVonMisesNoise',
- 'StatsVonMisesPDF', 'StatsWaldCDF', 'StatsWaldPDF', 'StatsWeibullCDF',
- 'StatsWeibullPDF', 'StopMSTimer', 'str2num', 'stringCRC', 'stringmatch',
- 'strlen', 'strsearch', 'StudentA', 'StudentT', 'sum', 'SVAR_Exists',
- 'TagVal', 'tan', 'tanh', 'ThreadGroupCreate', 'ThreadGroupRelease',
- 'ThreadGroupWait', 'ThreadProcessorCount', 'ThreadReturnValue', 'ticks',
- 'trunc', 'Variance', 'vcsr', 'WaveCRC', 'WaveDims', 'WaveExists',
- 'WaveMax', 'WaveMin', 'WaveRefsEqual', 'WaveType', 'WhichListItem',
- 'WinType', 'WNoise', 'x', 'x2pnt', 'xcsr', 'y', 'z', 'zcsr', 'ZernikeR',
- ]
- functions += [
- 'AddListItem', 'AnnotationInfo', 'AnnotationList', 'AxisInfo',
- 'AxisList', 'CaptureHistory', 'ChildWindowList', 'CleanupName',
- 'ContourInfo', 'ContourNameList', 'ControlNameList', 'CsrInfo',
- 'CsrWave', 'CsrXWave', 'CTabList', 'DataFolderDir', 'date',
- 'DDERequestString', 'FontList', 'FuncRefInfo', 'FunctionInfo',
- 'FunctionList', 'FunctionPath', 'GetDataFolder', 'GetDefaultFont',
- 'GetDimLabel', 'GetErrMessage', 'GetFormula',
- 'GetIndependentModuleName', 'GetIndexedObjName', 'GetIndexedObjNameDFR',
- 'GetRTErrMessage', 'GetRTStackInfo', 'GetScrapText', 'GetUserData',
- 'GetWavesDataFolder', 'GrepList', 'GuideInfo', 'GuideNameList', 'Hash',
- 'IgorInfo', 'ImageInfo', 'ImageNameList', 'IndexedDir', 'IndexedFile',
- 'JulianToDate', 'LayoutInfo', 'ListMatch', 'LowerStr', 'MacroList',
- 'NameOfWave', 'note', 'num2char', 'num2istr', 'num2str',
- 'OperationList', 'PadString', 'ParseFilePath', 'PathList', 'PICTInfo',
- 'PICTList', 'PossiblyQuoteName', 'ProcedureText', 'RemoveByKey',
- 'RemoveEnding', 'RemoveFromList', 'RemoveListItem',
- 'ReplaceNumberByKey', 'ReplaceString', 'ReplaceStringByKey',
- 'Secs2Date', 'Secs2Time', 'SelectString', 'SortList',
- 'SpecialCharacterInfo', 'SpecialCharacterList', 'SpecialDirPath',
- 'StringByKey', 'StringFromList', 'StringList', 'StrVarOrDefault',
- 'TableInfo', 'TextFile', 'ThreadGroupGetDF', 'time', 'TraceFromPixel',
- 'TraceInfo', 'TraceNameList', 'UniqueName', 'UnPadString', 'UpperStr',
- 'VariableList', 'WaveInfo', 'WaveList', 'WaveName', 'WaveUnits',
- 'WinList', 'WinName', 'WinRecreation', 'XWaveName',
- 'ContourNameToWaveRef', 'CsrWaveRef', 'CsrXWaveRef',
- 'ImageNameToWaveRef', 'NewFreeWave', 'TagWaveRef', 'TraceNameToWaveRef',
- 'WaveRefIndexed', 'XWaveRefFromTrace', 'GetDataFolderDFR',
- 'GetWavesDataFolderDFR', 'NewFreeDataFolder', 'ThreadGroupGetDFR',
- ]
-
- tokens = {
- 'root': [
- (r'//.*$', Comment.Single),
- (r'"([^"\\]|\\.)*"', String),
- # Flow Control.
- (r'\b(%s)\b' % '|'.join(flowControl), Keyword),
- # Types.
- (r'\b(%s)\b' % '|'.join(types), Keyword.Type),
- # Built-in operations.
- (r'\b(%s)\b' % '|'.join(operations), Name.Class),
- # Built-in functions.
- (r'\b(%s)\b' % '|'.join(functions), Name.Function),
- # Compiler directives.
- (r'^#(include|pragma|define|ifdef|ifndef|endif)',
- Name.Decorator),
- (r'[^a-zA-Z"/]+', Text),
- (r'.', Text),
- ],
- }
+__all__ = []
diff --git a/pygments/lexers/matlab.py b/pygments/lexers/matlab.py
new file mode 100644
index 00000000..ccb11a5d
--- /dev/null
+++ b/pygments/lexers/matlab.py
@@ -0,0 +1,663 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.matlab
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Matlab and related languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Whitespace
+
+from pygments.lexers import _scilab_builtins
+
+__all__ = ['MatlabLexer', 'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer']
+
+
+class MatlabLexer(RegexLexer):
+ """
+ For Matlab source code.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Matlab'
+ aliases = ['matlab']
+ filenames = ['*.m']
+ mimetypes = ['text/matlab']
+
+ #
+ # These lists are generated automatically.
+ # Run the following in bash shell:
+ #
+ # for f in elfun specfun elmat; do
+ # echo -n "$f = "
+ # matlab -nojvm -r "help $f;exit;" | perl -ne \
+ # 'push(@c,$1) if /^ (\w+)\s+-/; END {print q{["}.join(q{","},@c).qq{"]\n};}'
+ # done
+ #
+ # elfun: Elementary math functions
+ # specfun: Special Math functions
+ # elmat: Elementary matrices and matrix manipulation
+ #
+ # taken from Matlab version 7.4.0.336 (R2007a)
+ #
+ elfun = ("sin", "sind", "sinh", "asin", "asind", "asinh", "cos", "cosd", "cosh",
+ "acos", "acosd", "acosh", "tan", "tand", "tanh", "atan", "atand", "atan2",
+ "atanh", "sec", "secd", "sech", "asec", "asecd", "asech", "csc", "cscd",
+ "csch", "acsc", "acscd", "acsch", "cot", "cotd", "coth", "acot", "acotd",
+ "acoth", "hypot", "exp", "expm1", "log", "log1p", "log10", "log2", "pow2",
+ "realpow", "reallog", "realsqrt", "sqrt", "nthroot", "nextpow2", "abs",
+ "angle", "complex", "conj", "imag", "real", "unwrap", "isreal", "cplxpair",
+ "fix", "floor", "ceil", "round", "mod", "rem", "sign")
+ specfun = ("airy", "besselj", "bessely", "besselh", "besseli", "besselk", "beta",
+ "betainc", "betaln", "ellipj", "ellipke", "erf", "erfc", "erfcx",
+ "erfinv", "expint", "gamma", "gammainc", "gammaln", "psi", "legendre",
+ "cross", "dot", "factor", "isprime", "primes", "gcd", "lcm", "rat",
+ "rats", "perms", "nchoosek", "factorial", "cart2sph", "cart2pol",
+ "pol2cart", "sph2cart", "hsv2rgb", "rgb2hsv")
+ elmat = ("zeros", "ones", "eye", "repmat", "rand", "randn", "linspace", "logspace",
+ "freqspace", "meshgrid", "accumarray", "size", "length", "ndims", "numel",
+ "disp", "isempty", "isequal", "isequalwithequalnans", "cat", "reshape",
+ "diag", "blkdiag", "tril", "triu", "fliplr", "flipud", "flipdim", "rot90",
+ "find", "end", "sub2ind", "ind2sub", "bsxfun", "ndgrid", "permute",
+ "ipermute", "shiftdim", "circshift", "squeeze", "isscalar", "isvector",
+ "ans", "eps", "realmax", "realmin", "pi", "i", "inf", "nan", "isnan",
+ "isinf", "isfinite", "j", "why", "compan", "gallery", "hadamard", "hankel",
+ "hilb", "invhilb", "magic", "pascal", "rosser", "toeplitz", "vander",
+ "wilkinson")
+
+ tokens = {
+ 'root': [
+ # line starting with '!' is sent as a system command. not sure what
+ # label to use...
+ (r'^!.*', String.Other),
+ (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
+ (r'%.*$', Comment),
+ (r'^\s*function', Keyword, 'deffunc'),
+
+ # from 'iskeyword' on version 7.11 (R2010):
+ (words((
+ 'break', 'case', 'catch', 'classdef', 'continue', 'else', 'elseif',
+ 'end', 'enumerated', 'events', 'for', 'function', 'global', 'if',
+ 'methods', 'otherwise', 'parfor', 'persistent', 'properties',
+ 'return', 'spmd', 'switch', 'try', 'while'), suffix=r'\b'),
+ Keyword),
+
+ ("(" + "|".join(elfun + specfun + elmat) + r')\b', Name.Builtin),
+
+ # line continuation with following comment:
+ (r'\.\.\..*$', Comment),
+
+ # operators:
+ (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
+ # operators requiring escape for re:
+ (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
+
+ # punctuation:
+ (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
+ (r'=|:|;', Punctuation),
+
+ # quote can be transpose, instead of string:
+ # (not great, but handles common cases...)
+ (r'(?<=[\w)\].])\'+', Operator),
+
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer),
+
+ (r'(?<![\w)\].])\'', String, 'string'),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'.', Text),
+ ],
+ 'string': [
+ (r'[^\']*\'', String, '#pop')
+ ],
+ 'blockcomment': [
+ (r'^\s*%\}', Comment.Multiline, '#pop'),
+ (r'^.*\n', Comment.Multiline),
+ (r'.', Comment.Multiline),
+ ],
+ 'deffunc': [
+ (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
+ bygroups(Whitespace, Text, Whitespace, Punctuation,
+ Whitespace, Name.Function, Punctuation, Text,
+ Punctuation, Whitespace), '#pop'),
+ # function with no args
+ (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if re.match('^\s*%', text, re.M): # comment
+ return 0.2
+ elif re.match('^!\w+', text, re.M): # system cmd
+ return 0.2
+
+
+line_re = re.compile('.*?\n')
+
+
+class MatlabSessionLexer(Lexer):
+ """
+ For Matlab sessions. Modeled after PythonConsoleLexer.
+ Contributed by Ken Schutte <kschutte@csail.mit.edu>.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Matlab session'
+ aliases = ['matlabsession']
+
+ def get_tokens_unprocessed(self, text):
+ mlexer = MatlabLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+
+ for match in line_re.finditer(text):
+ line = match.group()
+
+ if line.startswith('>> '):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:3])]))
+ curcode += line[3:]
+
+ elif line.startswith('>>'):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:2])]))
+ curcode += line[2:]
+
+ elif line.startswith('???'):
+
+ idx = len(curcode)
+
+ # without is showing error on same line as before...?
+ # line = "\n" + line
+ token = (0, Generic.Traceback, line)
+ insertions.append((idx, [token]))
+
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, mlexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+
+ yield match.start(), Generic.Output, line
+
+ if curcode: # or item:
+ for item in do_insertions(
+ insertions, mlexer.get_tokens_unprocessed(curcode)):
+ yield item
+
+
+class OctaveLexer(RegexLexer):
+ """
+ For GNU Octave source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Octave'
+ aliases = ['octave']
+ filenames = ['*.m']
+ mimetypes = ['text/octave']
+
+ # These lists are generated automatically.
+ # Run the following in bash shell:
+ #
+ # First dump all of the Octave manual into a plain text file:
+ #
+ # $ info octave --subnodes -o octave-manual
+ #
+ # Now grep through it:
+
+ # for i in \
+ # "Built-in Function" "Command" "Function File" \
+ # "Loadable Function" "Mapping Function";
+ # do
+ # perl -e '@name = qw('"$i"');
+ # print lc($name[0]),"_kw = [\n"';
+ #
+ # perl -n -e 'print "\"$1\",\n" if /-- '"$i"': .* (\w*) \(/;' \
+ # octave-manual | sort | uniq ;
+ # echo "]" ;
+ # echo;
+ # done
+
+ # taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011)
+
+ builtin_kw = (
+ "addlistener", "addpath", "addproperty", "all",
+ "and", "any", "argnames", "argv", "assignin",
+ "atexit", "autoload",
+ "available_graphics_toolkits", "beep_on_error",
+ "bitand", "bitmax", "bitor", "bitshift", "bitxor",
+ "cat", "cell", "cellstr", "char", "class", "clc",
+ "columns", "command_line_path",
+ "completion_append_char", "completion_matches",
+ "complex", "confirm_recursive_rmdir", "cputime",
+ "crash_dumps_octave_core", "ctranspose", "cumprod",
+ "cumsum", "debug_on_error", "debug_on_interrupt",
+ "debug_on_warning", "default_save_options",
+ "dellistener", "diag", "diff", "disp",
+ "doc_cache_file", "do_string_escapes", "double",
+ "drawnow", "e", "echo_executing_commands", "eps",
+ "eq", "errno", "errno_list", "error", "eval",
+ "evalin", "exec", "exist", "exit", "eye", "false",
+ "fclear", "fclose", "fcntl", "fdisp", "feof",
+ "ferror", "feval", "fflush", "fgetl", "fgets",
+ "fieldnames", "file_in_loadpath", "file_in_path",
+ "filemarker", "filesep", "find_dir_in_path",
+ "fixed_point_format", "fnmatch", "fopen", "fork",
+ "formula", "fprintf", "fputs", "fread", "freport",
+ "frewind", "fscanf", "fseek", "fskipl", "ftell",
+ "functions", "fwrite", "ge", "genpath", "get",
+ "getegid", "getenv", "geteuid", "getgid",
+ "getpgrp", "getpid", "getppid", "getuid", "glob",
+ "gt", "gui_mode", "history_control",
+ "history_file", "history_size",
+ "history_timestamp_format_string", "home",
+ "horzcat", "hypot", "ifelse",
+ "ignore_function_time_stamp", "inferiorto",
+ "info_file", "info_program", "inline", "input",
+ "intmax", "intmin", "ipermute",
+ "is_absolute_filename", "isargout", "isbool",
+ "iscell", "iscellstr", "ischar", "iscomplex",
+ "isempty", "isfield", "isfloat", "isglobal",
+ "ishandle", "isieee", "isindex", "isinteger",
+ "islogical", "ismatrix", "ismethod", "isnull",
+ "isnumeric", "isobject", "isreal",
+ "is_rooted_relative_filename", "issorted",
+ "isstruct", "isvarname", "kbhit", "keyboard",
+ "kill", "lasterr", "lasterror", "lastwarn",
+ "ldivide", "le", "length", "link", "linspace",
+ "logical", "lstat", "lt", "make_absolute_filename",
+ "makeinfo_program", "max_recursion_depth", "merge",
+ "methods", "mfilename", "minus", "mislocked",
+ "mkdir", "mkfifo", "mkstemp", "mldivide", "mlock",
+ "mouse_wheel_zoom", "mpower", "mrdivide", "mtimes",
+ "munlock", "nargin", "nargout",
+ "native_float_format", "ndims", "ne", "nfields",
+ "nnz", "norm", "not", "numel", "nzmax",
+ "octave_config_info", "octave_core_file_limit",
+ "octave_core_file_name",
+ "octave_core_file_options", "ones", "or",
+ "output_max_field_width", "output_precision",
+ "page_output_immediately", "page_screen_output",
+ "path", "pathsep", "pause", "pclose", "permute",
+ "pi", "pipe", "plus", "popen", "power",
+ "print_empty_dimensions", "printf",
+ "print_struct_array_contents", "prod",
+ "program_invocation_name", "program_name",
+ "putenv", "puts", "pwd", "quit", "rats", "rdivide",
+ "readdir", "readlink", "read_readline_init_file",
+ "realmax", "realmin", "rehash", "rename",
+ "repelems", "re_read_readline_init_file", "reset",
+ "reshape", "resize", "restoredefaultpath",
+ "rethrow", "rmdir", "rmfield", "rmpath", "rows",
+ "save_header_format_string", "save_precision",
+ "saving_history", "scanf", "set", "setenv",
+ "shell_cmd", "sighup_dumps_octave_core",
+ "sigterm_dumps_octave_core", "silent_functions",
+ "single", "size", "size_equal", "sizemax",
+ "sizeof", "sleep", "source", "sparse_auto_mutate",
+ "split_long_rows", "sprintf", "squeeze", "sscanf",
+ "stat", "stderr", "stdin", "stdout", "strcmp",
+ "strcmpi", "string_fill_char", "strncmp",
+ "strncmpi", "struct", "struct_levels_to_print",
+ "strvcat", "subsasgn", "subsref", "sum", "sumsq",
+ "superiorto", "suppress_verbose_help_message",
+ "symlink", "system", "tic", "tilde_expand",
+ "times", "tmpfile", "tmpnam", "toc", "toupper",
+ "transpose", "true", "typeinfo", "umask", "uminus",
+ "uname", "undo_string_escapes", "unlink", "uplus",
+ "upper", "usage", "usleep", "vec", "vectorize",
+ "vertcat", "waitpid", "warning", "warranty",
+ "whos_line_format", "yes_or_no", "zeros",
+ "inf", "Inf", "nan", "NaN")
+
+ command_kw = ("close", "load", "who", "whos")
+
+ function_kw = (
+ "accumarray", "accumdim", "acosd", "acotd",
+ "acscd", "addtodate", "allchild", "ancestor",
+ "anova", "arch_fit", "arch_rnd", "arch_test",
+ "area", "arma_rnd", "arrayfun", "ascii", "asctime",
+ "asecd", "asind", "assert", "atand",
+ "autoreg_matrix", "autumn", "axes", "axis", "bar",
+ "barh", "bartlett", "bartlett_test", "beep",
+ "betacdf", "betainv", "betapdf", "betarnd",
+ "bicgstab", "bicubic", "binary", "binocdf",
+ "binoinv", "binopdf", "binornd", "bitcmp",
+ "bitget", "bitset", "blackman", "blanks",
+ "blkdiag", "bone", "box", "brighten", "calendar",
+ "cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf",
+ "cauchy_rnd", "caxis", "celldisp", "center", "cgs",
+ "chisquare_test_homogeneity",
+ "chisquare_test_independence", "circshift", "cla",
+ "clabel", "clf", "clock", "cloglog", "closereq",
+ "colon", "colorbar", "colormap", "colperm",
+ "comet", "common_size", "commutation_matrix",
+ "compan", "compare_versions", "compass",
+ "computer", "cond", "condest", "contour",
+ "contourc", "contourf", "contrast", "conv",
+ "convhull", "cool", "copper", "copyfile", "cor",
+ "corrcoef", "cor_test", "cosd", "cotd", "cov",
+ "cplxpair", "cross", "cscd", "cstrcat", "csvread",
+ "csvwrite", "ctime", "cumtrapz", "curl", "cut",
+ "cylinder", "date", "datenum", "datestr",
+ "datetick", "datevec", "dblquad", "deal",
+ "deblank", "deconv", "delaunay", "delaunayn",
+ "delete", "demo", "detrend", "diffpara", "diffuse",
+ "dir", "discrete_cdf", "discrete_inv",
+ "discrete_pdf", "discrete_rnd", "display",
+ "divergence", "dlmwrite", "dos", "dsearch",
+ "dsearchn", "duplication_matrix", "durbinlevinson",
+ "ellipsoid", "empirical_cdf", "empirical_inv",
+ "empirical_pdf", "empirical_rnd", "eomday",
+ "errorbar", "etime", "etreeplot", "example",
+ "expcdf", "expinv", "expm", "exppdf", "exprnd",
+ "ezcontour", "ezcontourf", "ezmesh", "ezmeshc",
+ "ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor",
+ "factorial", "fail", "fcdf", "feather", "fftconv",
+ "fftfilt", "fftshift", "figure", "fileattrib",
+ "fileparts", "fill", "findall", "findobj",
+ "findstr", "finv", "flag", "flipdim", "fliplr",
+ "flipud", "fpdf", "fplot", "fractdiff", "freqz",
+ "freqz_plot", "frnd", "fsolve",
+ "f_test_regression", "ftp", "fullfile", "fzero",
+ "gamcdf", "gaminv", "gampdf", "gamrnd", "gca",
+ "gcbf", "gcbo", "gcf", "genvarname", "geocdf",
+ "geoinv", "geopdf", "geornd", "getfield", "ginput",
+ "glpk", "gls", "gplot", "gradient",
+ "graphics_toolkit", "gray", "grid", "griddata",
+ "griddatan", "gtext", "gunzip", "gzip", "hadamard",
+ "hamming", "hankel", "hanning", "hggroup",
+ "hidden", "hilb", "hist", "histc", "hold", "hot",
+ "hotelling_test", "housh", "hsv", "hurst",
+ "hygecdf", "hygeinv", "hygepdf", "hygernd",
+ "idivide", "ifftshift", "image", "imagesc",
+ "imfinfo", "imread", "imshow", "imwrite", "index",
+ "info", "inpolygon", "inputname", "interpft",
+ "interpn", "intersect", "invhilb", "iqr", "isa",
+ "isdefinite", "isdir", "is_duplicate_entry",
+ "isequal", "isequalwithequalnans", "isfigure",
+ "ishermitian", "ishghandle", "is_leap_year",
+ "isletter", "ismac", "ismember", "ispc", "isprime",
+ "isprop", "isscalar", "issquare", "isstrprop",
+ "issymmetric", "isunix", "is_valid_file_id",
+ "isvector", "jet", "kendall",
+ "kolmogorov_smirnov_cdf",
+ "kolmogorov_smirnov_test", "kruskal_wallis_test",
+ "krylov", "kurtosis", "laplace_cdf", "laplace_inv",
+ "laplace_pdf", "laplace_rnd", "legend", "legendre",
+ "license", "line", "linkprop", "list_primes",
+ "loadaudio", "loadobj", "logistic_cdf",
+ "logistic_inv", "logistic_pdf", "logistic_rnd",
+ "logit", "loglog", "loglogerr", "logm", "logncdf",
+ "logninv", "lognpdf", "lognrnd", "logspace",
+ "lookfor", "ls_command", "lsqnonneg", "magic",
+ "mahalanobis", "manova", "matlabroot",
+ "mcnemar_test", "mean", "meansq", "median", "menu",
+ "mesh", "meshc", "meshgrid", "meshz", "mexext",
+ "mget", "mkpp", "mode", "moment", "movefile",
+ "mpoles", "mput", "namelengthmax", "nargchk",
+ "nargoutchk", "nbincdf", "nbininv", "nbinpdf",
+ "nbinrnd", "nchoosek", "ndgrid", "newplot", "news",
+ "nonzeros", "normcdf", "normest", "norminv",
+ "normpdf", "normrnd", "now", "nthroot", "null",
+ "ocean", "ols", "onenormest", "optimget",
+ "optimset", "orderfields", "orient", "orth",
+ "pack", "pareto", "parseparams", "pascal", "patch",
+ "pathdef", "pcg", "pchip", "pcolor", "pcr",
+ "peaks", "periodogram", "perl", "perms", "pie",
+ "pink", "planerot", "playaudio", "plot",
+ "plotmatrix", "plotyy", "poisscdf", "poissinv",
+ "poisspdf", "poissrnd", "polar", "poly",
+ "polyaffine", "polyarea", "polyderiv", "polyfit",
+ "polygcd", "polyint", "polyout", "polyreduce",
+ "polyval", "polyvalm", "postpad", "powerset",
+ "ppder", "ppint", "ppjumps", "ppplot", "ppval",
+ "pqpnonneg", "prepad", "primes", "print",
+ "print_usage", "prism", "probit", "qp", "qqplot",
+ "quadcc", "quadgk", "quadl", "quadv", "quiver",
+ "qzhess", "rainbow", "randi", "range", "rank",
+ "ranks", "rat", "reallog", "realpow", "realsqrt",
+ "record", "rectangle_lw", "rectangle_sw",
+ "rectint", "refresh", "refreshdata",
+ "regexptranslate", "repmat", "residue", "ribbon",
+ "rindex", "roots", "rose", "rosser", "rotdim",
+ "rref", "run", "run_count", "rundemos", "run_test",
+ "runtests", "saveas", "saveaudio", "saveobj",
+ "savepath", "scatter", "secd", "semilogx",
+ "semilogxerr", "semilogy", "semilogyerr",
+ "setaudio", "setdiff", "setfield", "setxor",
+ "shading", "shift", "shiftdim", "sign_test",
+ "sinc", "sind", "sinetone", "sinewave", "skewness",
+ "slice", "sombrero", "sortrows", "spaugment",
+ "spconvert", "spdiags", "spearman", "spectral_adf",
+ "spectral_xdf", "specular", "speed", "spencer",
+ "speye", "spfun", "sphere", "spinmap", "spline",
+ "spones", "sprand", "sprandn", "sprandsym",
+ "spring", "spstats", "spy", "sqp", "stairs",
+ "statistics", "std", "stdnormal_cdf",
+ "stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd",
+ "stem", "stft", "strcat", "strchr", "strjust",
+ "strmatch", "strread", "strsplit", "strtok",
+ "strtrim", "strtrunc", "structfun", "studentize",
+ "subplot", "subsindex", "subspace", "substr",
+ "substruct", "summer", "surf", "surface", "surfc",
+ "surfl", "surfnorm", "svds", "swapbytes",
+ "sylvester_matrix", "symvar", "synthesis", "table",
+ "tand", "tar", "tcdf", "tempdir", "tempname",
+ "test", "text", "textread", "textscan", "tinv",
+ "title", "toeplitz", "tpdf", "trace", "trapz",
+ "treelayout", "treeplot", "triangle_lw",
+ "triangle_sw", "tril", "trimesh", "triplequad",
+ "triplot", "trisurf", "triu", "trnd", "tsearchn",
+ "t_test", "t_test_regression", "type", "unidcdf",
+ "unidinv", "unidpdf", "unidrnd", "unifcdf",
+ "unifinv", "unifpdf", "unifrnd", "union", "unique",
+ "unix", "unmkpp", "unpack", "untabify", "untar",
+ "unwrap", "unzip", "u_test", "validatestring",
+ "vander", "var", "var_test", "vech", "ver",
+ "version", "view", "voronoi", "voronoin",
+ "waitforbuttonpress", "wavread", "wavwrite",
+ "wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday",
+ "welch_test", "what", "white", "whitebg",
+ "wienrnd", "wilcoxon_test", "wilkinson", "winter",
+ "xlabel", "xlim", "ylabel", "yulewalker", "zip",
+ "zlabel", "z_test")
+
+ loadable_kw = (
+ "airy", "amd", "balance", "besselh", "besseli",
+ "besselj", "besselk", "bessely", "bitpack",
+ "bsxfun", "builtin", "ccolamd", "cellfun",
+ "cellslices", "chol", "choldelete", "cholinsert",
+ "cholinv", "cholshift", "cholupdate", "colamd",
+ "colloc", "convhulln", "convn", "csymamd",
+ "cummax", "cummin", "daspk", "daspk_options",
+ "dasrt", "dasrt_options", "dassl", "dassl_options",
+ "dbclear", "dbdown", "dbstack", "dbstatus",
+ "dbstop", "dbtype", "dbup", "dbwhere", "det",
+ "dlmread", "dmperm", "dot", "eig", "eigs",
+ "endgrent", "endpwent", "etree", "fft", "fftn",
+ "fftw", "filter", "find", "full", "gcd",
+ "getgrent", "getgrgid", "getgrnam", "getpwent",
+ "getpwnam", "getpwuid", "getrusage", "givens",
+ "gmtime", "gnuplot_binary", "hess", "ifft",
+ "ifftn", "inv", "isdebugmode", "issparse", "kron",
+ "localtime", "lookup", "lsode", "lsode_options",
+ "lu", "luinc", "luupdate", "matrix_type", "max",
+ "min", "mktime", "pinv", "qr", "qrdelete",
+ "qrinsert", "qrshift", "qrupdate", "quad",
+ "quad_options", "qz", "rand", "rande", "randg",
+ "randn", "randp", "randperm", "rcond", "regexp",
+ "regexpi", "regexprep", "schur", "setgrent",
+ "setpwent", "sort", "spalloc", "sparse", "spparms",
+ "sprank", "sqrtm", "strfind", "strftime",
+ "strptime", "strrep", "svd", "svd_driver", "syl",
+ "symamd", "symbfact", "symrcm", "time", "tsearch",
+ "typecast", "urlread", "urlwrite")
+
+ mapping_kw = (
+ "abs", "acos", "acosh", "acot", "acoth", "acsc",
+ "acsch", "angle", "arg", "asec", "asech", "asin",
+ "asinh", "atan", "atanh", "beta", "betainc",
+ "betaln", "bincoeff", "cbrt", "ceil", "conj", "cos",
+ "cosh", "cot", "coth", "csc", "csch", "erf", "erfc",
+ "erfcx", "erfinv", "exp", "finite", "fix", "floor",
+ "fmod", "gamma", "gammainc", "gammaln", "imag",
+ "isalnum", "isalpha", "isascii", "iscntrl",
+ "isdigit", "isfinite", "isgraph", "isinf",
+ "islower", "isna", "isnan", "isprint", "ispunct",
+ "isspace", "isupper", "isxdigit", "lcm", "lgamma",
+ "log", "lower", "mod", "real", "rem", "round",
+ "roundb", "sec", "sech", "sign", "sin", "sinh",
+ "sqrt", "tan", "tanh", "toascii", "tolower", "xor")
+
+ builtin_consts = (
+ "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA",
+ "OCTAVE_HOME", "OCTAVE_VERSION", "PAGER",
+ "PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET",
+ "SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO",
+ "S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE",
+ "WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED",
+ "WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG",
+ "WSTOPSIG", "WTERMSIG", "WUNTRACED")
+
+ tokens = {
+ 'root': [
+ # We should look into multiline comments
+ (r'[%#].*$', Comment),
+ (r'^\s*function', Keyword, 'deffunc'),
+
+ # from 'iskeyword' on hg changeset 8cc154f45e37
+ (words((
+ '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
+ 'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef',
+ 'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties',
+ 'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods',
+ 'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try',
+ 'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
+ Keyword),
+
+ (words(builtin_kw + command_kw + function_kw + loadable_kw + mapping_kw,
+ suffix=r'\b'), Name.Builtin),
+
+ (words(builtin_consts, suffix=r'\b'), Name.Constant),
+
+ # operators in Octave but not Matlab:
+ (r'-=|!=|!|/=|--', Operator),
+ # operators:
+ (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
+ # operators in Octave but not Matlab requiring escape for re:
+ (r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*', Operator),
+ # operators requiring escape for re:
+ (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
+
+
+ # punctuation:
+ (r'[\[\](){}:@.,]', Punctuation),
+ (r'=|:|;', Punctuation),
+
+ (r'"[^"]*"', String),
+
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer),
+
+ # quote can be transpose, instead of string:
+ # (not great, but handles common cases...)
+ (r'(?<=[\w)\].])\'+', Operator),
+ (r'(?<![\w)\].])\'', String, 'string'),
+
+ (r'[a-zA-Z_]\w*', Name),
+ (r'.', Text),
+ ],
+ 'string': [
+ (r"[^']*'", String, '#pop'),
+ ],
+ 'deffunc': [
+ (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
+ bygroups(Whitespace, Text, Whitespace, Punctuation,
+ Whitespace, Name.Function, Punctuation, Text,
+ Punctuation, Whitespace), '#pop'),
+ # function with no args
+ (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
+ ],
+ }
+
+
+class ScilabLexer(RegexLexer):
+ """
+ For Scilab source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Scilab'
+ aliases = ['scilab']
+ filenames = ['*.sci', '*.sce', '*.tst']
+ mimetypes = ['text/scilab']
+
+ tokens = {
+ 'root': [
+ (r'//.*?$', Comment.Single),
+ (r'^\s*function', Keyword, 'deffunc'),
+
+ (words((
+ '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
+ 'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef',
+ 'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties',
+ 'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods',
+ 'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try',
+ 'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
+ Keyword),
+
+ (words(_scilab_builtins.functions_kw +
+ _scilab_builtins.commands_kw +
+ _scilab_builtins.macros_kw, suffix=r'\b'), Name.Builtin),
+
+ (words(_scilab_builtins.variables_kw, suffix=r'\b'), Name.Constant),
+
+ # operators:
+ (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
+ # operators requiring escape for re:
+ (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
+
+ # punctuation:
+ (r'[\[\](){}@.,=:;]', Punctuation),
+
+ (r'"[^"]*"', String),
+
+ # quote can be transpose, instead of string:
+ # (not great, but handles common cases...)
+ (r'(?<=[\w)\].])\'+', Operator),
+ (r'(?<![\w)\].])\'', String, 'string'),
+
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer),
+
+ (r'[a-zA-Z_]\w*', Name),
+ (r'.', Text),
+ ],
+ 'string': [
+ (r"[^']*'", String, '#pop'),
+ (r'.', String, '#pop'),
+ ],
+ 'deffunc': [
+ (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
+ bygroups(Whitespace, Text, Whitespace, Punctuation,
+ Whitespace, Name.Function, Punctuation, Text,
+ Punctuation, Whitespace), '#pop'),
+ # function with no args
+ (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/ml.py b/pygments/lexers/ml.py
new file mode 100644
index 00000000..4f10edd0
--- /dev/null
+++ b/pygments/lexers/ml.py
@@ -0,0 +1,769 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ml
+ ~~~~~~~~~~~~~~~~~~
+
+ Lexers for ML family languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['SMLLexer', 'OcamlLexer', 'OpaLexer']
+
+
+class SMLLexer(RegexLexer):
+ """
+ For the Standard ML language.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Standard ML'
+ aliases = ['sml']
+ filenames = ['*.sml', '*.sig', '*.fun']
+ mimetypes = ['text/x-standardml', 'application/x-standardml']
+
+ alphanumid_reserved = set((
+ # Core
+ 'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
+ 'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
+ 'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'open', 'orelse',
+ 'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while',
+ # Modules
+ 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
+ 'struct', 'structure', 'where',
+ ))
+
+ symbolicid_reserved = set((
+ # Core
+ ':', '\|', '=', '=>', '->', '#',
+ # Modules
+ ':>',
+ ))
+
+ nonid_reserved = set(('(', ')', '[', ']', '{', '}', ',', ';', '...', '_'))
+
+ alphanumid_re = r"[a-zA-Z][\w']*"
+ symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
+
+ # A character constant is a sequence of the form #s, where s is a string
+ # constant denoting a string of size one character. This setup just parses
+ # the entire string as either a String.Double or a String.Char (depending
+ # on the argument), even if the String.Char is an erronous
+ # multiple-character string.
+ def stringy(whatkind):
+ return [
+ (r'[^"\\]', whatkind),
+ (r'\\[\\"abtnvfr]', String.Escape),
+ # Control-character notation is used for codes < 32,
+ # where \^@ == \000
+ (r'\\\^[\x40-\x5e]', String.Escape),
+ # Docs say 'decimal digits'
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\\s+\\', String.Interpol),
+ (r'"', whatkind, '#pop'),
+ ]
+
+ # Callbacks for distinguishing tokens and reserved words
+ def long_id_callback(self, match):
+ if match.group(1) in self.alphanumid_reserved:
+ token = Error
+ else:
+ token = Name.Namespace
+ yield match.start(1), token, match.group(1)
+ yield match.start(2), Punctuation, match.group(2)
+
+ def end_id_callback(self, match):
+ if match.group(1) in self.alphanumid_reserved:
+ token = Error
+ elif match.group(1) in self.symbolicid_reserved:
+ token = Error
+ else:
+ token = Name
+ yield match.start(1), token, match.group(1)
+
+ def id_callback(self, match):
+ str = match.group(1)
+ if str in self.alphanumid_reserved:
+ token = Keyword.Reserved
+ elif str in self.symbolicid_reserved:
+ token = Punctuation
+ else:
+ token = Name
+ yield match.start(1), token, str
+
+ tokens = {
+ # Whitespace and comments are (almost) everywhere
+ 'whitespace': [
+ (r'\s+', Text),
+ (r'\(\*', Comment.Multiline, 'comment'),
+ ],
+
+ 'delimiters': [
+ # This lexer treats these delimiters specially:
+ # Delimiters define scopes, and the scope is how the meaning of
+ # the `|' is resolved - is it a case/handle expression, or function
+ # definition by cases? (This is not how the Definition works, but
+ # it's how MLton behaves, see http://mlton.org/SMLNJDeviations)
+ (r'\(|\[|\{', Punctuation, 'main'),
+ (r'\)|\]|\}', Punctuation, '#pop'),
+ (r'\b(let|if|local)\b(?!\')', Keyword.Reserved, ('main', 'main')),
+ (r'\b(struct|sig|while)\b(?!\')', Keyword.Reserved, 'main'),
+ (r'\b(do|else|end|in|then)\b(?!\')', Keyword.Reserved, '#pop'),
+ ],
+
+ 'core': [
+ # Punctuation that doesn't overlap symbolic identifiers
+ (r'(%s)' % '|'.join(re.escape(z) for z in nonid_reserved),
+ Punctuation),
+
+ # Special constants: strings, floats, numbers in decimal and hex
+ (r'#"', String.Char, 'char'),
+ (r'"', String.Double, 'string'),
+ (r'~?0x[0-9a-fA-F]+', Number.Hex),
+ (r'0wx[0-9a-fA-F]+', Number.Hex),
+ (r'0w\d+', Number.Integer),
+ (r'~?\d+\.\d+[eE]~?\d+', Number.Float),
+ (r'~?\d+\.\d+', Number.Float),
+ (r'~?\d+[eE]~?\d+', Number.Float),
+ (r'~?\d+', Number.Integer),
+
+ # Labels
+ (r'#\s*[1-9][0-9]*', Name.Label),
+ (r'#\s*(%s)' % alphanumid_re, Name.Label),
+ (r'#\s+(%s)' % symbolicid_re, Name.Label),
+ # Some reserved words trigger a special, local lexer state change
+ (r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'),
+ (r'(?=\b(exception)\b(?!\'))', Text, ('ename')),
+ (r'\b(functor|include|open|signature|structure)\b(?!\')',
+ Keyword.Reserved, 'sname'),
+ (r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'),
+
+ # Regular identifiers, long and otherwise
+ (r'\'[\w\']*', Name.Decorator),
+ (r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"),
+ (r'(%s)' % alphanumid_re, id_callback),
+ (r'(%s)' % symbolicid_re, id_callback),
+ ],
+ 'dotted': [
+ (r'(%s)(\.)' % alphanumid_re, long_id_callback),
+ (r'(%s)' % alphanumid_re, end_id_callback, "#pop"),
+ (r'(%s)' % symbolicid_re, end_id_callback, "#pop"),
+ (r'\s+', Error),
+ (r'\S+', Error),
+ ],
+
+
+ # Main parser (prevents errors in files that have scoping errors)
+ 'root': [
+ default('main')
+ ],
+
+ # In this scope, I expect '|' to not be followed by a function name,
+ # and I expect 'and' to be followed by a binding site
+ 'main': [
+ include('whitespace'),
+
+ # Special behavior of val/and/fun
+ (r'\b(val|and)\b(?!\')', Keyword.Reserved, 'vname'),
+ (r'\b(fun)\b(?!\')', Keyword.Reserved,
+ ('#pop', 'main-fun', 'fname')),
+
+ include('delimiters'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ # In this scope, I expect '|' and 'and' to be followed by a function
+ 'main-fun': [
+ include('whitespace'),
+
+ (r'\s', Text),
+ (r'\(\*', Comment.Multiline, 'comment'),
+
+ # Special behavior of val/and/fun
+ (r'\b(fun|and)\b(?!\')', Keyword.Reserved, 'fname'),
+ (r'\b(val)\b(?!\')', Keyword.Reserved,
+ ('#pop', 'main', 'vname')),
+
+ # Special behavior of '|' and '|'-manipulating keywords
+ (r'\|', Punctuation, 'fname'),
+ (r'\b(case|handle)\b(?!\')', Keyword.Reserved,
+ ('#pop', 'main')),
+
+ include('delimiters'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ # Character and string parsers
+ 'char': stringy(String.Char),
+ 'string': stringy(String.Double),
+
+ 'breakout': [
+ (r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'),
+ ],
+
+ # Dealing with what comes after module system keywords
+ 'sname': [
+ include('whitespace'),
+ include('breakout'),
+
+ (r'(%s)' % alphanumid_re, Name.Namespace),
+ default('#pop'),
+ ],
+
+ # Dealing with what comes after the 'fun' (or 'and' or '|') keyword
+ 'fname': [
+ include('whitespace'),
+ (r'\'[\w\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+
+ (r'(%s)' % alphanumid_re, Name.Function, '#pop'),
+ (r'(%s)' % symbolicid_re, Name.Function, '#pop'),
+
+ # Ignore interesting function declarations like "fun (x + y) = ..."
+ default('#pop'),
+ ],
+
+ # Dealing with what comes after the 'val' (or 'and') keyword
+ 'vname': [
+ include('whitespace'),
+ (r'\'[\w\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+
+ (r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re),
+ bygroups(Name.Variable, Text, Punctuation), '#pop'),
+ (r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re),
+ bygroups(Name.Variable, Text, Punctuation), '#pop'),
+ (r'(%s)' % alphanumid_re, Name.Variable, '#pop'),
+ (r'(%s)' % symbolicid_re, Name.Variable, '#pop'),
+
+ # Ignore interesting patterns like 'val (x, y)'
+ default('#pop'),
+ ],
+
+ # Dealing with what comes after the 'type' (or 'and') keyword
+ 'tname': [
+ include('whitespace'),
+ include('breakout'),
+
+ (r'\'[\w\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+ (r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')),
+
+ (r'(%s)' % alphanumid_re, Keyword.Type),
+ (r'(%s)' % symbolicid_re, Keyword.Type),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # A type binding includes most identifiers
+ 'typbind': [
+ include('whitespace'),
+
+ (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
+
+ include('breakout'),
+ include('core'),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # Dealing with what comes after the 'datatype' (or 'and') keyword
+ 'dname': [
+ include('whitespace'),
+ include('breakout'),
+
+ (r'\'[\w\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+ (r'(=)(\s*)(datatype)',
+ bygroups(Punctuation, Text, Keyword.Reserved), '#pop'),
+ (r'=(?!%s)' % symbolicid_re, Punctuation,
+ ('#pop', 'datbind', 'datcon')),
+
+ (r'(%s)' % alphanumid_re, Keyword.Type),
+ (r'(%s)' % symbolicid_re, Keyword.Type),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # common case - A | B | C of int
+ 'datbind': [
+ include('whitespace'),
+
+ (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'dname')),
+ (r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
+ (r'\b(of)\b(?!\')', Keyword.Reserved),
+
+ (r'(\|)(\s*)(%s)' % alphanumid_re,
+ bygroups(Punctuation, Text, Name.Class)),
+ (r'(\|)(\s+)(%s)' % symbolicid_re,
+ bygroups(Punctuation, Text, Name.Class)),
+
+ include('breakout'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ # Dealing with what comes after an exception
+ 'ename': [
+ include('whitespace'),
+
+ (r'(exception|and)\b(\s+)(%s)' % alphanumid_re,
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ (r'(exception|and)\b(\s*)(%s)' % symbolicid_re,
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ (r'\b(of)\b(?!\')', Keyword.Reserved),
+
+ include('breakout'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ 'datcon': [
+ include('whitespace'),
+ (r'(%s)' % alphanumid_re, Name.Class, '#pop'),
+ (r'(%s)' % symbolicid_re, Name.Class, '#pop'),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # Series of type variables
+ 'tyvarseq': [
+ (r'\s', Text),
+ (r'\(\*', Comment.Multiline, 'comment'),
+
+ (r'\'[\w\']*', Name.Decorator),
+ (alphanumid_re, Name),
+ (r',', Punctuation),
+ (r'\)', Punctuation, '#pop'),
+ (symbolicid_re, Name),
+ ],
+
+ 'comment': [
+ (r'[^(*)]', Comment.Multiline),
+ (r'\(\*', Comment.Multiline, '#push'),
+ (r'\*\)', Comment.Multiline, '#pop'),
+ (r'[(*)]', Comment.Multiline),
+ ],
+ }
+
+
+class OcamlLexer(RegexLexer):
+ """
+ For the OCaml language.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'OCaml'
+ aliases = ['ocaml']
+ filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
+ mimetypes = ['text/x-ocaml']
+
+ keywords = (
+ 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
+ 'downto', 'else', 'end', 'exception', 'external', 'false',
+ 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
+ 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
+ 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
+ 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
+ 'type', 'value', 'val', 'virtual', 'when', 'while', 'with',
+ )
+ keyopts = (
+ '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
+ r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
+ '<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
+ r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~'
+ )
+
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
+ word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or')
+ prefix_syms = r'[!?~]'
+ infix_syms = r'[=<>@^|&+\*/$%-]'
+ primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
+
+ tokens = {
+ 'escape-sequence': [
+ (r'\\[\\"\'ntbr]', String.Escape),
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ ],
+ 'root': [
+ (r'\s+', Text),
+ (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
+ (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
+ (r'\b([A-Z][\w\']*)', Name.Class),
+ (r'\(\*(?![)])', Comment, 'comment'),
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
+ (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+
+ (r"[^\W\d][\w']*", Name),
+
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Bin),
+ (r'\d[\d_]*', Number.Integer),
+
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
+ String.Char),
+ (r"'.'", String.Char),
+ (r"'", Keyword), # a stray quote is another syntax element
+
+ (r'"', String.Double, 'string'),
+
+ (r'[~?][a-z][\w\']*:', Name.Variable),
+ ],
+ 'comment': [
+ (r'[^(*)]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ (r'[(*)]', Comment),
+ ],
+ 'string': [
+ (r'[^\\"]+', String.Double),
+ include('escape-sequence'),
+ (r'\\\n', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'dotted': [
+ (r'\s+', Text),
+ (r'\.', Punctuation),
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+ (r'[A-Z][\w\']*', Name.Class, '#pop'),
+ (r'[a-z_][\w\']*', Name, '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+class OpaLexer(RegexLexer):
+ """
+ Lexer for the Opa language (http://opalang.org).
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Opa'
+ aliases = ['opa']
+ filenames = ['*.opa']
+ mimetypes = ['text/x-opa']
+
+ # most of these aren't strictly keywords
+ # but if you color only real keywords, you might just
+ # as well not color anything
+ keywords = (
+ 'and', 'as', 'begin', 'case', 'client', 'css', 'database', 'db', 'do',
+ 'else', 'end', 'external', 'forall', 'function', 'if', 'import',
+ 'match', 'module', 'or', 'package', 'parser', 'rec', 'server', 'then',
+ 'type', 'val', 'with', 'xml_parser',
+ )
+
+ # matches both stuff and `stuff`
+ ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))'
+
+ op_re = r'[.=\-<>,@~%/+?*&^!]'
+ punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere
+ # because they are also used for inserts
+
+ tokens = {
+ # copied from the caml lexer, should be adapted
+ 'escape-sequence': [
+ (r'\\[\\"\'ntr}]', String.Escape),
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ ],
+
+ # factorizing these rules, because they are inserted many times
+ 'comments': [
+ (r'/\*', Comment, 'nested-comment'),
+ (r'//.*?$', Comment),
+ ],
+ 'comments-and-spaces': [
+ include('comments'),
+ (r'\s+', Text),
+ ],
+
+ 'root': [
+ include('comments-and-spaces'),
+ # keywords
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
+ # directives
+ # we could parse the actual set of directives instead of anything
+ # starting with @, but this is troublesome
+ # because it needs to be adjusted all the time
+ # and assuming we parse only sources that compile, it is useless
+ (r'@' + ident_re + r'\b', Name.Builtin.Pseudo),
+
+ # number literals
+ (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float),
+ (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float),
+ (r'-?\d+[eE][+\-]?\d+', Number.Float),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[bB][01]+', Number.Bin),
+ (r'\d+', Number.Integer),
+ # color literals
+ (r'#[\da-fA-F]{3,6}', Number.Integer),
+
+ # string literals
+ (r'"', String.Double, 'string'),
+ # char literal, should be checked because this is the regexp from
+ # the caml lexer
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'",
+ String.Char),
+
+ # this is meant to deal with embedded exprs in strings
+ # every time we find a '}' we pop a state so that if we were
+ # inside a string, we are back in the string state
+ # as a consequence, we must also push a state every time we find a
+ # '{' or else we will have errors when parsing {} for instance
+ (r'\{', Operator, '#push'),
+ (r'\}', Operator, '#pop'),
+
+ # html literals
+ # this is a much more strict that the actual parser,
+ # since a<b would not be parsed as html
+ # but then again, the parser is way too lax, and we can't hope
+ # to have something as tolerant
+ (r'<(?=[a-zA-Z>])', String.Single, 'html-open-tag'),
+
+ # db path
+ # matching the '[_]' in '/a[_]' because it is a part
+ # of the syntax of the db path definition
+ # unfortunately, i don't know how to match the ']' in
+ # /a[1], so this is somewhat inconsistent
+ (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable),
+ # putting the same color on <- as on db path, since
+ # it can be used only to mean Db.write
+ (r'<-(?!'+op_re+r')', Name.Variable),
+
+ # 'modules'
+ # although modules are not distinguished by their names as in caml
+ # the standard library seems to follow the convention that modules
+ # only area capitalized
+ (r'\b([A-Z]\w*)(?=\.)', Name.Namespace),
+
+ # operators
+ # = has a special role because this is the only
+ # way to syntactic distinguish binding constructions
+ # unfortunately, this colors the equal in {x=2} too
+ (r'=(?!'+op_re+r')', Keyword),
+ (r'(%s)+' % op_re, Operator),
+ (r'(%s)+' % punc_re, Operator),
+
+ # coercions
+ (r':', Operator, 'type'),
+ # type variables
+ # we need this rule because we don't parse specially type
+ # definitions so in "type t('a) = ...", "'a" is parsed by 'root'
+ ("'"+ident_re, Keyword.Type),
+
+ # id literal, #something, or #{expr}
+ (r'#'+ident_re, String.Single),
+ (r'#(?=\{)', String.Single),
+
+ # identifiers
+ # this avoids to color '2' in 'a2' as an integer
+ (ident_re, Text),
+
+ # default, not sure if that is needed or not
+ # (r'.', Text),
+ ],
+
+ # it is quite painful to have to parse types to know where they end
+ # this is the general rule for a type
+ # a type is either:
+ # * -> ty
+ # * type-with-slash
+ # * type-with-slash -> ty
+ # * type-with-slash (, type-with-slash)+ -> ty
+ #
+ # the code is pretty funky in here, but this code would roughly
+ # translate in caml to:
+ # let rec type stream =
+ # match stream with
+ # | [< "->"; stream >] -> type stream
+ # | [< ""; stream >] ->
+ # type_with_slash stream
+ # type_lhs_1 stream;
+ # and type_1 stream = ...
+ 'type': [
+ include('comments-and-spaces'),
+ (r'->', Keyword.Type),
+ default(('#pop', 'type-lhs-1', 'type-with-slash')),
+ ],
+
+ # parses all the atomic or closed constructions in the syntax of type
+ # expressions: record types, tuple types, type constructors, basic type
+ # and type variables
+ 'type-1': [
+ include('comments-and-spaces'),
+ (r'\(', Keyword.Type, ('#pop', 'type-tuple')),
+ (r'~?\{', Keyword.Type, ('#pop', 'type-record')),
+ (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')),
+ (ident_re, Keyword.Type, '#pop'),
+ ("'"+ident_re, Keyword.Type),
+ # this case is not in the syntax but sometimes
+ # we think we are parsing types when in fact we are parsing
+ # some css, so we just pop the states until we get back into
+ # the root state
+ default('#pop'),
+ ],
+
+ # type-with-slash is either:
+ # * type-1
+ # * type-1 (/ type-1)+
+ 'type-with-slash': [
+ include('comments-and-spaces'),
+ default(('#pop', 'slash-type-1', 'type-1')),
+ ],
+ 'slash-type-1': [
+ include('comments-and-spaces'),
+ ('/', Keyword.Type, ('#pop', 'type-1')),
+ # same remark as above
+ default('#pop'),
+ ],
+
+ # we go in this state after having parsed a type-with-slash
+ # while trying to parse a type
+ # and at this point we must determine if we are parsing an arrow
+ # type (in which case we must continue parsing) or not (in which
+ # case we stop)
+ 'type-lhs-1': [
+ include('comments-and-spaces'),
+ (r'->', Keyword.Type, ('#pop', 'type')),
+ (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')),
+ default('#pop'),
+ ],
+ 'type-arrow': [
+ include('comments-and-spaces'),
+ # the look ahead here allows to parse f(x : int, y : float -> truc)
+ # correctly
+ (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'),
+ (r'->', Keyword.Type, ('#pop', 'type')),
+ # same remark as above
+ default('#pop'),
+ ],
+
+ # no need to do precise parsing for tuples and records
+ # because they are closed constructions, so we can simply
+ # find the closing delimiter
+ # note that this function would be not work if the source
+ # contained identifiers like `{)` (although it could be patched
+ # to support it)
+ 'type-tuple': [
+ include('comments-and-spaces'),
+ (r'[^()/*]+', Keyword.Type),
+ (r'[/*]', Keyword.Type),
+ (r'\(', Keyword.Type, '#push'),
+ (r'\)', Keyword.Type, '#pop'),
+ ],
+ 'type-record': [
+ include('comments-and-spaces'),
+ (r'[^{}/*]+', Keyword.Type),
+ (r'[/*]', Keyword.Type),
+ (r'\{', Keyword.Type, '#push'),
+ (r'\}', Keyword.Type, '#pop'),
+ ],
+
+ # 'type-tuple': [
+ # include('comments-and-spaces'),
+ # (r'\)', Keyword.Type, '#pop'),
+ # default(('#pop', 'type-tuple-1', 'type-1')),
+ # ],
+ # 'type-tuple-1': [
+ # include('comments-and-spaces'),
+ # (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,)
+ # (r',', Keyword.Type, 'type-1'),
+ # ],
+ # 'type-record':[
+ # include('comments-and-spaces'),
+ # (r'\}', Keyword.Type, '#pop'),
+ # (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'),
+ # ],
+ # 'type-record-field-expr': [
+ #
+ # ],
+
+ 'nested-comment': [
+ (r'[^/*]+', Comment),
+ (r'/\*', Comment, '#push'),
+ (r'\*/', Comment, '#pop'),
+ (r'[/*]', Comment),
+ ],
+
+ # the copy pasting between string and single-string
+ # is kinda sad. Is there a way to avoid that??
+ 'string': [
+ (r'[^\\"{]+', String.Double),
+ (r'"', String.Double, '#pop'),
+ (r'\{', Operator, 'root'),
+ include('escape-sequence'),
+ ],
+ 'single-string': [
+ (r'[^\\\'{]+', String.Double),
+ (r'\'', String.Double, '#pop'),
+ (r'\{', Operator, 'root'),
+ include('escape-sequence'),
+ ],
+
+ # all the html stuff
+ # can't really reuse some existing html parser
+ # because we must be able to parse embedded expressions
+
+ # we are in this state after someone parsed the '<' that
+ # started the html literal
+ 'html-open-tag': [
+ (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')),
+ (r'>', String.Single, ('#pop', 'html-content')),
+ ],
+
+ # we are in this state after someone parsed the '</' that
+ # started the end of the closing tag
+ 'html-end-tag': [
+ # this is a star, because </> is allowed
+ (r'[\w\-:]*>', String.Single, '#pop'),
+ ],
+
+ # we are in this state after having parsed '<ident(:ident)?'
+ # we thus parse a possibly empty list of attributes
+ 'html-attr': [
+ (r'\s+', Text),
+ (r'[\w\-:]+=', String.Single, 'html-attr-value'),
+ (r'/>', String.Single, '#pop'),
+ (r'>', String.Single, ('#pop', 'html-content')),
+ ],
+
+ 'html-attr-value': [
+ (r"'", String.Single, ('#pop', 'single-string')),
+ (r'"', String.Single, ('#pop', 'string')),
+ (r'#'+ident_re, String.Single, '#pop'),
+ (r'#(?=\{)', String.Single, ('#pop', 'root')),
+ (r'[^"\'{`=<>]+', String.Single, '#pop'),
+ (r'\{', Operator, ('#pop', 'root')), # this is a tail call!
+ ],
+
+ # we should probably deal with '\' escapes here
+ 'html-content': [
+ (r'<!--', Comment, 'html-comment'),
+ (r'</', String.Single, ('#pop', 'html-end-tag')),
+ (r'<', String.Single, 'html-open-tag'),
+ (r'\{', Operator, 'root'),
+ (r'[^<{]+', String.Single),
+ ],
+
+ 'html-comment': [
+ (r'-->', Comment, '#pop'),
+ (r'[^\-]+|-', Comment),
+ ],
+ }
diff --git a/pygments/lexers/modeling.py b/pygments/lexers/modeling.py
new file mode 100644
index 00000000..43194436
--- /dev/null
+++ b/pygments/lexers/modeling.py
@@ -0,0 +1,356 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.modeling
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for modeling languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+from pygments.lexers.html import HtmlLexer
+from pygments.lexers import _stan_builtins
+
+__all__ = ['ModelicaLexer', 'BugsLexer', 'JagsLexer', 'StanLexer']
+
+
+class ModelicaLexer(RegexLexer):
+ """
+ For `Modelica <http://www.modelica.org/>`_ source code.
+
+ .. versionadded:: 1.1
+ """
+ name = 'Modelica'
+ aliases = ['modelica']
+ filenames = ['*.mo']
+ mimetypes = ['text/x-modelica']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ _name = r"(?:'(?:[^\\']|\\.)+'|[a-zA-Z_]\w*)"
+
+ tokens = {
+ 'whitespace': [
+ (u'[\\s\ufeff]+', Text),
+ (r'//[^\n]*\n?', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'root': [
+ include('whitespace'),
+ (r'"', String.Double, 'string'),
+ (r'[()\[\]{},;]+', Punctuation),
+ (r'\.?[*^/+-]|\.|<>|[<>:=]=?', Operator),
+ (r'\d+(\.?\d*[eE][-+]?\d+|\.\d*)', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'(abs|acos|actualStream|array|asin|assert|AssertionLevel|atan|'
+ r'atan2|backSample|Boolean|cardinality|cat|ceil|change|Clock|'
+ r'Connections|cos|cosh|cross|delay|diagonal|div|edge|exp|'
+ r'ExternalObject|fill|floor|getInstanceName|hold|homotopy|'
+ r'identity|inStream|integer|Integer|interval|inverse|isPresent|'
+ r'linspace|log|log10|matrix|max|min|mod|ndims|noClock|noEvent|'
+ r'ones|outerProduct|pre|previous|product|Real|reinit|rem|rooted|'
+ r'sample|scalar|semiLinear|shiftSample|sign|sin|sinh|size|skew|'
+ r'smooth|spatialDistribution|sqrt|StateSelect|String|subSample|'
+ r'sum|superSample|symmetric|tan|tanh|terminal|terminate|time|'
+ r'transpose|vector|zeros)\b', Name.Builtin),
+ (r'(algorithm|annotation|break|connect|constant|constrainedby|der|'
+ r'discrete|each|else|elseif|elsewhen|encapsulated|enumeration|'
+ r'equation|exit|expandable|extends|external|final|flow|for|if|'
+ r'import|impure|in|initial|inner|input|loop|nondiscrete|outer|'
+ r'output|parameter|partial|protected|public|pure|redeclare|'
+ r'replaceable|return|stream|then|when|while)\b',
+ Keyword.Reserved),
+ (r'(and|not|or)\b', Operator.Word),
+ (r'(block|class|connector|end|function|model|operator|package|'
+ r'record|type)\b', Keyword.Reserved, 'class'),
+ (r'(false|true)\b', Keyword.Constant),
+ (r'within\b', Keyword.Reserved, 'package-prefix'),
+ (_name, Name)
+ ],
+ 'class': [
+ include('whitespace'),
+ (r'(function|record)\b', Keyword.Reserved),
+ (r'(if|for|when|while)\b', Keyword.Reserved, '#pop'),
+ (_name, Name.Class, '#pop'),
+ default('#pop')
+ ],
+ 'package-prefix': [
+ include('whitespace'),
+ (_name, Name.Namespace, '#pop'),
+ default('#pop')
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'\\[\'"?\\abfnrtv]', String.Escape),
+ (r'(?i)<\s*html\s*>([^\\"]|\\.)+?(<\s*/\s*html\s*>|(?="))',
+ using(HtmlLexer)),
+ (r'<|\\?[^"\\<]+', String.Double)
+ ]
+ }
+
+
+class BugsLexer(RegexLexer):
+ """
+ Pygments Lexer for `OpenBugs <http://www.openbugs.net/>`_ and WinBugs
+ models.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'BUGS'
+ aliases = ['bugs', 'winbugs', 'openbugs']
+ filenames = ['*.bug']
+
+ _FUNCTIONS = (
+ # Scalar functions
+ 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
+ 'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance',
+ 'equals', 'expr', 'gammap', 'ilogit', 'icloglog', 'integral', 'log',
+ 'logfact', 'loggam', 'logit', 'max', 'min', 'phi', 'post.p.value',
+ 'pow', 'prior.p.value', 'probit', 'replicate.post', 'replicate.prior',
+ 'round', 'sin', 'sinh', 'solution', 'sqrt', 'step', 'tan', 'tanh',
+ 'trunc',
+ # Vector functions
+ 'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals',
+ 'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM',
+ 'sd', 'sort', 'sum',
+ # Special
+ 'D', 'I', 'F', 'T', 'C')
+ """ OpenBUGS built-in functions
+
+ From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII
+
+ This also includes
+
+ - T, C, I : Truncation and censoring.
+ ``T`` and ``C`` are in OpenBUGS. ``I`` in WinBUGS.
+ - D : ODE
+ - F : Functional http://www.openbugs.info/Examples/Functionals.html
+
+ """
+
+ _DISTRIBUTIONS = ('dbern', 'dbin', 'dcat', 'dnegbin', 'dpois',
+ 'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp',
+ 'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar',
+ 'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar',
+ 'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm',
+ 'dmt', 'dwish')
+ """ OpenBUGS built-in distributions
+
+ Functions from
+ http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI
+ """
+
+ tokens = {
+ 'whitespace': [
+ (r"\s+", Text),
+ ],
+ 'comments': [
+ # Comments
+ (r'#.*$', Comment.Single),
+ ],
+ 'root': [
+ # Comments
+ include('comments'),
+ include('whitespace'),
+ # Block start
+ (r'(model)(\s+)(\{)',
+ bygroups(Keyword.Namespace, Text, Punctuation)),
+ # Reserved Words
+ (r'(for|in)(?![\w.])', Keyword.Reserved),
+ # Built-in Functions
+ (r'(%s)(?=\s*\()'
+ % r'|'.join(_FUNCTIONS + _DISTRIBUTIONS),
+ Name.Builtin),
+ # Regular variable names
+ (r'[A-Za-z][\w.]*', Name),
+ # Number Literals
+ (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
+ # Punctuation
+ (r'\[|\]|\(|\)|:|,|;', Punctuation),
+ # Assignment operators
+ # SLexer makes these tokens Operators.
+ (r'<-|~', Operator),
+ # Infix and prefix operators
+ (r'\+|-|\*|/', Operator),
+ # Block
+ (r'[{}]', Punctuation),
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search(r"^\s*model\s*{", text, re.M):
+ return 0.7
+ else:
+ return 0.0
+
+
+class JagsLexer(RegexLexer):
+ """
+ Pygments Lexer for JAGS.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'JAGS'
+ aliases = ['jags']
+ filenames = ['*.jag', '*.bug']
+
+ # JAGS
+ _FUNCTIONS = (
+ 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
+ 'cos', 'cosh', 'cloglog',
+ 'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact',
+ 'loggam', 'logit', 'phi', 'pow', 'probit', 'round', 'sin', 'sinh',
+ 'sqrt', 'step', 'tan', 'tanh', 'trunc', 'inprod', 'interp.lin',
+ 'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse',
+ 'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan',
+ # Truncation/Censoring (should I include)
+ 'T', 'I')
+ # Distributions with density, probability and quartile functions
+ _DISTRIBUTIONS = tuple('[dpq]%s' % x for x in
+ ('bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
+ 'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
+ 'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib'))
+ # Other distributions without density and probability
+ _OTHER_DISTRIBUTIONS = (
+ 'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper',
+ 'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq',
+ 'dnbinom', 'dweibull', 'ddirich')
+
+ tokens = {
+ 'whitespace': [
+ (r"\s+", Text),
+ ],
+ 'names': [
+ # Regular variable names
+ (r'[a-zA-Z][\w.]*\b', Name),
+ ],
+ 'comments': [
+ # do not use stateful comments
+ (r'(?s)/\*.*?\*/', Comment.Multiline),
+ # Comments
+ (r'#.*$', Comment.Single),
+ ],
+ 'root': [
+ # Comments
+ include('comments'),
+ include('whitespace'),
+ # Block start
+ (r'(model|data)(\s+)(\{)',
+ bygroups(Keyword.Namespace, Text, Punctuation)),
+ (r'var(?![\w.])', Keyword.Declaration),
+ # Reserved Words
+ (r'(for|in)(?![\w.])', Keyword.Reserved),
+ # Builtins
+ # Need to use lookahead because . is a valid char
+ (r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS
+ + _DISTRIBUTIONS
+ + _OTHER_DISTRIBUTIONS),
+ Name.Builtin),
+ # Names
+ include('names'),
+ # Number Literals
+ (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
+ (r'\[|\]|\(|\)|:|,|;', Punctuation),
+ # Assignment operators
+ (r'<-|~', Operator),
+ # # JAGS includes many more than OpenBUGS
+ (r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator),
+ (r'[{}]', Punctuation),
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search(r'^\s*model\s*\{', text, re.M):
+ if re.search(r'^\s*data\s*\{', text, re.M):
+ return 0.9
+ elif re.search(r'^\s*var', text, re.M):
+ return 0.9
+ else:
+ return 0.3
+ else:
+ return 0
+
+
+class StanLexer(RegexLexer):
+ """Pygments Lexer for Stan models.
+
+ The Stan modeling language is specified in the *Stan Modeling Language
+ User's Guide and Reference Manual, v2.5.0*,
+ `pdf <https://github.com/stan-dev/stan/releases/download/v2.5.0/stan-reference-2.5.0.pdf>`__.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Stan'
+ aliases = ['stan']
+ filenames = ['*.stan']
+
+ tokens = {
+ 'whitespace': [
+ (r"\s+", Text),
+ ],
+ 'comments': [
+ (r'(?s)/\*.*?\*/', Comment.Multiline),
+ # Comments
+ (r'(//|#).*$', Comment.Single),
+ ],
+ 'root': [
+ # Stan is more restrictive on strings than this regex
+ (r'"[^"]*"', String),
+ # Comments
+ include('comments'),
+ # block start
+ include('whitespace'),
+ # Block start
+ (r'(%s)(\s*)(\{)' %
+ r'|'.join(('functions', 'data', r'transformed\s+?data',
+ 'parameters', r'transformed\s+parameters',
+ 'model', r'generated\s+quantities')),
+ bygroups(Keyword.Namespace, Text, Punctuation)),
+ # Reserved Words
+ (r'(%s)\b' % r'|'.join(_stan_builtins.KEYWORDS), Keyword),
+ # Truncation
+ (r'T(?=\s*\[)', Keyword),
+ # Data types
+ (r'(%s)\b' % r'|'.join(_stan_builtins.TYPES), Keyword.Type),
+ # Punctuation
+ (r"[;:,\[\]()]", Punctuation),
+ # Builtin
+ (r'(%s)(?=\s*\()'
+ % r'|'.join(_stan_builtins.FUNCTIONS
+ + _stan_builtins.DISTRIBUTIONS),
+ Name.Builtin),
+ # Special names ending in __, like lp__
+ (r'[A-Za-z]\w*__\b', Name.Builtin.Pseudo),
+ (r'(%s)\b' % r'|'.join(_stan_builtins.RESERVED), Keyword.Reserved),
+ # Regular variable names
+ (r'[A-Za-z]\w*\b', Name),
+ # Real Literals
+ (r'-?[0-9]+(\.[0-9]+)?[eE]-?[0-9]+', Number.Float),
+ (r'-?[0-9]*\.[0-9]*', Number.Float),
+ # Integer Literals
+ (r'-?[0-9]+', Number.Integer),
+ # Assignment operators
+ # SLexer makes these tokens Operators.
+ (r'<-|~', Operator),
+ # Infix, prefix and postfix operators (and = )
+ (r"\+|-|\.?\*|\.?/|\\|'|\^|==?|!=?|<=?|>=?|\|\||&&", Operator),
+ # Block delimiters
+ (r'[{}]', Punctuation),
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search(r'^\s*parameters\s*\{', text, re.M):
+ return 1.0
+ else:
+ return 0.0
diff --git a/pygments/lexers/modula2.py b/pygments/lexers/modula2.py
new file mode 100644
index 00000000..d32bb5bb
--- /dev/null
+++ b/pygments/lexers/modula2.py
@@ -0,0 +1,1566 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.modula2
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Multi-Dialect Lexer for Modula-2.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.util import get_bool_opt, get_list_opt
+from pygments.token import Text, Comment, Operator, Keyword, Name, \
+ String, Number, Punctuation, Error
+
+__all__ = ['Modula2Lexer']
+
+
+# Multi-Dialect Modula-2 Lexer
+class Modula2Lexer(RegexLexer):
+ """
+ For `Modula-2 <http://www.modula2.org/>`_ source code.
+
+ The Modula-2 lexer supports several dialects. By default, it operates in
+ fallback mode, recognising the *combined* literals, punctuation symbols
+ and operators of all supported dialects, and the *combined* reserved words
+ and builtins of PIM Modula-2, ISO Modula-2 and Modula-2 R10, while not
+ differentiating between library defined identifiers.
+
+ To select a specific dialect, a dialect option may be passed
+ or a dialect tag may be embedded into a source file.
+
+ Dialect Options:
+
+ `m2pim`
+ Select PIM Modula-2 dialect.
+ `m2iso`
+ Select ISO Modula-2 dialect.
+ `m2r10`
+ Select Modula-2 R10 dialect.
+ `objm2`
+ Select Objective Modula-2 dialect.
+
+ The PIM and ISO dialect options may be qualified with a language extension.
+
+ Language Extensions:
+
+ `+aglet`
+ Select Aglet Modula-2 extensions, available with m2iso.
+ `+gm2`
+ Select GNU Modula-2 extensions, available with m2pim.
+ `+p1`
+ Select p1 Modula-2 extensions, available with m2iso.
+ `+xds`
+ Select XDS Modula-2 extensions, available with m2iso.
+
+
+ Passing a Dialect Option via Unix Commandline Interface
+
+ Dialect options may be passed to the lexer using the `dialect` key.
+ Only one such option should be passed. If multiple dialect options are
+ passed, the first valid option is used, any subsequent options are ignored.
+
+ Examples:
+
+ `$ pygmentize -O full,dialect=m2iso -f html -o /path/to/output /path/to/input`
+ Use ISO dialect to render input to HTML output
+ `$ pygmentize -O full,dialect=m2iso+p1 -f rtf -o /path/to/output /path/to/input`
+ Use ISO dialect with p1 extensions to render input to RTF output
+
+
+ Embedding a Dialect Option within a source file
+
+ A dialect option may be embedded in a source file in form of a dialect
+ tag, a specially formatted comment that specifies a dialect option.
+
+ Dialect Tag EBNF:
+
+ dialectTag :
+ OpeningCommentDelim Prefix dialectOption ClosingCommentDelim ;
+
+ dialectOption :
+ 'm2pim' | 'm2iso' | 'm2r10' | 'objm2' |
+ 'm2iso+aglet' | 'm2pim+gm2' | 'm2iso+p1' | 'm2iso+xds' ;
+
+ Prefix : '!' ;
+
+ OpeningCommentDelim : '(*' ;
+
+ ClosingCommentDelim : '*)' ;
+
+ No whitespace is permitted between the tokens of a dialect tag.
+
+ In the event that a source file contains multiple dialect tags, the first
+ tag that contains a valid dialect option will be used and any subsequent
+ dialect tags will be ignored. Ideally, a dialect tag should be placed
+ at the beginning of a source file.
+
+ An embedded dialect tag overrides a dialect option set via command line.
+
+ Examples:
+
+ `(*!m2r10*) DEFINITION MODULE Foobar; ...`
+ Use Modula2 R10 dialect to render this source file.
+ `(*!m2pim+gm2*) DEFINITION MODULE Bazbam; ...`
+ Use PIM dialect with GNU extensions to render this source file.
+
+
+ Algol Publication Mode:
+
+ In Algol publication mode, source text is rendered for publication of
+ algorithms in scientific papers and academic texts, following the format
+ of the Revised Algol-60 Language Report. It is activated by passing
+ one of two corresponding styles as an option:
+
+ `algol`
+ render reserved words lowercase underline boldface
+ and builtins lowercase boldface italic
+ `algol_nu`
+ render reserved words lowercase boldface (no underlining)
+ and builtins lowercase boldface italic
+
+ The lexer automatically performs the required lowercase conversion when
+ this mode is activated.
+
+ Example:
+
+ `$ pygmentize -O full,style=algol -f latex -o /path/to/output /path/to/input`
+ Render input file in Algol publication mode to LaTeX output.
+
+
+ Rendering Mode of First Class ADT Identifiers:
+
+ The rendering of standard library first class ADT identifiers is controlled
+ by option flag "treat_stdlib_adts_as_builtins".
+
+ When this option is turned on, standard library ADT identifiers are rendered
+ as builtins. When it is turned off, they are rendered as ordinary library
+ identifiers.
+
+ `treat_stdlib_adts_as_builtins` (default: On)
+
+ The option is useful for dialects that support ADTs as first class objects
+ and provide ADTs in the standard library that would otherwise be built-in.
+
+ At present, only Modula-2 R10 supports library ADTs as first class objects
+ and therefore, no ADT identifiers are defined for any other dialects.
+
+ Example:
+
+ `$ pygmentize -O full,dialect=m2r10,treat_stdlib_adts_as_builtins=Off ...`
+ Render standard library ADTs as ordinary library types.
+
+ .. versionadded:: 1.3
+
+ .. versionchanged:: 2.1
+ Added multi-dialect support.
+ """
+ name = 'Modula-2'
+ aliases = ['modula2', 'm2']
+ filenames = ['*.def', '*.mod']
+ mimetypes = ['text/x-modula2']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'whitespace': [
+ (r'\n+', Text), # blank lines
+ (r'\s+', Text), # whitespace
+ ],
+ 'dialecttags': [
+ # PIM Dialect Tag
+ (r'\(\*!m2pim\*\)', Comment.Special),
+ # ISO Dialect Tag
+ (r'\(\*!m2iso\*\)', Comment.Special),
+ # M2R10 Dialect Tag
+ (r'\(\*!m2r10\*\)', Comment.Special),
+ # ObjM2 Dialect Tag
+ (r'\(\*!objm2\*\)', Comment.Special),
+ # Aglet Extensions Dialect Tag
+ (r'\(\*!m2iso\+aglet\*\)', Comment.Special),
+ # GNU Extensions Dialect Tag
+ (r'\(\*!m2pim\+gm2\*\)', Comment.Special),
+ # p1 Extensions Dialect Tag
+ (r'\(\*!m2iso\+p1\*\)', Comment.Special),
+ # XDS Extensions Dialect Tag
+ (r'\(\*!m2iso\+xds\*\)', Comment.Special),
+ ],
+ 'identifiers': [
+ (r'([a-zA-Z_$][\w$]*)', Name),
+ ],
+ 'prefixed_number_literals': [
+ #
+ # Base-2, whole number
+ (r'0b[01]+(\'[01]+)*', Number.Bin),
+ #
+ # Base-16, whole number
+ (r'0[ux][0-9A-F]+(\'[0-9A-F]+)*', Number.Hex),
+ ],
+ 'plain_number_literals': [
+ #
+ # Base-10, real number with exponent
+ (r'[0-9]+(\'[0-9]+)*' # integral part \
+ r'\.[0-9]+(\'[0-9]+)*' # fractional part \
+ r'[eE][+-]?[0-9]+(\'[0-9]+)*', # exponent \
+ Number.Float),
+ #
+ # Base-10, real number without exponent
+ (r'[0-9]+(\'[0-9]+)*' # integral part \
+ r'\.[0-9]+(\'[0-9]+)*', # fractional part \
+ Number.Float),
+ #
+ # Base-10, whole number
+ (r'[0-9]+(\'[0-9]+)*', Number.Integer),
+ ],
+ 'suffixed_number_literals': [
+ #
+ # Base-8, whole number
+ (r'[0-7]+B', Number.Oct),
+ #
+ # Base-8, character code
+ (r'[0-7]+C', Number.Oct),
+ #
+ # Base-16, number
+ (r'[0-9A-F]+H', Number.Hex),
+ ],
+ 'string_literals': [
+ (r"'(\\\\|\\'|[^'])*'", String), # single quoted string
+ (r'"(\\\\|\\"|[^"])*"', String), # double quoted string
+ ],
+ 'digraph_operators': [
+ # Dot Product Operator
+ (r'\*\.', Operator),
+ # Array Concatenation Operator
+ (r'\+>', Operator), # M2R10 + ObjM2
+ # Inequality Operator
+ (r'<>', Operator), # ISO + PIM
+ # Less-Or-Equal, Subset
+ (r'<=', Operator),
+ # Greater-Or-Equal, Superset
+ (r'>=', Operator),
+ # Identity Operator
+ (r'==', Operator), # M2R10 + ObjM2
+ # Type Conversion Operator
+ (r'::', Operator), # M2R10 + ObjM2
+ # Assignment Symbol
+ (r':=', Operator),
+ # Postfix Increment Mutator
+ (r'\+\+', Operator), # M2R10 + ObjM2
+ # Postfix Decrement Mutator
+ (r'--', Operator), # M2R10 + ObjM2
+ ],
+ 'unigraph_operators': [
+ # Arithmetic Operators
+ (r'[+-]', Operator),
+ (r'[*/]', Operator),
+ # ISO 80000-2 compliant Set Difference Operator
+ (r'\\', Operator), # M2R10 + ObjM2
+ # Relational Operators
+ (r'[=#<>]', Operator),
+ # Dereferencing Operator
+ (r'\^', Operator),
+ # Dereferencing Operator Synonym
+ (r'@', Operator), # ISO
+ # Logical AND Operator Synonym
+ (r'&', Operator), # PIM + ISO
+ # Logical NOT Operator Synonym
+ (r'~', Operator), # PIM + ISO
+ # Smalltalk Message Prefix
+ (r'`', Operator), # ObjM2
+ ],
+ 'digraph_punctuation': [
+ # Range Constructor
+ (r'\.\.', Punctuation),
+ # Opening Chevron Bracket
+ (r'<<', Punctuation), # M2R10 + ISO
+ # Closing Chevron Bracket
+ (r'>>', Punctuation), # M2R10 + ISO
+ # Blueprint Punctuation
+ (r'->', Punctuation), # M2R10 + ISO
+ # Distinguish |# and # in M2 R10
+ (r'\|#', Punctuation),
+ # Distinguish ## and # in M2 R10
+ (r'##', Punctuation),
+ # Distinguish |* and * in M2 R10
+ (r'\|\*', Punctuation),
+ ],
+ 'unigraph_punctuation': [
+ # Common Punctuation
+ (r'[\(\)\[\]{},.:;\|]', Punctuation),
+ # Case Label Separator Synonym
+ (r'!', Punctuation), # ISO
+ # Blueprint Punctuation
+ (r'\?', Punctuation), # M2R10 + ObjM2
+ ],
+ 'comments': [
+ # Single Line Comment
+ (r'^//.*?\n', Comment.Single), # M2R10 + ObjM2
+ # Block Comment
+ (r'\(\*([^$].*?)\*\)', Comment.Multiline),
+ # Template Block Comment
+ (r'/\*(.*?)\*/', Comment.Multiline), # M2R10 + ObjM2
+ ],
+ 'pragmas': [
+ # ISO Style Pragmas
+ (r'<\*.*?\*>', Comment.Preproc), # ISO, M2R10 + ObjM2
+ # Pascal Style Pragmas
+ (r'\(\*\$.*?\*\)', Comment.Preproc), # PIM
+ ],
+ 'root': [
+ include('whitespace'),
+ include('dialecttags'),
+ include('pragmas'),
+ include('comments'),
+ include('identifiers'),
+ include('suffixed_number_literals'), # PIM + ISO
+ include('prefixed_number_literals'), # M2R10 + ObjM2
+ include('plain_number_literals'),
+ include('string_literals'),
+ include('digraph_punctuation'),
+ include('digraph_operators'),
+ include('unigraph_punctuation'),
+ include('unigraph_operators'),
+ ]
+ }
+
+# C o m m o n D a t a s e t s
+
+ # Common Reserved Words Dataset
+ common_reserved_words = (
+ # 37 common reserved words
+ 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
+ 'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'FOR', 'FROM', 'IF',
+ 'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD', 'MODULE', 'NOT',
+ 'OF', 'OR', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
+ 'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE',
+ )
+
+ # Common Builtins Dataset
+ common_builtins = (
+ # 16 common builtins
+ 'ABS', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'FALSE', 'INTEGER',
+ 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NIL', 'ODD', 'ORD', 'REAL',
+ 'TRUE',
+ )
+
+ # Common Pseudo-Module Builtins Dataset
+ common_pseudo_builtins = (
+ # 4 common pseudo builtins
+ 'ADDRESS', 'BYTE', 'WORD', 'ADR'
+ )
+
+# P I M M o d u l a - 2 D a t a s e t s
+
+ # Lexemes to Mark as Error Tokens for PIM Modula-2
+ pim_lexemes_to_reject = (
+ '!', '`', '@', '$', '%', '?', '\\', '==', '++', '--', '::', '*.',
+ '+>', '->', '<<', '>>', '|#', '##',
+ )
+
+ # PIM Modula-2 Additional Reserved Words Dataset
+ pim_additional_reserved_words = (
+ # 3 additional reserved words
+ 'EXPORT', 'QUALIFIED', 'WITH',
+ )
+
+ # PIM Modula-2 Additional Builtins Dataset
+ pim_additional_builtins = (
+ # 16 additional builtins
+ 'BITSET', 'CAP', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT', 'HALT', 'HIGH',
+ 'INC', 'INCL', 'NEW', 'NIL', 'PROC', 'SIZE', 'TRUNC', 'VAL',
+ )
+
+ # PIM Modula-2 Additional Pseudo-Module Builtins Dataset
+ pim_additional_pseudo_builtins = (
+ # 5 additional pseudo builtins
+ 'SYSTEM', 'PROCESS', 'TSIZE', 'NEWPROCESS', 'TRANSFER',
+ )
+
+# I S O M o d u l a - 2 D a t a s e t s
+
+ # Lexemes to Mark as Error Tokens for ISO Modula-2
+ iso_lexemes_to_reject = (
+ '`', '$', '%', '?', '\\', '==', '++', '--', '::', '*.', '+>', '->',
+ '<<', '>>', '|#', '##',
+ )
+
+ # ISO Modula-2 Additional Reserved Words Dataset
+ iso_additional_reserved_words = (
+ # 9 additional reserved words (ISO 10514-1)
+ 'EXCEPT', 'EXPORT', 'FINALLY', 'FORWARD', 'PACKEDSET', 'QUALIFIED',
+ 'REM', 'RETRY', 'WITH',
+ # 10 additional reserved words (ISO 10514-2 & ISO 10514-3)
+ 'ABSTRACT', 'AS', 'CLASS', 'GUARD', 'INHERIT', 'OVERRIDE', 'READONLY',
+ 'REVEAL', 'TRACED', 'UNSAFEGUARDED',
+ )
+
+ # ISO Modula-2 Additional Builtins Dataset
+ iso_additional_builtins = (
+ # 26 additional builtins (ISO 10514-1)
+ 'BITSET', 'CAP', 'CMPLX', 'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT',
+ 'HALT', 'HIGH', 'IM', 'INC', 'INCL', 'INT', 'INTERRUPTIBLE', 'LENGTH',
+ 'LFLOAT', 'LONGCOMPLEX', 'NEW', 'PROC', 'PROTECTION', 'RE', 'SIZE',
+ 'TRUNC', 'UNINTERRUBTIBLE', 'VAL',
+ # 5 additional builtins (ISO 10514-2 & ISO 10514-3)
+ 'CREATE', 'DESTROY', 'EMPTY', 'ISMEMBER', 'SELF',
+ )
+
+ # ISO Modula-2 Additional Pseudo-Module Builtins Dataset
+ iso_additional_pseudo_builtins = (
+ # 14 additional builtins (SYSTEM)
+ 'SYSTEM', 'BITSPERLOC', 'LOCSPERBYTE', 'LOCSPERWORD', 'LOC',
+ 'ADDADR', 'SUBADR', 'DIFADR', 'MAKEADR', 'ADR',
+ 'ROTATE', 'SHIFT', 'CAST', 'TSIZE',
+ # 13 additional builtins (COROUTINES)
+ 'COROUTINES', 'ATTACH', 'COROUTINE', 'CURRENT', 'DETACH', 'HANDLER',
+ 'INTERRUPTSOURCE', 'IOTRANSFER', 'IsATTACHED', 'LISTEN',
+ 'NEWCOROUTINE', 'PROT', 'TRANSFER',
+ # 9 additional builtins (EXCEPTIONS)
+ 'EXCEPTIONS', 'AllocateSource', 'CurrentNumber', 'ExceptionNumber',
+ 'ExceptionSource', 'GetMessage', 'IsCurrentSource',
+ 'IsExceptionalExecution', 'RAISE',
+ # 3 additional builtins (TERMINATION)
+ 'TERMINATION', 'IsTerminating', 'HasHalted',
+ # 4 additional builtins (M2EXCEPTION)
+ 'M2EXCEPTION', 'M2Exceptions', 'M2Exception', 'IsM2Exception',
+ 'indexException', 'rangeException', 'caseSelectException',
+ 'invalidLocation', 'functionException', 'wholeValueException',
+ 'wholeDivException', 'realValueException', 'realDivException',
+ 'complexValueException', 'complexDivException', 'protException',
+ 'sysException', 'coException', 'exException',
+ )
+
+# M o d u l a - 2 R 1 0 D a t a s e t s
+
+ # Lexemes to Mark as Error Tokens for Modula-2 R10
+ m2r10_lexemes_to_reject = (
+ '!', '`', '@', '$', '%', '&', '<>',
+ )
+
+ # Modula-2 R10 reserved words in addition to the common set
+ m2r10_additional_reserved_words = (
+ # 12 additional reserved words
+ 'ALIAS', 'ARGLIST', 'BLUEPRINT', 'COPY', 'GENLIB', 'INDETERMINATE',
+ 'NEW', 'NONE', 'OPAQUE', 'REFERENTIAL', 'RELEASE', 'RETAIN',
+ # 2 additional reserved words with symbolic assembly option
+ 'ASM', 'REG',
+ )
+
+ # Modula-2 R10 builtins in addition to the common set
+ m2r10_additional_builtins = (
+ # 26 additional builtins
+ 'CARDINAL', 'COUNT', 'EMPTY', 'EXISTS', 'INSERT', 'LENGTH', 'LONGCARD',
+ 'OCTET', 'PTR', 'PRED', 'READ', 'READNEW', 'REMOVE', 'RETRIEVE', 'SORT',
+ 'STORE', 'SUBSET', 'SUCC', 'TLIMIT', 'TMAX', 'TMIN', 'TRUE', 'TSIZE',
+ 'UNICHAR', 'WRITE', 'WRITEF',
+ )
+
+ # Modula-2 R10 Additional Pseudo-Module Builtins Dataset
+ m2r10_additional_pseudo_builtins = (
+ # 13 additional builtins (TPROPERTIES)
+ 'TPROPERTIES', 'PROPERTY', 'LITERAL', 'TPROPERTY', 'TLITERAL',
+ 'TBUILTIN', 'TDYN', 'TREFC', 'TNIL', 'TBASE', 'TPRECISION',
+ 'TMAXEXP', 'TMINEXP',
+ # 4 additional builtins (CONVERSION)
+ 'CONVERSION', 'TSXFSIZE', 'SXF', 'VAL',
+ # 35 additional builtins (UNSAFE)
+ 'UNSAFE', 'CAST', 'INTRINSIC', 'AVAIL', 'ADD', 'SUB', 'ADDC', 'SUBC',
+ 'FETCHADD', 'FETCHSUB', 'SHL', 'SHR', 'ASHR', 'ROTL', 'ROTR', 'ROTLC',
+ 'ROTRC', 'BWNOT', 'BWAND', 'BWOR', 'BWXOR', 'BWNAND', 'BWNOR',
+ 'SETBIT', 'TESTBIT', 'LSBIT', 'MSBIT', 'CSBITS', 'BAIL', 'HALT',
+ 'TODO', 'FFI', 'ADDR', 'VARGLIST', 'VARGC',
+ # 11 additional builtins (ATOMIC)
+ 'ATOMIC', 'INTRINSIC', 'AVAIL', 'SWAP', 'CAS', 'INC', 'DEC', 'BWAND',
+ 'BWNAND', 'BWOR', 'BWXOR',
+ # 7 additional builtins (COMPILER)
+ 'COMPILER', 'DEBUG', 'MODNAME', 'PROCNAME', 'LINENUM', 'DEFAULT',
+ 'HASH',
+ # 5 additional builtins (ASSEMBLER)
+ 'ASSEMBLER', 'REGISTER', 'SETREG', 'GETREG', 'CODE',
+ )
+
+# O b j e c t i v e M o d u l a - 2 D a t a s e t s
+
+ # Lexemes to Mark as Error Tokens for Objective Modula-2
+ objm2_lexemes_to_reject = (
+ '!', '$', '%', '&', '<>',
+ )
+
+ # Objective Modula-2 Extensions
+ # reserved words in addition to Modula-2 R10
+ objm2_additional_reserved_words = (
+ # 16 additional reserved words
+ 'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD',
+ 'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC',
+ 'SUPER', 'TRY',
+ )
+
+ # Objective Modula-2 Extensions
+ # builtins in addition to Modula-2 R10
+ objm2_additional_builtins = (
+ # 3 additional builtins
+ 'OBJECT', 'NO', 'YES',
+ )
+
+ # Objective Modula-2 Extensions
+ # pseudo-module builtins in addition to Modula-2 R10
+ objm2_additional_pseudo_builtins = (
+ # None
+ )
+
+# A g l e t M o d u l a - 2 D a t a s e t s
+
+ # Aglet Extensions
+ # reserved words in addition to ISO Modula-2
+ aglet_additional_reserved_words = (
+ # None
+ )
+
+ # Aglet Extensions
+ # builtins in addition to ISO Modula-2
+ aglet_additional_builtins = (
+ # 9 additional builtins
+ 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
+ 'CARDINAL32', 'INTEGER8', 'INTEGER16', 'INTEGER32',
+ )
+
+ # Aglet Modula-2 Extensions
+ # pseudo-module builtins in addition to ISO Modula-2
+ aglet_additional_pseudo_builtins = (
+ # None
+ )
+
+# G N U M o d u l a - 2 D a t a s e t s
+
+ # GNU Extensions
+ # reserved words in addition to PIM Modula-2
+ gm2_additional_reserved_words = (
+ # 10 additional reserved words
+ 'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__',
+ '__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE',
+ )
+
+ # GNU Extensions
+ # builtins in addition to PIM Modula-2
+ gm2_additional_builtins = (
+ # 21 additional builtins
+ 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
+ 'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96',
+ 'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64',
+ 'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW',
+ )
+
+ # GNU Extensions
+ # pseudo-module builtins in addition to PIM Modula-2
+ gm2_additional_pseudo_builtins = (
+ # None
+ )
+
+# p 1 M o d u l a - 2 D a t a s e t s
+
+ # p1 Extensions
+ # reserved words in addition to ISO Modula-2
+ p1_additional_reserved_words = (
+ # None
+ )
+
+ # p1 Extensions
+ # builtins in addition to ISO Modula-2
+ p1_additional_builtins = (
+ # None
+ )
+
+ # p1 Modula-2 Extensions
+ # pseudo-module builtins in addition to ISO Modula-2
+ p1_additional_pseudo_builtins = (
+ # 1 additional builtin
+ 'BCD',
+ )
+
+# X D S M o d u l a - 2 D a t a s e t s
+
+ # XDS Extensions
+ # reserved words in addition to ISO Modula-2
+ xds_additional_reserved_words = (
+ # 1 additional reserved word
+ 'SEQ',
+ )
+
+ # XDS Extensions
+ # builtins in addition to ISO Modula-2
+ xds_additional_builtins = (
+ # 9 additional builtins
+ 'ASH', 'ASSERT', 'DIFFADR_TYPE', 'ENTIER', 'INDEX', 'LEN',
+ 'LONGCARD', 'SHORTCARD', 'SHORTINT',
+ )
+
+ # XDS Modula-2 Extensions
+ # pseudo-module builtins in addition to ISO Modula-2
+ xds_additional_pseudo_builtins = (
+ # 22 additional builtins (SYSTEM)
+ 'PROCESS', 'NEWPROCESS', 'BOOL8', 'BOOL16', 'BOOL32', 'CARD8',
+ 'CARD16', 'CARD32', 'INT8', 'INT16', 'INT32', 'REF', 'MOVE',
+ 'FILL', 'GET', 'PUT', 'CC', 'int', 'unsigned', 'size_t', 'void'
+ # 3 additional builtins (COMPILER)
+ 'COMPILER', 'OPTION', 'EQUATION'
+ )
+
+# P I M S t a n d a r d L i b r a r y D a t a s e t s
+
+ # PIM Modula-2 Standard Library Modules Dataset
+ pim_stdlib_module_identifiers = (
+ 'Terminal', 'FileSystem', 'InOut', 'RealInOut', 'MathLib0', 'Storage',
+ )
+
+ # PIM Modula-2 Standard Library Types Dataset
+ pim_stdlib_type_identifiers = (
+ 'Flag', 'FlagSet', 'Response', 'Command', 'Lock', 'Permission',
+ 'MediumType', 'File', 'FileProc', 'DirectoryProc', 'FileCommand',
+ 'DirectoryCommand',
+ )
+
+ # PIM Modula-2 Standard Library Procedures Dataset
+ pim_stdlib_proc_identifiers = (
+ 'Read', 'BusyRead', 'ReadAgain', 'Write', 'WriteString', 'WriteLn',
+ 'Create', 'Lookup', 'Close', 'Delete', 'Rename', 'SetRead', 'SetWrite',
+ 'SetModify', 'SetOpen', 'Doio', 'SetPos', 'GetPos', 'Length', 'Reset',
+ 'Again', 'ReadWord', 'WriteWord', 'ReadChar', 'WriteChar',
+ 'CreateMedium', 'DeleteMedium', 'AssignName', 'DeassignName',
+ 'ReadMedium', 'LookupMedium', 'OpenInput', 'OpenOutput', 'CloseInput',
+ 'CloseOutput', 'ReadString', 'ReadInt', 'ReadCard', 'ReadWrd',
+ 'WriteInt', 'WriteCard', 'WriteOct', 'WriteHex', 'WriteWrd',
+ 'ReadReal', 'WriteReal', 'WriteFixPt', 'WriteRealOct', 'sqrt', 'exp',
+ 'ln', 'sin', 'cos', 'arctan', 'entier','ALLOCATE', 'DEALLOCATE',
+ )
+
+ # PIM Modula-2 Standard Library Variables Dataset
+ pim_stdlib_var_identifiers = (
+ 'Done', 'termCH', 'in', 'out'
+ )
+
+ # PIM Modula-2 Standard Library Constants Dataset
+ pim_stdlib_const_identifiers = (
+ 'EOL',
+ )
+
+# I S O S t a n d a r d L i b r a r y D a t a s e t s
+
+ # ISO Modula-2 Standard Library Modules Dataset
+ iso_stdlib_module_identifiers = (
+ # TO DO
+ )
+
+ # ISO Modula-2 Standard Library Types Dataset
+ iso_stdlib_type_identifiers = (
+ # TO DO
+ )
+
+ # ISO Modula-2 Standard Library Procedures Dataset
+ iso_stdlib_proc_identifiers = (
+ # TO DO
+ )
+
+ # ISO Modula-2 Standard Library Variables Dataset
+ iso_stdlib_var_identifiers = (
+ # TO DO
+ )
+
+ # ISO Modula-2 Standard Library Constants Dataset
+ iso_stdlib_const_identifiers = (
+ # TO DO
+ )
+
+# M 2 R 1 0 S t a n d a r d L i b r a r y D a t a s e t s
+
+ # Modula-2 R10 Standard Library ADTs Dataset
+ m2r10_stdlib_adt_identifiers = (
+ 'BCD', 'LONGBCD', 'BITSET', 'SHORTBITSET', 'LONGBITSET',
+ 'LONGLONGBITSET', 'COMPLEX', 'LONGCOMPLEX', 'SHORTCARD', 'LONGLONGCARD',
+ 'SHORTINT', 'LONGLONGINT', 'POSINT', 'SHORTPOSINT', 'LONGPOSINT',
+ 'LONGLONGPOSINT', 'BITSET8', 'BITSET16', 'BITSET32', 'BITSET64',
+ 'BITSET128', 'BS8', 'BS16', 'BS32', 'BS64', 'BS128', 'CARDINAL8',
+ 'CARDINAL16', 'CARDINAL32', 'CARDINAL64', 'CARDINAL128', 'CARD8',
+ 'CARD16', 'CARD32', 'CARD64', 'CARD128', 'INTEGER8', 'INTEGER16',
+ 'INTEGER32', 'INTEGER64', 'INTEGER128', 'INT8', 'INT16', 'INT32',
+ 'INT64', 'INT128', 'STRING', 'UNISTRING',
+ )
+
+ # Modula-2 R10 Standard Library Blueprints Dataset
+ m2r10_stdlib_blueprint_identifiers = (
+ 'ProtoRoot', 'ProtoComputational', 'ProtoNumeric', 'ProtoScalar',
+ 'ProtoNonScalar', 'ProtoCardinal', 'ProtoInteger', 'ProtoReal',
+ 'ProtoComplex', 'ProtoVector', 'ProtoTuple', 'ProtoCompArray',
+ 'ProtoCollection', 'ProtoStaticArray', 'ProtoStaticSet',
+ 'ProtoStaticString', 'ProtoArray', 'ProtoString', 'ProtoSet',
+ 'ProtoMultiSet', 'ProtoDictionary', 'ProtoMultiDict', 'ProtoExtension',
+ 'ProtoIO', 'ProtoCardMath', 'ProtoIntMath', 'ProtoRealMath',
+ )
+
+ # Modula-2 R10 Standard Library Modules Dataset
+ m2r10_stdlib_module_identifiers = (
+ 'ASCII', 'BooleanIO', 'CharIO', 'UnicharIO', 'OctetIO',
+ 'CardinalIO', 'LongCardIO', 'IntegerIO', 'LongIntIO', 'RealIO',
+ 'LongRealIO', 'BCDIO', 'LongBCDIO', 'CardMath', 'LongCardMath',
+ 'IntMath', 'LongIntMath', 'RealMath', 'LongRealMath', 'BCDMath',
+ 'LongBCDMath', 'FileIO', 'FileSystem', 'Storage', 'IOSupport',
+ )
+
+ # Modula-2 R10 Standard Library Types Dataset
+ m2r10_stdlib_type_identifiers = (
+ 'File', 'Status',
+ # TO BE COMPLETED
+ )
+
+ # Modula-2 R10 Standard Library Procedures Dataset
+ m2r10_stdlib_proc_identifiers = (
+ 'ALLOCATE', 'DEALLOCATE', 'SIZE',
+ # TO BE COMPLETED
+ )
+
+ # Modula-2 R10 Standard Library Variables Dataset
+ m2r10_stdlib_var_identifiers = (
+ 'stdIn', 'stdOut', 'stdErr',
+ )
+
+ # Modula-2 R10 Standard Library Constants Dataset
+ m2r10_stdlib_const_identifiers = (
+ 'pi', 'tau',
+ )
+
+# D i a l e c t s
+
+
+ # Dialect modes
+ dialects = (
+ 'unknown',
+ 'm2pim', 'm2iso', 'm2r10', 'objm2',
+ 'm2iso+aglet', 'm2pim+gm2', 'm2iso+p1', 'm2iso+xds',
+ )
+
+# D a t a b a s e s
+
+ # Lexemes to Mark as Errors Database
+ lexemes_to_reject_db = {
+ # Lexemes to reject for unknown dialect
+ 'unknown' : (
+ # LEAVE THIS EMPTY
+ ),
+ # Lexemes to reject for PIM Modula-2
+ 'm2pim' : (
+ pim_lexemes_to_reject,
+ ),
+ # Lexemes to reject for ISO Modula-2
+ 'm2iso' : (
+ iso_lexemes_to_reject,
+ ),
+ # Lexemes to reject for Modula-2 R10
+ 'm2r10' : (
+ m2r10_lexemes_to_reject,
+ ),
+ # Lexemes to reject for Objective Modula-2
+ 'objm2' : (
+ objm2_lexemes_to_reject,
+ ),
+ # Lexemes to reject for Aglet Modula-2
+ 'm2iso+aglet' : (
+ iso_lexemes_to_reject,
+ ),
+ # Lexemes to reject for GNU Modula-2
+ 'm2pim+gm2' : (
+ pim_lexemes_to_reject,
+ ),
+ # Lexemes to reject for p1 Modula-2
+ 'm2iso+p1' : (
+ iso_lexemes_to_reject,
+ ),
+ # Lexemes to reject for XDS Modula-2
+ 'm2iso+xds' : (
+ iso_lexemes_to_reject,
+ ),
+ }
+
+ # Reserved Words Database
+ reserved_words_db = {
+ # Reserved words for unknown dialect
+ 'unknown' : (
+ common_reserved_words,
+ pim_additional_reserved_words,
+ iso_additional_reserved_words,
+ m2r10_additional_reserved_words,
+ ),
+
+ # Reserved words for PIM Modula-2
+ 'm2pim' : (
+ common_reserved_words,
+ pim_additional_reserved_words,
+ ),
+
+ # Reserved words for Modula-2 R10
+ 'm2iso' : (
+ common_reserved_words,
+ iso_additional_reserved_words,
+ ),
+
+ # Reserved words for ISO Modula-2
+ 'm2r10' : (
+ common_reserved_words,
+ m2r10_additional_reserved_words,
+ ),
+
+ # Reserved words for Objective Modula-2
+ 'objm2' : (
+ common_reserved_words,
+ m2r10_additional_reserved_words,
+ objm2_additional_reserved_words,
+ ),
+
+ # Reserved words for Aglet Modula-2 Extensions
+ 'm2iso+aglet' : (
+ common_reserved_words,
+ iso_additional_reserved_words,
+ aglet_additional_reserved_words,
+ ),
+
+ # Reserved words for GNU Modula-2 Extensions
+ 'm2pim+gm2' : (
+ common_reserved_words,
+ pim_additional_reserved_words,
+ gm2_additional_reserved_words,
+ ),
+
+ # Reserved words for p1 Modula-2 Extensions
+ 'm2iso+p1' : (
+ common_reserved_words,
+ iso_additional_reserved_words,
+ p1_additional_reserved_words,
+ ),
+
+ # Reserved words for XDS Modula-2 Extensions
+ 'm2iso+xds' : (
+ common_reserved_words,
+ iso_additional_reserved_words,
+ xds_additional_reserved_words,
+ ),
+ }
+
+ # Builtins Database
+ builtins_db = {
+ # Builtins for unknown dialect
+ 'unknown' : (
+ common_builtins,
+ pim_additional_builtins,
+ iso_additional_builtins,
+ m2r10_additional_builtins,
+ ),
+
+ # Builtins for PIM Modula-2
+ 'm2pim' : (
+ common_builtins,
+ pim_additional_builtins,
+ ),
+
+ # Builtins for ISO Modula-2
+ 'm2iso' : (
+ common_builtins,
+ iso_additional_builtins,
+ ),
+
+ # Builtins for ISO Modula-2
+ 'm2r10' : (
+ common_builtins,
+ m2r10_additional_builtins,
+ ),
+
+ # Builtins for Objective Modula-2
+ 'objm2' : (
+ common_builtins,
+ m2r10_additional_builtins,
+ objm2_additional_builtins,
+ ),
+
+ # Builtins for Aglet Modula-2 Extensions
+ 'm2iso+aglet' : (
+ common_builtins,
+ iso_additional_builtins,
+ aglet_additional_builtins,
+ ),
+
+ # Builtins for GNU Modula-2 Extensions
+ 'm2pim+gm2' : (
+ common_builtins,
+ pim_additional_builtins,
+ gm2_additional_builtins,
+ ),
+
+ # Builtins for p1 Modula-2 Extensions
+ 'm2iso+p1' : (
+ common_builtins,
+ iso_additional_builtins,
+ p1_additional_builtins,
+ ),
+
+ # Builtins for XDS Modula-2 Extensions
+ 'm2iso+xds' : (
+ common_builtins,
+ iso_additional_builtins,
+ xds_additional_builtins,
+ ),
+ }
+
+ # Pseudo-Module Builtins Database
+ pseudo_builtins_db = {
+ # Builtins for unknown dialect
+ 'unknown' : (
+ common_pseudo_builtins,
+ pim_additional_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ m2r10_additional_pseudo_builtins,
+ ),
+
+ # Builtins for PIM Modula-2
+ 'm2pim' : (
+ common_pseudo_builtins,
+ pim_additional_pseudo_builtins,
+ ),
+
+ # Builtins for ISO Modula-2
+ 'm2iso' : (
+ common_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ ),
+
+ # Builtins for ISO Modula-2
+ 'm2r10' : (
+ common_pseudo_builtins,
+ m2r10_additional_pseudo_builtins,
+ ),
+
+ # Builtins for Objective Modula-2
+ 'objm2' : (
+ common_pseudo_builtins,
+ m2r10_additional_pseudo_builtins,
+ objm2_additional_pseudo_builtins,
+ ),
+
+ # Builtins for Aglet Modula-2 Extensions
+ 'm2iso+aglet' : (
+ common_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ aglet_additional_pseudo_builtins,
+ ),
+
+ # Builtins for GNU Modula-2 Extensions
+ 'm2pim+gm2' : (
+ common_pseudo_builtins,
+ pim_additional_pseudo_builtins,
+ gm2_additional_pseudo_builtins,
+ ),
+
+ # Builtins for p1 Modula-2 Extensions
+ 'm2iso+p1' : (
+ common_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ p1_additional_pseudo_builtins,
+ ),
+
+ # Builtins for XDS Modula-2 Extensions
+ 'm2iso+xds' : (
+ common_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ xds_additional_pseudo_builtins,
+ ),
+ }
+
+ # Standard Library ADTs Database
+ stdlib_adts_db = {
+ # Empty entry for unknown dialect
+ 'unknown' : (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library ADTs for PIM Modula-2
+ 'm2pim' : (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for ISO Modula-2
+ 'm2iso' : (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for Modula-2 R10
+ 'm2r10' : (
+ m2r10_stdlib_adt_identifiers,
+ ),
+
+ # Standard Library ADTs for Objective Modula-2
+ 'objm2' : (
+ m2r10_stdlib_adt_identifiers,
+ ),
+
+ # Standard Library ADTs for Aglet Modula-2
+ 'm2iso+aglet' : (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for GNU Modula-2
+ 'm2pim+gm2' : (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for p1 Modula-2
+ 'm2iso+p1' : (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for XDS Modula-2
+ 'm2iso+xds' : (
+ # No first class library types
+ ),
+ }
+
+ # Standard Library Modules Database
+ stdlib_modules_db = {
+ # Empty entry for unknown dialect
+ 'unknown' : (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Modules for PIM Modula-2
+ 'm2pim' : (
+ pim_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for ISO Modula-2
+ 'm2iso' : (
+ iso_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for Modula-2 R10
+ 'm2r10' : (
+ m2r10_stdlib_blueprint_identifiers,
+ m2r10_stdlib_module_identifiers,
+ m2r10_stdlib_adt_identifiers,
+ ),
+
+ # Standard Library Modules for Objective Modula-2
+ 'objm2' : (
+ m2r10_stdlib_blueprint_identifiers,
+ m2r10_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for Aglet Modula-2
+ 'm2iso+aglet' : (
+ iso_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for GNU Modula-2
+ 'm2pim+gm2' : (
+ pim_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for p1 Modula-2
+ 'm2iso+p1' : (
+ iso_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for XDS Modula-2
+ 'm2iso+xds' : (
+ iso_stdlib_module_identifiers,
+ ),
+ }
+
+ # Standard Library Types Database
+ stdlib_types_db = {
+ # Empty entry for unknown dialect
+ 'unknown' : (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Types for PIM Modula-2
+ 'm2pim' : (
+ pim_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for ISO Modula-2
+ 'm2iso' : (
+ iso_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for Modula-2 R10
+ 'm2r10' : (
+ m2r10_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for Objective Modula-2
+ 'objm2' : (
+ m2r10_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for Aglet Modula-2
+ 'm2iso+aglet' : (
+ iso_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for GNU Modula-2
+ 'm2pim+gm2' : (
+ pim_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for p1 Modula-2
+ 'm2iso+p1' : (
+ iso_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for XDS Modula-2
+ 'm2iso+xds' : (
+ iso_stdlib_type_identifiers,
+ ),
+ }
+
+ # Standard Library Procedures Database
+ stdlib_procedures_db = {
+ # Empty entry for unknown dialect
+ 'unknown' : (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Procedures for PIM Modula-2
+ 'm2pim' : (
+ pim_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for ISO Modula-2
+ 'm2iso' : (
+ iso_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for Modula-2 R10
+ 'm2r10' : (
+ m2r10_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for Objective Modula-2
+ 'objm2' : (
+ m2r10_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for Aglet Modula-2
+ 'm2iso+aglet' : (
+ iso_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for GNU Modula-2
+ 'm2pim+gm2' : (
+ pim_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for p1 Modula-2
+ 'm2iso+p1' : (
+ iso_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for XDS Modula-2
+ 'm2iso+xds' : (
+ iso_stdlib_proc_identifiers,
+ ),
+ }
+
+ # Standard Library Variables Database
+ stdlib_variables_db = {
+ # Empty entry for unknown dialect
+ 'unknown' : (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Variables for PIM Modula-2
+ 'm2pim' : (
+ pim_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for ISO Modula-2
+ 'm2iso' : (
+ iso_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for Modula-2 R10
+ 'm2r10' : (
+ m2r10_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for Objective Modula-2
+ 'objm2' : (
+ m2r10_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for Aglet Modula-2
+ 'm2iso+aglet' : (
+ iso_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for GNU Modula-2
+ 'm2pim+gm2' : (
+ pim_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for p1 Modula-2
+ 'm2iso+p1' : (
+ iso_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for XDS Modula-2
+ 'm2iso+xds' : (
+ iso_stdlib_var_identifiers,
+ ),
+ }
+
+ # Standard Library Constants Database
+ stdlib_constants_db = {
+ # Empty entry for unknown dialect
+ 'unknown' : (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Constants for PIM Modula-2
+ 'm2pim' : (
+ pim_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for ISO Modula-2
+ 'm2iso' : (
+ iso_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for Modula-2 R10
+ 'm2r10' : (
+ m2r10_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for Objective Modula-2
+ 'objm2' : (
+ m2r10_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for Aglet Modula-2
+ 'm2iso+aglet' : (
+ iso_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for GNU Modula-2
+ 'm2pim+gm2' : (
+ pim_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for p1 Modula-2
+ 'm2iso+p1' : (
+ iso_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for XDS Modula-2
+ 'm2iso+xds' : (
+ iso_stdlib_const_identifiers,
+ ),
+ }
+
+# M e t h o d s
+
+ # initialise a lexer instance
+ def __init__(self, **options):
+ #
+ # Alias for unknown dialect
+ global UNKNOWN
+ UNKNOWN = self.dialects[0]
+ #
+ # check dialect options
+ #
+ dialects = get_list_opt(options, 'dialect', [])
+ #
+ for dialect_option in dialects:
+ if dialect_option in self.dialects[1:-1]:
+ # valid dialect option found
+ self.set_dialect(dialect_option)
+ break
+ #
+ # Fallback Mode (DEFAULT)
+ else:
+ # no valid dialect option
+ self.set_dialect(UNKNOWN)
+ #
+ self.dialect_set_by_tag = False
+ #
+ # check style options
+ #
+ styles = get_list_opt(options, 'style', [])
+ #
+ # use lowercase mode for Algol style
+ if 'algol' in styles or 'algol_nu' in styles:
+ self.algol_publication_mode = True
+ else:
+ self.algol_publication_mode = False
+ #
+ # Check option flags
+ #
+ self.treat_stdlib_adts_as_builtins = \
+ get_bool_opt(options, 'treat_stdlib_adts_as_builtins', True)
+ #
+ # call superclass initialiser
+ RegexLexer.__init__(self, **options)
+
+ # Set lexer to a specified dialect
+ def set_dialect(self, dialect_id):
+ #
+ #if __debug__:
+ # print 'entered set_dialect with arg: ', dialect_id
+ #
+ # check dialect name against known dialects
+ if dialect_id not in self.dialects:
+ dialect = UNKNOWN # default
+ else:
+ dialect = dialect_id
+ #
+ # compose lexemes to reject set
+ lexemes_to_reject_set = set()
+ # add each list of reject lexemes for this dialect
+ for list in self.lexemes_to_reject_db[dialect]:
+ lexemes_to_reject_set.update(set(list))
+ #
+ # compose reserved words set
+ reswords_set = set()
+ # add each list of reserved words for this dialect
+ for list in self.reserved_words_db[dialect]:
+ reswords_set.update(set(list))
+ #
+ # compose builtins set
+ builtins_set = set()
+ # add each list of builtins for this dialect excluding reserved words
+ for list in self.builtins_db[dialect]:
+ builtins_set.update(set(list).difference(reswords_set))
+ #
+ # compose pseudo-builtins set
+ pseudo_builtins_set = set()
+ # add each list of builtins for this dialect excluding reserved words
+ for list in self.pseudo_builtins_db[dialect]:
+ pseudo_builtins_set.update(set(list).difference(reswords_set))
+ #
+ # compose ADTs set
+ adts_set = set()
+ # add each list of ADTs for this dialect excluding reserved words
+ for list in self.stdlib_adts_db[dialect]:
+ adts_set.update(set(list).difference(reswords_set))
+ #
+ # compose modules set
+ modules_set = set()
+ # add each list of builtins for this dialect excluding builtins
+ for list in self.stdlib_modules_db[dialect]:
+ modules_set.update(set(list).difference(builtins_set))
+ #
+ # compose types set
+ types_set = set()
+ # add each list of types for this dialect excluding builtins
+ for list in self.stdlib_types_db[dialect]:
+ types_set.update(set(list).difference(builtins_set))
+ #
+ # compose procedures set
+ procedures_set = set()
+ # add each list of procedures for this dialect excluding builtins
+ for list in self.stdlib_procedures_db[dialect]:
+ procedures_set.update(set(list).difference(builtins_set))
+ #
+ # compose variables set
+ variables_set = set()
+ # add each list of variables for this dialect excluding builtins
+ for list in self.stdlib_variables_db[dialect]:
+ variables_set.update(set(list).difference(builtins_set))
+ #
+ # compose constants set
+ constants_set = set()
+ # add each list of constants for this dialect excluding builtins
+ for list in self.stdlib_constants_db[dialect]:
+ constants_set.update(set(list).difference(builtins_set))
+ #
+ # update lexer state
+ self.dialect = dialect
+ self.lexemes_to_reject = lexemes_to_reject_set
+ self.reserved_words = reswords_set
+ self.builtins = builtins_set
+ self.pseudo_builtins = pseudo_builtins_set
+ self.adts = adts_set
+ self.modules = modules_set
+ self.types = types_set
+ self.procedures = procedures_set
+ self.variables = variables_set
+ self.constants = constants_set
+ #
+ #if __debug__:
+ # print 'exiting set_dialect'
+ # print ' self.dialect: ', self.dialect
+ # print ' self.lexemes_to_reject: ', self.lexemes_to_reject
+ # print ' self.reserved_words: ', self.reserved_words
+ # print ' self.builtins: ', self.builtins
+ # print ' self.pseudo_builtins: ', self.pseudo_builtins
+ # print ' self.adts: ', self.adts
+ # print ' self.modules: ', self.modules
+ # print ' self.types: ', self.types
+ # print ' self.procedures: ', self.procedures
+ # print ' self.variables: ', self.variables
+ # print ' self.types: ', self.types
+ # print ' self.constants: ', self.constants
+
+ # Extracts a dialect name from a dialect tag comment string and checks
+ # the extracted name against known dialects. If a match is found, the
+ # matching name is returned, otherwise dialect id 'unknown' is returned
+ def get_dialect_from_dialect_tag(self, dialect_tag):
+ #
+ #if __debug__:
+ # print 'entered get_dialect_from_dialect_tag with arg: ', dialect_tag
+ #
+ # constants
+ left_tag_delim = '(*!'
+ right_tag_delim = '*)'
+ left_tag_delim_len = len(left_tag_delim)
+ right_tag_delim_len = len(right_tag_delim)
+ indicator_start = left_tag_delim_len
+ indicator_end = -(right_tag_delim_len)
+ #
+ # check comment string for dialect indicator
+ if len(dialect_tag) > (left_tag_delim_len + right_tag_delim_len) \
+ and dialect_tag.startswith(left_tag_delim) \
+ and dialect_tag.endswith(right_tag_delim):
+ #
+ #if __debug__:
+ # print 'dialect tag found'
+ #
+ # extract dialect indicator
+ indicator = dialect_tag[indicator_start:indicator_end]
+ #
+ #if __debug__:
+ # print 'extracted: ', indicator
+ #
+ # check against known dialects
+ for index in range(1, len(self.dialects)):
+ #
+ #if __debug__:
+ # print 'dialects[', index, ']: ', self.dialects[index]
+ #
+ if indicator == self.dialects[index]:
+ #
+ #if __debug__:
+ # print 'matching dialect found'
+ #
+ # indicator matches known dialect
+ return indicator
+ else:
+ # indicator does not match any dialect
+ return UNKNOWN # default
+ else:
+ # invalid indicator string
+ return UNKNOWN # default
+
+ # intercept the token stream, modify token attributes and return them
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
+ #
+ # check for dialect tag if dialect has not been set by tag
+ if not self.dialect_set_by_tag and token == Comment.Special:
+ indicated_dialect = self.get_dialect_from_dialect_tag(value)
+ if indicated_dialect != UNKNOWN:
+ # token is a dialect indicator
+ # reset reserved words and builtins
+ self.set_dialect(indicated_dialect)
+ self.dialect_set_by_tag = True
+ #
+ # check for reserved words, predefined and stdlib identifiers
+ if token is Name:
+ if value in self.reserved_words:
+ token = Keyword.Reserved
+ if self.algol_publication_mode:
+ value = value.lower()
+ #
+ elif value in self.builtins:
+ token = Name.Builtin
+ if self.algol_publication_mode:
+ value = value.lower()
+ #
+ elif value in self.pseudo_builtins:
+ token = Name.Builtin.Pseudo
+ if self.algol_publication_mode:
+ value = value.lower()
+ #
+ elif value in self.adts:
+ if not self.treat_stdlib_adts_as_builtins:
+ token = Name.Namespace
+ else:
+ token = Name.Builtin.Pseudo
+ if self.algol_publication_mode:
+ value = value.lower()
+ #
+ elif value in self.modules:
+ token = Name.Namespace
+ #
+ elif value in self.types:
+ token = Name.Class
+ #
+ elif value in self.procedures:
+ token = Name.Function
+ #
+ elif value in self.variables:
+ token = Name.Variable
+ #
+ elif value in self.constants:
+ token = Name.Constant
+ #
+ elif token in Number:
+ #
+ # mark prefix number literals as error for PIM and ISO dialects
+ if self.dialect not in (UNKNOWN, 'm2r10', 'objm2'):
+ if "'" in value or value[0:2] in ('0b', '0x', '0u'):
+ token = Error
+ #
+ elif self.dialect in ('m2r10', 'objm2'):
+ # mark base-8 number literals as errors for M2 R10 and ObjM2
+ if token is Number.Oct:
+ token = Error
+ # mark suffix base-16 literals as errors for M2 R10 and ObjM2
+ elif token is Number.Hex and 'H' in value:
+ token = Error
+ # mark real numbers with E as errors for M2 R10 and ObjM2
+ elif token is Number.Float and 'E' in value:
+ token = Error
+ #
+ elif token in Comment:
+ #
+ # mark single line comment as error for PIM and ISO dialects
+ if token is Comment.Single:
+ if self.dialect not in [UNKNOWN, 'm2r10', 'objm2']:
+ token = Error
+ #
+ if token is Comment.Preproc:
+ # mark ISO pragma as error for PIM dialects
+ if value.startswith('<*') and \
+ self.dialect.startswith('m2pim'):
+ token = Error
+ # mark PIM pragma as comment for other dialects
+ elif value.startswith('(*$') and \
+ self.dialect != UNKNOWN and \
+ not self.dialect.startswith('m2pim'):
+ token = Comment.Multiline
+ #
+ else: # token is neither Name nor Comment
+ #
+ # mark lexemes matching the dialect's error token set as errors
+ if value in self.lexemes_to_reject:
+ token = Error
+ #
+ # substitute lexemes when in Algol mode
+ if self.algol_publication_mode:
+ if value == '#':
+ value = u'≠'
+ elif value == '<=':
+ value = u'≤'
+ elif value == '>=':
+ value = u'≥'
+ elif value == '==':
+ value = u'≡'
+ elif value == '*.':
+ value = u'•'
+
+ # return result
+ yield index, token, value
diff --git a/pygments/lexers/nimrod.py b/pygments/lexers/nimrod.py
new file mode 100644
index 00000000..00b849a6
--- /dev/null
+++ b/pygments/lexers/nimrod.py
@@ -0,0 +1,159 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.nimrod
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Nimrod language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['NimrodLexer']
+
+
+class NimrodLexer(RegexLexer):
+ """
+ For `Nimrod <http://nimrod-code.org/>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Nimrod'
+ aliases = ['nimrod', 'nim']
+ filenames = ['*.nim', '*.nimrod']
+ mimetypes = ['text/x-nimrod']
+
+ flags = re.MULTILINE | re.IGNORECASE | re.UNICODE
+
+ def underscorize(words):
+ newWords = []
+ new = ""
+ for word in words:
+ for ch in word:
+ new += (ch + "_?")
+ newWords.append(new)
+ new = ""
+ return "|".join(newWords)
+
+ keywords = [
+ 'addr', 'and', 'as', 'asm', 'atomic', 'bind', 'block', 'break',
+ 'case', 'cast', 'const', 'continue', 'converter', 'discard',
+ 'distinct', 'div', 'elif', 'else', 'end', 'enum', 'except', 'finally',
+ 'for', 'generic', 'if', 'implies', 'in', 'yield',
+ 'is', 'isnot', 'iterator', 'lambda', 'let', 'macro', 'method',
+ 'mod', 'not', 'notin', 'object', 'of', 'or', 'out', 'proc',
+ 'ptr', 'raise', 'ref', 'return', 'shl', 'shr', 'template', 'try',
+ 'tuple', 'type', 'when', 'while', 'with', 'without', 'xor'
+ ]
+
+ keywordsPseudo = [
+ 'nil', 'true', 'false'
+ ]
+
+ opWords = [
+ 'and', 'or', 'not', 'xor', 'shl', 'shr', 'div', 'mod', 'in',
+ 'notin', 'is', 'isnot'
+ ]
+
+ types = [
+ 'int', 'int8', 'int16', 'int32', 'int64', 'float', 'float32', 'float64',
+ 'bool', 'char', 'range', 'array', 'seq', 'set', 'string'
+ ]
+
+ tokens = {
+ 'root': [
+ (r'##.*$', String.Doc),
+ (r'#.*$', Comment),
+ (r'[*=><+\-/@$~&%!?|\\\[\]]', Operator),
+ (r'\.\.|\.|,|\[\.|\.\]|\{\.|\.\}|\(\.|\.\)|\{|\}|\(|\)|:|\^|`|;',
+ Punctuation),
+
+ # Strings
+ (r'(?:[\w]+)"', String, 'rdqs'),
+ (r'"""', String, 'tdqs'),
+ ('"', String, 'dqs'),
+
+ # Char
+ ("'", String.Char, 'chars'),
+
+ # Keywords
+ (r'(%s)\b' % underscorize(opWords), Operator.Word),
+ (r'(p_?r_?o_?c_?\s)(?![(\[\]])', Keyword, 'funcname'),
+ (r'(%s)\b' % underscorize(keywords), Keyword),
+ (r'(%s)\b' % underscorize(['from', 'import', 'include']),
+ Keyword.Namespace),
+ (r'(v_?a_?r)\b', Keyword.Declaration),
+ (r'(%s)\b' % underscorize(types), Keyword.Type),
+ (r'(%s)\b' % underscorize(keywordsPseudo), Keyword.Pseudo),
+ # Identifiers
+ (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name),
+ # Numbers
+ (r'[0-9][0-9_]*(?=([e.]|\'f(32|64)))',
+ Number.Float, ('float-suffix', 'float-number')),
+ (r'0x[a-f0-9][a-f0-9_]*', Number.Hex, 'int-suffix'),
+ (r'0b[01][01_]*', Number.Bin, 'int-suffix'),
+ (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'),
+ (r'[0-9][0-9_]*', Number.Integer, 'int-suffix'),
+ # Whitespace
+ (r'\s+', Text),
+ (r'.+$', Error),
+ ],
+ 'chars': [
+ (r'\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})', String.Escape),
+ (r"'", String.Char, '#pop'),
+ (r".", String.Char)
+ ],
+ 'strings': [
+ (r'(?<!\$)\$(\d+|#|\w+)+', String.Interpol),
+ (r'[^\\\'"$\n]+', String),
+ # quotes, dollars and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'\$', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'dqs': [
+ (r'\\([\\abcefnrtvl"\']|\n|x[a-f0-9]{2}|[0-9]{1,3})',
+ String.Escape),
+ (r'"', String, '#pop'),
+ include('strings')
+ ],
+ 'rdqs': [
+ (r'"(?!")', String, '#pop'),
+ (r'""', String.Escape),
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""(?!")', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'funcname': [
+ (r'((?![\d_])\w)(((?!_)\w)|(_(?!_)\w))*', Name.Function, '#pop'),
+ (r'`.+`', Name.Function, '#pop')
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'float-number': [
+ (r'\.(?!\.)[0-9_]*', Number.Float),
+ (r'e[+-]?[0-9][0-9_]*', Number.Float),
+ default('#pop')
+ ],
+ 'float-suffix': [
+ (r'\'f(32|64)', Number.Float),
+ default('#pop')
+ ],
+ 'int-suffix': [
+ (r'\'i(32|64)', Number.Integer.Long),
+ (r'\'i(8|16)', Number.Integer),
+ default('#pop')
+ ],
+ }
diff --git a/pygments/lexers/nit.py b/pygments/lexers/nit.py
new file mode 100644
index 00000000..ab59c4e5
--- /dev/null
+++ b/pygments/lexers/nit.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.nit
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Nit language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['NitLexer']
+
+
+class NitLexer(RegexLexer):
+ """
+ For `nit <http://nitlanguage.org>`_ source.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Nit'
+ aliases = ['nit']
+ filenames = ['*.nit']
+ tokens = {
+ 'root': [
+ (r'#.*?$', Comment.Single),
+ (words((
+ 'package', 'module', 'import', 'class', 'abstract', 'interface',
+ 'universal', 'enum', 'end', 'fun', 'type', 'init', 'redef',
+ 'isa', 'do', 'readable', 'writable', 'var', 'intern', 'extern',
+ 'public', 'protected', 'private', 'intrude', 'if', 'then',
+ 'else', 'while', 'loop', 'for', 'in', 'and', 'or', 'not',
+ 'implies', 'return', 'continue', 'break', 'abort', 'assert',
+ 'new', 'is', 'once', 'super', 'self', 'true', 'false', 'nullable',
+ 'null', 'as', 'isset', 'label', '__debug__'), suffix=r'(?=[\r\n\t( ])'),
+ Keyword),
+ (r'[A-Z]\w*', Name.Class),
+ (r'"""(([^\'\\]|\\.)|\\r|\\n)*((\{\{?)?(""?\{\{?)*""""*)', String), # Simple long string
+ (r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|'
+ r'\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), # Simple long string alt
+ (r'"""(([^\'\\]|\\.)|\\r|\\n)*((""?)?(\{\{?""?)*\{\{\{\{*)', String), # Start long string
+ (r'\}\}\}(((\\.|[^\'\\])|\\r|\\n))*(""?)?(\{\{?""?)*\{\{\{\{*', String), # Mid long string
+ (r'\}\}\}(((\\.|[^\'\\])|\\r|\\n))*(\{\{?)?(""?\{\{?)*""""*', String), # End long string
+ (r'"(\\.|([^"}{\\]))*"', String), # Simple String
+ (r'"(\\.|([^"}{\\]))*\{', String), # Start string
+ (r'\}(\\.|([^"}{\\]))*\{', String), # Mid String
+ (r'\}(\\.|([^"}{\\]))*"', String), # End String
+ (r'(\'[^\'\\]\')|(\'\\.\')', String.Char),
+ (r'[0-9]+', Number.Integer),
+ (r'[0-9]*.[0-9]+', Number.Float),
+ (r'0(x|X)[0-9A-Fa-f]+', Number.Hex),
+ (r'[a-z]\w*', Name),
+ (r'_\w+', Name.Variable.Instance),
+ (r'==|!=|<==>|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator),
+ (r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation),
+ (r'`\{[^`]*`\}', Text), # Extern blocks won't be Lexed by Nit
+ (r'[\r\n\t ]+', Text),
+ ],
+ }
diff --git a/pygments/lexers/nix.py b/pygments/lexers/nix.py
new file mode 100644
index 00000000..57f08623
--- /dev/null
+++ b/pygments/lexers/nix.py
@@ -0,0 +1,136 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.nix
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the NixOS Nix language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal
+
+__all__ = ['NixLexer']
+
+
+class NixLexer(RegexLexer):
+ """
+ For the `Nix language <http://nixos.org/nix/>`_.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Nix'
+ aliases = ['nixos', 'nix']
+ filenames = ['*.nix']
+ mimetypes = ['text/x-nix']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if',
+ 'else', 'then', '...']
+ builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins',
+ 'map', 'removeAttrs', 'throw', 'toString', 'derivation']
+ operators = ['++', '+', '?', '.', '!', '//', '==',
+ '!=', '&&', '||', '->', '=']
+
+ punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"]
+
+ tokens = {
+ 'root': [
+ # comments starting with #
+ (r'#.*$', Comment.Single),
+
+ # multiline comments
+ (r'/\*', Comment.Multiline, 'comment'),
+
+ # whitespace
+ (r'\s+', Text),
+
+ # keywords
+ ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword),
+
+ # highlight the builtins
+ ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
+ Name.Builtin),
+
+ (r'\b(true|false|null)\b', Name.Constant),
+
+ # operators
+ ('(%s)' % '|'.join(re.escape(entry) for entry in operators),
+ Operator),
+
+ # word operators
+ (r'\b(or|and)\b', Operator.Word),
+
+ # punctuations
+ ('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation),
+
+ # integers
+ (r'[0-9]+', Number.Integer),
+
+ # strings
+ (r'"', String.Double, 'doublequote'),
+ (r"''", String.Single, 'singlequote'),
+
+ # paths
+ (r'[\w.+-]*(\/[\w.+-]+)+', Literal),
+ (r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal),
+
+ # urls
+ (r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal),
+
+ # names of variables
+ (r'[\w-]+\s*=', String.Symbol),
+ (r'[a-zA-Z_][\w\'-]*', Text),
+
+ ],
+ 'comment': [
+ (r'[^/*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'singlequote': [
+ (r"'''", String.Escape),
+ (r"''\$\{", String.Escape),
+ (r"''\n", String.Escape),
+ (r"''\r", String.Escape),
+ (r"''\t", String.Escape),
+ (r"''", String.Single, '#pop'),
+ (r'\$\{', String.Interpol, 'antiquote'),
+ (r"[^']", String.Single),
+ ],
+ 'doublequote': [
+ (r'\\', String.Escape),
+ (r'\\"', String.Escape),
+ (r'\\$\{', String.Escape),
+ (r'"', String.Double, '#pop'),
+ (r'\$\{', String.Interpol, 'antiquote'),
+ (r'[^"]', String.Double),
+ ],
+ 'antiquote': [
+ (r"\}", String.Interpol, '#pop'),
+ # TODO: we should probably escape also here ''${ \${
+ (r"\$\{", String.Interpol, '#push'),
+ include('root'),
+ ],
+ }
+
+ def analyse_text(text):
+ rv = 0.0
+ # TODO: let/in
+ if re.search(r'import.+?<[^>]+>', text):
+ rv += 0.4
+ if re.search(r'mkDerivation\s+(\(|\{|rec)', text):
+ rv += 0.4
+ if re.search(r'=\s+mkIf\s+', text):
+ rv += 0.4
+ if re.search(r'\{[a-zA-Z,\s]+\}:', text):
+ rv += 0.1
+ return rv
diff --git a/pygments/lexers/objective.py b/pygments/lexers/objective.py
new file mode 100644
index 00000000..fc8e5d17
--- /dev/null
+++ b/pygments/lexers/objective.py
@@ -0,0 +1,501 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.objective
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Objective-C family languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this, words, \
+ inherit, default
+from pygments.token import Text, Keyword, Name, String, Operator, \
+ Number, Punctuation, Literal, Comment
+
+from pygments.lexers.c_cpp import CLexer, CppLexer
+
+__all__ = ['ObjectiveCLexer', 'ObjectiveCppLexer', 'LogosLexer', 'SwiftLexer']
+
+
+def objective(baselexer):
+ """
+ Generate a subclass of baselexer that accepts the Objective-C syntax
+ extensions.
+ """
+
+ # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,
+ # since that's quite common in ordinary C/C++ files. It's OK to match
+ # JavaDoc/Doxygen keywords that only apply to Objective-C, mind.
+ #
+ # The upshot of this is that we CANNOT match @class or @interface
+ _oc_keywords = re.compile(r'@(?:end|implementation|protocol)')
+
+ # Matches [ <ws>? identifier <ws> ( identifier <ws>? ] | identifier? : )
+ # (note the identifier is *optional* when there is a ':'!)
+ _oc_message = re.compile(r'\[\s*[a-zA-Z_]\w*\s+'
+ r'(?:[a-zA-Z_]\w*\s*\]|'
+ r'(?:[a-zA-Z_]\w*)?:)')
+
+ class GeneratedObjectiveCVariant(baselexer):
+ """
+ Implements Objective-C syntax on top of an existing C family lexer.
+ """
+
+ tokens = {
+ 'statements': [
+ (r'@"', String, 'string'),
+ (r'@(YES|NO)', Number),
+ (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'@0[0-7]+[Ll]?', Number.Oct),
+ (r'@\d+[Ll]?', Number.Integer),
+ (r'@\(', Literal, 'literal_number'),
+ (r'@\[', Literal, 'literal_array'),
+ (r'@\{', Literal, 'literal_dictionary'),
+ (words((
+ '@selector', '@private', '@protected', '@public', '@encode',
+ '@synchronized', '@try', '@throw', '@catch', '@finally',
+ '@end', '@property', '@synthesize', '__bridge', '__bridge_transfer',
+ '__autoreleasing', '__block', '__weak', '__strong', 'weak', 'strong',
+ 'copy', 'retain', 'assign', 'unsafe_unretained', 'atomic', 'nonatomic',
+ 'readonly', 'readwrite', 'setter', 'getter', 'typeof', 'in',
+ 'out', 'inout', 'release', 'class', '@dynamic', '@optional',
+ '@required', '@autoreleasepool'), suffix=r'\b'),
+ Keyword),
+ (words(('id', 'instancetype', 'Class', 'IMP', 'SEL', 'BOOL',
+ 'IBOutlet', 'IBAction', 'unichar'), suffix=r'\b'),
+ Keyword.Type),
+ (r'@(true|false|YES|NO)\n', Name.Builtin),
+ (r'(YES|NO|nil|self|super)\b', Name.Builtin),
+ # Carbon types
+ (r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b', Keyword.Type),
+ # Carbon built-ins
+ (r'(TRUE|FALSE)\b', Name.Builtin),
+ (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
+ ('#pop', 'oc_classname')),
+ (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text),
+ ('#pop', 'oc_forward_classname')),
+ # @ can also prefix other expressions like @{...} or @(...)
+ (r'@', Punctuation),
+ inherit,
+ ],
+ 'oc_classname': [
+ # interface definition that inherits
+ ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)',
+ bygroups(Name.Class, Text, Name.Class, Text, Punctuation),
+ ('#pop', 'oc_ivars')),
+ ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
+ bygroups(Name.Class, Text, Name.Class), '#pop'),
+ # interface definition for a category
+ ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)',
+ bygroups(Name.Class, Text, Name.Label, Text, Punctuation),
+ ('#pop', 'oc_ivars')),
+ ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))',
+ bygroups(Name.Class, Text, Name.Label), '#pop'),
+ # simple interface / implementation
+ ('([a-zA-Z$_][\w$]*)(\s*)(\{)',
+ bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')),
+ ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
+ ],
+ 'oc_forward_classname': [
+ ('([a-zA-Z$_][\w$]*)(\s*,\s*)',
+ bygroups(Name.Class, Text), 'oc_forward_classname'),
+ ('([a-zA-Z$_][\w$]*)(\s*;?)',
+ bygroups(Name.Class, Text), '#pop')
+ ],
+ 'oc_ivars': [
+ include('whitespace'),
+ include('statements'),
+ (';', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'root': [
+ # methods
+ (r'^([-+])(\s*)' # method marker
+ r'(\(.*?\))?(\s*)' # return type
+ r'([a-zA-Z$_][\w$]*:?)', # begin of method name
+ bygroups(Punctuation, Text, using(this),
+ Text, Name.Function),
+ 'method'),
+ inherit,
+ ],
+ 'method': [
+ include('whitespace'),
+ # TODO unsure if ellipses are allowed elsewhere, see
+ # discussion in Issue 789
+ (r',', Punctuation),
+ (r'\.\.\.', Punctuation),
+ (r'(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)',
+ bygroups(using(this), Text, Name.Variable)),
+ (r'[a-zA-Z$_][\w$]*:', Name.Function),
+ (';', Punctuation, '#pop'),
+ (r'\{', Punctuation, 'function'),
+ default('#pop'),
+ ],
+ 'literal_number': [
+ (r'\(', Punctuation, 'literal_number_inner'),
+ (r'\)', Literal, '#pop'),
+ include('statement'),
+ ],
+ 'literal_number_inner': [
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
+ include('statement'),
+ ],
+ 'literal_array': [
+ (r'\[', Punctuation, 'literal_array_inner'),
+ (r'\]', Literal, '#pop'),
+ include('statement'),
+ ],
+ 'literal_array_inner': [
+ (r'\[', Punctuation, '#push'),
+ (r'\]', Punctuation, '#pop'),
+ include('statement'),
+ ],
+ 'literal_dictionary': [
+ (r'\}', Literal, '#pop'),
+ include('statement'),
+ ],
+ }
+
+ def analyse_text(text):
+ if _oc_keywords.search(text):
+ return 1.0
+ elif '@"' in text: # strings
+ return 0.8
+ elif re.search('@[0-9]+', text):
+ return 0.7
+ elif _oc_message.search(text):
+ return 0.8
+ return 0
+
+ def get_tokens_unprocessed(self, text):
+ from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
+ COCOA_PROTOCOLS, COCOA_PRIMITIVES
+
+ for index, token, value in \
+ baselexer.get_tokens_unprocessed(self, text):
+ if token is Name or token is Name.Class:
+ if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
+ or value in COCOA_PRIMITIVES:
+ token = Name.Builtin.Pseudo
+
+ yield index, token, value
+
+ return GeneratedObjectiveCVariant
+
+
+class ObjectiveCLexer(objective(CLexer)):
+ """
+ For Objective-C source code with preprocessor directives.
+ """
+
+ name = 'Objective-C'
+ aliases = ['objective-c', 'objectivec', 'obj-c', 'objc']
+ filenames = ['*.m', '*.h']
+ mimetypes = ['text/x-objective-c']
+ priority = 0.05 # Lower than C
+
+
+class ObjectiveCppLexer(objective(CppLexer)):
+ """
+ For Objective-C++ source code with preprocessor directives.
+ """
+
+ name = 'Objective-C++'
+ aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++']
+ filenames = ['*.mm', '*.hh']
+ mimetypes = ['text/x-objective-c++']
+ priority = 0.05 # Lower than C++
+
+
+class LogosLexer(ObjectiveCppLexer):
+ """
+ For Logos + Objective-C source code with preprocessor directives.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Logos'
+ aliases = ['logos']
+ filenames = ['*.x', '*.xi', '*.xm', '*.xmi']
+ mimetypes = ['text/x-logos']
+ priority = 0.25
+
+ tokens = {
+ 'statements': [
+ (r'(%orig|%log)\b', Keyword),
+ (r'(%c)\b(\()(\s*)([a-zA-Z$_][\w$]*)(\s*)(\))',
+ bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)),
+ (r'(%init)\b(\()',
+ bygroups(Keyword, Punctuation), 'logos_init_directive'),
+ (r'(%init)(?=\s*;)', bygroups(Keyword)),
+ (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
+ bygroups(Keyword, Text, Name.Class), '#pop'),
+ (r'(%subclass)(\s+)', bygroups(Keyword, Text),
+ ('#pop', 'logos_classname')),
+ inherit,
+ ],
+ 'logos_init_directive': [
+ ('\s+', Text),
+ (',', Punctuation, ('logos_init_directive', '#pop')),
+ ('([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)',
+ bygroups(Name.Class, Text, Punctuation, Text, Text)),
+ ('([a-zA-Z$_][\w$]*)', Name.Class),
+ ('\)', Punctuation, '#pop'),
+ ],
+ 'logos_classname': [
+ ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
+ bygroups(Name.Class, Text, Name.Class), '#pop'),
+ ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
+ ],
+ 'root': [
+ (r'(%subclass)(\s+)', bygroups(Keyword, Text),
+ 'logos_classname'),
+ (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
+ bygroups(Keyword, Text, Name.Class)),
+ (r'(%config)(\s*\(\s*)(\w+)(\s*=\s*)(.*?)(\s*\)\s*)',
+ bygroups(Keyword, Text, Name.Variable, Text, String, Text)),
+ (r'(%ctor)(\s*)(\{)', bygroups(Keyword, Text, Punctuation),
+ 'function'),
+ (r'(%new)(\s*)(\()(\s*.*?\s*)(\))',
+ bygroups(Keyword, Text, Keyword, String, Keyword)),
+ (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)),
+ inherit,
+ ],
+ }
+
+ _logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()')
+
+ def analyse_text(text):
+ if LogosLexer._logos_keywords.search(text):
+ return 1.0
+ return 0
+
+
+class SwiftLexer(RegexLexer):
+ """
+ For `Swift <https://developer.apple.com/swift/>`_ source.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Swift'
+ filenames = ['*.swift']
+ aliases = ['swift']
+ mimetypes = ['text/x-swift']
+
+ tokens = {
+ 'root': [
+ # Whitespace and Comments
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'//', Comment.Single, 'comment-single'),
+ (r'/\*', Comment.Multiline, 'comment-multi'),
+ (r'#(if|elseif|else|endif)\b', Comment.Preproc, 'preproc'),
+
+ # Keywords
+ include('keywords'),
+
+ # Global Types
+ (words((
+ 'Array', 'AutoreleasingUnsafeMutablePointer', 'BidirectionalReverseView',
+ 'Bit', 'Bool', 'CFunctionPointer', 'COpaquePointer', 'CVaListPointer',
+ 'Character', 'ClosedInterval', 'CollectionOfOne', 'ContiguousArray',
+ 'Dictionary', 'DictionaryGenerator', 'DictionaryIndex', 'Double',
+ 'EmptyCollection', 'EmptyGenerator', 'EnumerateGenerator',
+ 'EnumerateSequence', 'FilterCollectionView',
+ 'FilterCollectionViewIndex', 'FilterGenerator', 'FilterSequenceView',
+ 'Float', 'Float80', 'FloatingPointClassification', 'GeneratorOf',
+ 'GeneratorOfOne', 'GeneratorSequence', 'HalfOpenInterval', 'HeapBuffer',
+ 'HeapBufferStorage', 'ImplicitlyUnwrappedOptional', 'IndexingGenerator',
+ 'Int', 'Int16', 'Int32', 'Int64', 'Int8', 'LazyBidirectionalCollection',
+ 'LazyForwardCollection', 'LazyRandomAccessCollection',
+ 'LazySequence', 'MapCollectionView', 'MapSequenceGenerator',
+ 'MapSequenceView', 'MirrorDisposition', 'ObjectIdentifier', 'OnHeap',
+ 'Optional', 'PermutationGenerator', 'QuickLookObject',
+ 'RandomAccessReverseView', 'Range', 'RangeGenerator', 'RawByte', 'Repeat',
+ 'ReverseBidirectionalIndex', 'ReverseRandomAccessIndex', 'SequenceOf',
+ 'SinkOf', 'Slice', 'StaticString', 'StrideThrough', 'StrideThroughGenerator',
+ 'StrideTo', 'StrideToGenerator', 'String', 'UInt', 'UInt16', 'UInt32',
+ 'UInt64', 'UInt8', 'UTF16', 'UTF32', 'UTF8', 'UnicodeDecodingResult',
+ 'UnicodeScalar', 'Unmanaged', 'UnsafeBufferPointer',
+ 'UnsafeBufferPointerGenerator', 'UnsafeMutableBufferPointer',
+ 'UnsafeMutablePointer', 'UnsafePointer', 'Zip2', 'ZipGenerator2',
+ # Protocols
+ 'AbsoluteValuable', 'AnyObject', 'ArrayLiteralConvertible',
+ 'BidirectionalIndexType', 'BitwiseOperationsType',
+ 'BooleanLiteralConvertible', 'BooleanType', 'CVarArgType',
+ 'CollectionType', 'Comparable', 'DebugPrintable',
+ 'DictionaryLiteralConvertible', 'Equatable',
+ 'ExtendedGraphemeClusterLiteralConvertible',
+ 'ExtensibleCollectionType', 'FloatLiteralConvertible',
+ 'FloatingPointType', 'ForwardIndexType', 'GeneratorType', 'Hashable',
+ 'IntegerArithmeticType', 'IntegerLiteralConvertible', 'IntegerType',
+ 'IntervalType', 'MirrorType', 'MutableCollectionType', 'MutableSliceable',
+ 'NilLiteralConvertible', 'OutputStreamType', 'Printable',
+ 'RandomAccessIndexType', 'RangeReplaceableCollectionType',
+ 'RawOptionSetType', 'RawRepresentable', 'Reflectable', 'SequenceType',
+ 'SignedIntegerType', 'SignedNumberType', 'SinkType', 'Sliceable',
+ 'Streamable', 'Strideable', 'StringInterpolationConvertible',
+ 'StringLiteralConvertible', 'UnicodeCodecType',
+ 'UnicodeScalarLiteralConvertible', 'UnsignedIntegerType',
+ '_ArrayBufferType', '_BidirectionalIndexType', '_CocoaStringType',
+ '_CollectionType', '_Comparable', '_ExtensibleCollectionType',
+ '_ForwardIndexType', '_Incrementable', '_IntegerArithmeticType',
+ '_IntegerType', '_ObjectiveCBridgeable', '_RandomAccessIndexType',
+ '_RawOptionSetType', '_SequenceType', '_Sequence_Type',
+ '_SignedIntegerType', '_SignedNumberType', '_Sliceable', '_Strideable',
+ '_SwiftNSArrayRequiredOverridesType', '_SwiftNSArrayType',
+ '_SwiftNSCopyingType', '_SwiftNSDictionaryRequiredOverridesType',
+ '_SwiftNSDictionaryType', '_SwiftNSEnumeratorType',
+ '_SwiftNSFastEnumerationType', '_SwiftNSStringRequiredOverridesType',
+ '_SwiftNSStringType', '_UnsignedIntegerType',
+ # Variables
+ 'C_ARGC', 'C_ARGV', 'Process',
+ # Typealiases
+ 'Any', 'AnyClass', 'BooleanLiteralType', 'CBool', 'CChar', 'CChar16',
+ 'CChar32', 'CDouble', 'CFloat', 'CInt', 'CLong', 'CLongLong', 'CShort',
+ 'CSignedChar', 'CUnsignedInt', 'CUnsignedLong', 'CUnsignedShort',
+ 'CWideChar', 'ExtendedGraphemeClusterType', 'Float32', 'Float64',
+ 'FloatLiteralType', 'IntMax', 'IntegerLiteralType', 'StringLiteralType',
+ 'UIntMax', 'UWord', 'UnicodeScalarType', 'Void', 'Word',
+ # Foundation/Cocoa
+ 'NSErrorPointer', 'NSObjectProtocol', 'Selector'), suffix=r'\b'),
+ Name.Builtin),
+ # Functions
+ (words((
+ 'abs', 'advance', 'alignof', 'alignofValue', 'assert', 'assertionFailure',
+ 'contains', 'count', 'countElements', 'debugPrint', 'debugPrintln',
+ 'distance', 'dropFirst', 'dropLast', 'dump', 'enumerate', 'equal',
+ 'extend', 'fatalError', 'filter', 'find', 'first', 'getVaList', 'indices',
+ 'insert', 'isEmpty', 'join', 'last', 'lazy', 'lexicographicalCompare',
+ 'map', 'max', 'maxElement', 'min', 'minElement', 'numericCast', 'overlaps',
+ 'partition', 'precondition', 'preconditionFailure', 'prefix', 'print',
+ 'println', 'reduce', 'reflect', 'removeAll', 'removeAtIndex', 'removeLast',
+ 'removeRange', 'reverse', 'sizeof', 'sizeofValue', 'sort', 'sorted',
+ 'splice', 'split', 'startsWith', 'stride', 'strideof', 'strideofValue',
+ 'suffix', 'swap', 'toDebugString', 'toString', 'transcode',
+ 'underestimateCount', 'unsafeAddressOf', 'unsafeBitCast', 'unsafeDowncast',
+ 'withExtendedLifetime', 'withUnsafeMutablePointer',
+ 'withUnsafeMutablePointers', 'withUnsafePointer', 'withUnsafePointers',
+ 'withVaList'), suffix=r'\b'),
+ Name.Builtin.Pseudo),
+
+ # Implicit Block Variables
+ (r'\$\d+', Name.Variable),
+
+ # Binary Literal
+ (r'0b[01_]+', Number.Bin),
+ # Octal Literal
+ (r'0o[0-7_]+', Number.Oct),
+ # Hexadecimal Literal
+ (r'0x[0-9a-fA-F_]+', Number.Hex),
+ # Decimal Literal
+ (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float),
+ (r'[0-9][0-9_]*', Number.Integer),
+ # String Literal
+ (r'"', String, 'string'),
+
+ # Operators and Punctuation
+ (r'[(){}\[\].,:;=@#`?]|->|[<&?](?=\w)|(?<=\w)[>!?]', Punctuation),
+ (r'[/=\-+!*%<>&|^?~]+', Operator),
+
+ # Identifier
+ (r'[a-zA-Z_]\w*', Name)
+ ],
+ 'keywords': [
+ (words((
+ 'break', 'case', 'continue', 'default', 'do', 'else',
+ 'fallthrough', 'for', 'if', 'in', 'return', 'switch', 'where',
+ 'while'), suffix=r'\b'),
+ Keyword),
+ (r'@availability\([^)]+\)', Keyword.Reserved),
+ (words((
+ 'associativity', 'convenience', 'dynamic', 'didSet', 'final',
+ 'get', 'infix', 'inout', 'lazy', 'left', 'mutating', 'none',
+ 'nonmutating', 'optional', 'override', 'postfix', 'precedence',
+ 'prefix', 'Protocol', 'required', 'right', 'set', 'Type',
+ 'unowned', 'weak', 'willSet', '@availability', '@autoclosure',
+ '@noreturn', '@NSApplicationMain', '@NSCopying', '@NSManaged',
+ '@objc', '@UIApplicationMain', '@IBAction', '@IBDesignable',
+ '@IBInspectable', '@IBOutlet'), suffix=r'\b'),
+ Keyword.Reserved),
+ (r'(as|dynamicType|false|is|nil|self|Self|super|true|__COLUMN__'
+ r'|__FILE__|__FUNCTION__|__LINE__|_)\b', Keyword.Constant),
+ (r'import\b', Keyword.Declaration, 'module'),
+ (r'(class|enum|extension|struct|protocol)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Keyword.Declaration, Text, Name.Class)),
+ (r'(func)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Keyword.Declaration, Text, Name.Function)),
+ (r'(var|let)(\s+)([a-zA-Z_]\w*)', bygroups(Keyword.Declaration,
+ Text, Name.Variable)),
+ (words((
+ 'class', 'deinit', 'enum', 'extension', 'func', 'import', 'init',
+ 'internal', 'let', 'operator', 'private', 'protocol', 'public',
+ 'static', 'struct', 'subscript', 'typealias', 'var'), suffix=r'\b'),
+ Keyword.Declaration)
+ ],
+ 'comment': [
+ (r':param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):',
+ Comment.Special)
+ ],
+
+ # Nested
+ 'comment-single': [
+ (r'\n', Text, '#pop'),
+ include('comment'),
+ (r'[^\n]', Comment.Single)
+ ],
+ 'comment-multi': [
+ include('comment'),
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'module': [
+ (r'\n', Text, '#pop'),
+ (r'[a-zA-Z_]\w*', Name.Class),
+ include('root')
+ ],
+ 'preproc': [
+ (r'\n', Text, '#pop'),
+ include('keywords'),
+ (r'[A-Za-z]\w*', Comment.Preproc),
+ include('root')
+ ],
+ 'string': [
+ (r'\\\(', String.Interpol, 'string-intp'),
+ (r'"', String, '#pop'),
+ (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
+ (r'[^\\"]+', String),
+ (r'\\', String)
+ ],
+ 'string-intp': [
+ (r'\(', String.Interpol, '#push'),
+ (r'\)', String.Interpol, '#pop'),
+ include('root')
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
+ COCOA_PROTOCOLS, COCOA_PRIMITIVES
+
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name or token is Name.Class:
+ if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
+ or value in COCOA_PRIMITIVES:
+ token = Name.Builtin.Pseudo
+
+ yield index, token, value
diff --git a/pygments/lexers/ooc.py b/pygments/lexers/ooc.py
new file mode 100644
index 00000000..b4e8c6db
--- /dev/null
+++ b/pygments/lexers/ooc.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ooc
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Ooc language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['OocLexer']
+
+
+class OocLexer(RegexLexer):
+ """
+ For `Ooc <http://ooc-lang.org/>`_ source code
+
+ .. versionadded:: 1.2
+ """
+ name = 'Ooc'
+ aliases = ['ooc']
+ filenames = ['*.ooc']
+ mimetypes = ['text/x-ooc']
+
+ tokens = {
+ 'root': [
+ (words((
+ 'class', 'interface', 'implement', 'abstract', 'extends', 'from',
+ 'this', 'super', 'new', 'const', 'final', 'static', 'import',
+ 'use', 'extern', 'inline', 'proto', 'break', 'continue',
+ 'fallthrough', 'operator', 'if', 'else', 'for', 'while', 'do',
+ 'switch', 'case', 'as', 'in', 'version', 'return', 'true',
+ 'false', 'null'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (r'include\b', Keyword, 'include'),
+ (r'(cover)([ \t]+)(from)([ \t]+)(\w+[*@]?)',
+ bygroups(Keyword, Text, Keyword, Text, Name.Class)),
+ (r'(func)((?:[ \t]|\\\n)+)(~[a-z_]\w*)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'\bfunc\b', Keyword),
+ # Note: %= and ^= not listed on http://ooc-lang.org/syntax
+ (r'//.*', Comment),
+ (r'(?s)/\*.*?\*/', Comment.Multiline),
+ (r'(==?|\+=?|-[=>]?|\*=?|/=?|:=|!=?|%=?|\?|>{1,3}=?|<{1,3}=?|\.\.|'
+ r'&&?|\|\|?|\^=?)', Operator),
+ (r'(\.)([ \t]*)([a-z]\w*)', bygroups(Operator, Text,
+ Name.Function)),
+ (r'[A-Z][A-Z0-9_]+', Name.Constant),
+ (r'[A-Z]\w*([@*]|\[[ \t]*\])?', Name.Class),
+
+ (r'([a-z]\w*(?:~[a-z]\w*)?)((?:[ \t]|\\\n)*)(?=\()',
+ bygroups(Name.Function, Text)),
+ (r'[a-z]\w*', Name.Variable),
+
+ # : introduces types
+ (r'[:(){}\[\];,]', Punctuation),
+
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'0c[0-9]+', Number.Oct),
+ (r'0b[01]+', Number.Bin),
+ (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float),
+ (r'[0-9_]+', Number.Decimal),
+
+ (r'"(?:\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\"])*"',
+ String.Double),
+ (r"'(?:\\.|\\[0-9]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
+ String.Char),
+ (r'@', Punctuation), # pointer dereference
+ (r'\.', Punctuation), # imports or chain operator
+
+ (r'\\[ \t\n]', Text),
+ (r'[ \t]+', Text),
+ ],
+ 'include': [
+ (r'[\w/]+', Name),
+ (r',', Punctuation),
+ (r'[ \t]', Text),
+ (r'[;\n]', Text, '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py
index 10598fb4..afd0fda5 100644
--- a/pygments/lexers/other.py
+++ b/pygments/lexers/other.py
@@ -3,3776 +3,38 @@
pygments.lexers.other
~~~~~~~~~~~~~~~~~~~~~
- Lexers for other languages.
+ Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, \
- this, combined, ExtendedRegexLexer
-from pygments.token import Error, Punctuation, Literal, Token, \
- Text, Comment, Operator, Keyword, Name, String, Number, Generic, \
- Whitespace
-from pygments.util import get_bool_opt
-from pygments.lexers.web import HtmlLexer
-
-from pygments.lexers._openedgebuiltins import OPENEDGEKEYWORDS
-from pygments.lexers._robotframeworklexer import RobotFrameworkLexer
-
-# backwards compatibility
from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer
from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \
- TcshLexer
-
-__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer',
- 'SmalltalkLexer', 'LogtalkLexer', 'GnuplotLexer', 'PovrayLexer',
- 'AppleScriptLexer', 'ModelicaLexer', 'RebolLexer', 'ABAPLexer',
- 'NewspeakLexer', 'GherkinLexer', 'AsymptoteLexer', 'PostScriptLexer',
- 'AutohotkeyLexer', 'GoodDataCLLexer', 'MaqlLexer', 'ProtoBufLexer',
- 'HybrisLexer', 'AwkLexer', 'Cfengine3Lexer', 'SnobolLexer',
- 'ECLLexer', 'UrbiscriptLexer', 'OpenEdgeLexer', 'BroLexer',
- 'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer',
- 'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer',
- 'CbmBasicV2Lexer', 'AutoItLexer', 'RexxLexer']
-
-
-class ECLLexer(RegexLexer):
- """
- Lexer for the declarative big-data `ECL
- <http://hpccsystems.com/community/docs/ecl-language-reference/html>`_
- language.
-
- *New in Pygments 1.5.*
- """
-
- name = 'ECL'
- aliases = ['ecl']
- filenames = ['*.ecl']
- mimetypes = ['application/x-ecl']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('statements'),
- ],
- 'whitespace': [
- (r'\s+', Text),
- (r'\/\/.*', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- ],
- 'statements': [
- include('types'),
- include('keywords'),
- include('functions'),
- include('hash'),
- (r'"', String, 'string'),
- (r'\'', String, 'string'),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\*/', Error),
- (r'[~!%^&*+=|?:<>/-]+', Operator),
- (r'[{}()\[\],.;]', Punctuation),
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
- ],
- 'hash': [
- (r'^#.*$', Comment.Preproc),
- ],
- 'types': [
- (r'(RECORD|END)\D', Keyword.Declaration),
- (r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|'
- r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|'
- r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)',
- bygroups(Keyword.Type, Text)),
- ],
- 'keywords': [
- (r'(APPLY|ASSERT|BUILD|BUILDINDEX|EVALUATE|FAIL|KEYDIFF|KEYPATCH|'
- r'LOADXML|NOTHOR|NOTIFY|OUTPUT|PARALLEL|SEQUENTIAL|SOAPCALL|WAIT'
- r'CHECKPOINT|DEPRECATED|FAILCODE|FAILMESSAGE|FAILURE|GLOBAL|'
- r'INDEPENDENT|ONWARNING|PERSIST|PRIORITY|RECOVERY|STORED|SUCCESS|'
- r'WAIT|WHEN)\b', Keyword.Reserved),
- # These are classed differently, check later
- (r'(ALL|AND|ANY|AS|ATMOST|BEFORE|BEGINC\+\+|BEST|BETWEEN|CASE|CONST|'
- r'COUNTER|CSV|DESCEND|ENCRYPT|ENDC\+\+|ENDMACRO|EXCEPT|EXCLUSIVE|'
- r'EXPIRE|EXPORT|EXTEND|FALSE|FEW|FIRST|FLAT|FULL|FUNCTION|GROUP|'
- r'HEADER|HEADING|HOLE|IFBLOCK|IMPORT|IN|JOINED|KEEP|KEYED|LAST|'
- r'LEFT|LIMIT|LOAD|LOCAL|LOCALE|LOOKUP|MACRO|MANY|MAXCOUNT|'
- r'MAXLENGTH|MIN SKEW|MODULE|INTERFACE|NAMED|NOCASE|NOROOT|NOSCAN|'
- r'NOSORT|NOT|OF|ONLY|OPT|OR|OUTER|OVERWRITE|PACKED|PARTITION|'
- r'PENALTY|PHYSICALLENGTH|PIPE|QUOTE|RELATIONSHIP|REPEAT|RETURN|'
- r'RIGHT|SCAN|SELF|SEPARATOR|SERVICE|SHARED|SKEW|SKIP|SQL|STORE|'
- r'TERMINATOR|THOR|THRESHOLD|TOKEN|TRANSFORM|TRIM|TRUE|TYPE|'
- r'UNICODEORDER|UNSORTED|VALIDATE|VIRTUAL|WHOLE|WILD|WITHIN|XML|'
- r'XPATH|__COMPRESSED__)\b', Keyword.Reserved),
- ],
- 'functions': [
- (r'(ABS|ACOS|ALLNODES|ASCII|ASIN|ASSTRING|ATAN|ATAN2|AVE|CASE|'
- r'CHOOSE|CHOOSEN|CHOOSESETS|CLUSTERSIZE|COMBINE|CORRELATION|COS|'
- r'COSH|COUNT|COVARIANCE|CRON|DATASET|DEDUP|DEFINE|DENORMALIZE|'
- r'DISTRIBUTE|DISTRIBUTED|DISTRIBUTION|EBCDIC|ENTH|ERROR|EVALUATE|'
- r'EVENT|EVENTEXTRA|EVENTNAME|EXISTS|EXP|FAILCODE|FAILMESSAGE|'
- r'FETCH|FROMUNICODE|GETISVALID|GLOBAL|GRAPH|GROUP|HASH|HASH32|'
- r'HASH64|HASHCRC|HASHMD5|HAVING|IF|INDEX|INTFORMAT|ISVALID|'
- r'ITERATE|JOIN|KEYUNICODE|LENGTH|LIBRARY|LIMIT|LN|LOCAL|LOG|LOOP|'
- r'MAP|MATCHED|MATCHLENGTH|MATCHPOSITION|MATCHTEXT|MATCHUNICODE|'
- r'MAX|MERGE|MERGEJOIN|MIN|NOLOCAL|NONEMPTY|NORMALIZE|PARSE|PIPE|'
- r'POWER|PRELOAD|PROCESS|PROJECT|PULL|RANDOM|RANGE|RANK|RANKED|'
- r'REALFORMAT|RECORDOF|REGEXFIND|REGEXREPLACE|REGROUP|REJECTED|'
- r'ROLLUP|ROUND|ROUNDUP|ROW|ROWDIFF|SAMPLE|SET|SIN|SINH|SIZEOF|'
- r'SOAPCALL|SORT|SORTED|SQRT|STEPPED|STORED|SUM|TABLE|TAN|TANH|'
- r'THISNODE|TOPN|TOUNICODE|TRANSFER|TRIM|TRUNCATE|TYPEOF|UNGROUP|'
- r'UNICODEORDER|VARIANCE|WHICH|WORKUNIT|XMLDECODE|XMLENCODE|'
- r'XMLTEXT|XMLUNICODE)\b', Name.Function),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\'', String, '#pop'),
- (r'[^"\']+', String),
- ],
- }
-
-
-class BrainfuckLexer(RegexLexer):
- """
- Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
- language.
- """
-
- name = 'Brainfuck'
- aliases = ['brainfuck', 'bf']
- filenames = ['*.bf', '*.b']
- mimetypes = ['application/x-brainfuck']
-
- tokens = {
- 'common': [
- # use different colors for different instruction types
- (r'[.,]+', Name.Tag),
- (r'[+-]+', Name.Builtin),
- (r'[<>]+', Name.Variable),
- (r'[^.,+\-<>\[\]]+', Comment),
- ],
- 'root': [
- (r'\[', Keyword, 'loop'),
- (r'\]', Error),
- include('common'),
- ],
- 'loop': [
- (r'\[', Keyword, '#push'),
- (r'\]', Keyword, '#pop'),
- include('common'),
- ]
- }
-
-
-class BefungeLexer(RegexLexer):
- """
- Lexer for the esoteric `Befunge <http://en.wikipedia.org/wiki/Befunge>`_
- language.
-
- *New in Pygments 0.7.*
- """
- name = 'Befunge'
- aliases = ['befunge']
- filenames = ['*.befunge']
- mimetypes = ['application/x-befunge']
-
- tokens = {
- 'root': [
- (r'[0-9a-f]', Number),
- (r'[\+\*/%!`-]', Operator), # Traditional math
- (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives
- (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives
- (r'[|_mw]', Keyword),
- (r'[{}]', Name.Tag), # Befunge-98 stack ops
- (r'".*?"', String.Double), # Strings don't appear to allow escapes
- (r'\'.', String.Single), # Single character
- (r'[#;]', Comment), # Trampoline... depends on direction hit
- (r'[pg&~=@iotsy]', Keyword), # Misc
- (r'[()A-Z]', Comment), # Fingerprints
- (r'\s+', Text), # Whitespace doesn't matter
- ],
- }
-
-
-class RedcodeLexer(RegexLexer):
- """
- A simple Redcode lexer based on ICWS'94.
- Contributed by Adam Blinkinsop <blinks@acm.org>.
-
- *New in Pygments 0.8.*
- """
- name = 'Redcode'
- aliases = ['redcode']
- filenames = ['*.cw']
-
- opcodes = ['DAT','MOV','ADD','SUB','MUL','DIV','MOD',
- 'JMP','JMZ','JMN','DJN','CMP','SLT','SPL',
- 'ORG','EQU','END']
- modifiers = ['A','B','AB','BA','F','X','I']
-
- tokens = {
- 'root': [
- # Whitespace:
- (r'\s+', Text),
- (r';.*$', Comment.Single),
- # Lexemes:
- # Identifiers
- (r'\b(%s)\b' % '|'.join(opcodes), Name.Function),
- (r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator),
- (r'[A-Za-z_][A-Za-z_0-9]+', Name),
- # Operators
- (r'[-+*/%]', Operator),
- (r'[#$@<>]', Operator), # mode
- (r'[.,]', Punctuation), # mode
- # Numbers
- (r'[-+]?\d+', Number.Integer),
- ],
- }
-
-
-class MOOCodeLexer(RegexLexer):
- """
- For `MOOCode <http://www.moo.mud.org/>`_ (the MOO scripting
- language).
-
- *New in Pygments 0.9.*
- """
- name = 'MOOCode'
- filenames = ['*.moo']
- aliases = ['moocode', 'moo']
- mimetypes = ['text/x-moocode']
-
- tokens = {
- 'root' : [
- # Numbers
- (r'(0|[1-9][0-9_]*)', Number.Integer),
- # Strings
- (r'"(\\\\|\\"|[^"])*"', String),
- # exceptions
- (r'(E_PERM|E_DIV)', Name.Exception),
- # db-refs
- (r'((#[-0-9]+)|(\$[a-z_A-Z0-9]+))', Name.Entity),
- # Keywords
- (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while'
- r'|endwhile|break|continue|return|try'
- r'|except|endtry|finally|in)\b', Keyword),
- # builtins
- (r'(random|length)', Name.Builtin),
- # special variables
- (r'(player|caller|this|args)', Name.Variable.Instance),
- # skip whitespace
- (r'\s+', Text),
- (r'\n', Text),
- # other operators
- (r'([!;=,{}&\|:\.\[\]@\(\)\<\>\?]+)', Operator),
- # function call
- (r'([a-z_A-Z0-9]+)(\()', bygroups(Name.Function, Operator)),
- # variables
- (r'([a-zA-Z_0-9]+)', Text),
- ]
- }
-
-
-class SmalltalkLexer(RegexLexer):
- """
- For `Smalltalk <http://www.smalltalk.org/>`_ syntax.
- Contributed by Stefan Matthias Aust.
- Rewritten by Nils Winter.
-
- *New in Pygments 0.10.*
- """
- name = 'Smalltalk'
- filenames = ['*.st']
- aliases = ['smalltalk', 'squeak', 'st']
- mimetypes = ['text/x-smalltalk']
-
- tokens = {
- 'root' : [
- (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)),
- include('squeak fileout'),
- include('whitespaces'),
- include('method definition'),
- (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)),
- include('objects'),
- (r'\^|\:=|\_', Operator),
- # temporaries
- (r'[\]({}.;!]', Text),
- ],
- 'method definition' : [
- # Not perfect can't allow whitespaces at the beginning and the
- # without breaking everything
- (r'([a-zA-Z]+\w*:)(\s*)(\w+)',
- bygroups(Name.Function, Text, Name.Variable)),
- (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)),
- (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$',
- bygroups(Name.Function, Text, Name.Variable, Text)),
- ],
- 'blockvariables' : [
- include('whitespaces'),
- (r'(:)(\s*)(\w+)',
- bygroups(Operator, Text, Name.Variable)),
- (r'\|', Operator, '#pop'),
- (r'', Text, '#pop'), # else pop
- ],
- 'literals' : [
- (r"'(''|[^'])*'", String, 'afterobject'),
- (r'\$.', String.Char, 'afterobject'),
- (r'#\(', String.Symbol, 'parenth'),
- (r'\)', Text, 'afterobject'),
- (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
- ],
- '_parenth_helper' : [
- include('whitespaces'),
- (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
- (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol),
- # literals
- (r"'(''|[^'])*'", String),
- (r'\$.', String.Char),
- (r'#*\(', String.Symbol, 'inner_parenth'),
- ],
- 'parenth' : [
- # This state is a bit tricky since
- # we can't just pop this state
- (r'\)', String.Symbol, ('root', 'afterobject')),
- include('_parenth_helper'),
- ],
- 'inner_parenth': [
- (r'\)', String.Symbol, '#pop'),
- include('_parenth_helper'),
- ],
- 'whitespaces' : [
- # skip whitespace and comments
- (r'\s+', Text),
- (r'"(""|[^"])*"', Comment),
- ],
- 'objects' : [
- (r'\[', Text, 'blockvariables'),
- (r'\]', Text, 'afterobject'),
- (r'\b(self|super|true|false|nil|thisContext)\b',
- Name.Builtin.Pseudo, 'afterobject'),
- (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
- (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
- (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
- String.Symbol, 'afterobject'),
- include('literals'),
- ],
- 'afterobject' : [
- (r'! !$', Keyword , '#pop'), # squeak chunk delimiter
- include('whitespaces'),
- (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
- Name.Builtin, '#pop'),
- (r'\b(new\b(?!:))', Name.Builtin),
- (r'\:=|\_', Operator, '#pop'),
- (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'),
- (r'\b[a-zA-Z]+\w*', Name.Function),
- (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'),
- (r'\.', Punctuation, '#pop'),
- (r';', Punctuation),
- (r'[\])}]', Text),
- (r'[\[({]', Text, '#pop'),
- ],
- 'squeak fileout' : [
- # Squeak fileout format (optional)
- (r'^"(""|[^"])*"!', Keyword),
- (r"^'(''|[^'])*'!", Keyword),
- (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
- bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
- (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)",
- bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
- (r'^(\w+)( subclass: )(#\w+)'
- r'(\s+instanceVariableNames: )(.*?)'
- r'(\s+classVariableNames: )(.*?)'
- r'(\s+poolDictionaries: )(.*?)'
- r'(\s+category: )(.*?)(!)',
- bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword,
- String, Keyword, String, Keyword, String, Keyword)),
- (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)',
- bygroups(Name.Class, Keyword, String, Keyword)),
- (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)),
- (r'! !$', Keyword),
- ],
- }
-
-
-class LogtalkLexer(RegexLexer):
- """
- For `Logtalk <http://logtalk.org/>`_ source code.
-
- *New in Pygments 0.10.*
- """
-
- name = 'Logtalk'
- aliases = ['logtalk']
- filenames = ['*.lgt']
- mimetypes = ['text/x-logtalk']
-
- tokens = {
- 'root': [
- # Directives
- (r'^\s*:-\s',Punctuation,'directive'),
- # Comments
- (r'%.*?\n', Comment),
- (r'/\*(.|\n)*?\*/',Comment),
- # Whitespace
- (r'\n', Text),
- (r'\s+', Text),
- # Numbers
- (r"0'.", Number),
- (r'0b[01]+', Number),
- (r'0o[0-7]+', Number),
- (r'0x[0-9a-fA-F]+', Number),
- (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
- # Variables
- (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
- # Event handlers
- (r'(after|before)(?=[(])', Keyword),
- # Execution-context methods
- (r'(parameter|this|se(lf|nder))(?=[(])', Keyword),
- # Reflection
- (r'(current_predicate|predicate_property)(?=[(])', Keyword),
- # DCGs and term expansion
- (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])',
- Keyword),
- # Entity
- (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])',
- Keyword),
- (r'(object|protocol|category)_property(?=[(])', Keyword),
- # Entity relations
- (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword),
- (r'extends_(object|protocol|category)(?=[(])', Keyword),
- (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
- (r'(instantiat|specializ)es_class(?=[(])', Keyword),
- # Events
- (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
- # Flags
- (r'(current|set)_logtalk_flag(?=[(])', Keyword),
- # Compiling, loading, and library paths
- (r'logtalk_(compile|l(ibrary_path|oad_context|oad))(?=[(])',
- Keyword),
- # Database
- (r'(clause|retract(all)?)(?=[(])', Keyword),
- (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
- # Control constructs
- (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
- (r'(fail|true)\b', Keyword),
- # All solutions
- (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
- # Multi-threading meta-predicates
- (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])',
- Keyword),
- # Term unification
- (r'unify_with_occurs_check(?=[(])', Keyword),
- # Term creation and decomposition
- (r'(functor|arg|copy_term|numbervars)(?=[(])', Keyword),
- # Evaluable functors
- (r'(rem|mod|abs|sign)(?=[(])', Keyword),
- (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
- (r'(floor|truncate|round|ceiling)(?=[(])', Keyword),
- # Other arithmetic functors
- (r'(cos|atan|exp|log|s(in|qrt))(?=[(])', Keyword),
- # Term testing
- (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|'
- r'ground)(?=[(])', Keyword),
- # Term comparison
- (r'compare(?=[(])', Keyword),
- # Stream selection and control
- (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
- (r'(open|close)(?=[(])', Keyword),
- (r'flush_output(?=[(])', Keyword),
- (r'(at_end_of_stream|flush_output)\b', Keyword),
- (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])',
- Keyword),
- # Character and byte input/output
- (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
- (r'\bnl\b', Keyword),
- # Term input/output
- (r'read(_term)?(?=[(])', Keyword),
- (r'write(q|_(canonical|term))?(?=[(])', Keyword),
- (r'(current_)?op(?=[(])', Keyword),
- (r'(current_)?char_conversion(?=[(])', Keyword),
- # Atomic term processing
- (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
- (r'(char_code|sub_atom)(?=[(])', Keyword),
- (r'number_c(har|ode)s(?=[(])', Keyword),
- # Implementation defined hooks functions
- (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
- (r'\bhalt\b', Keyword),
- (r'halt(?=[(])', Keyword),
- # Message sending operators
- (r'(::|:|\^\^)', Operator),
- # External call
- (r'[{}]', Keyword),
- # Logic and control
- (r'\b(ignore|once)(?=[(])', Keyword),
- (r'\brepeat\b', Keyword),
- # Sorting
- (r'(key)?sort(?=[(])', Keyword),
- # Bitwise functors
- (r'(>>|<<|/\\|\\\\|\\)', Operator),
- # Arithemtic evaluation
- (r'\bis\b', Keyword),
- # Arithemtic comparison
- (r'(=:=|=\\=|<|=<|>=|>)', Operator),
- # Term creation and decomposition
- (r'=\.\.', Operator),
- # Term unification
- (r'(=|\\=)', Operator),
- # Term comparison
- (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
- # Evaluable functors
- (r'(//|[-+*/])', Operator),
- (r'\b(e|pi|mod|rem)\b', Operator),
- # Other arithemtic functors
- (r'\b\*\*\b', Operator),
- # DCG rules
- (r'-->', Operator),
- # Control constructs
- (r'([!;]|->)', Operator),
- # Logic and control
- (r'\\+', Operator),
- # Mode operators
- (r'[?@]', Operator),
- # Existential quantifier
- (r'\^', Operator),
- # Strings
- (r'"(\\\\|\\"|[^"])*"', String),
- # Ponctuation
- (r'[()\[\],.|]', Text),
- # Atoms
- (r"[a-z][a-zA-Z0-9_]*", Text),
- (r"'", String, 'quoted_atom'),
- ],
-
- 'quoted_atom': [
- (r"''", String),
- (r"'", String, '#pop'),
- (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
- (r"[^\\'\n]+", String),
- (r'\\', String),
- ],
-
- 'directive': [
- # Conditional compilation directives
- (r'(el)?if(?=[(])', Keyword, 'root'),
- (r'(e(lse|ndif))[.]', Keyword, 'root'),
- # Entity directives
- (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
- (r'(end_(category|object|protocol))[.]',Keyword, 'root'),
- # Predicate scope directives
- (r'(public|protected|private)(?=[(])', Keyword, 'root'),
- # Other directives
- (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
- (r'in(fo|itialization)(?=[(])', Keyword, 'root'),
- (r'(dynamic|synchronized|threaded)[.]', Keyword, 'root'),
- (r'(alias|d(ynamic|iscontiguous)|m(eta_predicate|ode|ultifile)|'
- r's(et_(logtalk|prolog)_flag|ynchronized))(?=[(])',
- Keyword, 'root'),
- (r'op(?=[(])', Keyword, 'root'),
- (r'(c(alls|oinductive)|reexport|use(s|_module))(?=[(])',
- Keyword, 'root'),
- (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
- (r'[a-z][a-zA-Z0-9_]*[.]', Text, 'root'),
- ],
-
- 'entityrelations': [
- (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)'
- r'(?=[(])', Keyword),
- # Numbers
- (r"0'.", Number),
- (r'0b[01]+', Number),
- (r'0o[0-7]+', Number),
- (r'0x[0-9a-fA-F]+', Number),
- (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
- # Variables
- (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
- # Atoms
- (r"[a-z][a-zA-Z0-9_]*", Text),
- (r"'", String, 'quoted_atom'),
- # Strings
- (r'"(\\\\|\\"|[^"])*"', String),
- # End of entity-opening directive
- (r'([)]\.)', Text, 'root'),
- # Scope operator
- (r'(::)', Operator),
- # Ponctuation
- (r'[()\[\],.|]', Text),
- # Comments
- (r'%.*?\n', Comment),
- (r'/\*(.|\n)*?\*/',Comment),
- # Whitespace
- (r'\n', Text),
- (r'\s+', Text),
- ]
- }
-
- def analyse_text(text):
- if ':- object(' in text:
- return True
- if ':- protocol(' in text:
- return True
- if ':- category(' in text:
- return True
- return False
-
-
-def _shortened(word):
- dpos = word.find('$')
- return '|'.join([word[:dpos] + word[dpos+1:i] + r'\b'
- for i in range(len(word), dpos, -1)])
-def _shortened_many(*words):
- return '|'.join(map(_shortened, words))
-
-class GnuplotLexer(RegexLexer):
- """
- For `Gnuplot <http://gnuplot.info/>`_ plotting scripts.
-
- *New in Pygments 0.11.*
- """
-
- name = 'Gnuplot'
- aliases = ['gnuplot']
- filenames = ['*.plot', '*.plt']
- mimetypes = ['text/x-gnuplot']
-
- tokens = {
- 'root': [
- include('whitespace'),
- (_shortened('bi$nd'), Keyword, 'bind'),
- (_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'),
- (_shortened('f$it'), Keyword, 'fit'),
- (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'),
- (r'else\b', Keyword),
- (_shortened('pa$use'), Keyword, 'pause'),
- (_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'),
- (_shortened('sa$ve'), Keyword, 'save'),
- (_shortened('se$t'), Keyword, ('genericargs', 'optionarg')),
- (_shortened_many('sh$ow', 'uns$et'),
- Keyword, ('noargs', 'optionarg')),
- (_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear',
- 'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int',
- 'pwd$', 're$read', 'res$et', 'scr$eendump',
- 'she$ll', 'sy$stem', 'up$date'),
- Keyword, 'genericargs'),
- (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump',
- 'she$ll', 'test$'),
- Keyword, 'noargs'),
- ('([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(=)',
- bygroups(Name.Variable, Text, Operator), 'genericargs'),
- ('([a-zA-Z_][a-zA-Z0-9_]*)(\s*\(.*?\)\s*)(=)',
- bygroups(Name.Function, Text, Operator), 'genericargs'),
- (r'@[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant), # macros
- (r';', Keyword),
- ],
- 'comment': [
- (r'[^\\\n]', Comment),
- (r'\\\n', Comment),
- (r'\\', Comment),
- # don't add the newline to the Comment token
- ('', Comment, '#pop'),
- ],
- 'whitespace': [
- ('#', Comment, 'comment'),
- (r'[ \t\v\f]+', Text),
- ],
- 'noargs': [
- include('whitespace'),
- # semicolon and newline end the argument list
- (r';', Punctuation, '#pop'),
- (r'\n', Text, '#pop'),
- ],
- 'dqstring': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- (r'\n', String, '#pop'), # newline ends the string too
- ],
- 'sqstring': [
- (r"''", String), # escaped single quote
- (r"'", String, '#pop'),
- (r"[^\\'\n]+", String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # normal backslash
- (r'\n', String, '#pop'), # newline ends the string too
- ],
- 'genericargs': [
- include('noargs'),
- (r'"', String, 'dqstring'),
- (r"'", String, 'sqstring'),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
- (r'(\d+\.\d*|\.\d+)', Number.Float),
- (r'-?\d+', Number.Integer),
- ('[,.~!%^&*+=|?:<>/-]', Operator),
- ('[{}()\[\]]', Punctuation),
- (r'(eq|ne)\b', Operator.Word),
- (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
- (r'@[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant), # macros
- (r'\\\n', Text),
- ],
- 'optionarg': [
- include('whitespace'),
- (_shortened_many(
- "a$ll","an$gles","ar$row","au$toscale","b$ars","bor$der",
- "box$width","cl$abel","c$lip","cn$trparam","co$ntour","da$ta",
- "data$file","dg$rid3d","du$mmy","enc$oding","dec$imalsign",
- "fit$","font$path","fo$rmat","fu$nction","fu$nctions","g$rid",
- "hid$den3d","his$torysize","is$osamples","k$ey","keyt$itle",
- "la$bel","li$nestyle","ls$","loa$dpath","loc$ale","log$scale",
- "mac$ros","map$ping","map$ping3d","mar$gin","lmar$gin",
- "rmar$gin","tmar$gin","bmar$gin","mo$use","multi$plot",
- "mxt$ics","nomxt$ics","mx2t$ics","nomx2t$ics","myt$ics",
- "nomyt$ics","my2t$ics","nomy2t$ics","mzt$ics","nomzt$ics",
- "mcbt$ics","nomcbt$ics","of$fsets","or$igin","o$utput",
- "pa$rametric","pm$3d","pal$ette","colorb$ox","p$lot",
- "poi$ntsize","pol$ar","pr$int","obj$ect","sa$mples","si$ze",
- "st$yle","su$rface","table$","t$erminal","termo$ptions","ti$cs",
- "ticsc$ale","ticsl$evel","timef$mt","tim$estamp","tit$le",
- "v$ariables","ve$rsion","vi$ew","xyp$lane","xda$ta","x2da$ta",
- "yda$ta","y2da$ta","zda$ta","cbda$ta","xl$abel","x2l$abel",
- "yl$abel","y2l$abel","zl$abel","cbl$abel","xti$cs","noxti$cs",
- "x2ti$cs","nox2ti$cs","yti$cs","noyti$cs","y2ti$cs","noy2ti$cs",
- "zti$cs","nozti$cs","cbti$cs","nocbti$cs","xdti$cs","noxdti$cs",
- "x2dti$cs","nox2dti$cs","ydti$cs","noydti$cs","y2dti$cs",
- "noy2dti$cs","zdti$cs","nozdti$cs","cbdti$cs","nocbdti$cs",
- "xmti$cs","noxmti$cs","x2mti$cs","nox2mti$cs","ymti$cs",
- "noymti$cs","y2mti$cs","noy2mti$cs","zmti$cs","nozmti$cs",
- "cbmti$cs","nocbmti$cs","xr$ange","x2r$ange","yr$ange",
- "y2r$ange","zr$ange","cbr$ange","rr$ange","tr$ange","ur$ange",
- "vr$ange","xzeroa$xis","x2zeroa$xis","yzeroa$xis","y2zeroa$xis",
- "zzeroa$xis","zeroa$xis","z$ero"), Name.Builtin, '#pop'),
- ],
- 'bind': [
- ('!', Keyword, '#pop'),
- (_shortened('all$windows'), Name.Builtin),
- include('genericargs'),
- ],
- 'quit': [
- (r'gnuplot\b', Keyword),
- include('noargs'),
- ],
- 'fit': [
- (r'via\b', Name.Builtin),
- include('plot'),
- ],
- 'if': [
- (r'\)', Punctuation, '#pop'),
- include('genericargs'),
- ],
- 'pause': [
- (r'(mouse|any|button1|button2|button3)\b', Name.Builtin),
- (_shortened('key$press'), Name.Builtin),
- include('genericargs'),
- ],
- 'plot': [
- (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex',
- 'mat$rix', 's$mooth', 'thru$', 't$itle',
- 'not$itle', 'u$sing', 'w$ith'),
- Name.Builtin),
- include('genericargs'),
- ],
- 'save': [
- (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'),
- Name.Builtin),
- include('genericargs'),
- ],
- }
-
-
-class PovrayLexer(RegexLexer):
- """
- For `Persistence of Vision Raytracer <http://www.povray.org/>`_ files.
-
- *New in Pygments 0.11.*
- """
- name = 'POVRay'
- aliases = ['pov']
- filenames = ['*.pov', '*.inc']
- mimetypes = ['text/x-povray']
-
- tokens = {
- 'root': [
- (r'/\*[\w\W]*?\*/', Comment.Multiline),
- (r'//.*\n', Comment.Single),
- (r'(?s)"(?:\\.|[^"\\])+"', String.Double),
- (r'#(debug|default|else|end|error|fclose|fopen|ifdef|ifndef|'
- r'include|range|read|render|statistics|switch|undef|version|'
- r'warning|while|write|define|macro|local|declare)\b',
- Comment.Preproc),
- (r'\b(aa_level|aa_threshold|abs|acos|acosh|adaptive|adc_bailout|'
- r'agate|agate_turb|all|alpha|ambient|ambient_light|angle|'
- r'aperture|arc_angle|area_light|asc|asin|asinh|assumed_gamma|'
- r'atan|atan2|atanh|atmosphere|atmospheric_attenuation|'
- r'attenuating|average|background|black_hole|blue|blur_samples|'
- r'bounded_by|box_mapping|bozo|break|brick|brick_size|'
- r'brightness|brilliance|bumps|bumpy1|bumpy2|bumpy3|bump_map|'
- r'bump_size|case|caustics|ceil|checker|chr|clipped_by|clock|'
- r'color|color_map|colour|colour_map|component|composite|concat|'
- r'confidence|conic_sweep|constant|control0|control1|cos|cosh|'
- r'count|crackle|crand|cube|cubic_spline|cylindrical_mapping|'
- r'debug|declare|default|degrees|dents|diffuse|direction|'
- r'distance|distance_maximum|div|dust|dust_type|eccentricity|'
- r'else|emitting|end|error|error_bound|exp|exponent|'
- r'fade_distance|fade_power|falloff|falloff_angle|false|'
- r'file_exists|filter|finish|fisheye|flatness|flip|floor|'
- r'focal_point|fog|fog_alt|fog_offset|fog_type|frequency|gif|'
- r'global_settings|glowing|gradient|granite|gray_threshold|'
- r'green|halo|hexagon|hf_gray_16|hierarchy|hollow|hypercomplex|'
- r'if|ifdef|iff|image_map|incidence|include|int|interpolate|'
- r'inverse|ior|irid|irid_wavelength|jitter|lambda|leopard|'
- r'linear|linear_spline|linear_sweep|location|log|looks_like|'
- r'look_at|low_error_factor|mandel|map_type|marble|material_map|'
- r'matrix|max|max_intersections|max_iteration|max_trace_level|'
- r'max_value|metallic|min|minimum_reuse|mod|mortar|'
- r'nearest_count|no|normal|normal_map|no_shadow|number_of_waves|'
- r'octaves|off|offset|omega|omnimax|on|once|onion|open|'
- r'orthographic|panoramic|pattern1|pattern2|pattern3|'
- r'perspective|pgm|phase|phong|phong_size|pi|pigment|'
- r'pigment_map|planar_mapping|png|point_at|pot|pow|ppm|'
- r'precision|pwr|quadratic_spline|quaternion|quick_color|'
- r'quick_colour|quilted|radial|radians|radiosity|radius|rainbow|'
- r'ramp_wave|rand|range|reciprocal|recursion_limit|red|'
- r'reflection|refraction|render|repeat|rgb|rgbf|rgbft|rgbt|'
- r'right|ripples|rotate|roughness|samples|scale|scallop_wave|'
- r'scattering|seed|shadowless|sin|sine_wave|sinh|sky|sky_sphere|'
- r'slice|slope_map|smooth|specular|spherical_mapping|spiral|'
- r'spiral1|spiral2|spotlight|spotted|sqr|sqrt|statistics|str|'
- r'strcmp|strength|strlen|strlwr|strupr|sturm|substr|switch|sys|'
- r't|tan|tanh|test_camera_1|test_camera_2|test_camera_3|'
- r'test_camera_4|texture|texture_map|tga|thickness|threshold|'
- r'tightness|tile2|tiles|track|transform|translate|transmit|'
- r'triangle_wave|true|ttf|turbulence|turb_depth|type|'
- r'ultra_wide_angle|up|use_color|use_colour|use_index|u_steps|'
- r'val|variance|vaxis_rotate|vcross|vdot|version|vlength|'
- r'vnormalize|volume_object|volume_rendered|vol_with_light|'
- r'vrotate|v_steps|warning|warp|water_level|waves|while|width|'
- r'wood|wrinkles|yes)\b', Keyword),
- (r'(bicubic_patch|blob|box|camera|cone|cubic|cylinder|difference|'
- r'disc|height_field|intersection|julia_fractal|lathe|'
- r'light_source|merge|mesh|object|plane|poly|polygon|prism|'
- r'quadric|quartic|smooth_triangle|sor|sphere|superellipsoid|'
- r'text|torus|triangle|union)\b', Name.Builtin),
- # TODO: <=, etc
- (r'[\[\](){}<>;,]', Punctuation),
- (r'[-+*/=]', Operator),
- (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo),
- (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
- (r'[0-9]+\.[0-9]*', Number.Float),
- (r'\.[0-9]+', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\s+', Text),
- ]
- }
-
-
-class AppleScriptLexer(RegexLexer):
- """
- For `AppleScript source code
- <http://developer.apple.com/documentation/AppleScript/
- Conceptual/AppleScriptLangGuide>`_,
- including `AppleScript Studio
- <http://developer.apple.com/documentation/AppleScript/
- Reference/StudioReference>`_.
- Contributed by Andreas Amann <aamann@mac.com>.
- """
-
- name = 'AppleScript'
- aliases = ['applescript']
- filenames = ['*.applescript']
-
- flags = re.MULTILINE | re.DOTALL
-
- Identifiers = r'[a-zA-Z]\w*'
- Literals = ['AppleScript', 'current application', 'false', 'linefeed',
- 'missing value', 'pi','quote', 'result', 'return', 'space',
- 'tab', 'text item delimiters', 'true', 'version']
- Classes = ['alias ', 'application ', 'boolean ', 'class ', 'constant ',
- 'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ',
- 'real ', 'record ', 'reference ', 'RGB color ', 'script ',
- 'text ', 'unit types', '(?:Unicode )?text', 'string']
- BuiltIn = ['attachment', 'attribute run', 'character', 'day', 'month',
- 'paragraph', 'word', 'year']
- HandlerParams = ['about', 'above', 'against', 'apart from', 'around',
- 'aside from', 'at', 'below', 'beneath', 'beside',
- 'between', 'for', 'given', 'instead of', 'on', 'onto',
- 'out of', 'over', 'since']
- Commands = ['ASCII (character|number)', 'activate', 'beep', 'choose URL',
- 'choose application', 'choose color', 'choose file( name)?',
- 'choose folder', 'choose from list',
- 'choose remote application', 'clipboard info',
- 'close( access)?', 'copy', 'count', 'current date', 'delay',
- 'delete', 'display (alert|dialog)', 'do shell script',
- 'duplicate', 'exists', 'get eof', 'get volume settings',
- 'info for', 'launch', 'list (disks|folder)', 'load script',
- 'log', 'make', 'mount volume', 'new', 'offset',
- 'open( (for access|location))?', 'path to', 'print', 'quit',
- 'random number', 'read', 'round', 'run( script)?',
- 'say', 'scripting components',
- 'set (eof|the clipboard to|volume)', 'store script',
- 'summarize', 'system attribute', 'system info',
- 'the clipboard', 'time to GMT', 'write', 'quoted form']
- References = ['(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)',
- 'first', 'second', 'third', 'fourth', 'fifth', 'sixth',
- 'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back',
- 'before', 'behind', 'every', 'front', 'index', 'last',
- 'middle', 'some', 'that', 'through', 'thru', 'where', 'whose']
- Operators = ["and", "or", "is equal", "equals", "(is )?equal to", "is not",
- "isn't", "isn't equal( to)?", "is not equal( to)?",
- "doesn't equal", "does not equal", "(is )?greater than",
- "comes after", "is not less than or equal( to)?",
- "isn't less than or equal( to)?", "(is )?less than",
- "comes before", "is not greater than or equal( to)?",
- "isn't greater than or equal( to)?",
- "(is )?greater than or equal( to)?", "is not less than",
- "isn't less than", "does not come before",
- "doesn't come before", "(is )?less than or equal( to)?",
- "is not greater than", "isn't greater than",
- "does not come after", "doesn't come after", "starts? with",
- "begins? with", "ends? with", "contains?", "does not contain",
- "doesn't contain", "is in", "is contained by", "is not in",
- "is not contained by", "isn't contained by", "div", "mod",
- "not", "(a )?(ref( to)?|reference to)", "is", "does"]
- Control = ['considering', 'else', 'error', 'exit', 'from', 'if',
- 'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to',
- 'try', 'until', 'using terms from', 'while', 'whith',
- 'with timeout( of)?', 'with transaction', 'by', 'continue',
- 'end', 'its?', 'me', 'my', 'return', 'of' , 'as']
- Declarations = ['global', 'local', 'prop(erty)?', 'set', 'get']
- Reserved = ['but', 'put', 'returning', 'the']
- StudioClasses = ['action cell', 'alert reply', 'application', 'box',
- 'browser( cell)?', 'bundle', 'button( cell)?', 'cell',
- 'clip view', 'color well', 'color-panel',
- 'combo box( item)?', 'control',
- 'data( (cell|column|item|row|source))?', 'default entry',
- 'dialog reply', 'document', 'drag info', 'drawer',
- 'event', 'font(-panel)?', 'formatter',
- 'image( (cell|view))?', 'matrix', 'menu( item)?', 'item',
- 'movie( view)?', 'open-panel', 'outline view', 'panel',
- 'pasteboard', 'plugin', 'popup button',
- 'progress indicator', 'responder', 'save-panel',
- 'scroll view', 'secure text field( cell)?', 'slider',
- 'sound', 'split view', 'stepper', 'tab view( item)?',
- 'table( (column|header cell|header view|view))',
- 'text( (field( cell)?|view))?', 'toolbar( item)?',
- 'user-defaults', 'view', 'window']
- StudioEvents = ['accept outline drop', 'accept table drop', 'action',
- 'activated', 'alert ended', 'awake from nib', 'became key',
- 'became main', 'begin editing', 'bounds changed',
- 'cell value', 'cell value changed', 'change cell value',
- 'change item value', 'changed', 'child of item',
- 'choose menu item', 'clicked', 'clicked toolbar item',
- 'closed', 'column clicked', 'column moved',
- 'column resized', 'conclude drop', 'data representation',
- 'deminiaturized', 'dialog ended', 'document nib name',
- 'double clicked', 'drag( (entered|exited|updated))?',
- 'drop', 'end editing', 'exposed', 'idle', 'item expandable',
- 'item value', 'item value changed', 'items changed',
- 'keyboard down', 'keyboard up', 'launched',
- 'load data representation', 'miniaturized', 'mouse down',
- 'mouse dragged', 'mouse entered', 'mouse exited',
- 'mouse moved', 'mouse up', 'moved',
- 'number of browser rows', 'number of items',
- 'number of rows', 'open untitled', 'opened', 'panel ended',
- 'parameters updated', 'plugin loaded', 'prepare drop',
- 'prepare outline drag', 'prepare outline drop',
- 'prepare table drag', 'prepare table drop',
- 'read from file', 'resigned active', 'resigned key',
- 'resigned main', 'resized( sub views)?',
- 'right mouse down', 'right mouse dragged',
- 'right mouse up', 'rows changed', 'scroll wheel',
- 'selected tab view item', 'selection changed',
- 'selection changing', 'should begin editing',
- 'should close', 'should collapse item',
- 'should end editing', 'should expand item',
- 'should open( untitled)?',
- 'should quit( after last window closed)?',
- 'should select column', 'should select item',
- 'should select row', 'should select tab view item',
- 'should selection change', 'should zoom', 'shown',
- 'update menu item', 'update parameters',
- 'update toolbar item', 'was hidden', 'was miniaturized',
- 'will become active', 'will close', 'will dismiss',
- 'will display browser cell', 'will display cell',
- 'will display item cell', 'will display outline cell',
- 'will finish launching', 'will hide', 'will miniaturize',
- 'will move', 'will open', 'will pop up', 'will quit',
- 'will resign active', 'will resize( sub views)?',
- 'will select tab view item', 'will show', 'will zoom',
- 'write to file', 'zoomed']
- StudioCommands = ['animate', 'append', 'call method', 'center',
- 'close drawer', 'close panel', 'display',
- 'display alert', 'display dialog', 'display panel', 'go',
- 'hide', 'highlight', 'increment', 'item for',
- 'load image', 'load movie', 'load nib', 'load panel',
- 'load sound', 'localized string', 'lock focus', 'log',
- 'open drawer', 'path for', 'pause', 'perform action',
- 'play', 'register', 'resume', 'scroll', 'select( all)?',
- 'show', 'size to fit', 'start', 'step back',
- 'step forward', 'stop', 'synchronize', 'unlock focus',
- 'update']
- StudioProperties = ['accepts arrow key', 'action method', 'active',
- 'alignment', 'allowed identifiers',
- 'allows branch selection', 'allows column reordering',
- 'allows column resizing', 'allows column selection',
- 'allows customization',
- 'allows editing text attributes',
- 'allows empty selection', 'allows mixed state',
- 'allows multiple selection', 'allows reordering',
- 'allows undo', 'alpha( value)?', 'alternate image',
- 'alternate increment value', 'alternate title',
- 'animation delay', 'associated file name',
- 'associated object', 'auto completes', 'auto display',
- 'auto enables items', 'auto repeat',
- 'auto resizes( outline column)?',
- 'auto save expanded items', 'auto save name',
- 'auto save table columns', 'auto saves configuration',
- 'auto scroll', 'auto sizes all columns to fit',
- 'auto sizes cells', 'background color', 'bezel state',
- 'bezel style', 'bezeled', 'border rect', 'border type',
- 'bordered', 'bounds( rotation)?', 'box type',
- 'button returned', 'button type',
- 'can choose directories', 'can choose files',
- 'can draw', 'can hide',
- 'cell( (background color|size|type))?', 'characters',
- 'class', 'click count', 'clicked( data)? column',
- 'clicked data item', 'clicked( data)? row',
- 'closeable', 'collating', 'color( (mode|panel))',
- 'command key down', 'configuration',
- 'content(s| (size|view( margins)?))?', 'context',
- 'continuous', 'control key down', 'control size',
- 'control tint', 'control view',
- 'controller visible', 'coordinate system',
- 'copies( on scroll)?', 'corner view', 'current cell',
- 'current column', 'current( field)? editor',
- 'current( menu)? item', 'current row',
- 'current tab view item', 'data source',
- 'default identifiers', 'delta (x|y|z)',
- 'destination window', 'directory', 'display mode',
- 'displayed cell', 'document( (edited|rect|view))?',
- 'double value', 'dragged column', 'dragged distance',
- 'dragged items', 'draws( cell)? background',
- 'draws grid', 'dynamically scrolls', 'echos bullets',
- 'edge', 'editable', 'edited( data)? column',
- 'edited data item', 'edited( data)? row', 'enabled',
- 'enclosing scroll view', 'ending page',
- 'error handling', 'event number', 'event type',
- 'excluded from windows menu', 'executable path',
- 'expanded', 'fax number', 'field editor', 'file kind',
- 'file name', 'file type', 'first responder',
- 'first visible column', 'flipped', 'floating',
- 'font( panel)?', 'formatter', 'frameworks path',
- 'frontmost', 'gave up', 'grid color', 'has data items',
- 'has horizontal ruler', 'has horizontal scroller',
- 'has parent data item', 'has resize indicator',
- 'has shadow', 'has sub menu', 'has vertical ruler',
- 'has vertical scroller', 'header cell', 'header view',
- 'hidden', 'hides when deactivated', 'highlights by',
- 'horizontal line scroll', 'horizontal page scroll',
- 'horizontal ruler view', 'horizontally resizable',
- 'icon image', 'id', 'identifier',
- 'ignores multiple clicks',
- 'image( (alignment|dims when disabled|frame style|'
- 'scaling))?',
- 'imports graphics', 'increment value',
- 'indentation per level', 'indeterminate', 'index',
- 'integer value', 'intercell spacing', 'item height',
- 'key( (code|equivalent( modifier)?|window))?',
- 'knob thickness', 'label', 'last( visible)? column',
- 'leading offset', 'leaf', 'level', 'line scroll',
- 'loaded', 'localized sort', 'location', 'loop mode',
- 'main( (bunde|menu|window))?', 'marker follows cell',
- 'matrix mode', 'maximum( content)? size',
- 'maximum visible columns',
- 'menu( form representation)?', 'miniaturizable',
- 'miniaturized', 'minimized image', 'minimized title',
- 'minimum column width', 'minimum( content)? size',
- 'modal', 'modified', 'mouse down state',
- 'movie( (controller|file|rect))?', 'muted', 'name',
- 'needs display', 'next state', 'next text',
- 'number of tick marks', 'only tick mark values',
- 'opaque', 'open panel', 'option key down',
- 'outline table column', 'page scroll', 'pages across',
- 'pages down', 'palette label', 'pane splitter',
- 'parent data item', 'parent window', 'pasteboard',
- 'path( (names|separator))?', 'playing',
- 'plays every frame', 'plays selection only', 'position',
- 'preferred edge', 'preferred type', 'pressure',
- 'previous text', 'prompt', 'properties',
- 'prototype cell', 'pulls down', 'rate',
- 'released when closed', 'repeated',
- 'requested print time', 'required file type',
- 'resizable', 'resized column', 'resource path',
- 'returns records', 'reuses columns', 'rich text',
- 'roll over', 'row height', 'rulers visible',
- 'save panel', 'scripts path', 'scrollable',
- 'selectable( identifiers)?', 'selected cell',
- 'selected( data)? columns?', 'selected data items?',
- 'selected( data)? rows?', 'selected item identifier',
- 'selection by rect', 'send action on arrow key',
- 'sends action when done editing', 'separates columns',
- 'separator item', 'sequence number', 'services menu',
- 'shared frameworks path', 'shared support path',
- 'sheet', 'shift key down', 'shows alpha',
- 'shows state by', 'size( mode)?',
- 'smart insert delete enabled', 'sort case sensitivity',
- 'sort column', 'sort order', 'sort type',
- 'sorted( data rows)?', 'sound', 'source( mask)?',
- 'spell checking enabled', 'starting page', 'state',
- 'string value', 'sub menu', 'super menu', 'super view',
- 'tab key traverses cells', 'tab state', 'tab type',
- 'tab view', 'table view', 'tag', 'target( printer)?',
- 'text color', 'text container insert',
- 'text container origin', 'text returned',
- 'tick mark position', 'time stamp',
- 'title(d| (cell|font|height|position|rect))?',
- 'tool tip', 'toolbar', 'trailing offset', 'transparent',
- 'treat packages as directories', 'truncated labels',
- 'types', 'unmodified characters', 'update views',
- 'use sort indicator', 'user defaults',
- 'uses data source', 'uses ruler',
- 'uses threaded animation',
- 'uses title from previous column', 'value wraps',
- 'version',
- 'vertical( (line scroll|page scroll|ruler view))?',
- 'vertically resizable', 'view',
- 'visible( document rect)?', 'volume', 'width', 'window',
- 'windows menu', 'wraps', 'zoomable', 'zoomed']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (ur'¬\n', String.Escape),
- (r"'s\s+", Text), # This is a possessive, consider moving
- (r'(--|#).*?$', Comment),
- (r'\(\*', Comment.Multiline, 'comment'),
- (r'[\(\){}!,.:]', Punctuation),
- (ur'(«)([^»]+)(»)',
- bygroups(Text, Name.Builtin, Text)),
- (r'\b((?:considering|ignoring)\s*)'
- r'(application responses|case|diacriticals|hyphens|'
- r'numeric strings|punctuation|white space)',
- bygroups(Keyword, Name.Builtin)),
- (ur'(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)', Operator),
- (r"\b(%s)\b" % '|'.join(Operators), Operator.Word),
- (r'^(\s*(?:on|end)\s+)'
- r'(%s)' % '|'.join(StudioEvents[::-1]),
- bygroups(Keyword, Name.Function)),
- (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
- (r'\b(as )(%s)\b' % '|'.join(Classes),
- bygroups(Keyword, Name.Class)),
- (r'\b(%s)\b' % '|'.join(Literals), Name.Constant),
- (r'\b(%s)\b' % '|'.join(Commands), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(Control), Keyword),
- (r'\b(%s)\b' % '|'.join(Declarations), Keyword),
- (r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin),
- (r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute),
- (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(References), Name.Builtin),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r'\b(%s)\b' % Identifiers, Name.Variable),
- (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
- (r'[-+]?\d+', Number.Integer),
- ],
- 'comment': [
- ('\(\*', Comment.Multiline, '#push'),
- ('\*\)', Comment.Multiline, '#pop'),
- ('[^*(]+', Comment.Multiline),
- ('[*(]', Comment.Multiline),
- ],
- }
-
-
-class ModelicaLexer(RegexLexer):
- """
- For `Modelica <http://www.modelica.org/>`_ source code.
-
- *New in Pygments 1.1.*
- """
- name = 'Modelica'
- aliases = ['modelica']
- filenames = ['*.mo']
- mimetypes = ['text/x-modelica']
-
- flags = re.IGNORECASE | re.DOTALL
-
- tokens = {
- 'whitespace': [
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment),
- ],
- 'statements': [
- (r'"', String, 'string'),
- (r'(\d+\.\d*|\.\d+|\d+|\d.)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+)', Number.Float),
- (r'\d+[Ll]?', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\]{},.;]', Punctuation),
- (r'(true|false|NULL|Real|Integer|Boolean)\b', Name.Builtin),
- (r"([a-zA-Z_][\w]*|'[a-zA-Z_\+\-\*\/\^][\w]*')"
- r"(\.([a-zA-Z_][\w]*|'[a-zA-Z_\+\-\*\/\^][\w]*'))+", Name.Class),
- (r"('[\w\+\-\*\/\^]+'|\w+)", Name),
- ],
- 'root': [
- include('whitespace'),
- include('keywords'),
- include('functions'),
- include('operators'),
- include('classes'),
- (r'("<html>|<html>)', Name.Tag, 'html-content'),
- include('statements'),
- ],
- 'keywords': [
- (r'(algorithm|annotation|break|connect|constant|constrainedby|'
- r'discrete|each|else|elseif|elsewhen|encapsulated|enumeration|'
- r'end|equation|exit|expandable|extends|'
- r'external|false|final|flow|for|if|import|impure|in|initial\sequation|'
- r'inner|input|loop|nondiscrete|outer|output|parameter|partial|'
- r'protected|public|pure|redeclare|replaceable|stream|time|then|true|'
- r'when|while|within)\b', Keyword),
- ],
- 'functions': [
- (r'(abs|acos|acosh|asin|asinh|atan|atan2|atan3|ceil|cos|cosh|'
- r'cross|div|exp|floor|getInstanceName|log|log10|mod|rem|'
- r'semiLinear|sign|sin|sinh|size|spatialDistribution|sqrt|tan|'
- r'tanh|zeros)\b', Name.Function),
- ],
- 'operators': [
- (r'(actualStream|and|assert|cardinality|change|Clock|delay|der|edge|'
- r'hold|homotopy|initial|inStream|noEvent|not|or|pre|previous|reinit|'
- r'return|sample|smooth|spatialDistribution|subSample|terminal|'
- r'terminate)\b', Name.Builtin),
- ],
- 'classes': [
- (r'(block|class|connector|function|model|package|'
- r'record|type)(\s+)([A-Za-z_]+)',
- bygroups(Keyword, Text, Name.Class))
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})',
- String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'html-content': [
- (r'<\s*/\s*html\s*>', Name.Tag, '#pop'),
- (r'.+?(?=<\s*/\s*html\s*>)', using(HtmlLexer)),
- ]
- }
-
-
-class RebolLexer(RegexLexer):
- """
- A `REBOL <http://www.rebol.com/>`_ lexer.
-
- *New in Pygments 1.1.*
- """
- name = 'REBOL'
- aliases = ['rebol']
- filenames = ['*.r', '*.r3']
- mimetypes = ['text/x-rebol']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- re.IGNORECASE
-
- escape_re = r'(?:\^\([0-9a-fA-F]{1,4}\)*)'
-
- def word_callback(lexer, match):
- word = match.group()
-
- if re.match(".*:$", word):
- yield match.start(), Generic.Subheading, word
- elif re.match(
- r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|'
- r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|'
- r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|'
- r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|'
- r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|'
- r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|'
- r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|'
- r'while|compress|decompress|secure|open|close|read|read-io|'
- r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|'
- r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|'
- r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|'
- r'browse|launch|stats|get-modes|set-modes|to-local-file|'
- r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|'
- r'hide|draw|show|size-text|textinfo|offset-to-caret|'
- r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|'
- r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|'
- r'dsa-make-key|dsa-generate-key|dsa-make-signature|'
- r'dsa-verify-signature|rsa-make-key|rsa-generate-key|'
- r'rsa-encrypt)$', word):
- yield match.start(), Name.Builtin, word
- elif re.match(
- r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|'
- r'minimum|maximum|negate|complement|absolute|random|head|tail|'
- r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|'
- r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|'
- r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|'
- r'copy)$', word):
- yield match.start(), Name.Function, word
- elif re.match(
- r'(error|source|input|license|help|install|echo|Usage|with|func|'
- r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|'
- r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|'
- r'remold|charset|array|replace|move|extract|forskip|forall|alter|'
- r'first+|also|take|for|forever|dispatch|attempt|what-dir|'
- r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|'
- r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|'
- r'build-tag|process-source|build-markup|decode-cgi|read-cgi|'
- r'write-user|save-user|set-user-name|protect-system|parse-xml|'
- r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|'
- r'scroll-para|get-face|alert|set-face|uninstall|unfocus|'
- r'request-dir|center-face|do-events|net-error|decode-url|'
- r'parse-header|parse-header-date|parse-email-addrs|import-email|'
- r'send|build-attach-body|resend|show-popup|hide-popup|open-events|'
- r'find-key-face|do-face|viewtop|confine|find-window|'
- r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|'
- r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|'
- r'read-thru|load-thru|do-thru|launch-thru|load-image|'
- r'request-download|do-face-alt|set-font|set-para|get-style|'
- r'set-style|make-face|stylize|choose|hilight-text|hilight-all|'
- r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|'
- r'resize-face|load-stock|load-stock-block|notify|request|flash|'
- r'request-color|request-pass|request-text|request-list|'
- r'request-date|request-file|dbug|editor|link-relative-path|'
- r'emailer|parse-error)$', word):
- yield match.start(), Keyword.Namespace, word
- elif re.match(
- r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|'
- r'return|exit|break)$', word):
- yield match.start(), Name.Exception, word
- elif re.match('REBOL$', word):
- yield match.start(), Generic.Heading, word
- elif re.match("to-.*", word):
- yield match.start(), Keyword, word
- elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$',
- word):
- yield match.start(), Operator, word
- elif re.match(".*\?$", word):
- yield match.start(), Keyword, word
- elif re.match(".*\!$", word):
- yield match.start(), Keyword.Type, word
- elif re.match("'.*", word):
- yield match.start(), Name.Variable.Instance, word # lit-word
- elif re.match("#.*", word):
- yield match.start(), Name.Label, word # issue
- elif re.match("%.*", word):
- yield match.start(), Name.Decorator, word # file
- else:
- yield match.start(), Name.Variable, word
-
- tokens = {
- 'root': [
- (r'REBOL', Generic.Strong, 'script'),
- (r'R', Comment),
- (r'[^R]+', Comment),
- ],
- 'script': [
- (r'\s+', Text),
- (r'#"', String.Char, 'char'),
- (r'#{[0-9a-fA-F]*}', Number.Hex),
- (r'2#{', Number.Hex, 'bin2'),
- (r'64#{[0-9a-zA-Z+/=\s]*}', Number.Hex),
- (r'"', String, 'string'),
- (r'{', String, 'string2'),
- (r';#+.*\n', Comment.Special),
- (r';\*+.*\n', Comment.Preproc),
- (r';.*\n', Comment),
- (r'%"', Name.Decorator, 'stringFile'),
- (r'%[^(\^{^")\s\[\]]+', Name.Decorator),
- (r'[+-]?([a-zA-Z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
- (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
- (r'\d+\-[0-9a-zA-Z]+\-\d+(\/\d+\:\d+(\:\d+)?'
- r'([\.\d+]?([+-]?\d+:\d+)?)?)?', String.Other), # date
- (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
- (r'\d+[xX]\d+', Keyword.Constant), # pair
- (r'[+-]?\d+(\'\d+)?([\.,]\d*)?[eE][+-]?\d+', Number.Float),
- (r'[+-]?\d+(\'\d+)?[\.,]\d*', Number.Float),
- (r'[+-]?\d+(\'\d+)?', Number),
- (r'[\[\]\(\)]', Generic.Strong),
- (r'[a-zA-Z]+[^(\^{"\s:)]*://[^(\^{"\s)]*', Name.Decorator), # url
- (r'mailto:[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # url
- (r'[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # email
- (r'comment\s', Comment, 'comment'),
- (r'/[^(\^{^")\s/[\]]*', Name.Attribute),
- (r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
- (r'<[a-zA-Z0-9:._-]*>', Name.Tag),
- (r'<[^(<>\s")]+', Name.Tag, 'tag'),
- (r'([^(\^{^")\s]+)', Text),
- ],
- 'string': [
- (r'[^(\^")]+', String),
- (escape_re, String.Escape),
- (r'[\(|\)]+', String),
- (r'\^.', String.Escape),
- (r'"', String, '#pop'),
- ],
- 'string2': [
- (r'[^(\^{^})]+', String),
- (escape_re, String.Escape),
- (r'[\(|\)]+', String),
- (r'\^.', String.Escape),
- (r'{', String, '#push'),
- (r'}', String, '#pop'),
- ],
- 'stringFile': [
- (r'[^(\^")]+', Name.Decorator),
- (escape_re, Name.Decorator),
- (r'\^.', Name.Decorator),
- (r'"', Name.Decorator, '#pop'),
- ],
- 'char': [
- (escape_re + '"', String.Char, '#pop'),
- (r'\^."', String.Char, '#pop'),
- (r'."', String.Char, '#pop'),
- ],
- 'tag': [
- (escape_re, Name.Tag),
- (r'"', Name.Tag, 'tagString'),
- (r'[^(<>\r\n")]+', Name.Tag),
- (r'>', Name.Tag, '#pop'),
- ],
- 'tagString': [
- (r'[^(\^")]+', Name.Tag),
- (escape_re, Name.Tag),
- (r'[\(|\)]+', Name.Tag),
- (r'\^.', Name.Tag),
- (r'"', Name.Tag, '#pop'),
- ],
- 'tuple': [
- (r'(\d+\.)+', Keyword.Constant),
- (r'\d+', Keyword.Constant, '#pop'),
- ],
- 'bin2': [
- (r'\s+', Number.Hex),
- (r'([0-1]\s*){8}', Number.Hex),
- (r'}', Number.Hex, '#pop'),
- ],
- 'comment': [
- (r'"', Comment, 'commentString1'),
- (r'{', Comment, 'commentString2'),
- (r'\[', Comment, 'commentBlock'),
- (r'[^(\s{\"\[]+', Comment, '#pop'),
- ],
- 'commentString1': [
- (r'[^(\^")]+', Comment),
- (escape_re, Comment),
- (r'[\(|\)]+', Comment),
- (r'\^.', Comment),
- (r'"', Comment, '#pop'),
- ],
- 'commentString2': [
- (r'[^(\^{^})]+', Comment),
- (escape_re, Comment),
- (r'[\(|\)]+', Comment),
- (r'\^.', Comment),
- (r'{', Comment, '#push'),
- (r'}', Comment, '#pop'),
- ],
- 'commentBlock': [
- (r'\[', Comment, '#push'),
- (r'\]', Comment, '#pop'),
- (r'[^(\[\])]+', Comment),
- ],
- }
-
-
-class ABAPLexer(RegexLexer):
- """
- Lexer for ABAP, SAP's integrated language.
-
- *New in Pygments 1.1.*
- """
- name = 'ABAP'
- aliases = ['abap']
- filenames = ['*.abap']
- mimetypes = ['text/x-abap']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'common': [
- (r'\s+', Text),
- (r'^\*.*$', Comment.Single),
- (r'\".*?\n', Comment.Single),
- ],
- 'variable-names': [
- (r'<[\S_]+>', Name.Variable),
- (r'\w[\w~]*(?:(\[\])|->\*)?', Name.Variable),
- ],
- 'root': [
- include('common'),
- #function calls
- (r'(CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION))(\s+)(\'?\S+\'?)',
- bygroups(Keyword, Text, Name.Function)),
- (r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
- r'TRANSACTION|TRANSFORMATION))\b',
- Keyword),
- (r'(FORM|PERFORM)(\s+)(\w+)',
- bygroups(Keyword, Text, Name.Function)),
- (r'(PERFORM)(\s+)(\()(\w+)(\))',
- bygroups(Keyword, Text, Punctuation, Name.Variable, Punctuation )),
- (r'(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)',
- bygroups(Keyword, Text, Name.Function, Text, Keyword)),
-
- # method implementation
- (r'(METHOD)(\s+)([\w~]+)',
- bygroups(Keyword, Text, Name.Function)),
- # method calls
- (r'(\s+)([\w\-]+)([=\-]>)([\w\-~]+)',
- bygroups(Text, Name.Variable, Operator, Name.Function)),
- # call methodnames returning style
- (r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function),
-
- # keywords with dashes in them.
- # these need to be first, because for instance the -ID part
- # of MESSAGE-ID wouldn't get highlighted if MESSAGE was
- # first in the list of keywords.
- (r'(ADD-CORRESPONDING|AUTHORITY-CHECK|'
- r'CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|'
- r'DELETE-ADJACENT|DIVIDE-CORRESPONDING|'
- r'EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|'
- r'FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|'
- r'INTERFACE-POOL|INVERTED-DATE|'
- r'LOAD-OF-PROGRAM|LOG-POINT|'
- r'MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|'
- r'NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|'
- r'OUTPUT-LENGTH|PRINT-CONTROL|'
- r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
- r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
- r'TYPE-POOL|TYPE-POOLS'
- r')\b', Keyword),
-
- # keyword kombinations
- (r'CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
- r'((PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
- r'(TYPE|LIKE)(\s+(LINE\s+OF|REF\s+TO|'
- r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
- r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
- r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
- r'GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|'
- r'PF-STATUS|(PROPERTY|REFERENCE)\s+OF|'
- r'RUN\s+TIME|TIME\s+(STAMP)?)?|'
- r'SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|'
- r'HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|'
- r'LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|'
- r'RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|'
- r'TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|'
- r'CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|'
- r'(CLOSE|OPEN)\s+(DATASET|CURSOR)|'
- r'(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|'
- r'DATABASE|SHARED\s+(MEMORY|BUFFER))|'
- r'DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|'
- r'FREE\s(MEMORY|OBJECT)?|'
- r'PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|'
- r'ON\s+(VALUE-REQUEST|HELP-REQUEST))|'
- r'AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|'
- r'AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|'
- r'END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|'
- r'SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|'
- r'SCREEN)|COMMENT|FUNCTION\s+KEY|'
- r'INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|'
- r'SKIP|ULINE)|'
- r'LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|'
- r'TO LIST-PROCESSING|TO TRANSACTION)'
- r'(ENDING|STARTING)\s+AT|'
- r'FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|'
- r'AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|'
- r'WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|'
- r'(BEGIN|END)\s+OF|'
- r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
- r'COMPARING(\s+ALL\s+FIELDS)?|'
- r'INSERT(\s+INITIAL\s+LINE\s+INTO|\s+LINES\s+OF)?|'
- r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
- r'END-OF-(DEFINITION|PAGE|SELECTION)|'
- r'WITH\s+FRAME(\s+TITLE)|'
-
- # simple kombinations
- r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
- r'IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|'
- r'LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|'
- r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
- r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
- r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
- r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE)\b', Keyword),
-
- # single word keywords.
- (r'(^|(?<=(\s|\.)))(ABBREVIATED|ADD|ALIASES|APPEND|ASSERT|'
- r'ASSIGN(ING)?|AT(\s+FIRST)?|'
- r'BACK|BLOCK|BREAK-POINT|'
- r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|'
- r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
- r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|'
- r'DATA|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
- r'DETAIL|DIRECTORY|DIVIDE|DO|'
- r'ELSE(IF)?|ENDAT|ENDCASE|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|'
- r'ENDIF|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|'
- r'ENHANCEMENT|EVENTS|EXCEPTIONS|EXIT|EXPORT|EXPORTING|EXTRACT|'
- r'FETCH|FIELDS?|FIND|FOR|FORM|FORMAT|FREE|FROM|'
- r'HIDE|'
- r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
- r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
- r'LENGTH|LINES|LOAD|LOCAL|'
- r'JOIN|'
- r'KEY|'
- r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFY|MOVE|MULTIPLY|'
- r'NODES|'
- r'OBLIGATORY|OF|OFF|ON|OVERLAY|'
- r'PACK|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|'
- r'RAISE|RAISING|RANGES|READ|RECEIVE|REFRESH|REJECT|REPORT|RESERVE|'
- r'RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|'
- r'SCROLL|SEARCH|SELECT|SHIFT|SINGLE|SKIP|SORT|SPLIT|STATICS|STOP|'
- r'SUBMIT|SUBTRACT|SUM|SUMMARY|SUMMING|SUPPLY|'
- r'TABLE|TABLES|TIMES|TITLE|TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
- r'ULINE|UNDER|UNPACK|UPDATE|USING|'
- r'VALUE|VALUES|VIA|'
- r'WAIT|WHEN|WHERE|WHILE|WITH|WINDOW|WRITE)\b', Keyword),
-
- # builtins
- (r'(abs|acos|asin|atan|'
- r'boolc|boolx|bit_set|'
- r'char_off|charlen|ceil|cmax|cmin|condense|contains|'
- r'contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|'
- r'count|count_any_of|count_any_not_of|'
- r'dbmaxlen|distance|'
- r'escape|exp|'
- r'find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|'
- r'insert|'
- r'lines|log|log10|'
- r'match|matches|'
- r'nmax|nmin|numofchar|'
- r'repeat|replace|rescale|reverse|round|'
- r'segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|'
- r'substring|substring_after|substring_from|substring_before|substring_to|'
- r'tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|'
- r'xstrlen)(\()\b', bygroups(Name.Builtin, Punctuation)),
-
- (r'&[0-9]', Name),
- (r'[0-9]+', Number.Integer),
-
- # operators which look like variable names before
- # parsing variable names.
- (r'(?<=(\s|.))(AND|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
- r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
- r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator),
-
- include('variable-names'),
-
- # standard oparators after variable names,
- # because < and > are part of field symbols.
- (r'[?*<>=\-+]', Operator),
- (r"'(''|[^'])*'", String.Single),
- (r'[/;:()\[\],\.]', Punctuation)
- ],
- }
-
-
-class NewspeakLexer(RegexLexer):
- """
- For `Newspeak <http://newspeaklanguage.org/>` syntax.
- """
- name = 'Newspeak'
- filenames = ['*.ns2']
- aliases = ['newspeak', ]
- mimetypes = ['text/x-newspeak']
-
- tokens = {
- 'root' : [
- (r'\b(Newsqueak2)\b',Keyword.Declaration),
- (r"'[^']*'",String),
- (r'\b(class)(\s+)([a-zA-Z0-9_]+)(\s*)',
- bygroups(Keyword.Declaration,Text,Name.Class,Text)),
- (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b',
- Keyword),
- (r'([a-zA-Z0-9_]+\:)(\s*)([a-zA-Z_]\w+)',
- bygroups(Name.Function,Text,Name.Variable)),
- (r'([a-zA-Z0-9_]+)(\s*)(=)',
- bygroups(Name.Attribute,Text,Operator)),
- (r'<[a-zA-Z0-9_]+>', Comment.Special),
- include('expressionstat'),
- include('whitespace')
- ],
-
- 'expressionstat': [
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'\d+', Number.Integer),
- (r':\w+',Name.Variable),
- (r'(\w+)(::)', bygroups(Name.Variable, Operator)),
- (r'\w+:', Name.Function),
- (r'\w+', Name.Variable),
- (r'\(|\)', Punctuation),
- (r'\[|\]', Punctuation),
- (r'\{|\}', Punctuation),
-
- (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator),
- (r'\.|;', Punctuation),
- include('whitespace'),
- include('literals'),
- ],
- 'literals': [
- (r'\$.', String),
- (r"'[^']*'", String),
- (r"#'[^']*'", String.Symbol),
- (r"#\w+:?", String.Symbol),
- (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol)
-
- ],
- 'whitespace' : [
- (r'\s+', Text),
- (r'"[^"]*"', Comment)
- ]
- }
-
-
-class GherkinLexer(RegexLexer):
- """
- For `Gherkin <http://github.com/aslakhellesoy/gherkin/>` syntax.
-
- *New in Pygments 1.2.*
- """
- name = 'Gherkin'
- aliases = ['Cucumber', 'cucumber', 'Gherkin', 'gherkin']
- filenames = ['*.feature']
- mimetypes = ['text/x-gherkin']
-
- feature_keywords = ur'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
- feature_element_keywords = ur'^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
- examples_keywords = ur'^(\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
- step_keywords = ur'^(\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )'
-
- tokens = {
- 'comments': [
- (r'#.*$', Comment),
- ],
- 'feature_elements' : [
- (step_keywords, Keyword, "step_content_stack"),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'feature_elements_on_stack' : [
- (step_keywords, Keyword, "#pop:2"),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'examples_table': [
- (r"\s+\|", Keyword, 'examples_table_header'),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'examples_table_header': [
- (r"\s+\|\s*$", Keyword, "#pop:2"),
- include('comments'),
- (r"\s*\|", Keyword),
- (r"[^\|]", Name.Variable),
- ],
- 'scenario_sections_on_stack': [
- (feature_element_keywords, bygroups(Name.Function, Keyword, Keyword, Name.Function), "feature_elements_on_stack"),
- ],
- 'narrative': [
- include('scenario_sections_on_stack'),
- (r"(\s|.)", Name.Function),
- ],
- 'table_vars': [
- (r'(<[^>]+>)', Name.Variable),
- ],
- 'numbers': [
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String),
- ],
- 'string': [
- include('table_vars'),
- (r'(\s|.)', String),
- ],
- 'py_string': [
- (r'"""', Keyword, "#pop"),
- include('string'),
- ],
- 'step_content_root':[
- (r"$", Keyword, "#pop"),
- include('step_content'),
- ],
- 'step_content_stack':[
- (r"$", Keyword, "#pop:2"),
- include('step_content'),
- ],
- 'step_content':[
- (r'"', Name.Function, "double_string"),
- include('table_vars'),
- include('numbers'),
- include('comments'),
- (r'(\s|.)', Name.Function),
- ],
- 'table_content': [
- (r"\s+\|\s*$", Keyword, "#pop"),
- include('comments'),
- (r"\s*\|", Keyword),
- include('string'),
- ],
- 'double_string': [
- (r'"', Name.Function, "#pop"),
- include('string'),
- ],
- 'root': [
- (r'\n', Name.Function),
- include('comments'),
- (r'"""', Keyword, "py_string"),
- (r'\s+\|', Keyword, 'table_content'),
- (r'"', Name.Function, "double_string"),
- include('table_vars'),
- include('numbers'),
- (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)),
- (step_keywords, bygroups(Name.Function, Keyword),
- 'step_content_root'),
- (feature_keywords, bygroups(Keyword, Keyword, Name.Function),
- 'narrative'),
- (feature_element_keywords,
- bygroups(Name.Function, Keyword, Keyword, Name.Function),
- 'feature_elements'),
- (examples_keywords,
- bygroups(Name.Function, Keyword, Keyword, Name.Function),
- 'examples_table'),
- (r'(\s|.)', Name.Function),
- ]
- }
-
-class AsymptoteLexer(RegexLexer):
- """
- For `Asymptote <http://asymptote.sf.net/>`_ source code.
-
- *New in Pygments 1.2.*
- """
- name = 'Asymptote'
- aliases = ['asy', 'asymptote']
- filenames = ['*.asy']
- mimetypes = ['text/x-asymptote']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
-
- tokens = {
- 'whitespace': [
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment),
- ],
- 'statements': [
- # simple string (TeX friendly)
- (r'"(\\\\|\\"|[^"])*"', String),
- # C style string (with character escapes)
- (r"'", String, 'string'),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.]', Punctuation),
- (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)),
- (r'(and|controls|tension|atleast|curl|if|else|while|for|do|'
- r'return|break|continue|struct|typedef|new|access|import|'
- r'unravel|from|include|quote|static|public|private|restricted|'
- r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword),
- # Since an asy-type-name can be also an asy-function-name,
- # in the following we test if the string " [a-zA-Z]" follows
- # the Keyword.Type.
- # Of course it is not perfect !
- (r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|'
- r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|'
- r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|'
- r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|'
- r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|'
- r'path3|pen|picture|point|position|projection|real|revolution|'
- r'scaleT|scientific|segment|side|slice|splitface|string|surface|'
- r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|'
- r'transformation|tree|triangle|trilinear|triple|vector|'
- r'vertex|void)(?=([ ]{1,}[a-zA-Z]))', Keyword.Type),
- # Now the asy-type-name which are not asy-function-name
- # except yours !
- # Perhaps useless
- (r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|'
- r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|'
- r'picture|position|real|revolution|slice|splitface|ticksgridT|'
- r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type),
- ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
- ],
- 'root': [
- include('whitespace'),
- # functions
- (r'((?:[a-zA-Z0-9_*\s])+?(?:\s|\*))' # return arguments
- r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'(' + _ws + r')({)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation),
- 'function'),
- # function declarations
- (r'((?:[a-zA-Z0-9_*\s])+?(?:\s|\*))' # return arguments
- r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'(' + _ws + r')(;)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation)),
- ('', Text, 'statement'),
- ],
- 'statement' : [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'function': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation),
- ('{', Punctuation, '#push'),
- ('}', Punctuation, '#pop'),
- ],
- 'string': [
- (r"'", String, '#pop'),
- (r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'\n', String),
- (r"[^\\'\n]+", String), # all other characters
- (r'\\\n', String),
- (r'\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- from pygments.lexers._asybuiltins import ASYFUNCNAME, ASYVARNAME
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name and value in ASYFUNCNAME:
- token = Name.Function
- elif token is Name and value in ASYVARNAME:
- token = Name.Variable
- yield index, token, value
-
-
-class PostScriptLexer(RegexLexer):
- """
- Lexer for PostScript files.
-
- The PostScript Language Reference published by Adobe at
- <http://partners.adobe.com/public/developer/en/ps/PLRM.pdf>
- is the authority for this.
-
- *New in Pygments 1.4.*
- """
- name = 'PostScript'
- aliases = ['postscript', 'postscr']
- filenames = ['*.ps', '*.eps']
- mimetypes = ['application/postscript']
-
- delimiter = r'\(\)\<\>\[\]\{\}\/\%\s'
- delimiter_end = r'(?=[%s])' % delimiter
-
- valid_name_chars = r'[^%s]' % delimiter
- valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
-
- tokens = {
- 'root': [
- # All comment types
- (r'^%!.+\n', Comment.Preproc),
- (r'%%.*\n', Comment.Special),
- (r'(^%.*\n){2,}', Comment.Multiline),
- (r'%.*\n', Comment.Single),
-
- # String literals are awkward; enter separate state.
- (r'\(', String, 'stringliteral'),
-
- (r'[\{\}(\<\<)(\>\>)\[\]]', Punctuation),
-
- # Numbers
- (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex),
- # Slight abuse: use Oct to signify any explicit base system
- (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)'
- r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct),
- (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?'
- + delimiter_end, Number.Float),
- (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer),
-
- # References
- (r'\/%s' % valid_name, Name.Variable),
-
- # Names
- (valid_name, Name.Function), # Anything else is executed
-
- # These keywords taken from
- # <http://www.math.ubc.ca/~cass/graphics/manual/pdf/a1.pdf>
- # Is there an authoritative list anywhere that doesn't involve
- # trawling documentation?
-
- (r'(false|true)' + delimiter_end, Keyword.Constant),
-
- # Conditionals / flow control
- (r'(eq|ne|ge|gt|le|lt|and|or|not|if|ifelse|for|forall)'
- + delimiter_end, Keyword.Reserved),
-
- ('(abs|add|aload|arc|arcn|array|atan|begin|bind|ceiling|charpath|'
- 'clip|closepath|concat|concatmatrix|copy|cos|currentlinewidth|'
- 'currentmatrix|currentpoint|curveto|cvi|cvs|def|defaultmatrix|'
- 'dict|dictstackoverflow|div|dtransform|dup|end|exch|exec|exit|exp|'
- 'fill|findfont|floor|get|getinterval|grestore|gsave|gt|'
- 'identmatrix|idiv|idtransform|index|invertmatrix|itransform|'
- 'length|lineto|ln|load|log|loop|matrix|mod|moveto|mul|neg|newpath|'
- 'pathforall|pathbbox|pop|print|pstack|put|quit|rand|rangecheck|'
- 'rcurveto|repeat|restore|rlineto|rmoveto|roll|rotate|round|run|'
- 'save|scale|scalefont|setdash|setfont|setgray|setlinecap|'
- 'setlinejoin|setlinewidth|setmatrix|setrgbcolor|shfill|show|'
- 'showpage|sin|sqrt|stack|stringwidth|stroke|strokepath|sub|'
- 'syntaxerror|transform|translate|truncate|typecheck|undefined|'
- 'undefinedfilename|undefinedresult)' + delimiter_end,
- Name.Builtin),
-
- (r'\s+', Text),
- ],
-
- 'stringliteral': [
- (r'[^\(\)\\]+', String),
- (r'\\', String.Escape, 'escape'),
- (r'\(', String, '#push'),
- (r'\)', String, '#pop'),
- ],
-
- 'escape': [
- (r'([0-8]{3}|n|r|t|b|f|\\|\(|\))?', String.Escape, '#pop'),
- ],
- }
-
-
-class AutohotkeyLexer(RegexLexer):
- """
- For `autohotkey <http://www.autohotkey.com/>`_ source code.
-
- *New in Pygments 1.4.*
- """
- name = 'autohotkey'
- aliases = ['ahk', 'autohotkey']
- filenames = ['*.ahk', '*.ahkl']
- mimetypes = ['text/x-autohotkey']
-
- tokens = {
- 'root': [
- (r'^(\s*)(/\*)', bygroups(Text, Comment.Multiline),
- 'incomment'),
- (r'^(\s*)(\()', bygroups(Text, Generic), 'incontinuation'),
- (r'\s+;.*?$', Comment.Singleline),
- (r'^;.*?$', Comment.Singleline),
- (r'[]{}(),;[]', Punctuation),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'\%[a-zA-Z_#@$][a-zA-Z0-9_#@$]*\%', Name.Variable),
- (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
- include('commands'),
- include('labels'),
- include('builtInFunctions'),
- include('builtInVariables'),
- (r'"', String, combined('stringescape', 'dqs')),
- include('numbers'),
- (r'[a-zA-Z_#@$][a-zA-Z0-9_#@$]*', Name),
- (r'\\|\'', Text),
- (r'\`([\,\%\`abfnrtv\-\+;])', String.Escape),
- include('garbage'),
- ],
- 'incomment': [
- (r'^\s*\*/', Comment.Multiline, '#pop'),
- (r'[^*/]', Comment.Multiline),
- (r'[*/]', Comment.Multiline)
- ],
- 'incontinuation': [
- (r'^\s*\)', Generic, '#pop'),
- (r'[^)]', Generic),
- (r'[)]', Generic),
- ],
- 'commands': [
- (r'(?i)^(\s*)(global|local|static|'
- r'#AllowSameLineComments|#ClipboardTimeout|#CommentFlag|'
- r'#ErrorStdOut|#EscapeChar|#HotkeyInterval|#HotkeyModifierTimeout|'
- r'#Hotstring|#IfWinActive|#IfWinExist|#IfWinNotActive|'
- r'#IfWinNotExist|#IncludeAgain|#Include|#InstallKeybdHook|'
- r'#InstallMouseHook|#KeyHistory|#LTrim|#MaxHotkeysPerInterval|'
- r'#MaxMem|#MaxThreads|#MaxThreadsBuffer|#MaxThreadsPerHotkey|'
- r'#NoEnv|#NoTrayIcon|#Persistent|#SingleInstance|#UseHook|'
- r'#WinActivateForce|AutoTrim|BlockInput|Break|Click|ClipWait|'
- r'Continue|Control|ControlClick|ControlFocus|ControlGetFocus|'
- r'ControlGetPos|ControlGetText|ControlGet|ControlMove|ControlSend|'
- r'ControlSendRaw|ControlSetText|CoordMode|Critical|'
- r'DetectHiddenText|DetectHiddenWindows|Drive|DriveGet|'
- r'DriveSpaceFree|Edit|Else|EnvAdd|EnvDiv|EnvGet|EnvMult|EnvSet|'
- r'EnvSub|EnvUpdate|Exit|ExitApp|FileAppend|'
- r'FileCopy|FileCopyDir|FileCreateDir|FileCreateShortcut|'
- r'FileDelete|FileGetAttrib|FileGetShortcut|FileGetSize|'
- r'FileGetTime|FileGetVersion|FileInstall|FileMove|FileMoveDir|'
- r'FileRead|FileReadLine|FileRecycle|FileRecycleEmpty|'
- r'FileRemoveDir|FileSelectFile|FileSelectFolder|FileSetAttrib|'
- r'FileSetTime|FormatTime|GetKeyState|Gosub|Goto|GroupActivate|'
- r'GroupAdd|GroupClose|GroupDeactivate|Gui|GuiControl|'
- r'GuiControlGet|Hotkey|IfEqual|IfExist|IfGreaterOrEqual|IfGreater|'
- r'IfInString|IfLess|IfLessOrEqual|IfMsgBox|IfNotEqual|IfNotExist|'
- r'IfNotInString|IfWinActive|IfWinExist|IfWinNotActive|'
- r'IfWinNotExist|If |ImageSearch|IniDelete|IniRead|IniWrite|'
- r'InputBox|Input|KeyHistory|KeyWait|ListHotkeys|ListLines|'
- r'ListVars|Loop|Menu|MouseClickDrag|MouseClick|MouseGetPos|'
- r'MouseMove|MsgBox|OnExit|OutputDebug|Pause|PixelGetColor|'
- r'PixelSearch|PostMessage|Process|Progress|Random|RegDelete|'
- r'RegRead|RegWrite|Reload|Repeat|Return|RunAs|RunWait|Run|'
- r'SendEvent|SendInput|SendMessage|SendMode|SendPlay|SendRaw|Send|'
- r'SetBatchLines|SetCapslockState|SetControlDelay|'
- r'SetDefaultMouseSpeed|SetEnv|SetFormat|SetKeyDelay|'
- r'SetMouseDelay|SetNumlockState|SetScrollLockState|'
- r'SetStoreCapslockMode|SetTimer|SetTitleMatchMode|'
- r'SetWinDelay|SetWorkingDir|Shutdown|Sleep|Sort|SoundBeep|'
- r'SoundGet|SoundGetWaveVolume|SoundPlay|SoundSet|'
- r'SoundSetWaveVolume|SplashImage|SplashTextOff|SplashTextOn|'
- r'SplitPath|StatusBarGetText|StatusBarWait|StringCaseSense|'
- r'StringGetPos|StringLeft|StringLen|StringLower|StringMid|'
- r'StringReplace|StringRight|StringSplit|StringTrimLeft|'
- r'StringTrimRight|StringUpper|Suspend|SysGet|Thread|ToolTip|'
- r'Transform|TrayTip|URLDownloadToFile|While|WinActivate|'
- r'WinActivateBottom|WinClose|WinGetActiveStats|WinGetActiveTitle|'
- r'WinGetClass|WinGetPos|WinGetText|WinGetTitle|WinGet|WinHide|'
- r'WinKill|WinMaximize|WinMenuSelectItem|WinMinimizeAllUndo|'
- r'WinMinimizeAll|WinMinimize|WinMove|WinRestore|WinSetTitle|'
- r'WinSet|WinShow|WinWaitActive|WinWaitClose|WinWaitNotActive|'
- r'WinWait)\b', bygroups(Text, Name.Builtin)),
- ],
- 'builtInFunctions': [
- (r'(?i)(Abs|ACos|Asc|ASin|ATan|Ceil|Chr|Cos|DllCall|Exp|FileExist|'
- r'Floor|GetKeyState|IL_Add|IL_Create|IL_Destroy|InStr|IsFunc|'
- r'IsLabel|Ln|Log|LV_Add|LV_Delete|LV_DeleteCol|LV_GetCount|'
- r'LV_GetNext|LV_GetText|LV_Insert|LV_InsertCol|LV_Modify|'
- r'LV_ModifyCol|LV_SetImageList|Mod|NumGet|NumPut|OnMessage|'
- r'RegExMatch|RegExReplace|RegisterCallback|Round|SB_SetIcon|'
- r'SB_SetParts|SB_SetText|Sin|Sqrt|StrLen|SubStr|Tan|TV_Add|'
- r'TV_Delete|TV_GetChild|TV_GetCount|TV_GetNext|TV_Get|'
- r'TV_GetParent|TV_GetPrev|TV_GetSelection|TV_GetText|TV_Modify|'
- r'VarSetCapacity|WinActive|WinExist|Object|ComObjActive|'
- r'ComObjArray|ComObjEnwrap|ComObjUnwrap|ComObjParameter|'
- r'ComObjType|ComObjConnect|ComObjCreate|ComObjGet|ComObjError|'
- r'ComObjValue|Insert|MinIndex|MaxIndex|Remove|SetCapacity|'
- r'GetCapacity|GetAddress|_NewEnum|FileOpen|Read|Write|ReadLine|'
- r'WriteLine|ReadNumType|WriteNumType|RawRead|RawWrite|Seek|Tell|'
- r'Close|Next|IsObject|StrPut|StrGet|Trim|LTrim|RTrim)\b',
- Name.Function),
- ],
- 'builtInVariables': [
- (r'(?i)(A_AhkPath|A_AhkVersion|A_AppData|A_AppDataCommon|'
- r'A_AutoTrim|A_BatchLines|A_CaretX|A_CaretY|A_ComputerName|'
- r'A_ControlDelay|A_Cursor|A_DDDD|A_DDD|A_DD|A_DefaultMouseSpeed|'
- r'A_Desktop|A_DesktopCommon|A_DetectHiddenText|'
- r'A_DetectHiddenWindows|A_EndChar|A_EventInfo|A_ExitReason|'
- r'A_FormatFloat|A_FormatInteger|A_Gui|A_GuiEvent|A_GuiControl|'
- r'A_GuiControlEvent|A_GuiHeight|A_GuiWidth|A_GuiX|A_GuiY|A_Hour|'
- r'A_IconFile|A_IconHidden|A_IconNumber|A_IconTip|A_Index|'
- r'A_IPAddress1|A_IPAddress2|A_IPAddress3|A_IPAddress4|A_ISAdmin|'
- r'A_IsCompiled|A_IsCritical|A_IsPaused|A_IsSuspended|A_KeyDelay|'
- r'A_Language|A_LastError|A_LineFile|A_LineNumber|A_LoopField|'
- r'A_LoopFileAttrib|A_LoopFileDir|A_LoopFileExt|A_LoopFileFullPath|'
- r'A_LoopFileLongPath|A_LoopFileName|A_LoopFileShortName|'
- r'A_LoopFileShortPath|A_LoopFileSize|A_LoopFileSizeKB|'
- r'A_LoopFileSizeMB|A_LoopFileTimeAccessed|A_LoopFileTimeCreated|'
- r'A_LoopFileTimeModified|A_LoopReadLine|A_LoopRegKey|'
- r'A_LoopRegName|A_LoopRegSubkey|A_LoopRegTimeModified|'
- r'A_LoopRegType|A_MDAY|A_Min|A_MM|A_MMM|A_MMMM|A_Mon|A_MouseDelay|'
- r'A_MSec|A_MyDocuments|A_Now|A_NowUTC|A_NumBatchLines|A_OSType|'
- r'A_OSVersion|A_PriorHotkey|A_ProgramFiles|A_Programs|'
- r'A_ProgramsCommon|A_ScreenHeight|A_ScreenWidth|A_ScriptDir|'
- r'A_ScriptFullPath|A_ScriptName|A_Sec|A_Space|A_StartMenu|'
- r'A_StartMenuCommon|A_Startup|A_StartupCommon|A_StringCaseSense|'
- r'A_Tab|A_Temp|A_ThisFunc|A_ThisHotkey|A_ThisLabel|A_ThisMenu|'
- r'A_ThisMenuItem|A_ThisMenuItemPos|A_TickCount|A_TimeIdle|'
- r'A_TimeIdlePhysical|A_TimeSincePriorHotkey|A_TimeSinceThisHotkey|'
- r'A_TitleMatchMode|A_TitleMatchModeSpeed|A_UserName|A_WDay|'
- r'A_WinDelay|A_WinDir|A_WorkingDir|A_YDay|A_YEAR|A_YWeek|A_YYYY|'
- r'Clipboard|ClipboardAll|ComSpec|ErrorLevel|ProgramFiles|True|'
- r'False|A_IsUnicode|A_FileEncoding|A_OSVersion|A_PtrSize)\b',
- Name.Variable),
- ],
- 'labels': [
- # hotkeys and labels
- # technically, hotkey names are limited to named keys and buttons
- (r'(^\s*)([^:\s\(\"]+?:{1,2})', bygroups(Text, Name.Label)),
- (r'(^\s*)(::[^:\s]+?::)', bygroups(Text, Name.Label)),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'stringescape': [
- (r'\"\"|\`([\,\%\`abfnrtv])', String.Escape),
- ],
- 'strings': [
- (r'[^"\n]+', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings')
- ],
- 'garbage': [
- (r'[^\S\n]', Text),
- # (r'.', Text), # no cheating
- ],
- }
-
-
-class MaqlLexer(RegexLexer):
- """
- Lexer for `GoodData MAQL
- <https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
- scripts.
-
- *New in Pygments 1.4.*
- """
-
- name = 'MAQL'
- aliases = ['maql']
- filenames = ['*.maql']
- mimetypes = ['text/x-gooddata-maql','application/x-gooddata-maql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- # IDENTITY
- (r'IDENTIFIER\b', Name.Builtin),
- # IDENTIFIER
- (r'\{[^}]+\}', Name.Variable),
- # NUMBER
- (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Literal.Number),
- # STRING
- (r'"', Literal.String, 'string-literal'),
- # RELATION
- (r'\<\>|\!\=', Operator),
- (r'\=|\>\=|\>|\<\=|\<', Operator),
- # :=
- (r'\:\=', Operator),
- # OBJECT
- (r'\[[^]]+\]', Name.Variable.Class),
- # keywords
- (r'(DIMENSIONS?|BOTTOM|METRIC|COUNT|OTHER|FACT|WITH|TOP|OR|'
- r'ATTRIBUTE|CREATE|PARENT|FALSE|ROWS?|FROM|ALL|AS|PF|'
- r'COLUMNS?|DEFINE|REPORT|LIMIT|TABLE|LIKE|AND|BY|'
- r'BETWEEN|EXCEPT|SELECT|MATCH|WHERE|TRUE|FOR|IN|'
- r'WITHOUT|FILTER|ALIAS|ORDER|FACT|WHEN|NOT|ON|'
- r'KEYS|KEY|FULLSET|PRIMARY|LABELS|LABEL|VISUAL|'
- r'TITLE|DESCRIPTION|FOLDER|ALTER|DROP|ADD|DATASET|'
- r'DATATYPE|INT|BIGINT|DOUBLE|DATE|VARCHAR|DECIMAL|'
- r'SYNCHRONIZE|TYPE|DEFAULT|ORDER|ASC|DESC|HYPERLINK|'
- r'INCLUDE|TEMPLATE|MODIFY)\b', Keyword),
- # FUNCNAME
- (r'[a-zA-Z]\w*\b', Name.Function),
- # Comments
- (r'#.*', Comment.Single),
- # Punctuation
- (r'[,;\(\)]', Token.Punctuation),
- # Space is not significant
- (r'\s+', Text)
- ],
- 'string-literal': [
- (r'\\[tnrfbae"\\]', String.Escape),
- (r'"', Literal.String, '#pop'),
- (r'[^\\"]+', Literal.String)
- ],
- }
-
-
-class GoodDataCLLexer(RegexLexer):
- """
- Lexer for `GoodData-CL <http://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/com/gooddata/processor/COMMANDS.txt>`_
- script files.
-
- *New in Pygments 1.4.*
- """
-
- name = 'GoodData-CL'
- aliases = ['gooddata-cl']
- filenames = ['*.gdc']
- mimetypes = ['text/x-gooddata-cl']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- # Comments
- (r'#.*', Comment.Single),
- # Function call
- (r'[a-zA-Z]\w*', Name.Function),
- # Argument list
- (r'\(', Token.Punctuation, 'args-list'),
- # Punctuation
- (r';', Token.Punctuation),
- # Space is not significant
- (r'\s+', Text)
- ],
- 'args-list': [
- (r'\)', Token.Punctuation, '#pop'),
- (r',', Token.Punctuation),
- (r'[a-zA-Z]\w*', Name.Variable),
- (r'=', Operator),
- (r'"', Literal.String, 'string-literal'),
- (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Literal.Number),
- # Space is not significant
- (r'\s', Text)
- ],
- 'string-literal': [
- (r'\\[tnrfbae"\\]', String.Escape),
- (r'"', Literal.String, '#pop'),
- (r'[^\\"]+', Literal.String)
- ]
- }
-
-
-class ProtoBufLexer(RegexLexer):
- """
- Lexer for `Protocol Buffer <http://code.google.com/p/protobuf/>`_
- definition files.
-
- *New in Pygments 1.4.*
- """
-
- name = 'Protocol Buffer'
- aliases = ['protobuf', 'proto']
- filenames = ['*.proto']
-
- tokens = {
- 'root': [
- (r'[ \t]+', Text),
- (r'[,;{}\[\]\(\)]', Punctuation),
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- (r'\b(import|option|optional|required|repeated|default|packed|'
- r'ctype|extensions|to|max|rpc|returns)\b', Keyword),
- (r'(int32|int64|uint32|uint64|sint32|sint64|'
- r'fixed32|fixed64|sfixed32|sfixed64|'
- r'float|double|bool|string|bytes)\b', Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'package'),
- (r'(message|extend)(\s+)',
- bygroups(Keyword.Declaration, Text), 'message'),
- (r'(enum|group|service)(\s+)',
- bygroups(Keyword.Declaration, Text), 'type'),
- (r'\".*\"', String),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'(\-?(inf|nan))', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'[+-=]', Operator),
- (r'([a-zA-Z_][a-zA-Z0-9_\.]*)([ \t]*)(=)',
- bygroups(Name.Attribute, Text, Operator)),
- ('[a-zA-Z_][a-zA-Z0-9_\.]*', Name),
- ],
- 'package': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Namespace, '#pop')
- ],
- 'message': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
- ],
- 'type': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name, '#pop')
- ],
- }
-
-
-class HybrisLexer(RegexLexer):
- """
- For `Hybris <http://www.hybris-lang.org>`_ source code.
-
- *New in Pygments 1.4.*
- """
-
- name = 'Hybris'
- aliases = ['hybris', 'hy']
- filenames = ['*.hy', '*.hyb']
- mimetypes = ['text/x-hybris', 'application/x-hybris']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:function|method|operator\s+)+?)'
- r'([a-zA-Z_][a-zA-Z0-9_]*)'
- r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
- (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
- r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
- (r'(extends|private|protected|public|static|throws|function|method|'
- r'operator)\b', Keyword.Declaration),
- (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
- r'__INC_PATH__)\b', Keyword.Constant),
- (r'(class|struct)(\s+)',
- bygroups(Keyword.Declaration, Text), 'class'),
- (r'(import|include)(\s+)',
- bygroups(Keyword.Namespace, Text), 'import'),
- (r'(gc_collect|gc_mm_items|gc_mm_usage|gc_collect_threshold|'
- r'urlencode|urldecode|base64encode|base64decode|sha1|crc32|sha2|'
- r'md5|md5_file|acos|asin|atan|atan2|ceil|cos|cosh|exp|fabs|floor|'
- r'fmod|log|log10|pow|sin|sinh|sqrt|tan|tanh|isint|isfloat|ischar|'
- r'isstring|isarray|ismap|isalias|typeof|sizeof|toint|tostring|'
- r'fromxml|toxml|binary|pack|load|eval|var_names|var_values|'
- r'user_functions|dyn_functions|methods|call|call_method|mknod|'
- r'mkfifo|mount|umount2|umount|ticks|usleep|sleep|time|strtime|'
- r'strdate|dllopen|dlllink|dllcall|dllcall_argv|dllclose|env|exec|'
- r'fork|getpid|wait|popen|pclose|exit|kill|pthread_create|'
- r'pthread_create_argv|pthread_exit|pthread_join|pthread_kill|'
- r'smtp_send|http_get|http_post|http_download|socket|bind|listen|'
- r'accept|getsockname|getpeername|settimeout|connect|server|recv|'
- r'send|close|print|println|printf|input|readline|serial_open|'
- r'serial_fcntl|serial_get_attr|serial_get_ispeed|serial_get_ospeed|'
- r'serial_set_attr|serial_set_ispeed|serial_set_ospeed|serial_write|'
- r'serial_read|serial_close|xml_load|xml_parse|fopen|fseek|ftell|'
- r'fsize|fread|fwrite|fgets|fclose|file|readdir|pcre_replace|size|'
- r'pop|unmap|has|keys|values|length|find|substr|replace|split|trim|'
- r'remove|contains|join)\b', Name.Builtin),
- (r'(MethodReference|Runner|Dll|Thread|Pipe|Process|Runnable|'
- r'CGI|ClientSocket|Socket|ServerSocket|File|Console|Directory|'
- r'Exception)\b', Keyword.Type),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
- (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)',
- bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
- (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
- (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?\-@]+', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text),
- ],
- 'class': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
- ],
- }
-
-
-class AwkLexer(RegexLexer):
- """
- For Awk scripts.
-
- *New in Pygments 1.5.*
- """
-
- name = 'Awk'
- aliases = ['awk', 'gawk', 'mawk', 'nawk']
- filenames = ['*.awk']
- mimetypes = ['application/x-awk']
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'#.*$', Comment.Single)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'\B', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- (r'', Text, '#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|\|\||&&|in|\$|!?~|'
- r'(\*\*|[-<>+*%\^/!=])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(break|continue|do|while|exit|for|if|'
- r'return)\b', Keyword, 'slashstartsregex'),
- (r'function\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|'
- r'length|match|split|sprintf|sub|substr|tolower|toupper|close|'
- r'fflush|getline|next|nextfile|print|printf|strftime|systime|'
- r'delete|system)\b', Keyword.Reserved),
- (r'(ARGC|ARGIND|ARGV|CONVFMT|ENVIRON|ERRNO|FIELDWIDTHS|FILENAME|FNR|FS|'
- r'IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|RSTART|RT|'
- r'SUBSEP)\b', Name.Builtin),
- (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
-
-
-class Cfengine3Lexer(RegexLexer):
- """
- Lexer for `CFEngine3 <http://cfengine.org>`_ policy files.
-
- *New in Pygments 1.5.*
- """
-
- name = 'CFEngine3'
- aliases = ['cfengine3', 'cf3']
- filenames = ['*.cf']
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'#.*?\n', Comment),
- (r'(body)(\s+)(\S+)(\s+)(control)',
- bygroups(Keyword, Text, Keyword, Text, Keyword)),
- (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()',
- bygroups(Keyword, Text, Keyword, Text, Name.Function, Punctuation),
- 'arglist'),
- (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)',
- bygroups(Keyword, Text, Keyword, Text, Name.Function)),
- (r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)',
- bygroups(Punctuation,Name.Variable,Punctuation,
- Text,Keyword.Type,Text,Operator,Text)),
- (r'(\S+)(\s*)(=>)(\s*)',
- bygroups(Keyword.Reserved,Text,Operator,Text)),
- (r'"', String, 'string'),
- (r'(\w+)(\()', bygroups(Name.Function, Punctuation)),
- (r'([\w.!&|\(\)]+)(::)', bygroups(Name.Class, Punctuation)),
- (r'(\w+)(:)', bygroups(Keyword.Declaration,Punctuation)),
- (r'@[\{\(][^\)\}]+[\}\)]', Name.Variable),
- (r'[(){},;]', Punctuation),
- (r'=>', Operator),
- (r'->', Operator),
- (r'\d+\.\d+', Number.Float),
- (r'\d+', Number.Integer),
- (r'\w+', Name.Function),
- (r'\s+', Text),
- ],
- 'string': [
- (r'\$[\{\(]', String.Interpol, 'interpol'),
- (r'\\.', String.Escape),
- (r'"', String, '#pop'),
- (r'\n', String),
- (r'.', String),
- ],
- 'interpol': [
- (r'\$[\{\(]', String.Interpol, '#push'),
- (r'[\}\)]', String.Interpol, '#pop'),
- (r'[^\$\{\(\)\}]+', String.Interpol),
- ],
- 'arglist': [
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'\w+', Name.Variable),
- (r'\s+', Text),
- ],
- }
-
-
-class SnobolLexer(RegexLexer):
- """
- Lexer for the SNOBOL4 programming language.
-
- Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
- Does not require spaces around binary operators.
-
- *New in Pygments 1.5.*
- """
-
- name = "Snobol"
- aliases = ["snobol"]
- filenames = ['*.snobol']
- mimetypes = ['text/x-snobol']
-
- tokens = {
- # root state, start of line
- # comments, continuation lines, and directives start in column 1
- # as do labels
- 'root': [
- (r'\*.*\n', Comment),
- (r'[\+\.] ', Punctuation, 'statement'),
- (r'-.*\n', Comment),
- (r'END\s*\n', Name.Label, 'heredoc'),
- (r'[A-Za-z\$][\w$]*', Name.Label, 'statement'),
- (r'\s+', Text, 'statement'),
- ],
- # statement state, line after continuation or label
- 'statement': [
- (r'\s*\n', Text, '#pop'),
- (r'\s+', Text),
- (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
- r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
- r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
- r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
- Name.Builtin),
- (r'[A-Za-z][\w\.]*', Name),
- # ASCII equivalents of original operators
- # | for the EBCDIC equivalent, ! likewise
- # \ for EBCDIC negation
- (r'\*\*|[\?\$\.!%\*/#+\-@\|&\\=]', Operator),
- (r'"[^"]*"', String),
- (r"'[^']*'", String),
- # Accept SPITBOL syntax for real numbers
- # as well as Macro SNOBOL4
- (r'[0-9]+(?=[^\.EeDd])', Number.Integer),
- (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
- # Goto
- (r':', Punctuation, 'goto'),
- (r'[\(\)<>,;]', Punctuation),
- ],
- # Goto block
- 'goto': [
- (r'\s*\n', Text, "#pop:2"),
- (r'\s+', Text),
- (r'F|S', Keyword),
- (r'(\()([A-Za-z][\w.]*)(\))',
- bygroups(Punctuation, Name.Label, Punctuation))
- ],
- # everything after the END statement is basically one
- # big heredoc.
- 'heredoc': [
- (r'.*\n', String.Heredoc)
- ]
- }
-
-
-class UrbiscriptLexer(ExtendedRegexLexer):
- """
- For UrbiScript source code.
-
- *New in Pygments 1.5.*
- """
-
- name = 'UrbiScript'
- aliases = ['urbiscript']
- filenames = ['*.u']
- mimetypes = ['application/x-urbiscript']
-
- flags = re.DOTALL
-
- ## TODO
- # - handle Experimental and deprecated tags with specific tokens
- # - handle Angles and Durations with specific tokens
-
- def blob_callback(lexer, match, ctx):
- text_before_blob = match.group(1)
- blob_start = match.group(2)
- blob_size_str = match.group(3)
- blob_size = int(blob_size_str)
- yield match.start(), String, text_before_blob
- ctx.pos += len(text_before_blob)
-
- # if blob size doesn't match blob format (example : "\B(2)(aaa)")
- # yield blob as a string
- if ctx.text[match.end() + blob_size] != ")":
- result = "\\B(" + blob_size_str + ")("
- yield match.start(), String, result
- ctx.pos += len(result)
- return
-
- # if blob is well formated, yield as Escape
- blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")"
- yield match.start(), String.Escape, blob_text
- ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")"
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- # comments
- (r'//.*?\n', Comment),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'(?:every|for|loop|while)(?:;|&|\||,)',Keyword),
- (r'(?:assert|at|break|case|catch|closure|compl|continue|'
- r'default|else|enum|every|external|finally|for|freezeif|if|new|'
- r'onleave|return|stopif|switch|this|throw|timeout|try|'
- r'waituntil|whenever|while)\b', Keyword),
- (r'(?:asm|auto|bool|char|const_cast|delete|double|dynamic_cast|'
- r'explicit|export|extern|float|friend|goto|inline|int|'
- r'long|mutable|namespace|register|reinterpret_cast|short|'
- r'signed|sizeof|static_cast|struct|template|typedef|typeid|'
- r'typename|union|unsigned|using|virtual|volatile|'
- r'wchar_t)\b', Keyword.Reserved),
- # deprecated keywords, use a meaningfull token when available
- (r'(?:emit|foreach|internal|loopn|static)\b', Keyword),
- # ignored keywords, use a meaningfull token when available
- (r'(?:private|protected|public)\b', Keyword),
- (r'(?:var|do|const|function|class)\b', Keyword.Declaration),
- (r'(?:true|false|nil|void)\b', Keyword.Constant),
- (r'(?:Barrier|Binary|Boolean|CallMessage|Channel|Code|'
- r'Comparable|Container|Control|Date|Dictionary|Directory|'
- r'Duration|Enumeration|Event|Exception|Executable|File|Finalizable|'
- r'Float|FormatInfo|Formatter|Global|Group|Hash|InputStream|'
- r'IoService|Job|Kernel|Lazy|List|Loadable|Lobby|Location|Logger|Math|'
- r'Mutex|nil|Object|Orderable|OutputStream|Pair|Path|Pattern|Position|'
- r'Primitive|Process|Profile|PseudoLazy|PubSub|RangeIterable|Regexp|'
- r'Semaphore|Server|Singleton|Socket|StackFrame|Stream|String|System|'
- r'Tag|Timeout|Traceable|TrajectoryGenerator|Triplet|Tuple'
- r'|UObject|UValue|UVar)\b', Name.Builtin),
- (r'(?:this)\b', Name.Builtin.Pseudo),
- # don't match single | and &
- (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator),
- (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b',
- Operator.Word),
- (r'[{}\[\]()]+', Punctuation),
- (r'(?:;|\||,|&|\?|!)+', Punctuation),
- (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- # Float, Integer, Angle and Duration
- (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?'
- r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float),
- # handle binary blob in strings
- (r'"', String.Double, "string.double"),
- (r"'", String.Single, "string.single"),
- ],
- 'string.double': [
- (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback),
- (r'(\\\\|\\"|[^"])*?"', String.Double, '#pop'),
- ],
- 'string.single': [
- (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback),
- (r"(\\\\|\\'|[^'])*?'", String.Single, '#pop'),
- ],
- # from http://pygments.org/docs/lexerdevelopment/#changing-states
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ]
- }
-
-
-class OpenEdgeLexer(RegexLexer):
- """
- Lexer for `OpenEdge ABL (formerly Progress)
- <http://web.progress.com/en/openedge/abl.html>`_ source code.
-
- *New in Pygments 1.5.*
- """
- name = 'OpenEdge ABL'
- aliases = ['openedge', 'abl', 'progress']
- filenames = ['*.p', '*.cls']
- mimetypes = ['text/x-openedge', 'application/x-openedge']
-
- types = (r'(?i)(^|(?<=[^0-9a-z_\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|'
- r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|'
- r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|'
- r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|'
- r'LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^0-9a-z_\-]))')
-
- keywords = (r'(?i)(^|(?<=[^0-9a-z_\-]))(' +
- r'|'.join(OPENEDGEKEYWORDS) +
- r')\s*($|(?=[^0-9a-z_\-]))')
- tokens = {
- 'root': [
- (r'/\*', Comment.Multiline, 'comment'),
- (r'\{', Comment.Preproc, 'preprocessor'),
- (r'\s*&.*', Comment.Preproc),
- (r'0[xX][0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration),
- (types, Keyword.Type),
- (keywords, Name.Builtin),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\s+', Text),
- (r'[+*/=-]', Operator),
- (r'[.:()]', Punctuation),
- (r'.', Name.Variable), # Lazy catch-all
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'preprocessor': [
- (r'[^{}]', Comment.Preproc),
- (r'{', Comment.Preproc, '#push'),
- (r'}', Comment.Preproc, '#pop'),
- ],
- }
-
-
-class BroLexer(RegexLexer):
- """
- For `Bro <http://bro-ids.org/>`_ scripts.
-
- *New in Pygments 1.5.*
- """
- name = 'Bro'
- aliases = ['bro']
- filenames = ['*.bro']
-
- _hex = r'[0-9a-fA-F_]+'
- _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
- _h = r'[A-Za-z0-9][-A-Za-z0-9]*'
-
- tokens = {
- 'root': [
- # Whitespace
- (r'^@.*?\n', Comment.Preproc),
- (r'#.*?\n', Comment.Single),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text),
- # Keywords
- (r'(add|alarm|break|case|const|continue|delete|do|else|enum|event'
- r'|export|for|function|if|global|hook|local|module|next'
- r'|of|print|redef|return|schedule|switch|type|when|while)\b', Keyword),
- (r'(addr|any|bool|count|counter|double|file|int|interval|net'
- r'|pattern|port|record|set|string|subnet|table|time|timer'
- r'|vector)\b', Keyword.Type),
- (r'(T|F)\b', Keyword.Constant),
- (r'(&)((?:add|delete|expire)_func|attr|(?:create|read|write)_expire'
- r'|default|disable_print_hook|raw_output|encrypt|group|log'
- r'|mergeable|optional|persistent|priority|redef'
- r'|rotate_(?:interval|size)|synchronized)\b', bygroups(Punctuation,
- Keyword)),
- (r'\s+module\b', Keyword.Namespace),
- # Addresses, ports and networks
- (r'\d+/(tcp|udp|icmp|unknown)\b', Number),
- (r'(\d+\.){3}\d+', Number),
- (r'(' + _hex + r'){7}' + _hex, Number),
- (r'0x' + _hex + r'(' + _hex + r'|:)*::(' + _hex + r'|:)*', Number),
- (r'((\d+|:)(' + _hex + r'|:)*)?::(' + _hex + r'|:)*', Number),
- (r'(\d+\.\d+\.|(\d+\.){2}\d+)', Number),
- # Hostnames
- (_h + r'(\.' + _h + r')+', String),
- # Numeric
- (_float + r'\s+(day|hr|min|sec|msec|usec)s?\b', Literal.Date),
- (r'0[xX]' + _hex, Number.Hex),
- (_float, Number.Float),
- (r'\d+', Number.Integer),
- (r'/', String.Regex, 'regex'),
- (r'"', String, 'string'),
- # Operators
- (r'[!%*/+:<=>?~|-]', Operator),
- (r'([-+=&|]{2}|[+=!><-]=)', Operator),
- (r'(in|match)\b', Operator.Word),
- (r'[{}()\[\]$.,;]', Punctuation),
- # Identfier
- (r'([_a-zA-Z]\w*)(::)', bygroups(Name, Name.Namespace)),
- (r'[a-zA-Z_][a-zA-Z_0-9]*', Name)
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String),
- (r'\\\n', String),
- (r'\\', String)
- ],
- 'regex': [
- (r'/', String.Regex, '#pop'),
- (r'\\[\\nt/]', String.Regex), # String.Escape is too intense here.
- (r'[^\\/\n]+', String.Regex),
- (r'\\\n', String.Regex),
- (r'\\', String.Regex)
- ]
- }
-
-
-class CbmBasicV2Lexer(RegexLexer):
- """
- For CBM BASIC V2 sources.
-
- *New in Pygments 1.6.*
- """
- name = 'CBM BASIC V2'
- aliases = ['cbmbas']
- filenames = ['*.bas']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'rem.*\n', Comment.Single),
- (r'\s+', Text),
- (r'new|run|end|for|to|next|step|go(to|sub)?|on|return|stop|cont'
- r'|if|then|input#?|read|wait|load|save|verify|poke|sys|print#?'
- r'|list|clr|cmd|open|close|get#?', Keyword.Reserved),
- (r'data|restore|dim|let|def|fn', Keyword.Declaration),
- (r'tab|spc|sgn|int|abs|usr|fre|pos|sqr|rnd|log|exp|cos|sin|tan|atn'
- r'|peek|len|val|asc|(str|chr|left|right|mid)\$', Name.Builtin),
- (r'[-+*/^<>=]', Operator),
- (r'not|and|or', Operator.Word),
- (r'"[^"\n]*.', String),
- (r'\d+|[-+]?\d*\.\d*(e[-+]?\d+)?', Number.Float),
- (r'[\(\),:;]', Punctuation),
- (r'\w+[$%]?', Name),
- ]
- }
-
- def analyse_text(self, text):
- # if it starts with a line number, it shouldn't be a "modern" Basic
- # like VB.net
- if re.match(r'\d+', text):
- return True
-
-
-class MscgenLexer(RegexLexer):
- """
- For `Mscgen <http://www.mcternan.me.uk/mscgen/>`_ files.
-
- *New in Pygments 1.6.*
- """
- name = 'Mscgen'
- aliases = ['mscgen', 'msc']
- filenames = ['*.msc']
-
- _var = r'([a-zA-Z0-9_]+|"(?:\\"|[^"])*")'
-
- tokens = {
- 'root': [
- (r'msc\b', Keyword.Type),
- # Options
- (r'(hscale|HSCALE|width|WIDTH|wordwraparcs|WORDWRAPARCS'
- r'|arcgradient|ARCGRADIENT)\b', Name.Property),
- # Operators
- (r'(abox|ABOX|rbox|RBOX|box|BOX|note|NOTE)\b', Operator.Word),
- (r'(\.|-|\|){3}', Keyword),
- (r'(?:-|=|\.|:){2}'
- r'|<<=>>|<->|<=>|<<>>|<:>'
- r'|->|=>>|>>|=>|:>|-x|-X'
- r'|<-|<<=|<<|<=|<:|x-|X-|=', Operator),
- # Names
- (r'\*', Name.Builtin),
- (_var, Name.Variable),
- # Other
- (r'\[', Punctuation, 'attrs'),
- (r'\{|\}|,|;', Punctuation),
- include('comments')
- ],
- 'attrs': [
- (r'\]', Punctuation, '#pop'),
- (_var + r'(\s*)(=)(\s*)' + _var,
- bygroups(Name.Attribute, Text.Whitespace, Operator, Text.Whitespace,
- String)),
- (r',', Punctuation),
- include('comments')
- ],
- 'comments': [
- (r'(?://|#).*?\n', Comment.Single),
- (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
- (r'[ \t\r\n]+', Text.Whitespace)
- ]
- }
-
-
-def _rx_indent(level):
- # Kconfig *always* interprets a tab as 8 spaces, so this is the default.
- # Edit this if you are in an environment where KconfigLexer gets expanded
- # input (tabs expanded to spaces) and the expansion tab width is != 8,
- # e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width).
- # Value range here is 2 <= {tab_width} <= 8.
- tab_width = 8
- # Regex matching a given indentation {level}, assuming that indentation is
- # a multiple of {tab_width}. In other cases there might be problems.
- return r'(?:\t| {1,%s}\t| {%s}){%s}.*\n' % (tab_width-1, tab_width, level)
-
-
-class KconfigLexer(RegexLexer):
- """
- For Linux-style Kconfig files.
-
- *New in Pygments 1.6.*
- """
-
- name = 'Kconfig'
- aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
- # Adjust this if new kconfig file names appear in your environment
- filenames = ['Kconfig', '*Config.in*', 'external.in*',
- 'standard-modules.in']
- mimetypes = ['text/x-kconfig']
- # No re.MULTILINE, indentation-aware help text needs line-by-line handling
- flags = 0
-
- def call_indent(level):
- # If indentation >= {level} is detected, enter state 'indent{level}'
- return (_rx_indent(level), String.Doc, 'indent%s' % level)
-
- def do_indent(level):
- # Print paragraphs of indentation level >= {level} as String.Doc,
- # ignoring blank lines. Then return to 'root' state.
- return [
- (_rx_indent(level), String.Doc),
- (r'\s*\n', Text),
- (r'', Generic, '#pop:2')
- ]
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*?\n', Comment.Single),
- (r'(mainmenu|config|menuconfig|choice|endchoice|comment|menu|'
- r'endmenu|visible if|if|endif|source|prompt|select|depends on|'
- r'default|range|option)\b', Keyword),
- (r'(---help---|help)[\t ]*\n', Keyword, 'help'),
- (r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b',
- Name.Builtin),
- (r'[!=&|]', Operator),
- (r'[()]', Punctuation),
- (r'[0-9]+', Number.Integer),
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Double),
- (r'\S+', Text),
- ],
- # Help text is indented, multi-line and ends when a lower indentation
- # level is detected.
- 'help': [
- # Skip blank lines after help token, if any
- (r'\s*\n', Text),
- # Determine the first help line's indentation level heuristically(!).
- # Attention: this is not perfect, but works for 99% of "normal"
- # indentation schemes up to a max. indentation level of 7.
- call_indent(7),
- call_indent(6),
- call_indent(5),
- call_indent(4),
- call_indent(3),
- call_indent(2),
- call_indent(1),
- ('', Text, '#pop'), # for incomplete help sections without text
- ],
- # Handle text for indentation levels 7 to 1
- 'indent7': do_indent(7),
- 'indent6': do_indent(6),
- 'indent5': do_indent(5),
- 'indent4': do_indent(4),
- 'indent3': do_indent(3),
- 'indent2': do_indent(2),
- 'indent1': do_indent(1),
- }
-
-
-class VGLLexer(RegexLexer):
- """
- For `SampleManager VGL <http://www.thermoscientific.com/samplemanager>`_
- source code.
-
- *New in Pygments 1.6.*
- """
- name = 'VGL'
- aliases = ['vgl']
- filenames = ['*.rpf']
-
- flags = re.MULTILINE | re.DOTALL | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\{[^\}]*\}', Comment.Multiline),
- (r'declare', Keyword.Constant),
- (r'(if|then|else|endif|while|do|endwhile|and|or|prompt|object'
- r'|create|on|line|with|global|routine|value|endroutine|constant'
- r'|global|set|join|library|compile_option|file|exists|create|copy'
- r'|delete|enable|windows|name|notprotected)(?! *[=<>.,()])',
- Keyword),
- (r'(true|false|null|empty|error|locked)', Keyword.Constant),
- (r'[~\^\*\#!%&\[\]\(\)<>\|+=:;,./?-]', Operator),
- (r'"[^"]*"', String),
- (r'(\.)([a-z_\$][a-z0-9_\$]*)', bygroups(Operator, Name.Attribute)),
- (r'[0-9][0-9]*(\.[0-9]+(e[+\-]?[0-9]+)?)?', Number),
- (r'[a-z_\$][a-z0-9_\$]*', Name),
- (r'[\r\n]+', Text),
- (r'\s+', Text)
- ]
- }
-
-
-class SourcePawnLexer(RegexLexer):
- """
- For SourcePawn source code with preprocessor directives.
-
- *New in Pygments 1.6.*
- """
- name = 'SourcePawn'
- aliases = ['sp']
- filenames = ['*.sp']
- mimetypes = ['text/x-sourcepawn']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
-
- tokens = {
- 'root': [
- # preprocessor directives: without whitespace
- ('^#if\s+0', Comment.Preproc, 'if0'),
- ('^#', Comment.Preproc, 'macro'),
- # or with whitespace
- ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'),
- ('^' + _ws + '#', Comment.Preproc, 'macro'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- (r'[{}]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\*/', Error),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.;]', Punctuation),
- (r'(case|const|continue|native|'
- r'default|else|enum|for|if|new|operator|'
- r'public|return|sizeof|static|decl|struct|switch)\b', Keyword),
- (r'(bool|Float)\b', Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/\*(.|\n)*?\*/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
-
- SM_TYPES = ['Action', 'bool', 'Float', 'Plugin', 'String', 'any',
- 'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType',
- 'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart',
- 'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow',
- 'ConVarBounds', 'QueryCookie', 'ReplySource',
- 'ConVarQueryResult', 'ConVarQueryFinished', 'Function',
- 'Action', 'Identity', 'PluginStatus', 'PluginInfo', 'DBResult',
- 'DBBindType', 'DBPriority', 'PropType', 'PropFieldType',
- 'MoveType', 'RenderMode', 'RenderFx', 'EventHookMode',
- 'EventHook', 'FileType', 'FileTimeMode', 'PathType',
- 'ParamType', 'ExecType', 'DialogType', 'Handle', 'KvDataTypes',
- 'NominateResult', 'MapChange', 'MenuStyle', 'MenuAction',
- 'MenuSource', 'RegexError', 'SDKCallType', 'SDKLibrary',
- 'SDKFuncConfSource', 'SDKType', 'SDKPassMethod', 'RayType',
- 'TraceEntityFilter', 'ListenOverride', 'SortOrder', 'SortType',
- 'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus',
- 'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond',
- 'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType',
- 'TopMenuPosition', 'TopMenuObject', 'UserMsg']
-
- def __init__(self, **options):
- self.smhighlighting = get_bool_opt(options,
- 'sourcemod', True)
-
- self._functions = []
- if self.smhighlighting:
- from pygments.lexers._sourcemodbuiltins import FUNCTIONS
- self._functions.extend(FUNCTIONS)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if self.smhighlighting:
- if value in self.SM_TYPES:
- token = Keyword.Type
- elif value in self._functions:
- token = Name.Builtin
- yield index, token, value
-
-
-class PuppetLexer(RegexLexer):
- """
- For `Puppet <http://puppetlabs.com/>`__ configuration DSL.
-
- *New in Pygments 1.6.*
- """
- name = 'Puppet'
- aliases = ['puppet']
- filenames = ['*.pp']
-
- tokens = {
- 'root': [
- include('comments'),
- include('keywords'),
- include('names'),
- include('numbers'),
- include('operators'),
- include('strings'),
-
- (r'[]{}:(),;[]', Punctuation),
- (r'[^\S\n]+', Text),
- ],
-
- 'comments': [
- (r'\s*#.*$', Comment),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- ],
-
- 'operators': [
- (r'(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)', Operator),
- (r'(in|and|or|not)\b', Operator.Word),
- ],
-
- 'names': [
- ('[a-zA-Z_][a-zA-Z0-9_]*', Name.Attribute),
- (r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation,
- String, Punctuation)),
- (r'\$\S+', Name.Variable),
- ],
-
- 'numbers': [
- # Copypasta from the Python lexer
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+j?', Number.Integer)
- ],
-
- 'keywords': [
- # Left out 'group' and 'require'
- # Since they're often used as attributes
- (r'(?i)(absent|alert|alias|audit|augeas|before|case|check|class|'
- r'computer|configured|contained|create_resources|crit|cron|debug|'
- r'default|define|defined|directory|else|elsif|emerg|err|exec|'
- r'extlookup|fail|false|file|filebucket|fqdn_rand|generate|host|if|'
- r'import|include|info|inherits|inline_template|installed|'
- r'interface|k5login|latest|link|loglevel|macauthorization|'
- r'mailalias|maillist|mcx|md5|mount|mounted|nagios_command|'
- r'nagios_contact|nagios_contactgroup|nagios_host|'
- r'nagios_hostdependency|nagios_hostescalation|nagios_hostextinfo|'
- r'nagios_hostgroup|nagios_service|nagios_servicedependency|'
- r'nagios_serviceescalation|nagios_serviceextinfo|'
- r'nagios_servicegroup|nagios_timeperiod|node|noop|notice|notify|'
- r'package|present|purged|realize|regsubst|resources|role|router|'
- r'running|schedule|scheduled_task|search|selboolean|selmodule|'
- r'service|sha1|shellquote|split|sprintf|ssh_authorized_key|sshkey|'
- r'stage|stopped|subscribe|tag|tagged|template|tidy|true|undef|'
- r'unmounted|user|versioncmp|vlan|warning|yumrepo|zfs|zone|'
- r'zpool)\b', Keyword),
- ],
-
- 'strings': [
- (r'"([^"])*"', String),
- (r'\'([^\'])*\'', String),
- ],
-
- }
-
-
-class NSISLexer(RegexLexer):
- """
- For `NSIS <http://nsis.sourceforge.net/>`_ scripts.
-
- *New in Pygments 1.6.*
- """
- name = 'NSIS'
- aliases = ['nsis', 'nsi', 'nsh']
- filenames = ['*.nsi', '*.nsh']
- mimetypes = ['text/x-nsis']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'[;\#].*\n', Comment),
- (r"'.*?'", String.Single),
- (r'"', String.Double, 'str_double'),
- (r'`', String.Backtick, 'str_backtick'),
- include('macro'),
- include('interpol'),
- include('basic'),
- (r'\$\{[a-z_|][\w|]*\}', Keyword.Pseudo),
- (r'/[a-z_]\w*', Name.Attribute),
- ('.', Text),
- ],
- 'basic': [
- (r'(\n)(Function)(\s+)([._a-z][.\w]*)\b',
- bygroups(Text, Keyword, Text, Name.Function)),
- (r'\b([_a-z]\w*)(::)([a-z][a-z0-9]*)\b',
- bygroups(Keyword.Namespace, Punctuation, Name.Function)),
- (r'\b([_a-z]\w*)(:)', bygroups(Name.Label, Punctuation)),
- (r'(\b[ULS]|\B)([\!\<\>=]?=|\<\>?|\>)\B', Operator),
- (r'[|+-]', Operator),
- (r'\\', Punctuation),
- (r'\b(Abort|Add(?:BrandingImage|Size)|'
- r'Allow(?:RootDirInstall|SkipFiles)|AutoCloseWindow|'
- r'BG(?:Font|Gradient)|BrandingText|BringToFront|Call(?:InstDLL)?|'
- r'(?:Sub)?Caption|ChangeUI|CheckBitmap|ClearErrors|CompletedText|'
- r'ComponentText|CopyFiles|CRCCheck|'
- r'Create(?:Directory|Font|Shortcut)|Delete(?:INI(?:Sec|Str)|'
- r'Reg(?:Key|Value))?|DetailPrint|DetailsButtonText|'
- r'Dir(?:Show|Text|Var|Verify)|(?:Disabled|Enabled)Bitmap|'
- r'EnableWindow|EnumReg(?:Key|Value)|Exch|Exec(?:Shell|Wait)?|'
- r'ExpandEnvStrings|File(?:BufSize|Close|ErrorText|Open|'
- r'Read(?:Byte)?|Seek|Write(?:Byte)?)?|'
- r'Find(?:Close|First|Next|Window)|FlushINI|Function(?:End)?|'
- r'Get(?:CurInstType|CurrentAddress|DlgItem|DLLVersion(?:Local)?|'
- r'ErrorLevel|FileTime(?:Local)?|FullPathName|FunctionAddress|'
- r'InstDirError|LabelAddress|TempFileName)|'
- r'Goto|HideWindow|Icon|'
- r'If(?:Abort|Errors|FileExists|RebootFlag|Silent)|'
- r'InitPluginsDir|Install(?:ButtonText|Colors|Dir(?:RegKey)?)|'
- r'Inst(?:ProgressFlags|Type(?:[GS]etText)?)|Int(?:CmpU?|Fmt|Op)|'
- r'IsWindow|LangString(?:UP)?|'
- r'License(?:BkColor|Data|ForceSelection|LangString|Text)|'
- r'LoadLanguageFile|LockWindow|Log(?:Set|Text)|MessageBox|'
- r'MiscButtonText|Name|Nop|OutFile|(?:Uninst)?Page(?:Ex(?:End)?)?|'
- r'PluginDir|Pop|Push|Quit|Read(?:(?:Env|INI|Reg)Str|RegDWORD)|'
- r'Reboot|(?:Un)?RegDLL|Rename|RequestExecutionLevel|ReserveFile|'
- r'Return|RMDir|SearchPath|Section(?:Divider|End|'
- r'(?:(?:Get|Set)(?:Flags|InstTypes|Size|Text))|Group(?:End)?|In)?|'
- r'SendMessage|Set(?:AutoClose|BrandingImage|Compress(?:ionLevel|'
- r'or(?:DictSize)?)?|CtlColors|CurInstType|DatablockOptimize|'
- r'DateSave|Details(?:Print|View)|Error(?:s|Level)|FileAttributes|'
- r'Font|OutPath|Overwrite|PluginUnload|RebootFlag|ShellVarContext|'
- r'Silent|StaticBkColor)|'
- r'Show(?:(?:I|Uni)nstDetails|Window)|Silent(?:Un)?Install|Sleep|'
- r'SpaceTexts|Str(?:CmpS?|Cpy|Len)|SubSection(?:End)?|'
- r'Uninstall(?:ButtonText|(?:Sub)?Caption|EXEName|Icon|Text)|'
- r'UninstPage|Var|VI(?:AddVersionKey|ProductVersion)|WindowIcon|'
- r'Write(?:INIStr|Reg(:?Bin|DWORD|(?:Expand)?Str)|Uninstaller)|'
- r'XPStyle)\b', Keyword),
- (r'\b(CUR|END|(?:FILE_ATTRIBUTE_)?'
- r'(?:ARCHIVE|HIDDEN|NORMAL|OFFLINE|READONLY|SYSTEM|TEMPORARY)|'
- r'HK(CC|CR|CU|DD|LM|PD|U)|'
- r'HKEY_(?:CLASSES_ROOT|CURRENT_(?:CONFIG|USER)|DYN_DATA|'
- r'LOCAL_MACHINE|PERFORMANCE_DATA|USERS)|'
- r'ID(?:ABORT|CANCEL|IGNORE|NO|OK|RETRY|YES)|'
- r'MB_(?:ABORTRETRYIGNORE|DEFBUTTON[1-4]|'
- r'ICON(?:EXCLAMATION|INFORMATION|QUESTION|STOP)|'
- r'OK(?:CANCEL)?|RETRYCANCEL|RIGHT|SETFOREGROUND|TOPMOST|USERICON|'
- r'YESNO(?:CANCEL)?)|SET|SHCTX|'
- r'SW_(?:HIDE|SHOW(?:MAXIMIZED|MINIMIZED|NORMAL))|'
- r'admin|all|auto|both|bottom|bzip2|checkbox|colored|current|false|'
- r'force|hide|highest|if(?:diff|newer)|lastused|leave|left|'
- r'listonly|lzma|nevershow|none|normal|off|on|pop|push|'
- r'radiobuttons|right|show|silent|silentlog|smooth|textonly|top|'
- r'true|try|user|zlib)\b', Name.Constant),
- ],
- 'macro': [
- (r'\!(addincludedir(?:dir)?|addplugindir|appendfile|cd|define|'
- r'delfilefile|echo(?:message)?|else|endif|error|execute|'
- r'if(?:macro)?n?(?:def)?|include|insertmacro|macro(?:end)?|packhdr|'
- r'search(?:parse|replace)|system|tempfilesymbol|undef|verbose|'
- r'warning)\b', Comment.Preproc),
- ],
- 'interpol': [
- (r'\$(R?[0-9])', Name.Builtin.Pseudo), # registers
- (r'\$(ADMINTOOLS|APPDATA|CDBURN_AREA|COOKIES|COMMONFILES(?:32|64)|'
- r'DESKTOP|DOCUMENTS|EXE(?:DIR|FILE|PATH)|FAVORITES|FONTS|HISTORY|'
- r'HWNDPARENT|INTERNET_CACHE|LOCALAPPDATA|MUSIC|NETHOOD|PICTURES|'
- r'PLUGINSDIR|PRINTHOOD|PROFILE|PROGRAMFILES(?:32|64)|QUICKLAUNCH|'
- r'RECENT|RESOURCES(?:_LOCALIZED)?|SENDTO|SM(?:PROGRAMS|STARTUP)|'
- r'STARTMENU|SYSDIR|TEMP(?:LATES)?|VIDEOS|WINDIR|\{NSISDIR\})',
- Name.Builtin),
- (r'\$(CMDLINE|INSTDIR|OUTDIR|LANGUAGE)', Name.Variable.Global),
- (r'\$[a-z_]\w*', Name.Variable),
- ],
- 'str_double': [
- (r'"', String, '#pop'),
- (r'\$(\\[nrt"]|\$)', String.Escape),
- include('interpol'),
- (r'.', String.Double),
- ],
- 'str_backtick': [
- (r'`', String, '#pop'),
- (r'\$(\\[nrt"]|\$)', String.Escape),
- include('interpol'),
- (r'.', String.Double),
- ],
- }
-
-
-class RPMSpecLexer(RegexLexer):
- """
- For RPM *.spec files
-
- *New in Pygments 1.6.*
- """
-
- name = 'RPMSpec'
- aliases = ['spec']
- filenames = ['*.spec']
- mimetypes = ['text/x-rpm-spec']
-
- _directives = ('(?:package|prep|build|install|clean|check|pre[a-z]*|'
- 'post[a-z]*|trigger[a-z]*|files)')
-
- tokens = {
- 'root': [
- (r'#.*\n', Comment),
- include('basic'),
- ],
- 'description': [
- (r'^(%' + _directives + ')(.*)$',
- bygroups(Name.Decorator, Text), '#pop'),
- (r'\n', Text),
- (r'.', Text),
- ],
- 'changelog': [
- (r'\*.*\n', Generic.Subheading),
- (r'^(%' + _directives + ')(.*)$',
- bygroups(Name.Decorator, Text), '#pop'),
- (r'\n', Text),
- (r'.', Text),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- include('interpol'),
- (r'.', String.Double),
- ],
- 'basic': [
- include('macro'),
- (r'(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|'
- r'Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|'
- r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|'
- r'Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$',
- bygroups(Generic.Heading, Punctuation, using(this))),
- (r'^%description', Name.Decorator, 'description'),
- (r'^%changelog', Name.Decorator, 'changelog'),
- (r'^(%' + _directives + ')(.*)$', bygroups(Name.Decorator, Text)),
- (r'%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|'
- r'make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)',
- Keyword),
- include('interpol'),
- (r"'.*?'", String.Single),
- (r'"', String.Double, 'string'),
- (r'.', Text),
- ],
- 'macro': [
- (r'%define.*\n', Comment.Preproc),
- (r'%\{\!\?.*%define.*\}', Comment.Preproc),
- (r'(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$',
- bygroups(Comment.Preproc, Text)),
- ],
- 'interpol': [
- (r'%\{?__[a-z_]+\}?', Name.Function),
- (r'%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?', Keyword.Pseudo),
- (r'%\{\?[A-Za-z0-9_]+\}', Name.Variable),
- (r'\$\{?RPM_[A-Z0-9_]+\}?', Name.Variable.Global),
- (r'%\{[a-zA-Z][a-zA-Z0-9_]+\}', Keyword.Constant),
- ]
- }
-
-
-class AutoItLexer(RegexLexer):
- """
- For `AutoIt <http://www.autoitscript.com/site/autoit/>`_ files.
-
- AutoIt is a freeware BASIC-like scripting language
- designed for automating the Windows GUI and general scripting
-
- *New in Pygments 1.6.*
- """
- name = 'AutoIt'
- aliases = ['autoit', 'Autoit']
- filenames = ['*.au3']
- mimetypes = ['text/x-autoit']
-
- # Keywords, functions, macros from au3.keywords.properties
- # which can be found in AutoIt installed directory, e.g.
- # c:\Program Files (x86)\AutoIt3\SciTE\au3.keywords.properties
-
- keywords = """\
- #include-once #include #endregion #forcedef #forceref #region
- and byref case continueloop dim do else elseif endfunc endif
- endselect exit exitloop for func global
- if local next not or return select step
- then to until wend while exit""".split()
-
- functions = """\
- abs acos adlibregister adlibunregister asc ascw asin assign atan
- autoitsetoption autoitwingettitle autoitwinsettitle beep binary binarylen
- binarymid binarytostring bitand bitnot bitor bitrotate bitshift bitxor
- blockinput break call cdtray ceiling chr chrw clipget clipput consoleread
- consolewrite consolewriteerror controlclick controlcommand controldisable
- controlenable controlfocus controlgetfocus controlgethandle controlgetpos
- controlgettext controlhide controllistview controlmove controlsend
- controlsettext controlshow controltreeview cos dec dircopy dircreate
- dirgetsize dirmove dirremove dllcall dllcalladdress dllcallbackfree
- dllcallbackgetptr dllcallbackregister dllclose dllopen dllstructcreate
- dllstructgetdata dllstructgetptr dllstructgetsize dllstructsetdata
- drivegetdrive drivegetfilesystem drivegetlabel drivegetserial drivegettype
- drivemapadd drivemapdel drivemapget drivesetlabel drivespacefree
- drivespacetotal drivestatus envget envset envupdate eval execute exp
- filechangedir fileclose filecopy filecreatentfslink filecreateshortcut
- filedelete fileexists filefindfirstfile filefindnextfile fileflush
- filegetattrib filegetencoding filegetlongname filegetpos filegetshortcut
- filegetshortname filegetsize filegettime filegetversion fileinstall filemove
- fileopen fileopendialog fileread filereadline filerecycle filerecycleempty
- filesavedialog fileselectfolder filesetattrib filesetpos filesettime
- filewrite filewriteline floor ftpsetproxy guicreate guictrlcreateavi
- guictrlcreatebutton guictrlcreatecheckbox guictrlcreatecombo
- guictrlcreatecontextmenu guictrlcreatedate guictrlcreatedummy
- guictrlcreateedit guictrlcreategraphic guictrlcreategroup guictrlcreateicon
- guictrlcreateinput guictrlcreatelabel guictrlcreatelist
- guictrlcreatelistview guictrlcreatelistviewitem guictrlcreatemenu
- guictrlcreatemenuitem guictrlcreatemonthcal guictrlcreateobj
- guictrlcreatepic guictrlcreateprogress guictrlcreateradio
- guictrlcreateslider guictrlcreatetab guictrlcreatetabitem
- guictrlcreatetreeview guictrlcreatetreeviewitem guictrlcreateupdown
- guictrldelete guictrlgethandle guictrlgetstate guictrlread guictrlrecvmsg
- guictrlregisterlistviewsort guictrlsendmsg guictrlsendtodummy
- guictrlsetbkcolor guictrlsetcolor guictrlsetcursor guictrlsetdata
- guictrlsetdefbkcolor guictrlsetdefcolor guictrlsetfont guictrlsetgraphic
- guictrlsetimage guictrlsetlimit guictrlsetonevent guictrlsetpos
- guictrlsetresizing guictrlsetstate guictrlsetstyle guictrlsettip guidelete
- guigetcursorinfo guigetmsg guigetstyle guiregistermsg guisetaccelerators
- guisetbkcolor guisetcoord guisetcursor guisetfont guisethelp guiseticon
- guisetonevent guisetstate guisetstyle guistartgroup guiswitch hex hotkeyset
- httpsetproxy httpsetuseragent hwnd inetclose inetget inetgetinfo inetgetsize
- inetread inidelete iniread inireadsection inireadsectionnames
- inirenamesection iniwrite iniwritesection inputbox int isadmin isarray
- isbinary isbool isdeclared isdllstruct isfloat ishwnd isint iskeyword
- isnumber isobj isptr isstring log memgetstats mod mouseclick mouseclickdrag
- mousedown mousegetcursor mousegetpos mousemove mouseup mousewheel msgbox
- number objcreate objcreateinterface objevent objevent objget objname
- onautoitexitregister onautoitexitunregister opt ping pixelchecksum
- pixelgetcolor pixelsearch pluginclose pluginopen processclose processexists
- processgetstats processlist processsetpriority processwait processwaitclose
- progressoff progresson progressset ptr random regdelete regenumkey
- regenumval regread regwrite round run runas runaswait runwait send
- sendkeepactive seterror setextended shellexecute shellexecutewait shutdown
- sin sleep soundplay soundsetwavevolume splashimageon splashoff splashtexton
- sqrt srandom statusbargettext stderrread stdinwrite stdioclose stdoutread
- string stringaddcr stringcompare stringformat stringfromasciiarray
- stringinstr stringisalnum stringisalpha stringisascii stringisdigit
- stringisfloat stringisint stringislower stringisspace stringisupper
- stringisxdigit stringleft stringlen stringlower stringmid stringregexp
- stringregexpreplace stringreplace stringright stringsplit stringstripcr
- stringstripws stringtoasciiarray stringtobinary stringtrimleft
- stringtrimright stringupper tan tcpaccept tcpclosesocket tcpconnect
- tcplisten tcpnametoip tcprecv tcpsend tcpshutdown tcpstartup timerdiff
- timerinit tooltip traycreateitem traycreatemenu traygetmsg trayitemdelete
- trayitemgethandle trayitemgetstate trayitemgettext trayitemsetonevent
- trayitemsetstate trayitemsettext traysetclick trayseticon traysetonevent
- traysetpauseicon traysetstate traysettooltip traytip ubound udpbind
- udpclosesocket udpopen udprecv udpsend udpshutdown udpstartup vargettype
- winactivate winactive winclose winexists winflash wingetcaretpos
- wingetclasslist wingetclientsize wingethandle wingetpos wingetprocess
- wingetstate wingettext wingettitle winkill winlist winmenuselectitem
- winminimizeall winminimizeallundo winmove winsetontop winsetstate
- winsettitle winsettrans winwait winwaitactive winwaitclose
- winwaitnotactive""".split()
-
- macros = """\
- @appdatacommondir @appdatadir @autoitexe @autoitpid @autoitversion
- @autoitx64 @com_eventobj @commonfilesdir @compiled @computername @comspec
- @cpuarch @cr @crlf @desktopcommondir @desktopdepth @desktopdir
- @desktopheight @desktoprefresh @desktopwidth @documentscommondir @error
- @exitcode @exitmethod @extended @favoritescommondir @favoritesdir
- @gui_ctrlhandle @gui_ctrlid @gui_dragfile @gui_dragid @gui_dropid
- @gui_winhandle @homedrive @homepath @homeshare @hotkeypressed @hour
- @ipaddress1 @ipaddress2 @ipaddress3 @ipaddress4 @kblayout @lf
- @logondnsdomain @logondomain @logonserver @mday @min @mon @msec @muilang
- @mydocumentsdir @numparams @osarch @osbuild @oslang @osservicepack @ostype
- @osversion @programfilesdir @programscommondir @programsdir @scriptdir
- @scriptfullpath @scriptlinenumber @scriptname @sec @startmenucommondir
- @startmenudir @startupcommondir @startupdir @sw_disable @sw_enable @sw_hide
- @sw_lock @sw_maximize @sw_minimize @sw_restore @sw_show @sw_showdefault
- @sw_showmaximized @sw_showminimized @sw_showminnoactive @sw_showna
- @sw_shownoactivate @sw_shownormal @sw_unlock @systemdir @tab @tempdir
- @tray_id @trayiconflashing @trayiconvisible @username @userprofiledir @wday
- @windowsdir @workingdir @yday @year""".split()
-
- tokens = {
- 'root': [
- (r';.*\n', Comment.Single),
- (r'(#comments-start|#cs).*?(#comments-end|#ce)', Comment.Multiline),
- (r'[\[\]{}(),;]', Punctuation),
- (r'(and|or|not)\b', Operator.Word),
- (r'[\$|@][a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
- (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
- include('commands'),
- include('labels'),
- include('builtInFunctions'),
- include('builtInMarcros'),
- (r'"', String, combined('stringescape', 'dqs')),
- include('numbers'),
- (r'[a-zA-Z_#@$][a-zA-Z0-9_#@$]*', Name),
- (r'\\|\'', Text),
- (r'\`([\,\%\`abfnrtv\-\+;])', String.Escape),
- (r'_\n', Text), # Line continuation
- include('garbage'),
- ],
- 'commands': [
- (r'(?i)(\s*)(%s)\b' % '|'.join(keywords),
- bygroups(Text, Name.Builtin)),
- ],
- 'builtInFunctions': [
- (r'(?i)(%s)\b' % '|'.join(functions),
- Name.Function),
- ],
- 'builtInMarcros': [
- (r'(?i)(%s)\b' % '|'.join(macros),
- Name.Variable.Global),
- ],
- 'labels': [
- # sendkeys
- (r'(^\s*)({\S+?})', bygroups(Text, Name.Label)),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'stringescape': [
- (r'\"\"|\`([\,\%\`abfnrtv])', String.Escape),
- ],
- 'strings': [
- (r'[^"\n]+', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings')
- ],
- 'garbage': [
- (r'[^\S\n]', Text),
- ],
- }
-
-
-class RexxLexer(RegexLexer):
- """
- `Rexx <http://www.rexxinfo.org/>`_ is a scripting language available for
- a wide range of different platforms with its roots found on mainframe
- systems. It is popular for I/O- and data based tasks and can act as glue
- language to bind different applications together.
-
- *New in Pygments 1.7.*
- """
- name = 'Rexx'
- aliases = ['rexx', 'ARexx', 'arexx']
- filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
- mimetypes = ['text/x-rexx']
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s', Whitespace),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"', String, 'string_double'),
- (r"'", String, 'string_single'),
- (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
- (r'([a-z_][a-z0-9_]*)(\s*)(:)(\s*)(procedure)\b',
- bygroups(Name.Function, Whitespace, Operator, Whitespace,
- Keyword.Declaration)),
- (r'([a-z_][a-z0-9_]*)(\s*)(:)',
- bygroups(Name.Label, Whitespace, Operator)),
- include('function'),
- include('keyword'),
- include('operator'),
- (r'[a-z_][a-z0-9_]*', Text),
- ],
- 'function': [
- (r'(abbrev|abs|address|arg|b2x|bitand|bitor|bitxor|c2d|c2x|'
- r'center|charin|charout|chars|compare|condition|copies|d2c|'
- r'd2x|datatype|date|delstr|delword|digits|errortext|form|'
- r'format|fuzz|insert|lastpos|left|length|linein|lineout|lines|'
- r'max|min|overlay|pos|queued|random|reverse|right|sign|'
- r'sourceline|space|stream|strip|substr|subword|symbol|time|'
- r'trace|translate|trunc|value|verify|word|wordindex|'
- r'wordlength|wordpos|words|x2b|x2c|x2d|xrange)(\s*)(\()',
- bygroups(Name.Builtin, Whitespace, Operator)),
- ],
- 'keyword': [
- (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
- r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
- r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
- r'while)\b', Keyword.Reserved),
- ],
- 'operator': [
- (ur'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
- ur'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
- ur'¬>>|¬>|¬|\.|,)', Operator),
- ],
- 'string_double': [
- (r'[^"\n]+', String),
- (r'""', String),
- (r'"', String, '#pop'),
- (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
- ],
- 'string_single': [
- (r'[^\'\n]', String),
- (r'\'\'', String),
- (r'\'', String, '#pop'),
- (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
- ],
- 'comment': [
- (r'[^*]+', Comment.Multiline),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'\*', Comment.Multiline),
- ]
- }
-
- _c = lambda s: re.compile(s, re.MULTILINE)
- _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
- _ADDRESS_PATTERN = _c(r'^\s*address\s+')
- _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
- _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
- _PROCEDURE_PATTERN = _c(r'^\s*([a-z_][a-z0-9_]*)(\s*)(:)(\s*)(procedure)\b')
- _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
- _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
- PATTERNS_AND_WEIGHTS = (
- (_ADDRESS_COMMAND_PATTERN, 0.2),
- (_ADDRESS_PATTERN, 0.05),
- (_DO_WHILE_PATTERN, 0.1),
- (_ELSE_DO_PATTERN, 0.1),
- (_IF_THEN_DO_PATTERN, 0.1),
- (_PROCEDURE_PATTERN, 0.5),
- (_PARSE_ARG_PATTERN, 0.2),
- )
-
- def analyse_text(text):
- """
- Check for inital comment and patterns that distinguish Rexx from other
- C-like languages.
- """
- if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
- # Header matches MVS Rexx requirements, this is certainly a Rexx
- # script.
- return 1.0
- elif text.startswith('/*'):
- # Header matches general Rexx requirements; the source code might
- # still be any language using C comments such as C++, C# or Java.
- lowerText = text.lower()
- result = sum(weight
- for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
- if pattern.search(lowerText)) + 0.01
- return min(result, 1.0)
+ TcshLexer
+from pygments.lexers.robotframework import RobotFrameworkLexer
+from pygments.lexers.testing import GherkinLexer
+from pygments.lexers.esoteric import BrainfuckLexer, BefungeLexer, RedcodeLexer
+from pygments.lexers.prolog import LogtalkLexer
+from pygments.lexers.snobol import SnobolLexer
+from pygments.lexers.rebol import RebolLexer
+from pygments.lexers.configs import KconfigLexer, Cfengine3Lexer
+from pygments.lexers.modeling import ModelicaLexer
+from pygments.lexers.scripting import AppleScriptLexer, MOOCodeLexer, \
+ HybrisLexer
+from pygments.lexers.graphics import PostScriptLexer, GnuplotLexer, \
+ AsymptoteLexer, PovrayLexer
+from pygments.lexers.business import ABAPLexer, OpenEdgeLexer, \
+ GoodDataCLLexer, MaqlLexer
+from pygments.lexers.automation import AutoItLexer, AutohotkeyLexer
+from pygments.lexers.dsls import ProtoBufLexer, BroLexer, PuppetLexer, \
+ MscgenLexer, VGLLexer
+from pygments.lexers.basic import CbmBasicV2Lexer
+from pygments.lexers.pawn import SourcePawnLexer, PawnLexer
+from pygments.lexers.ecl import ECLLexer
+from pygments.lexers.urbi import UrbiscriptLexer
+from pygments.lexers.smalltalk import SmalltalkLexer, NewspeakLexer
+from pygments.lexers.installers import NSISLexer, RPMSpecLexer
+from pygments.lexers.textedit import AwkLexer
+
+__all__ = []
diff --git a/pygments/lexers/parsers.py b/pygments/lexers/parsers.py
index c1ad710f..e1b74dee 100644
--- a/pygments/lexers/parsers.py
+++ b/pygments/lexers/parsers.py
@@ -5,7 +5,7 @@
Lexers for parser generators.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -14,22 +14,24 @@ import re
from pygments.lexer import RegexLexer, DelegatingLexer, \
include, bygroups, using
from pygments.token import Punctuation, Other, Text, Comment, Operator, \
- Keyword, Name, String, Number, Whitespace
-from pygments.lexers.compiled import JavaLexer, CLexer, CppLexer, \
- ObjectiveCLexer, DLexer
+ Keyword, Name, String, Number, Whitespace
+from pygments.lexers.jvm import JavaLexer
+from pygments.lexers.c_cpp import CLexer, CppLexer
+from pygments.lexers.objective import ObjectiveCLexer
+from pygments.lexers.d import DLexer
from pygments.lexers.dotnet import CSharpLexer
-from pygments.lexers.agile import RubyLexer, PythonLexer, PerlLexer
-from pygments.lexers.web import ActionScriptLexer
-
+from pygments.lexers.ruby import RubyLexer
+from pygments.lexers.python import PythonLexer
+from pygments.lexers.perl import PerlLexer
__all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer',
'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer',
'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer',
'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer',
- #'AntlrCLexer',
+ # 'AntlrCLexer',
'AntlrCSharpLexer', 'AntlrObjectiveCLexer',
- 'AntlrJavaLexer', "AntlrActionScriptLexer",
- 'TreetopLexer']
+ 'AntlrJavaLexer', 'AntlrActionScriptLexer',
+ 'TreetopLexer', 'EbnfLexer']
class RagelLexer(RegexLexer):
@@ -38,7 +40,7 @@ class RagelLexer(RegexLexer):
fragments of Ragel. For ``.rl`` files, use RagelEmbeddedLexer instead
(or one of the language-specific subclasses).
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'Ragel'
@@ -63,29 +65,29 @@ class RagelLexer(RegexLexer):
(r'[+-]?[0-9]+', Number.Integer),
],
'literals': [
- (r'"(\\\\|\\"|[^"])*"', String), # double quote string
- (r"'(\\\\|\\'|[^'])*'", String), # single quote string
- (r'\[(\\\\|\\\]|[^\]])*\]', String), # square bracket literals
- (r'/(?!\*)(\\\\|\\/|[^/])*/', String.Regex), # regular expressions
+ (r'"(\\\\|\\"|[^"])*"', String), # double quote string
+ (r"'(\\\\|\\'|[^'])*'", String), # single quote string
+ (r'\[(\\\\|\\\]|[^\]])*\]', String), # square bracket literals
+ (r'/(?!\*)(\\\\|\\/|[^/])*/', String.Regex), # regular expressions
],
'identifiers': [
- (r'[a-zA-Z_][a-zA-Z_0-9]*', Name.Variable),
+ (r'[a-zA-Z_]\w*', Name.Variable),
],
'operators': [
- (r',', Operator), # Join
- (r'\||&|--?', Operator), # Union, Intersection and Subtraction
- (r'\.|<:|:>>?', Operator), # Concatention
- (r':', Operator), # Label
- (r'->', Operator), # Epsilon Transition
- (r'(>|\$|%|<|@|<>)(/|eof\b)', Operator), # EOF Actions
- (r'(>|\$|%|<|@|<>)(!|err\b)', Operator), # Global Error Actions
- (r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator), # Local Error Actions
- (r'(>|\$|%|<|@|<>)(~|to\b)', Operator), # To-State Actions
- (r'(>|\$|%|<|@|<>)(\*|from\b)', Operator), # From-State Actions
- (r'>|@|\$|%', Operator), # Transition Actions and Priorities
- (r'\*|\?|\+|{[0-9]*,[0-9]*}', Operator), # Repetition
- (r'!|\^', Operator), # Negation
- (r'\(|\)', Operator), # Grouping
+ (r',', Operator), # Join
+ (r'\||&|--?', Operator), # Union, Intersection and Subtraction
+ (r'\.|<:|:>>?', Operator), # Concatention
+ (r':', Operator), # Label
+ (r'->', Operator), # Epsilon Transition
+ (r'(>|\$|%|<|@|<>)(/|eof\b)', Operator), # EOF Actions
+ (r'(>|\$|%|<|@|<>)(!|err\b)', Operator), # Global Error Actions
+ (r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator), # Local Error Actions
+ (r'(>|\$|%|<|@|<>)(~|to\b)', Operator), # To-State Actions
+ (r'(>|\$|%|<|@|<>)(\*|from\b)', Operator), # From-State Actions
+ (r'>|@|\$|%', Operator), # Transition Actions and Priorities
+ (r'\*|\?|\+|\{[0-9]*,[0-9]*\}', Operator), # Repetition
+ (r'!|\^', Operator), # Negation
+ (r'\(|\)', Operator), # Grouping
],
'root': [
include('literals'),
@@ -95,21 +97,21 @@ class RagelLexer(RegexLexer):
include('numbers'),
include('identifiers'),
include('operators'),
- (r'{', Punctuation, 'host'),
+ (r'\{', Punctuation, 'host'),
(r'=', Operator),
(r';', Punctuation),
],
'host': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^{}\'"/#]+', # exclude unsafe characters
- r'[^\\][\\][{}]', # allow escaped { or }
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks
+ r'[^{}\'"/#]+', # exclude unsafe characters
+ r'[^\\]\\[{}]', # allow escaped { or }
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'\#.*$\n?', # ruby comment
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r'//.*$\n?', # single line comment
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'\#.*$\n?', # ruby comment
# regular expression: There's no reason for it to start
# with a * and this stops confusion with comments.
@@ -119,8 +121,8 @@ class RagelLexer(RegexLexer):
r'/',
)) + r')+', Other),
- (r'{', Punctuation, '#push'),
- (r'}', Punctuation, '#pop'),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
],
}
@@ -132,7 +134,7 @@ class RagelEmbeddedLexer(RegexLexer):
This will only highlight Ragel statements. If you want host language
highlighting then call the language-specific Ragel lexer.
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'Embedded Ragel'
@@ -141,17 +143,17 @@ class RagelEmbeddedLexer(RegexLexer):
tokens = {
'root': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^%\'"/#]+', # exclude unsafe characters
- r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks
+ r'[^%\'"/#]+', # exclude unsafe characters
+ r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'//.*$\n?', # single line comment
- r'\#.*$\n?', # ruby/ragel comment
- r'/(?!\*)(\\\\|\\/|[^/])*/', # regular expression
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'//.*$\n?', # single line comment
+ r'\#.*$\n?', # ruby/ragel comment
+ r'/(?!\*)(\\\\|\\/|[^/])*/', # regular expression
# / is safe now that we've handled regex and javadoc comments
r'/',
@@ -165,15 +167,15 @@ class RagelEmbeddedLexer(RegexLexer):
Punctuation, Text)),
# Multi Line FSM.
- (r'(%%%%|%%){', Punctuation, 'multi-line-fsm'),
+ (r'(%%%%|%%)\{', Punctuation, 'multi-line-fsm'),
],
'multi-line-fsm': [
- (r'(' + r'|'.join(( # keep ragel code in largest possible chunks.
+ (r'(' + r'|'.join(( # keep ragel code in largest possible chunks.
r'(' + r'|'.join((
- r'[^}\'"\[/#]', # exclude unsafe characters
- r'}(?=[^%]|$)', # } is okay as long as it's not followed by %
- r'}%(?=[^%]|$)', # ...well, one %'s okay, just not two...
- r'[^\\][\\][{}]', # ...and } is okay if it's escaped
+ r'[^}\'"\[/#]', # exclude unsafe characters
+ r'\}(?=[^%]|$)', # } is okay as long as it's not followed by %
+ r'\}%(?=[^%]|$)', # ...well, one %'s okay, just not two...
+ r'[^\\]\\[{}]', # ...and } is okay if it's escaped
# allow / if it's preceded with one of these symbols
# (ragel EOF actions)
@@ -184,35 +186,35 @@ class RagelEmbeddedLexer(RegexLexer):
r'/(?!\*)(\\\\|\\/|[^/])*/\*',
# allow / as long as it's not followed by another / or by a *
- r'/(?=[^/\*]|$)',
+ r'/(?=[^/*]|$)',
# We want to match as many of these as we can in one block.
# Not sure if we need the + sign here,
# does it help performance?
- )) + r')+',
+ )) + r')+',
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r"\[(\\\\|\\\]|[^\]])*\]", # square bracket literal
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'//.*$\n?', # single line comment
- r'\#.*$\n?', # ruby/ragel comment
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r"\[(\\\\|\\\]|[^\]])*\]", # square bracket literal
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'//.*$\n?', # single line comment
+ r'\#.*$\n?', # ruby/ragel comment
)) + r')+', using(RagelLexer)),
- (r'}%%', Punctuation, '#pop'),
+ (r'\}%%', Punctuation, '#pop'),
]
}
def analyse_text(text):
- return '@LANG: indep' in text or 0.1
+ return '@LANG: indep' in text
class RagelRubyLexer(DelegatingLexer):
"""
A lexer for `Ragel`_ in a Ruby host file.
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'Ragel in Ruby Host'
@@ -221,7 +223,7 @@ class RagelRubyLexer(DelegatingLexer):
def __init__(self, **options):
super(RagelRubyLexer, self).__init__(RubyLexer, RagelEmbeddedLexer,
- **options)
+ **options)
def analyse_text(text):
return '@LANG: ruby' in text
@@ -231,7 +233,7 @@ class RagelCLexer(DelegatingLexer):
"""
A lexer for `Ragel`_ in a C host file.
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'Ragel in C Host'
@@ -250,7 +252,7 @@ class RagelDLexer(DelegatingLexer):
"""
A lexer for `Ragel`_ in a D host file.
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'Ragel in D Host'
@@ -268,7 +270,7 @@ class RagelCppLexer(DelegatingLexer):
"""
A lexer for `Ragel`_ in a CPP host file.
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'Ragel in CPP Host'
@@ -286,7 +288,7 @@ class RagelObjectiveCLexer(DelegatingLexer):
"""
A lexer for `Ragel`_ in an Objective C host file.
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'Ragel in Objective C Host'
@@ -306,7 +308,7 @@ class RagelJavaLexer(DelegatingLexer):
"""
A lexer for `Ragel`_ in a Java host file.
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'Ragel in Java Host'
@@ -327,7 +329,7 @@ class AntlrLexer(RegexLexer):
Should not be called directly, instead
use DelegatingLexer for your target language.
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
.. _ANTLR: http://www.antlr.org/
"""
@@ -336,9 +338,9 @@ class AntlrLexer(RegexLexer):
aliases = ['antlr']
filenames = []
- _id = r'[A-Za-z][A-Za-z_0-9]*'
- _TOKEN_REF = r'[A-Z][A-Za-z_0-9]*'
- _RULE_REF = r'[a-z][A-Za-z_0-9]*'
+ _id = r'[A-Za-z]\w*'
+ _TOKEN_REF = r'[A-Z]\w*'
+ _RULE_REF = r'[a-z]\w*'
_STRING_LITERAL = r'\'(?:\\\\|\\\'|[^\']*)\''
_INT = r'[0-9]+'
@@ -362,17 +364,17 @@ class AntlrLexer(RegexLexer):
# tokensSpec
(r'tokens\b', Keyword, 'tokens'),
# attrScope
- (r'(scope)(\s*)(' + _id + ')(\s*)({)',
+ (r'(scope)(\s*)(' + _id + ')(\s*)(\{)',
bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
Punctuation), 'action'),
# exception
(r'(catch|finally)\b', Keyword, 'exception'),
# action
- (r'(@' + _id + ')(\s*)(::)?(\s*)(' + _id + ')(\s*)({)',
+ (r'(@' + _id + ')(\s*)(::)?(\s*)(' + _id + ')(\s*)(\{)',
bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
Name.Label, Whitespace, Punctuation), 'action'),
# rule
- (r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?', \
+ (r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?',
bygroups(Keyword, Whitespace, Name.Label, Punctuation),
('rule-alts', 'rule-prelims')),
],
@@ -395,18 +397,18 @@ class AntlrLexer(RegexLexer):
(r'(throws)(\s+)(' + _id + ')',
bygroups(Keyword, Whitespace, Name.Label)),
(r'(,)(\s*)(' + _id + ')',
- bygroups(Punctuation, Whitespace, Name.Label)), # Additional throws
+ bygroups(Punctuation, Whitespace, Name.Label)), # Additional throws
# optionsSpec
(r'options\b', Keyword, 'options'),
# ruleScopeSpec - scope followed by target language code or name of action
# TODO finish implementing other possibilities for scope
# L173 ANTLRv3.g from ANTLR book
- (r'(scope)(\s+)({)', bygroups(Keyword, Whitespace, Punctuation),
- 'action'),
+ (r'(scope)(\s+)(\{)', bygroups(Keyword, Whitespace, Punctuation),
+ 'action'),
(r'(scope)(\s+)(' + _id + ')(\s*)(;)',
bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)),
# ruleAction
- (r'(@' + _id + ')(\s*)({)',
+ (r'(@' + _id + ')(\s*)(\{)',
bygroups(Name.Label, Whitespace, Punctuation), 'action'),
# finished prelims, go to rule alts!
(r':', Punctuation, '#pop')
@@ -425,9 +427,9 @@ class AntlrLexer(RegexLexer):
(r'<<([^>]|>[^>])>>', String),
# identifiers
# Tokens start with capital letter.
- (r'\$?[A-Z_][A-Za-z_0-9]*', Name.Constant),
+ (r'\$?[A-Z_]\w*', Name.Constant),
# Rules start with small letter.
- (r'\$?[a-z_][A-Za-z_0-9]*', Name.Variable),
+ (r'\$?[a-z_]\w*', Name.Variable),
# operators
(r'(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)', Operator),
(r',', Punctuation),
@@ -438,32 +440,32 @@ class AntlrLexer(RegexLexer):
'tokens': [
include('whitespace'),
include('comments'),
- (r'{', Punctuation),
+ (r'\{', Punctuation),
(r'(' + _TOKEN_REF + r')(\s*)(=)?(\s*)(' + _STRING_LITERAL
+ ')?(\s*)(;)',
bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
String, Whitespace, Punctuation)),
- (r'}', Punctuation, '#pop'),
+ (r'\}', Punctuation, '#pop'),
],
'options': [
include('whitespace'),
include('comments'),
- (r'{', Punctuation),
+ (r'\{', Punctuation),
(r'(' + _id + r')(\s*)(=)(\s*)(' +
- '|'.join((_id, _STRING_LITERAL, _INT, '\*'))+ ')(\s*)(;)',
+ '|'.join((_id, _STRING_LITERAL, _INT, '\*')) + ')(\s*)(;)',
bygroups(Name.Variable, Whitespace, Punctuation, Whitespace,
Text, Whitespace, Punctuation)),
- (r'}', Punctuation, '#pop'),
+ (r'\}', Punctuation, '#pop'),
],
'action': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^\${}\'"/\\]+', # exclude unsafe characters
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks
+ r'[^${}\'"/\\]+', # exclude unsafe characters
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r'//.*$\n?', # single line comment
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
# regular expression: There's no reason for it to start
# with a * and this stops confusion with comments.
@@ -479,18 +481,18 @@ class AntlrLexer(RegexLexer):
(r'(\\)(%)', bygroups(Punctuation, Other)),
(r'(\$[a-zA-Z]+)(\.?)(text|value)?',
bygroups(Name.Variable, Punctuation, Name.Property)),
- (r'{', Punctuation, '#push'),
- (r'}', Punctuation, '#pop'),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
],
'nested-arg-action': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks.
- r'[^\$\[\]\'"/]+', # exclude unsafe characters
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks.
+ r'[^$\[\]\'"/]+', # exclude unsafe characters
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r'//.*$\n?', # single line comment
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
# regular expression: There's no reason for it to start
# with a * and this stops confusion with comments.
@@ -520,11 +522,11 @@ class AntlrLexer(RegexLexer):
# so just assume they're C++. No idea how to make Objective C work in the
# future.
-#class AntlrCLexer(DelegatingLexer):
+# class AntlrCLexer(DelegatingLexer):
# """
# ANTLR with C Target
#
-# *New in Pygments 1.1*
+# .. versionadded:: 1.1
# """
#
# name = 'ANTLR With C Target'
@@ -537,11 +539,12 @@ class AntlrLexer(RegexLexer):
# def analyse_text(text):
# return re.match(r'^\s*language\s*=\s*C\s*;', text)
+
class AntlrCppLexer(DelegatingLexer):
"""
`ANTLR`_ with CPP Target
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'ANTLR With CPP Target'
@@ -553,14 +556,14 @@ class AntlrCppLexer(DelegatingLexer):
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*C\s*;', text, re.M)
+ re.search(r'^\s*language\s*=\s*C\s*;', text, re.M)
class AntlrObjectiveCLexer(DelegatingLexer):
"""
`ANTLR`_ with Objective-C Target
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'ANTLR With ObjectiveC Target'
@@ -573,14 +576,14 @@ class AntlrObjectiveCLexer(DelegatingLexer):
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
+ re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
class AntlrCSharpLexer(DelegatingLexer):
"""
`ANTLR`_ with C# Target
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'ANTLR With C# Target'
@@ -593,14 +596,14 @@ class AntlrCSharpLexer(DelegatingLexer):
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M)
+ re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M)
class AntlrPythonLexer(DelegatingLexer):
"""
`ANTLR`_ with Python Target
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'ANTLR With Python Target'
@@ -613,14 +616,14 @@ class AntlrPythonLexer(DelegatingLexer):
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M)
+ re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M)
class AntlrJavaLexer(DelegatingLexer):
"""
`ANTLR`_ with Java Target
- *New in Pygments 1.1*
+ .. versionadded:: 1.
"""
name = 'ANTLR With Java Target'
@@ -640,7 +643,7 @@ class AntlrRubyLexer(DelegatingLexer):
"""
`ANTLR`_ with Ruby Target
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'ANTLR With Ruby Target'
@@ -653,14 +656,14 @@ class AntlrRubyLexer(DelegatingLexer):
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M)
+ re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M)
class AntlrPerlLexer(DelegatingLexer):
"""
`ANTLR`_ with Perl Target
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'ANTLR With Perl Target'
@@ -673,14 +676,14 @@ class AntlrPerlLexer(DelegatingLexer):
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M)
+ re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M)
class AntlrActionScriptLexer(DelegatingLexer):
"""
`ANTLR`_ with ActionScript Target
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'ANTLR With ActionScript Target'
@@ -688,19 +691,21 @@ class AntlrActionScriptLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
+ from pygments.lexers.actionscript import ActionScriptLexer
super(AntlrActionScriptLexer, self).__init__(ActionScriptLexer,
AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
+ re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
+
class TreetopBaseLexer(RegexLexer):
"""
A base lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
Not for direct use; use TreetopLexer instead.
- *New in Pygments 1.6.*
+ .. versionadded:: 1.6
"""
tokens = {
@@ -715,43 +720,43 @@ class TreetopBaseLexer(RegexLexer):
include('end'),
(r'module\b', Keyword, '#push'),
(r'grammar\b', Keyword, 'grammar'),
- (r'[A-Z][A-Za-z_0-9]*(?:::[A-Z][A-Za-z_0-9]*)*', Name.Namespace),
+ (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Namespace),
],
'grammar': [
include('space'),
include('end'),
(r'rule\b', Keyword, 'rule'),
(r'include\b', Keyword, 'include'),
- (r'[A-Z][A-Za-z_0-9]*', Name),
+ (r'[A-Z]\w*', Name),
],
'include': [
include('space'),
- (r'[A-Z][A-Za-z_0-9]*(?:::[A-Z][A-Za-z_0-9]*)*', Name.Class, '#pop'),
+ (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Class, '#pop'),
],
'rule': [
include('space'),
include('end'),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'([A-Za-z_][A-Za-z_0-9]*)(:)', bygroups(Name.Label, Punctuation)),
- (r'[A-Za-z_][A-Za-z_0-9]*', Name),
+ (r'([A-Za-z_]\w*)(:)', bygroups(Name.Label, Punctuation)),
+ (r'[A-Za-z_]\w*', Name),
(r'[()]', Punctuation),
(r'[?+*/&!~]', Operator),
(r'\[(?:\\.|\[:\^?[a-z]+:\]|[^\\\]])+\]', String.Regex),
(r'([0-9]*)(\.\.)([0-9]*)',
bygroups(Number.Integer, Operator, Number.Integer)),
(r'(<)([^>]+)(>)', bygroups(Punctuation, Name.Class, Punctuation)),
- (r'{', Punctuation, 'inline_module'),
+ (r'\{', Punctuation, 'inline_module'),
(r'\.', String.Regex),
],
'inline_module': [
- (r'{', Other, 'ruby'),
- (r'}', Punctuation, '#pop'),
+ (r'\{', Other, 'ruby'),
+ (r'\}', Punctuation, '#pop'),
(r'[^{}]+', Other),
],
'ruby': [
- (r'{', Other, '#push'),
- (r'}', Other, '#pop'),
+ (r'\{', Other, '#push'),
+ (r'\}', Other, '#pop'),
(r'[^{}]+', Other),
],
'space': [
@@ -763,11 +768,12 @@ class TreetopBaseLexer(RegexLexer):
],
}
+
class TreetopLexer(DelegatingLexer):
"""
A lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
- *New in Pygments 1.6.*
+ .. versionadded:: 1.6
"""
name = 'Treetop'
@@ -776,3 +782,54 @@ class TreetopLexer(DelegatingLexer):
def __init__(self, **options):
super(TreetopLexer, self).__init__(RubyLexer, TreetopBaseLexer, **options)
+
+
+class EbnfLexer(RegexLexer):
+ """
+ Lexer for `ISO/IEC 14977 EBNF
+ <http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
+ grammars.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'EBNF'
+ aliases = ['ebnf']
+ filenames = ['*.ebnf']
+ mimetypes = ['text/x-ebnf']
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('comment_start'),
+ include('identifier'),
+ (r'=', Operator, 'production'),
+ ],
+ 'production': [
+ include('whitespace'),
+ include('comment_start'),
+ include('identifier'),
+ (r'"[^"]*"', String.Double),
+ (r"'[^']*'", String.Single),
+ (r'(\?[^?]*\?)', Name.Entity),
+ (r'[\[\]{}(),|]', Punctuation),
+ (r'-', Operator),
+ (r';', Punctuation, '#pop'),
+ (r'\.', Punctuation, '#pop'),
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+ 'comment_start': [
+ (r'\(\*', Comment.Multiline, 'comment'),
+ ],
+ 'comment': [
+ (r'[^*)]', Comment.Multiline),
+ include('comment_start'),
+ (r'\*\)', Comment.Multiline, '#pop'),
+ (r'[*)]', Comment.Multiline),
+ ],
+ 'identifier': [
+ (r'([a-zA-Z][\w \-]*)', Keyword),
+ ],
+ }
diff --git a/pygments/lexers/pascal.py b/pygments/lexers/pascal.py
new file mode 100644
index 00000000..d3ce6a3a
--- /dev/null
+++ b/pygments/lexers/pascal.py
@@ -0,0 +1,641 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.pascal
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Pascal family languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, words, \
+ using, this, default
+from pygments.util import get_bool_opt, get_list_opt
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+from pygments.scanner import Scanner
+
+from pygments.lexers.modula2 import Modula2Lexer
+
+__all__ = ['DelphiLexer', 'AdaLexer']
+
+
+class DelphiLexer(Lexer):
+ """
+ For `Delphi <http://www.borland.com/delphi/>`_ (Borland Object Pascal),
+ Turbo Pascal and Free Pascal source code.
+
+ Additional options accepted:
+
+ `turbopascal`
+ Highlight Turbo Pascal specific keywords (default: ``True``).
+ `delphi`
+ Highlight Borland Delphi specific keywords (default: ``True``).
+ `freepascal`
+ Highlight Free Pascal specific keywords (default: ``True``).
+ `units`
+ A list of units that should be considered builtin, supported are
+ ``System``, ``SysUtils``, ``Classes`` and ``Math``.
+ Default is to consider all of them builtin.
+ """
+ name = 'Delphi'
+ aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
+ filenames = ['*.pas']
+ mimetypes = ['text/x-pascal']
+
+ TURBO_PASCAL_KEYWORDS = (
+ 'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
+ 'const', 'constructor', 'continue', 'destructor', 'div', 'do',
+ 'downto', 'else', 'end', 'file', 'for', 'function', 'goto',
+ 'if', 'implementation', 'in', 'inherited', 'inline', 'interface',
+ 'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator',
+ 'or', 'packed', 'procedure', 'program', 'record', 'reintroduce',
+ 'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to',
+ 'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor'
+ )
+
+ DELPHI_KEYWORDS = (
+ 'as', 'class', 'except', 'exports', 'finalization', 'finally',
+ 'initialization', 'is', 'library', 'on', 'property', 'raise',
+ 'threadvar', 'try'
+ )
+
+ FREE_PASCAL_KEYWORDS = (
+ 'dispose', 'exit', 'false', 'new', 'true'
+ )
+
+ BLOCK_KEYWORDS = set((
+ 'begin', 'class', 'const', 'constructor', 'destructor', 'end',
+ 'finalization', 'function', 'implementation', 'initialization',
+ 'label', 'library', 'operator', 'procedure', 'program', 'property',
+ 'record', 'threadvar', 'type', 'unit', 'uses', 'var'
+ ))
+
+ FUNCTION_MODIFIERS = set((
+ 'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
+ 'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
+ 'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
+ 'override', 'assembler'
+ ))
+
+ # XXX: those aren't global. but currently we know no way for defining
+ # them just for the type context.
+ DIRECTIVES = set((
+ 'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
+ 'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
+ 'published', 'public'
+ ))
+
+ BUILTIN_TYPES = set((
+ 'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
+ 'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
+ 'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
+ 'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean',
+ 'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency',
+ 'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle',
+ 'pint64', 'pinteger', 'plongint', 'plongword', 'pointer',
+ 'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint',
+ 'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword',
+ 'pwordarray', 'pwordbool', 'real', 'real48', 'shortint',
+ 'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
+ 'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
+ 'widechar', 'widestring', 'word', 'wordbool'
+ ))
+
+ BUILTIN_UNITS = {
+ 'System': (
+ 'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8',
+ 'append', 'arctan', 'assert', 'assigned', 'assignfile',
+ 'beginthread', 'blockread', 'blockwrite', 'break', 'chdir',
+ 'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble',
+ 'concat', 'continue', 'copy', 'cos', 'dec', 'delete',
+ 'dispose', 'doubletocomp', 'endthread', 'enummodules',
+ 'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr',
+ 'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize',
+ 'fillchar', 'finalize', 'findclasshinstance', 'findhinstance',
+ 'findresourcehinstance', 'flush', 'frac', 'freemem',
+ 'get8087cw', 'getdir', 'getlasterror', 'getmem',
+ 'getmemorymanager', 'getmodulefilename', 'getvariantmanager',
+ 'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert',
+ 'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset',
+ 'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd',
+ 'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount',
+ 'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random',
+ 'randomize', 'read', 'readln', 'reallocmem',
+ 'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir',
+ 'round', 'runerror', 'seek', 'seekeof', 'seekeoln',
+ 'set8087cw', 'setlength', 'setlinebreakstyle',
+ 'setmemorymanager', 'setstring', 'settextbuf',
+ 'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt',
+ 'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar',
+ 'succ', 'swap', 'trunc', 'truncate', 'typeinfo',
+ 'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring',
+ 'upcase', 'utf8decode', 'utf8encode', 'utf8toansi',
+ 'utf8tounicode', 'val', 'vararrayredim', 'varclear',
+ 'widecharlentostring', 'widecharlentostrvar',
+ 'widechartostring', 'widechartostrvar',
+ 'widestringtoucs4string', 'write', 'writeln'
+ ),
+ 'SysUtils': (
+ 'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks',
+ 'allocmem', 'ansicomparefilename', 'ansicomparestr',
+ 'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr',
+ 'ansilastchar', 'ansilowercase', 'ansilowercasefilename',
+ 'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext',
+ 'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp',
+ 'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan',
+ 'ansistrscan', 'ansistrupper', 'ansiuppercase',
+ 'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep',
+ 'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype',
+ 'callterminateprocs', 'changefileext', 'charlength',
+ 'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr',
+ 'comparetext', 'createdir', 'createguid', 'currentyear',
+ 'currtostr', 'currtostrf', 'date', 'datetimetofiledate',
+ 'datetimetostr', 'datetimetostring', 'datetimetosystemtime',
+ 'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate',
+ 'decodedatefully', 'decodetime', 'deletefile', 'directoryexists',
+ 'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime',
+ 'exceptionerrormessage', 'excludetrailingbackslash',
+ 'excludetrailingpathdelimiter', 'expandfilename',
+ 'expandfilenamecase', 'expanduncfilename', 'extractfiledir',
+ 'extractfiledrive', 'extractfileext', 'extractfilename',
+ 'extractfilepath', 'extractrelativepath', 'extractshortpathname',
+ 'fileage', 'fileclose', 'filecreate', 'filedatetodatetime',
+ 'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly',
+ 'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr',
+ 'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage',
+ 'findclose', 'findcmdlineswitch', 'findfirst', 'findnext',
+ 'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr',
+ 'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr',
+ 'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr',
+ 'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir',
+ 'getenvironmentvariable', 'getfileversion', 'getformatsettings',
+ 'getlocaleformatsettings', 'getmodulename', 'getpackagedescription',
+ 'getpackageinfo', 'gettime', 'guidtostring', 'incamonth',
+ 'includetrailingbackslash', 'includetrailingpathdelimiter',
+ 'incmonth', 'initializepackage', 'interlockeddecrement',
+ 'interlockedexchange', 'interlockedexchangeadd',
+ 'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter',
+ 'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident',
+ 'languages', 'lastdelimiter', 'loadpackage', 'loadstr',
+ 'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now',
+ 'outofmemoryerror', 'quotedstr', 'raiselastoserror',
+ 'raiselastwin32error', 'removedir', 'renamefile', 'replacedate',
+ 'replacetime', 'safeloadlibrary', 'samefilename', 'sametext',
+ 'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize',
+ 'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy',
+ 'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp',
+ 'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy',
+ 'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew',
+ 'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos',
+ 'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr',
+ 'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime',
+ 'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint',
+ 'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime',
+ 'strtotimedef', 'strupper', 'supports', 'syserrormessage',
+ 'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime',
+ 'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright',
+ 'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime',
+ 'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime',
+ 'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime',
+ 'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext',
+ 'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase',
+ 'widesamestr', 'widesametext', 'wideuppercase', 'win32check',
+ 'wraptext'
+ ),
+ 'Classes': (
+ 'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize',
+ 'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect',
+ 'extractstrings', 'findclass', 'findglobalcomponent', 'getclass',
+ 'groupdescendantswith', 'hextobin', 'identtoint',
+ 'initinheritedcomponent', 'inttoident', 'invalidpoint',
+ 'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext',
+ 'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource',
+ 'pointsequal', 'readcomponentres', 'readcomponentresex',
+ 'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias',
+ 'registerclasses', 'registercomponents', 'registerintegerconsts',
+ 'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup',
+ 'teststreamformat', 'unregisterclass', 'unregisterclasses',
+ 'unregisterintegerconsts', 'unregistermoduleclasses',
+ 'writecomponentresfile'
+ ),
+ 'Math': (
+ 'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec',
+ 'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil',
+ 'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc',
+ 'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle',
+ 'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance',
+ 'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask',
+ 'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg',
+ 'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate',
+ 'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero',
+ 'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue',
+ 'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue',
+ 'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods',
+ 'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance',
+ 'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd',
+ 'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant',
+ 'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode',
+ 'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev',
+ 'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation',
+ 'tan', 'tanh', 'totalvariance', 'variance'
+ )
+ }
+
+ ASM_REGISTERS = set((
+ 'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
+ 'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
+ 'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
+ 'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp',
+ 'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6',
+ 'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
+ 'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
+ 'xmm6', 'xmm7'
+ ))
+
+ ASM_INSTRUCTIONS = set((
+ 'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
+ 'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
+ 'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
+ 'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg',
+ 'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb',
+ 'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl',
+ 'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo',
+ 'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb',
+ 'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid',
+ 'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt',
+ 'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd',
+ 'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd',
+ 'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe',
+ 'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle',
+ 'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge',
+ 'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe',
+ 'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave',
+ 'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw',
+ 'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw',
+ 'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr',
+ 'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx',
+ 'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd',
+ 'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw',
+ 'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw',
+ 'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe',
+ 'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror',
+ 'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb',
+ 'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe',
+ 'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle',
+ 'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng',
+ 'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz',
+ 'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl',
+ 'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold',
+ 'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str',
+ 'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit',
+ 'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
+ 'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
+ 'xlatb', 'xor'
+ ))
+
+ def __init__(self, **options):
+ Lexer.__init__(self, **options)
+ self.keywords = set()
+ if get_bool_opt(options, 'turbopascal', True):
+ self.keywords.update(self.TURBO_PASCAL_KEYWORDS)
+ if get_bool_opt(options, 'delphi', True):
+ self.keywords.update(self.DELPHI_KEYWORDS)
+ if get_bool_opt(options, 'freepascal', True):
+ self.keywords.update(self.FREE_PASCAL_KEYWORDS)
+ self.builtins = set()
+ for unit in get_list_opt(options, 'units', list(self.BUILTIN_UNITS)):
+ self.builtins.update(self.BUILTIN_UNITS[unit])
+
+ def get_tokens_unprocessed(self, text):
+ scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE)
+ stack = ['initial']
+ in_function_block = False
+ in_property_block = False
+ was_dot = False
+ next_token_is_function = False
+ next_token_is_property = False
+ collect_labels = False
+ block_labels = set()
+ brace_balance = [0, 0]
+
+ while not scanner.eos:
+ token = Error
+
+ if stack[-1] == 'initial':
+ if scanner.scan(r'\s+'):
+ token = Text
+ elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
+ if scanner.match.startswith('$'):
+ token = Comment.Preproc
+ else:
+ token = Comment.Multiline
+ elif scanner.scan(r'//.*?$'):
+ token = Comment.Single
+ elif scanner.scan(r'[-+*\/=<>:;,.@\^]'):
+ token = Operator
+ # stop label highlighting on next ";"
+ if collect_labels and scanner.match == ';':
+ collect_labels = False
+ elif scanner.scan(r'[\(\)\[\]]+'):
+ token = Punctuation
+ # abort function naming ``foo = Function(...)``
+ next_token_is_function = False
+ # if we are in a function block we count the open
+ # braces because ootherwise it's impossible to
+ # determine the end of the modifier context
+ if in_function_block or in_property_block:
+ if scanner.match == '(':
+ brace_balance[0] += 1
+ elif scanner.match == ')':
+ brace_balance[0] -= 1
+ elif scanner.match == '[':
+ brace_balance[1] += 1
+ elif scanner.match == ']':
+ brace_balance[1] -= 1
+ elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
+ lowercase_name = scanner.match.lower()
+ if lowercase_name == 'result':
+ token = Name.Builtin.Pseudo
+ elif lowercase_name in self.keywords:
+ token = Keyword
+ # if we are in a special block and a
+ # block ending keyword occours (and the parenthesis
+ # is balanced) we end the current block context
+ if (in_function_block or in_property_block) and \
+ lowercase_name in self.BLOCK_KEYWORDS and \
+ brace_balance[0] <= 0 and \
+ brace_balance[1] <= 0:
+ in_function_block = False
+ in_property_block = False
+ brace_balance = [0, 0]
+ block_labels = set()
+ if lowercase_name in ('label', 'goto'):
+ collect_labels = True
+ elif lowercase_name == 'asm':
+ stack.append('asm')
+ elif lowercase_name == 'property':
+ in_property_block = True
+ next_token_is_property = True
+ elif lowercase_name in ('procedure', 'operator',
+ 'function', 'constructor',
+ 'destructor'):
+ in_function_block = True
+ next_token_is_function = True
+ # we are in a function block and the current name
+ # is in the set of registered modifiers. highlight
+ # it as pseudo keyword
+ elif in_function_block and \
+ lowercase_name in self.FUNCTION_MODIFIERS:
+ token = Keyword.Pseudo
+ # if we are in a property highlight some more
+ # modifiers
+ elif in_property_block and \
+ lowercase_name in ('read', 'write'):
+ token = Keyword.Pseudo
+ next_token_is_function = True
+ # if the last iteration set next_token_is_function
+ # to true we now want this name highlighted as
+ # function. so do that and reset the state
+ elif next_token_is_function:
+ # Look if the next token is a dot. If yes it's
+ # not a function, but a class name and the
+ # part after the dot a function name
+ if scanner.test(r'\s*\.\s*'):
+ token = Name.Class
+ # it's not a dot, our job is done
+ else:
+ token = Name.Function
+ next_token_is_function = False
+ # same for properties
+ elif next_token_is_property:
+ token = Name.Property
+ next_token_is_property = False
+ # Highlight this token as label and add it
+ # to the list of known labels
+ elif collect_labels:
+ token = Name.Label
+ block_labels.add(scanner.match.lower())
+ # name is in list of known labels
+ elif lowercase_name in block_labels:
+ token = Name.Label
+ elif lowercase_name in self.BUILTIN_TYPES:
+ token = Keyword.Type
+ elif lowercase_name in self.DIRECTIVES:
+ token = Keyword.Pseudo
+ # builtins are just builtins if the token
+ # before isn't a dot
+ elif not was_dot and lowercase_name in self.builtins:
+ token = Name.Builtin
+ else:
+ token = Name
+ elif scanner.scan(r"'"):
+ token = String
+ stack.append('string')
+ elif scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'):
+ token = String.Char
+ elif scanner.scan(r'\$[0-9A-Fa-f]+'):
+ token = Number.Hex
+ elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
+ token = Number.Integer
+ elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
+ token = Number.Float
+ else:
+ # if the stack depth is deeper than once, pop
+ if len(stack) > 1:
+ stack.pop()
+ scanner.get_char()
+
+ elif stack[-1] == 'string':
+ if scanner.scan(r"''"):
+ token = String.Escape
+ elif scanner.scan(r"'"):
+ token = String
+ stack.pop()
+ elif scanner.scan(r"[^']*"):
+ token = String
+ else:
+ scanner.get_char()
+ stack.pop()
+
+ elif stack[-1] == 'asm':
+ if scanner.scan(r'\s+'):
+ token = Text
+ elif scanner.scan(r'end'):
+ token = Keyword
+ stack.pop()
+ elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
+ if scanner.match.startswith('$'):
+ token = Comment.Preproc
+ else:
+ token = Comment.Multiline
+ elif scanner.scan(r'//.*?$'):
+ token = Comment.Single
+ elif scanner.scan(r"'"):
+ token = String
+ stack.append('string')
+ elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'):
+ token = Name.Label
+ elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
+ lowercase_name = scanner.match.lower()
+ if lowercase_name in self.ASM_INSTRUCTIONS:
+ token = Keyword
+ elif lowercase_name in self.ASM_REGISTERS:
+ token = Name.Builtin
+ else:
+ token = Name
+ elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'):
+ token = Operator
+ elif scanner.scan(r'[\(\)\[\]]+'):
+ token = Punctuation
+ elif scanner.scan(r'\$[0-9A-Fa-f]+'):
+ token = Number.Hex
+ elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
+ token = Number.Integer
+ elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
+ token = Number.Float
+ else:
+ scanner.get_char()
+ stack.pop()
+
+ # save the dot!!!11
+ if scanner.match.strip():
+ was_dot = scanner.match == '.'
+ yield scanner.start_pos, token, scanner.match or ''
+
+
+class AdaLexer(RegexLexer):
+ """
+ For Ada source code.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Ada'
+ aliases = ['ada', 'ada95', 'ada2005']
+ filenames = ['*.adb', '*.ads', '*.ada']
+ mimetypes = ['text/x-ada']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+ (r'--.*?\n', Comment.Single),
+ (r'[^\S\n]+', Text),
+ (r'function|procedure|entry', Keyword.Declaration, 'subprogram'),
+ (r'(subtype|type)(\s+)(\w+)',
+ bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
+ (r'task|protected', Keyword.Declaration),
+ (r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)),
+ (r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'),
+ (r'(pragma)(\s+)(\w+)', bygroups(Keyword.Reserved, Text,
+ Comment.Preproc)),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (words((
+ 'Address', 'Byte', 'Boolean', 'Character', 'Controlled', 'Count', 'Cursor',
+ 'Duration', 'File_Mode', 'File_Type', 'Float', 'Generator', 'Integer', 'Long_Float',
+ 'Long_Integer', 'Long_Long_Float', 'Long_Long_Integer', 'Natural', 'Positive',
+ 'Reference_Type', 'Short_Float', 'Short_Integer', 'Short_Short_Float',
+ 'Short_Short_Integer', 'String', 'Wide_Character', 'Wide_String'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word),
+ (r'generic|private', Keyword.Declaration),
+ (r'package', Keyword.Declaration, 'package'),
+ (r'array\b', Keyword.Reserved, 'array_def'),
+ (r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'(\w+)(\s*)(:)(\s*)(constant)',
+ bygroups(Name.Constant, Text, Punctuation, Text,
+ Keyword.Reserved)),
+ (r'<<\w+>>', Name.Label),
+ (r'(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)',
+ bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)),
+ (words((
+ 'abort', 'abs', 'abstract', 'accept', 'access', 'aliased', 'all',
+ 'array', 'at', 'begin', 'body', 'case', 'constant', 'declare',
+ 'delay', 'delta', 'digits', 'do', 'else', 'elsif', 'end', 'entry',
+ 'exception', 'exit', 'interface', 'for', 'goto', 'if', 'is', 'limited',
+ 'loop', 'new', 'null', 'of', 'or', 'others', 'out', 'overriding',
+ 'pragma', 'protected', 'raise', 'range', 'record', 'renames', 'requeue',
+ 'return', 'reverse', 'select', 'separate', 'subtype', 'synchronized',
+ 'task', 'tagged', 'terminate', 'then', 'type', 'until', 'when',
+ 'while', 'xor'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ (r'"[^"]*"', String),
+ include('attribute'),
+ include('numbers'),
+ (r"'[^']'", String.Character),
+ (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
+ (r"(<>|=>|:=|[()|:;,.'])", Punctuation),
+ (r'[*<>+=/&-]', Operator),
+ (r'\n+', Text),
+ ],
+ 'numbers': [
+ (r'[0-9_]+#[0-9a-f]+#', Number.Hex),
+ (r'[0-9_]+\.[0-9_]*', Number.Float),
+ (r'[0-9_]+', Number.Integer),
+ ],
+ 'attribute': [
+ (r"(')(\w+)", bygroups(Punctuation, Name.Attribute)),
+ ],
+ 'subprogram': [
+ (r'\(', Punctuation, ('#pop', 'formal_part')),
+ (r';', Punctuation, '#pop'),
+ (r'is\b', Keyword.Reserved, '#pop'),
+ (r'"[^"]+"|\w+', Name.Function),
+ include('root'),
+ ],
+ 'end': [
+ ('(if|case|record|loop|select)', Keyword.Reserved),
+ ('"[^"]+"|[\w.]+', Name.Function),
+ ('\s+', Text),
+ (';', Punctuation, '#pop'),
+ ],
+ 'type_def': [
+ (r';', Punctuation, '#pop'),
+ (r'\(', Punctuation, 'formal_part'),
+ (r'with|and|use', Keyword.Reserved),
+ (r'array\b', Keyword.Reserved, ('#pop', 'array_def')),
+ (r'record\b', Keyword.Reserved, ('record_def')),
+ (r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'),
+ include('root'),
+ ],
+ 'array_def': [
+ (r';', Punctuation, '#pop'),
+ (r'(\w+)(\s+)(range)', bygroups(Keyword.Type, Text, Keyword.Reserved)),
+ include('root'),
+ ],
+ 'record_def': [
+ (r'end record', Keyword.Reserved, '#pop'),
+ include('root'),
+ ],
+ 'import': [
+ (r'[\w.]+', Name.Namespace, '#pop'),
+ default('#pop'),
+ ],
+ 'formal_part': [
+ (r'\)', Punctuation, '#pop'),
+ (r'\w+', Name.Variable),
+ (r',|:[^=]', Punctuation),
+ (r'(in|not|null|out|access)\b', Keyword.Reserved),
+ include('root'),
+ ],
+ 'package': [
+ ('body', Keyword.Declaration),
+ ('is\s+new|renames', Keyword.Reserved),
+ ('is', Keyword.Reserved, '#pop'),
+ (';', Punctuation, '#pop'),
+ ('\(', Punctuation, 'package_instantiation'),
+ ('([\w.]+)', Name.Class),
+ include('root'),
+ ],
+ 'package_instantiation': [
+ (r'("[^"]+"|\w+)(\s+)(=>)', bygroups(Name.Variable, Text, Punctuation)),
+ (r'[\w.\'"]', Text),
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
+ ],
+ }
diff --git a/pygments/lexers/pawn.py b/pygments/lexers/pawn.py
new file mode 100644
index 00000000..f32fdbed
--- /dev/null
+++ b/pygments/lexers/pawn.py
@@ -0,0 +1,199 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.pawn
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Pawn languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+from pygments.util import get_bool_opt
+
+__all__ = ['SourcePawnLexer', 'PawnLexer']
+
+
+class SourcePawnLexer(RegexLexer):
+ """
+ For SourcePawn source code with preprocessor directives.
+
+ .. versionadded:: 1.6
+ """
+ name = 'SourcePawn'
+ aliases = ['sp']
+ filenames = ['*.sp']
+ mimetypes = ['text/x-sourcepawn']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
+ #: only one /* */ style comment
+ _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
+
+ tokens = {
+ 'root': [
+ # preprocessor directives: without whitespace
+ ('^#if\s+0', Comment.Preproc, 'if0'),
+ ('^#', Comment.Preproc, 'macro'),
+ # or with whitespace
+ ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
+ ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
+ (r'[{}]', Punctuation),
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.;]', Punctuation),
+ (r'(case|const|continue|native|'
+ r'default|else|enum|for|if|new|operator|'
+ r'public|return|sizeof|static|decl|struct|switch)\b', Keyword),
+ (r'(bool|Float)\b', Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/\*(.|\n)*?\*/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
+
+ SM_TYPES = set(('Action', 'bool', 'Float', 'Plugin', 'String', 'any',
+ 'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType',
+ 'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart',
+ 'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow',
+ 'ConVarBounds', 'QueryCookie', 'ReplySource',
+ 'ConVarQueryResult', 'ConVarQueryFinished', 'Function',
+ 'Action', 'Identity', 'PluginStatus', 'PluginInfo', 'DBResult',
+ 'DBBindType', 'DBPriority', 'PropType', 'PropFieldType',
+ 'MoveType', 'RenderMode', 'RenderFx', 'EventHookMode',
+ 'EventHook', 'FileType', 'FileTimeMode', 'PathType',
+ 'ParamType', 'ExecType', 'DialogType', 'Handle', 'KvDataTypes',
+ 'NominateResult', 'MapChange', 'MenuStyle', 'MenuAction',
+ 'MenuSource', 'RegexError', 'SDKCallType', 'SDKLibrary',
+ 'SDKFuncConfSource', 'SDKType', 'SDKPassMethod', 'RayType',
+ 'TraceEntityFilter', 'ListenOverride', 'SortOrder', 'SortType',
+ 'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus',
+ 'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond',
+ 'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType',
+ 'TopMenuPosition', 'TopMenuObject', 'UserMsg'))
+
+ def __init__(self, **options):
+ self.smhighlighting = get_bool_opt(options,
+ 'sourcemod', True)
+
+ self._functions = set()
+ if self.smhighlighting:
+ from pygments.lexers._sourcemod_builtins import FUNCTIONS
+ self._functions.update(FUNCTIONS)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if self.smhighlighting:
+ if value in self.SM_TYPES:
+ token = Keyword.Type
+ elif value in self._functions:
+ token = Name.Builtin
+ yield index, token, value
+
+
+class PawnLexer(RegexLexer):
+ """
+ For Pawn source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Pawn'
+ aliases = ['pawn']
+ filenames = ['*.p', '*.pwn', '*.inc']
+ mimetypes = ['text/x-pawn']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*][\w\W]*?[*]/)+'
+ #: only one /* */ style comment
+ _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
+
+ tokens = {
+ 'root': [
+ # preprocessor directives: without whitespace
+ ('^#if\s+0', Comment.Preproc, 'if0'),
+ ('^#', Comment.Preproc, 'macro'),
+ # or with whitespace
+ ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
+ ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?\*[\w\W]*?\*(\\\n)?/', Comment.Multiline),
+ (r'[{}]', Punctuation),
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.;]', Punctuation),
+ (r'(switch|case|default|const|new|static|char|continue|break|'
+ r'if|else|for|while|do|operator|enum|'
+ r'public|return|sizeof|tagof|state|goto)\b', Keyword),
+ (r'(bool|Float)\b', Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/\*(.|\n)*?\*/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
diff --git a/pygments/lexers/perl.py b/pygments/lexers/perl.py
new file mode 100644
index 00000000..b78963d0
--- /dev/null
+++ b/pygments/lexers/perl.py
@@ -0,0 +1,615 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.perl
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Perl and related languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
+ using, this, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+from pygments.util import shebang_matches
+
+__all__ = ['PerlLexer', 'Perl6Lexer']
+
+
+class PerlLexer(RegexLexer):
+ """
+ For `Perl <http://www.perl.org>`_ source code.
+ """
+
+ name = 'Perl'
+ aliases = ['perl', 'pl']
+ filenames = ['*.pl', '*.pm', '*.t']
+ mimetypes = ['text/x-perl', 'application/x-perl']
+
+ flags = re.DOTALL | re.MULTILINE
+ # TODO: give this to a perl guy who knows how to parse perl...
+ tokens = {
+ 'balanced-regex': [
+ (r'/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', String.Regex, '#pop'),
+ (r'!(\\\\|\\[^\\]|[^\\!])*![egimosx]*', String.Regex, '#pop'),
+ (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
+ (r'\{(\\\\|\\[^\\]|[^\\}])*\}[egimosx]*', String.Regex, '#pop'),
+ (r'<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*', String.Regex, '#pop'),
+ (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'),
+ (r'\((\\\\|\\[^\\]|[^\\)])*\)[egimosx]*', String.Regex, '#pop'),
+ (r'@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*', String.Regex, '#pop'),
+ (r'%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*', String.Regex, '#pop'),
+ (r'\$(\\\\|\\[^\\]|[^\\$])*\$[egimosx]*', String.Regex, '#pop'),
+ ],
+ 'root': [
+ (r'\A\#!.+?$', Comment.Hashbang),
+ (r'\#.*?$', Comment.Single),
+ (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
+ (words((
+ 'case', 'continue', 'do', 'else', 'elsif', 'for', 'foreach',
+ 'if', 'last', 'my', 'next', 'our', 'redo', 'reset', 'then',
+ 'unless', 'until', 'while', 'use', 'print', 'new', 'BEGIN',
+ 'CHECK', 'INIT', 'END', 'return'), suffix=r'\b'),
+ Keyword),
+ (r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)',
+ bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'),
+ (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
+ # common delimiters
+ (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*',
+ String.Regex),
+ (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
+ (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
+ (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*',
+ String.Regex),
+ (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*',
+ String.Regex),
+ # balanced delimiters
+ (r's\{(\\\\|\\[^\\]|[^\\}])*\}\s*', String.Regex, 'balanced-regex'),
+ (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'),
+ (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex,
+ 'balanced-regex'),
+ (r's\((\\\\|\\[^\\]|[^\\)])*\)\s*', String.Regex,
+ 'balanced-regex'),
+
+ (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex),
+ (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
+ (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*',
+ String.Regex),
+ (r'\s+', Text),
+ (words((
+ 'abs', 'accept', 'alarm', 'atan2', 'bind', 'binmode', 'bless', 'caller', 'chdir',
+ 'chmod', 'chomp', 'chop', 'chown', 'chr', 'chroot', 'close', 'closedir', 'connect',
+ 'continue', 'cos', 'crypt', 'dbmclose', 'dbmopen', 'defined', 'delete', 'die',
+ 'dump', 'each', 'endgrent', 'endhostent', 'endnetent', 'endprotoent',
+ 'endpwent', 'endservent', 'eof', 'eval', 'exec', 'exists', 'exit', 'exp', 'fcntl',
+ 'fileno', 'flock', 'fork', 'format', 'formline', 'getc', 'getgrent', 'getgrgid',
+ 'getgrnam', 'gethostbyaddr', 'gethostbyname', 'gethostent', 'getlogin',
+ 'getnetbyaddr', 'getnetbyname', 'getnetent', 'getpeername', 'getpgrp',
+ 'getppid', 'getpriority', 'getprotobyname', 'getprotobynumber',
+ 'getprotoent', 'getpwent', 'getpwnam', 'getpwuid', 'getservbyname',
+ 'getservbyport', 'getservent', 'getsockname', 'getsockopt', 'glob', 'gmtime',
+ 'goto', 'grep', 'hex', 'import', 'index', 'int', 'ioctl', 'join', 'keys', 'kill', 'last',
+ 'lc', 'lcfirst', 'length', 'link', 'listen', 'local', 'localtime', 'log', 'lstat',
+ 'map', 'mkdir', 'msgctl', 'msgget', 'msgrcv', 'msgsnd', 'my', 'next', 'no', 'oct', 'open',
+ 'opendir', 'ord', 'our', 'pack', 'package', 'pipe', 'pop', 'pos', 'printf',
+ 'prototype', 'push', 'quotemeta', 'rand', 'read', 'readdir',
+ 'readline', 'readlink', 'readpipe', 'recv', 'redo', 'ref', 'rename', 'require',
+ 'reverse', 'rewinddir', 'rindex', 'rmdir', 'scalar', 'seek', 'seekdir',
+ 'select', 'semctl', 'semget', 'semop', 'send', 'setgrent', 'sethostent', 'setnetent',
+ 'setpgrp', 'setpriority', 'setprotoent', 'setpwent', 'setservent',
+ 'setsockopt', 'shift', 'shmctl', 'shmget', 'shmread', 'shmwrite', 'shutdown',
+ 'sin', 'sleep', 'socket', 'socketpair', 'sort', 'splice', 'split', 'sprintf', 'sqrt',
+ 'srand', 'stat', 'study', 'substr', 'symlink', 'syscall', 'sysopen', 'sysread',
+ 'sysseek', 'system', 'syswrite', 'tell', 'telldir', 'tie', 'tied', 'time', 'times', 'tr',
+ 'truncate', 'uc', 'ucfirst', 'umask', 'undef', 'unlink', 'unpack', 'unshift', 'untie',
+ 'utime', 'values', 'vec', 'wait', 'waitpid', 'wantarray', 'warn', 'write'), suffix=r'\b'),
+ Name.Builtin),
+ (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo),
+ (r'<<([\'"]?)([a-zA-Z_]\w*)\1;?\n.*?\n\2\n', String),
+ (r'__END__', Comment.Preproc, 'end-part'),
+ (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global),
+ (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global),
+ (r'[$@%#]+', Name.Variable, 'varname'),
+ (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
+ (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
+ (r'0b[01]+(_[01]+)*', Number.Bin),
+ (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
+ Number.Float),
+ (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
+ (r'\d+(_\d+)*', Number.Integer),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick),
+ (r'<([^\s>]+)>', String.Regex),
+ (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
+ (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
+ (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
+ (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
+ (r'(q|qq|qw|qr|qx)([\W_])(.|\n)*?\2', String.Other),
+ (r'package\s+', Keyword, 'modulename'),
+ (r'sub\s+', Keyword, 'funcname'),
+ (r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|'
+ r'!~|&&?|\|\||\.{1,3})', Operator),
+ (r'[-+/*%=<>&^|!\\~]=?', Operator),
+ (r'[()\[\]:;,<>/?{}]', Punctuation), # yes, there's no shortage
+ # of punctuation in Perl!
+ (r'(?=\w)', Name, 'name'),
+ ],
+ 'format': [
+ (r'\.\n', String.Interpol, '#pop'),
+ (r'[^\n]*\n', String.Interpol),
+ ],
+ 'varname': [
+ (r'\s+', Text),
+ (r'\{', Punctuation, '#pop'), # hash syntax?
+ (r'\)|,', Punctuation, '#pop'), # argument specifier
+ (r'\w+::', Name.Namespace),
+ (r'[\w:]+', Name.Variable, '#pop'),
+ ],
+ 'name': [
+ (r'\w+::', Name.Namespace),
+ (r'[\w:]+', Name, '#pop'),
+ (r'[A-Z_]+(?=\W)', Name.Constant, '#pop'),
+ (r'(?=\W)', Text, '#pop'),
+ ],
+ 'modulename': [
+ (r'[a-zA-Z_]\w*', Name.Namespace, '#pop')
+ ],
+ 'funcname': [
+ (r'[a-zA-Z_]\w*[!?]?', Name.Function),
+ (r'\s+', Text),
+ # argument declaration
+ (r'(\([$@%]*\))(\s*)', bygroups(Punctuation, Text)),
+ (r';', Punctuation, '#pop'),
+ (r'.*?\{', Punctuation, '#pop'),
+ ],
+ 'cb-string': [
+ (r'\\[{}\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\{', String.Other, 'cb-string'),
+ (r'\}', String.Other, '#pop'),
+ (r'[^{}\\]+', String.Other)
+ ],
+ 'rb-string': [
+ (r'\\[()\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\(', String.Other, 'rb-string'),
+ (r'\)', String.Other, '#pop'),
+ (r'[^()]+', String.Other)
+ ],
+ 'sb-string': [
+ (r'\\[\[\]\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\[', String.Other, 'sb-string'),
+ (r'\]', String.Other, '#pop'),
+ (r'[^\[\]]+', String.Other)
+ ],
+ 'lt-string': [
+ (r'\\[<>\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\<', String.Other, 'lt-string'),
+ (r'\>', String.Other, '#pop'),
+ (r'[^<>]+', String.Other)
+ ],
+ 'end-part': [
+ (r'.+', Comment.Preproc, '#pop')
+ ]
+ }
+
+ def analyse_text(text):
+ if shebang_matches(text, r'perl'):
+ return True
+ if re.search('(?:my|our)\s+[$@%(]', text):
+ return 0.9
+
+
+class Perl6Lexer(ExtendedRegexLexer):
+ """
+ For `Perl 6 <http://www.perl6.org>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Perl6'
+ aliases = ['perl6', 'pl6']
+ filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6',
+ '*.6pm', '*.p6m', '*.pm6', '*.t']
+ mimetypes = ['text/x-perl6', 'application/x-perl6']
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ PERL6_IDENTIFIER_RANGE = "['\w:-]"
+
+ PERL6_KEYWORDS = (
+ 'BEGIN', 'CATCH', 'CHECK', 'CONTROL', 'END', 'ENTER', 'FIRST', 'INIT',
+ 'KEEP', 'LAST', 'LEAVE', 'NEXT', 'POST', 'PRE', 'START', 'TEMP',
+ 'UNDO', 'as', 'assoc', 'async', 'augment', 'binary', 'break', 'but',
+ 'cached', 'category', 'class', 'constant', 'contend', 'continue',
+ 'copy', 'deep', 'default', 'defequiv', 'defer', 'die', 'do', 'else',
+ 'elsif', 'enum', 'equiv', 'exit', 'export', 'fail', 'fatal', 'for',
+ 'gather', 'given', 'goto', 'grammar', 'handles', 'has', 'if', 'inline',
+ 'irs', 'is', 'last', 'leave', 'let', 'lift', 'loop', 'looser', 'macro',
+ 'make', 'maybe', 'method', 'module', 'multi', 'my', 'next', 'of',
+ 'ofs', 'only', 'oo', 'ors', 'our', 'package', 'parsed', 'prec',
+ 'proto', 'readonly', 'redo', 'ref', 'regex', 'reparsed', 'repeat',
+ 'require', 'required', 'return', 'returns', 'role', 'rule', 'rw',
+ 'self', 'slang', 'state', 'sub', 'submethod', 'subset', 'supersede',
+ 'take', 'temp', 'tighter', 'token', 'trusts', 'try', 'unary',
+ 'unless', 'until', 'use', 'warn', 'when', 'where', 'while', 'will',
+ )
+
+ PERL6_BUILTINS = (
+ 'ACCEPTS', 'HOW', 'REJECTS', 'VAR', 'WHAT', 'WHENCE', 'WHERE', 'WHICH',
+ 'WHO', 'abs', 'acos', 'acosec', 'acosech', 'acosh', 'acotan', 'acotanh',
+ 'all', 'any', 'approx', 'arity', 'asec', 'asech', 'asin', 'asinh',
+ 'assuming', 'atan', 'atan2', 'atanh', 'attr', 'bless', 'body', 'by',
+ 'bytes', 'caller', 'callsame', 'callwith', 'can', 'capitalize', 'cat',
+ 'ceiling', 'chars', 'chmod', 'chomp', 'chop', 'chr', 'chroot',
+ 'circumfix', 'cis', 'classify', 'clone', 'close', 'cmp_ok', 'codes',
+ 'comb', 'connect', 'contains', 'context', 'cos', 'cosec', 'cosech',
+ 'cosh', 'cotan', 'cotanh', 'count', 'defined', 'delete', 'diag',
+ 'dies_ok', 'does', 'e', 'each', 'eager', 'elems', 'end', 'eof', 'eval',
+ 'eval_dies_ok', 'eval_elsewhere', 'eval_lives_ok', 'evalfile', 'exists',
+ 'exp', 'first', 'flip', 'floor', 'flunk', 'flush', 'fmt', 'force_todo',
+ 'fork', 'from', 'getc', 'gethost', 'getlogin', 'getpeername', 'getpw',
+ 'gmtime', 'graphs', 'grep', 'hints', 'hyper', 'im', 'index', 'infix',
+ 'invert', 'is_approx', 'is_deeply', 'isa', 'isa_ok', 'isnt', 'iterator',
+ 'join', 'key', 'keys', 'kill', 'kv', 'lastcall', 'lazy', 'lc', 'lcfirst',
+ 'like', 'lines', 'link', 'lives_ok', 'localtime', 'log', 'log10', 'map',
+ 'max', 'min', 'minmax', 'name', 'new', 'nextsame', 'nextwith', 'nfc',
+ 'nfd', 'nfkc', 'nfkd', 'nok_error', 'nonce', 'none', 'normalize', 'not',
+ 'nothing', 'ok', 'once', 'one', 'open', 'opendir', 'operator', 'ord',
+ 'p5chomp', 'p5chop', 'pack', 'pair', 'pairs', 'pass', 'perl', 'pi',
+ 'pick', 'plan', 'plan_ok', 'polar', 'pop', 'pos', 'postcircumfix',
+ 'postfix', 'pred', 'prefix', 'print', 'printf', 'push', 'quasi',
+ 'quotemeta', 'rand', 're', 'read', 'readdir', 'readline', 'reduce',
+ 'reverse', 'rewind', 'rewinddir', 'rindex', 'roots', 'round',
+ 'roundrobin', 'run', 'runinstead', 'sameaccent', 'samecase', 'say',
+ 'sec', 'sech', 'sech', 'seek', 'shape', 'shift', 'sign', 'signature',
+ 'sin', 'sinh', 'skip', 'skip_rest', 'sleep', 'slurp', 'sort', 'splice',
+ 'split', 'sprintf', 'sqrt', 'srand', 'strand', 'subst', 'substr', 'succ',
+ 'sum', 'symlink', 'tan', 'tanh', 'throws_ok', 'time', 'times', 'to',
+ 'todo', 'trim', 'trim_end', 'trim_start', 'true', 'truncate', 'uc',
+ 'ucfirst', 'undef', 'undefine', 'uniq', 'unlike', 'unlink', 'unpack',
+ 'unpolar', 'unshift', 'unwrap', 'use_ok', 'value', 'values', 'vec',
+ 'version_lt', 'void', 'wait', 'want', 'wrap', 'write', 'zip',
+ )
+
+ PERL6_BUILTIN_CLASSES = (
+ 'Abstraction', 'Any', 'AnyChar', 'Array', 'Associative', 'Bag', 'Bit',
+ 'Blob', 'Block', 'Bool', 'Buf', 'Byte', 'Callable', 'Capture', 'Char', 'Class',
+ 'Code', 'Codepoint', 'Comparator', 'Complex', 'Decreasing', 'Exception',
+ 'Failure', 'False', 'Grammar', 'Grapheme', 'Hash', 'IO', 'Increasing',
+ 'Int', 'Junction', 'KeyBag', 'KeyExtractor', 'KeyHash', 'KeySet',
+ 'KitchenSink', 'List', 'Macro', 'Mapping', 'Match', 'Matcher', 'Method',
+ 'Module', 'Num', 'Object', 'Ordered', 'Ordering', 'OrderingPair',
+ 'Package', 'Pair', 'Positional', 'Proxy', 'Range', 'Rat', 'Regex',
+ 'Role', 'Routine', 'Scalar', 'Seq', 'Set', 'Signature', 'Str', 'StrLen',
+ 'StrPos', 'Sub', 'Submethod', 'True', 'UInt', 'Undef', 'Version', 'Void',
+ 'Whatever', 'bit', 'bool', 'buf', 'buf1', 'buf16', 'buf2', 'buf32',
+ 'buf4', 'buf64', 'buf8', 'complex', 'int', 'int1', 'int16', 'int2',
+ 'int32', 'int4', 'int64', 'int8', 'num', 'rat', 'rat1', 'rat16', 'rat2',
+ 'rat32', 'rat4', 'rat64', 'rat8', 'uint', 'uint1', 'uint16', 'uint2',
+ 'uint32', 'uint4', 'uint64', 'uint8', 'utf16', 'utf32', 'utf8',
+ )
+
+ PERL6_OPERATORS = (
+ 'X', 'Z', 'after', 'also', 'and', 'andthen', 'before', 'cmp', 'div',
+ 'eq', 'eqv', 'extra', 'ff', 'fff', 'ge', 'gt', 'le', 'leg', 'lt', 'm',
+ 'mm', 'mod', 'ne', 'or', 'orelse', 'rx', 's', 'tr', 'x', 'xor', 'xx',
+ '++', '--', '**', '!', '+', '-', '~', '?', '|', '||', '+^', '~^', '?^',
+ '^', '*', '/', '%', '%%', '+&', '+<', '+>', '~&', '~<', '~>', '?&',
+ 'gcd', 'lcm', '+', '-', '+|', '+^', '~|', '~^', '?|', '?^',
+ '~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^',
+ '!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv',
+ '&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so',
+ 'not', '<==', '==>', '<<==', '==>>',
+ )
+
+ # Perl 6 has a *lot* of possible bracketing characters
+ # this list was lifted from STD.pm6 (https://github.com/perl6/std)
+ PERL6_BRACKETS = {
+ u'\u0028': u'\u0029', u'\u003c': u'\u003e', u'\u005b': u'\u005d',
+ u'\u007b': u'\u007d', u'\u00ab': u'\u00bb', u'\u0f3a': u'\u0f3b',
+ u'\u0f3c': u'\u0f3d', u'\u169b': u'\u169c', u'\u2018': u'\u2019',
+ u'\u201a': u'\u2019', u'\u201b': u'\u2019', u'\u201c': u'\u201d',
+ u'\u201e': u'\u201d', u'\u201f': u'\u201d', u'\u2039': u'\u203a',
+ u'\u2045': u'\u2046', u'\u207d': u'\u207e', u'\u208d': u'\u208e',
+ u'\u2208': u'\u220b', u'\u2209': u'\u220c', u'\u220a': u'\u220d',
+ u'\u2215': u'\u29f5', u'\u223c': u'\u223d', u'\u2243': u'\u22cd',
+ u'\u2252': u'\u2253', u'\u2254': u'\u2255', u'\u2264': u'\u2265',
+ u'\u2266': u'\u2267', u'\u2268': u'\u2269', u'\u226a': u'\u226b',
+ u'\u226e': u'\u226f', u'\u2270': u'\u2271', u'\u2272': u'\u2273',
+ u'\u2274': u'\u2275', u'\u2276': u'\u2277', u'\u2278': u'\u2279',
+ u'\u227a': u'\u227b', u'\u227c': u'\u227d', u'\u227e': u'\u227f',
+ u'\u2280': u'\u2281', u'\u2282': u'\u2283', u'\u2284': u'\u2285',
+ u'\u2286': u'\u2287', u'\u2288': u'\u2289', u'\u228a': u'\u228b',
+ u'\u228f': u'\u2290', u'\u2291': u'\u2292', u'\u2298': u'\u29b8',
+ u'\u22a2': u'\u22a3', u'\u22a6': u'\u2ade', u'\u22a8': u'\u2ae4',
+ u'\u22a9': u'\u2ae3', u'\u22ab': u'\u2ae5', u'\u22b0': u'\u22b1',
+ u'\u22b2': u'\u22b3', u'\u22b4': u'\u22b5', u'\u22b6': u'\u22b7',
+ u'\u22c9': u'\u22ca', u'\u22cb': u'\u22cc', u'\u22d0': u'\u22d1',
+ u'\u22d6': u'\u22d7', u'\u22d8': u'\u22d9', u'\u22da': u'\u22db',
+ u'\u22dc': u'\u22dd', u'\u22de': u'\u22df', u'\u22e0': u'\u22e1',
+ u'\u22e2': u'\u22e3', u'\u22e4': u'\u22e5', u'\u22e6': u'\u22e7',
+ u'\u22e8': u'\u22e9', u'\u22ea': u'\u22eb', u'\u22ec': u'\u22ed',
+ u'\u22f0': u'\u22f1', u'\u22f2': u'\u22fa', u'\u22f3': u'\u22fb',
+ u'\u22f4': u'\u22fc', u'\u22f6': u'\u22fd', u'\u22f7': u'\u22fe',
+ u'\u2308': u'\u2309', u'\u230a': u'\u230b', u'\u2329': u'\u232a',
+ u'\u23b4': u'\u23b5', u'\u2768': u'\u2769', u'\u276a': u'\u276b',
+ u'\u276c': u'\u276d', u'\u276e': u'\u276f', u'\u2770': u'\u2771',
+ u'\u2772': u'\u2773', u'\u2774': u'\u2775', u'\u27c3': u'\u27c4',
+ u'\u27c5': u'\u27c6', u'\u27d5': u'\u27d6', u'\u27dd': u'\u27de',
+ u'\u27e2': u'\u27e3', u'\u27e4': u'\u27e5', u'\u27e6': u'\u27e7',
+ u'\u27e8': u'\u27e9', u'\u27ea': u'\u27eb', u'\u2983': u'\u2984',
+ u'\u2985': u'\u2986', u'\u2987': u'\u2988', u'\u2989': u'\u298a',
+ u'\u298b': u'\u298c', u'\u298d': u'\u298e', u'\u298f': u'\u2990',
+ u'\u2991': u'\u2992', u'\u2993': u'\u2994', u'\u2995': u'\u2996',
+ u'\u2997': u'\u2998', u'\u29c0': u'\u29c1', u'\u29c4': u'\u29c5',
+ u'\u29cf': u'\u29d0', u'\u29d1': u'\u29d2', u'\u29d4': u'\u29d5',
+ u'\u29d8': u'\u29d9', u'\u29da': u'\u29db', u'\u29f8': u'\u29f9',
+ u'\u29fc': u'\u29fd', u'\u2a2b': u'\u2a2c', u'\u2a2d': u'\u2a2e',
+ u'\u2a34': u'\u2a35', u'\u2a3c': u'\u2a3d', u'\u2a64': u'\u2a65',
+ u'\u2a79': u'\u2a7a', u'\u2a7d': u'\u2a7e', u'\u2a7f': u'\u2a80',
+ u'\u2a81': u'\u2a82', u'\u2a83': u'\u2a84', u'\u2a8b': u'\u2a8c',
+ u'\u2a91': u'\u2a92', u'\u2a93': u'\u2a94', u'\u2a95': u'\u2a96',
+ u'\u2a97': u'\u2a98', u'\u2a99': u'\u2a9a', u'\u2a9b': u'\u2a9c',
+ u'\u2aa1': u'\u2aa2', u'\u2aa6': u'\u2aa7', u'\u2aa8': u'\u2aa9',
+ u'\u2aaa': u'\u2aab', u'\u2aac': u'\u2aad', u'\u2aaf': u'\u2ab0',
+ u'\u2ab3': u'\u2ab4', u'\u2abb': u'\u2abc', u'\u2abd': u'\u2abe',
+ u'\u2abf': u'\u2ac0', u'\u2ac1': u'\u2ac2', u'\u2ac3': u'\u2ac4',
+ u'\u2ac5': u'\u2ac6', u'\u2acd': u'\u2ace', u'\u2acf': u'\u2ad0',
+ u'\u2ad1': u'\u2ad2', u'\u2ad3': u'\u2ad4', u'\u2ad5': u'\u2ad6',
+ u'\u2aec': u'\u2aed', u'\u2af7': u'\u2af8', u'\u2af9': u'\u2afa',
+ u'\u2e02': u'\u2e03', u'\u2e04': u'\u2e05', u'\u2e09': u'\u2e0a',
+ u'\u2e0c': u'\u2e0d', u'\u2e1c': u'\u2e1d', u'\u2e20': u'\u2e21',
+ u'\u3008': u'\u3009', u'\u300a': u'\u300b', u'\u300c': u'\u300d',
+ u'\u300e': u'\u300f', u'\u3010': u'\u3011', u'\u3014': u'\u3015',
+ u'\u3016': u'\u3017', u'\u3018': u'\u3019', u'\u301a': u'\u301b',
+ u'\u301d': u'\u301e', u'\ufd3e': u'\ufd3f', u'\ufe17': u'\ufe18',
+ u'\ufe35': u'\ufe36', u'\ufe37': u'\ufe38', u'\ufe39': u'\ufe3a',
+ u'\ufe3b': u'\ufe3c', u'\ufe3d': u'\ufe3e', u'\ufe3f': u'\ufe40',
+ u'\ufe41': u'\ufe42', u'\ufe43': u'\ufe44', u'\ufe47': u'\ufe48',
+ u'\ufe59': u'\ufe5a', u'\ufe5b': u'\ufe5c', u'\ufe5d': u'\ufe5e',
+ u'\uff08': u'\uff09', u'\uff1c': u'\uff1e', u'\uff3b': u'\uff3d',
+ u'\uff5b': u'\uff5d', u'\uff5f': u'\uff60', u'\uff62': u'\uff63',
+ }
+
+ def _build_word_match(words, boundary_regex_fragment=None, prefix='', suffix=''):
+ if boundary_regex_fragment is None:
+ return r'\b(' + prefix + r'|'.join(re.escape(x) for x in words) + \
+ suffix + r')\b'
+ else:
+ return r'(?<!' + boundary_regex_fragment + r')' + prefix + r'(' + \
+ r'|'.join(re.escape(x) for x in words) + r')' + suffix + r'(?!' + \
+ boundary_regex_fragment + r')'
+
+ def brackets_callback(token_class):
+ def callback(lexer, match, context):
+ groups = match.groupdict()
+ opening_chars = groups['delimiter']
+ n_chars = len(opening_chars)
+ adverbs = groups.get('adverbs')
+
+ closer = Perl6Lexer.PERL6_BRACKETS.get(opening_chars[0])
+ text = context.text
+
+ if closer is None: # it's not a mirrored character, which means we
+ # just need to look for the next occurrence
+
+ end_pos = text.find(opening_chars, match.start('delimiter') + n_chars)
+ else: # we need to look for the corresponding closing character,
+ # keep nesting in mind
+ closing_chars = closer * n_chars
+ nesting_level = 1
+
+ search_pos = match.start('delimiter')
+
+ while nesting_level > 0:
+ next_open_pos = text.find(opening_chars, search_pos + n_chars)
+ next_close_pos = text.find(closing_chars, search_pos + n_chars)
+
+ if next_close_pos == -1:
+ next_close_pos = len(text)
+ nesting_level = 0
+ elif next_open_pos != -1 and next_open_pos < next_close_pos:
+ nesting_level += 1
+ search_pos = next_open_pos
+ else: # next_close_pos < next_open_pos
+ nesting_level -= 1
+ search_pos = next_close_pos
+
+ end_pos = next_close_pos
+
+ if end_pos < 0: # if we didn't find a closer, just highlight the
+ # rest of the text in this class
+ end_pos = len(text)
+
+ if adverbs is not None and re.search(r':to\b', adverbs):
+ heredoc_terminator = text[match.start('delimiter') + n_chars:end_pos]
+ end_heredoc = re.search(r'^\s*' + re.escape(heredoc_terminator) +
+ r'\s*$', text[end_pos:], re.MULTILINE)
+
+ if end_heredoc:
+ end_pos += end_heredoc.end()
+ else:
+ end_pos = len(text)
+
+ yield match.start(), token_class, text[match.start():end_pos + n_chars]
+ context.pos = end_pos + n_chars
+
+ return callback
+
+ def opening_brace_callback(lexer, match, context):
+ stack = context.stack
+
+ yield match.start(), Text, context.text[match.start():match.end()]
+ context.pos = match.end()
+
+ # if we encounter an opening brace and we're one level
+ # below a token state, it means we need to increment
+ # the nesting level for braces so we know later when
+ # we should return to the token rules.
+ if len(stack) > 2 and stack[-2] == 'token':
+ context.perl6_token_nesting_level += 1
+
+ def closing_brace_callback(lexer, match, context):
+ stack = context.stack
+
+ yield match.start(), Text, context.text[match.start():match.end()]
+ context.pos = match.end()
+
+ # if we encounter a free closing brace and we're one level
+ # below a token state, it means we need to check the nesting
+ # level to see if we need to return to the token state.
+ if len(stack) > 2 and stack[-2] == 'token':
+ context.perl6_token_nesting_level -= 1
+ if context.perl6_token_nesting_level == 0:
+ stack.pop()
+
+ def embedded_perl6_callback(lexer, match, context):
+ context.perl6_token_nesting_level = 1
+ yield match.start(), Text, context.text[match.start():match.end()]
+ context.pos = match.end()
+ context.stack.append('root')
+
+ # If you're modifying these rules, be careful if you need to process '{' or '}'
+ # characters. We have special logic for processing these characters (due to the fact
+ # that you can nest Perl 6 code in regex blocks), so if you need to process one of
+ # them, make sure you also process the corresponding one!
+ tokens = {
+ 'common': [
+ (r'#[`|=](?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + r'])(?P=first_char)*)',
+ brackets_callback(Comment.Multiline)),
+ (r'#[^\n]*$', Comment.Singleline),
+ (r'^(\s*)=begin\s+(\w+)\b.*?^\1=end\s+\2', Comment.Multiline),
+ (r'^(\s*)=for.*?\n\s*?\n', Comment.Multiline),
+ (r'^=.*?\n\s*?\n', Comment.Multiline),
+ (r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)',
+ bygroups(Keyword, Name), 'token-sym-brackets'),
+ (r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + ')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?',
+ bygroups(Keyword, Name), 'pre-token'),
+ # deal with a special case in the Perl 6 grammar (role q { ... })
+ (r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Text, Name, Text)),
+ (_build_word_match(PERL6_KEYWORDS, PERL6_IDENTIFIER_RANGE), Keyword),
+ (_build_word_match(PERL6_BUILTIN_CLASSES, PERL6_IDENTIFIER_RANGE, suffix='(?::[UD])?'),
+ Name.Builtin),
+ (_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin),
+ # copied from PerlLexer
+ (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*',
+ Name.Variable),
+ (r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global),
+ (r'::\?\w+', Name.Variable.Global),
+ (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*',
+ Name.Variable.Global),
+ (r'\$(?:<.*?>)+', Name.Variable),
+ (r'(?:q|qq|Q)[a-zA-Z]?\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^0-9a-zA-Z:\s])'
+ r'(?P=first_char)*)', brackets_callback(String)),
+ # copied from PerlLexer
+ (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
+ (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
+ (r'0b[01]+(_[01]+)*', Number.Bin),
+ (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
+ Number.Float),
+ (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
+ (r'\d+(_\d+)*', Number.Integer),
+ (r'(?<=~~)\s*/(?:\\\\|\\/|.)*?/', String.Regex),
+ (r'(?<=[=(,])\s*/(?:\\\\|\\/|.)*?/', String.Regex),
+ (r'm\w+(?=\()', Name),
+ (r'(?:m|ms|rx)\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^\w:\s])'
+ r'(?P=first_char)*)', brackets_callback(String.Regex)),
+ (r'(?:s|ss|tr)\s*(?::[\w\s:]+)?\s*/(?:\\\\|\\/|.)*?/(?:\\\\|\\/|.)*?/',
+ String.Regex),
+ (r'<[^\s=].*?\S>', String),
+ (_build_word_match(PERL6_OPERATORS), Operator),
+ (r'\w' + PERL6_IDENTIFIER_RANGE + '*', Name),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ ],
+ 'root': [
+ include('common'),
+ (r'\{', opening_brace_callback),
+ (r'\}', closing_brace_callback),
+ (r'.+?', Text),
+ ],
+ 'pre-token': [
+ include('common'),
+ (r'\{', Text, ('#pop', 'token')),
+ (r'.+?', Text),
+ ],
+ 'token-sym-brackets': [
+ (r'(?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + '])(?P=first_char)*)',
+ brackets_callback(Name), ('#pop', 'pre-token')),
+ default(('#pop', 'pre-token')),
+ ],
+ 'token': [
+ (r'\}', Text, '#pop'),
+ (r'(?<=:)(?:my|our|state|constant|temp|let).*?;', using(this)),
+ # make sure that quotes in character classes aren't treated as strings
+ (r'<(?:[-!?+.]\s*)?\[.*?\]>', String.Regex),
+ # make sure that '#' characters in quotes aren't treated as comments
+ (r"(?<!\\)'(\\\\|\\[^\\]|[^'\\])*'", String.Regex),
+ (r'(?<!\\)"(\\\\|\\[^\\]|[^"\\])*"', String.Regex),
+ (r'#.*?$', Comment.Singleline),
+ (r'\{', embedded_perl6_callback),
+ ('.+?', String.Regex),
+ ],
+ }
+
+ def analyse_text(text):
+ def strip_pod(lines):
+ in_pod = False
+ stripped_lines = []
+
+ for line in lines:
+ if re.match(r'^=(?:end|cut)', line):
+ in_pod = False
+ elif re.match(r'^=\w+', line):
+ in_pod = True
+ elif not in_pod:
+ stripped_lines.append(line)
+
+ return stripped_lines
+
+ # XXX handle block comments
+ lines = text.splitlines()
+ lines = strip_pod(lines)
+ text = '\n'.join(lines)
+
+ if shebang_matches(text, r'perl6|rakudo|niecza|pugs'):
+ return True
+
+ saw_perl_decl = False
+ rating = False
+
+ # check for my/our/has declarations
+ if re.search("(?:my|our|has)\s+(?:" + Perl6Lexer.PERL6_IDENTIFIER_RANGE +
+ "+\s+)?[$@%&(]", text):
+ rating = 0.8
+ saw_perl_decl = True
+
+ for line in lines:
+ line = re.sub('#.*', '', line)
+ if re.match('^\s*$', line):
+ continue
+
+ # match v6; use v6; use v6.0; use v6.0.0;
+ if re.match('^\s*(?:use\s+)?v6(?:\.\d(?:\.\d)?)?;', line):
+ return True
+ # match class, module, role, enum, grammar declarations
+ class_decl = re.match('^\s*(?:(?P<scope>my|our)\s+)?(?:module|class|role|enum|grammar)', line)
+ if class_decl:
+ if saw_perl_decl or class_decl.group('scope') is not None:
+ return True
+ rating = 0.05
+ continue
+ break
+
+ return rating
+
+ def __init__(self, **options):
+ super(Perl6Lexer, self).__init__(**options)
+ self.encoding = options.get('encoding', 'utf-8')
diff --git a/pygments/lexers/php.py b/pygments/lexers/php.py
new file mode 100644
index 00000000..75b662cb
--- /dev/null
+++ b/pygments/lexers/php.py
@@ -0,0 +1,245 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.php
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for PHP and related languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, using, this
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Other
+from pygments.util import get_bool_opt, get_list_opt, iteritems
+
+__all__ = ['ZephirLexer', 'PhpLexer']
+
+
+class ZephirLexer(RegexLexer):
+ """
+ For `Zephir language <http://zephir-lang.com/>`_ source code.
+
+ Zephir is a compiled high level language aimed
+ to the creation of C-extensions for PHP.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Zephir'
+ aliases = ['zephir']
+ filenames = ['*.zep']
+
+ zephir_keywords = ['fetch', 'echo', 'isset', 'empty']
+ zephir_type = ['bit', 'bits', 'string']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|->|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|loop|'
+ r'require|inline|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+ r'namespace|use|extends|this|fetch|isset|unset|echo|fetch|likely|unlikely|'
+ r'empty)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(abstract|boolean|bool|char|class|const|double|enum|export|extends|final|'
+ r'native|goto|implements|import|int|string|interface|long|ulong|char|uchar|'
+ r'float|unsigned|private|protected|public|short|static|self|throws|reverse|'
+ r'transient|volatile)\b', Keyword.Reserved),
+ (r'(true|false|null|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|_REQUEST|_COOKIE|_SESSION|'
+ r'_GET|_POST|_SERVER|this|stdClass|range|count|iterator|'
+ r'window)\b', Name.Builtin),
+ (r'[$a-zA-Z_][\w\\]*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class PhpLexer(RegexLexer):
+ """
+ For `PHP <http://www.php.net/>`_ source code.
+ For PHP embedded in HTML, use the `HtmlPhpLexer`.
+
+ Additional options accepted:
+
+ `startinline`
+ If given and ``True`` the lexer starts highlighting with
+ php code (i.e.: no starting ``<?php`` required). The default
+ is ``False``.
+ `funcnamehighlighting`
+ If given and ``True``, highlight builtin function names
+ (default: ``True``).
+ `disabledmodules`
+ If given, must be a list of module names whose function names
+ should not be highlighted. By default all modules are highlighted
+ except the special ``'unknown'`` module that includes functions
+ that are known to php but are undocumented.
+
+ To get a list of allowed modules have a look into the
+ `_php_builtins` module:
+
+ .. sourcecode:: pycon
+
+ >>> from pygments.lexers._php_builtins import MODULES
+ >>> MODULES.keys()
+ ['PHP Options/Info', 'Zip', 'dba', ...]
+
+ In fact the names of those modules match the module names from
+ the php documentation.
+ """
+
+ name = 'PHP'
+ aliases = ['php', 'php3', 'php4', 'php5']
+ filenames = ['*.php', '*.php[345]', '*.inc']
+ mimetypes = ['text/x-php']
+
+ # Note that a backslash is included in the following two patterns
+ # PHP uses a backslash as a namespace separator
+ _ident_char = r'[\\\w]|[^\x00-\x7f]'
+ _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
+ _ident_end = r'(?:' + _ident_char + ')*'
+ _ident_inner = _ident_begin + _ident_end
+
+ flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
+ tokens = {
+ 'root': [
+ (r'<\?(php)?', Comment.Preproc, 'php'),
+ (r'[^<]+', Other),
+ (r'<', Other)
+ ],
+ 'php': [
+ (r'\?>', Comment.Preproc, '#pop'),
+ (r'<<<([\'"]?)(' + _ident_inner + r')\1\n.*?\n\s*\2;?\n', String),
+ (r'\s+', Text),
+ (r'#.*?\n', Comment.Single),
+ (r'//.*?\n', Comment.Single),
+ # put the empty comment here, it is otherwise seen as
+ # the start of a docstring
+ (r'/\*\*/', Comment.Multiline),
+ (r'/\*\*.*?\*/', String.Doc),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'(->|::)(\s*)(' + _ident_inner + ')',
+ bygroups(Operator, Text, Name.Attribute)),
+ (r'[~!%^&*+=|:.<>/@-]+', Operator),
+ (r'\?', Operator), # don't add to the charclass above!
+ (r'[\[\]{}();,]+', Punctuation),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(function)(\s*)(?=\()', bygroups(Keyword, Text)),
+ (r'(function)(\s+)(&?)(\s*)',
+ bygroups(Keyword, Text, Operator, Text), 'functionname'),
+ (r'(const)(\s+)(' + _ident_inner + ')',
+ bygroups(Keyword, Text, Name.Constant)),
+ (r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
+ r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
+ r'FALSE|print|for|require|continue|foreach|require_once|'
+ r'declare|return|default|static|do|switch|die|stdClass|'
+ r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
+ r'virtual|endfor|include_once|while|endforeach|global|__FILE__|'
+ r'endif|list|__LINE__|endswitch|new|__sleep|endwhile|not|'
+ r'array|__wakeup|E_ALL|NULL|final|php_user_filter|interface|'
+ r'implements|public|private|protected|abstract|clone|try|'
+ r'catch|throw|this|use|namespace|trait|yield|'
+ r'finally)\b', Keyword),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'\$\{\$+' + _ident_inner + '\}', Name.Variable),
+ (r'\$+' + _ident_inner, Name.Variable),
+ (_ident_inner, Name.Other),
+ (r'(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?', Number.Float),
+ (r'\d+e[+-]?[0-9]+', Number.Float),
+ (r'0[0-7]+', Number.Oct),
+ (r'0x[a-f0-9]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'0b[01]+', Number.Bin),
+ (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
+ (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
+ (r'"', String.Double, 'string'),
+ ],
+ 'classname': [
+ (_ident_inner, Name.Class, '#pop')
+ ],
+ 'functionname': [
+ (_ident_inner, Name.Function, '#pop')
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'[^{$"\\]+', String.Double),
+ (r'\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})', String.Escape),
+ (r'\$' + _ident_inner + '(\[\S+?\]|->' + _ident_inner + ')?',
+ String.Interpol),
+ (r'(\{\$\{)(.*?)(\}\})',
+ bygroups(String.Interpol, using(this, _startinline=True),
+ String.Interpol)),
+ (r'(\{)(\$.*?)(\})',
+ bygroups(String.Interpol, using(this, _startinline=True),
+ String.Interpol)),
+ (r'(\$\{)(\S+)(\})',
+ bygroups(String.Interpol, Name.Variable, String.Interpol)),
+ (r'[${\\]+', String.Double)
+ ],
+ }
+
+ def __init__(self, **options):
+ self.funcnamehighlighting = get_bool_opt(
+ options, 'funcnamehighlighting', True)
+ self.disabledmodules = get_list_opt(
+ options, 'disabledmodules', ['unknown'])
+ self.startinline = get_bool_opt(options, 'startinline', False)
+
+ # private option argument for the lexer itself
+ if '_startinline' in options:
+ self.startinline = options.pop('_startinline')
+
+ # collect activated functions in a set
+ self._functions = set()
+ if self.funcnamehighlighting:
+ from pygments.lexers._php_builtins import MODULES
+ for key, value in iteritems(MODULES):
+ if key not in self.disabledmodules:
+ self._functions.update(value)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ if self.startinline:
+ stack.append('php')
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name.Other:
+ if value in self._functions:
+ yield index, Name.Builtin, value
+ continue
+ yield index, token, value
+
+ def analyse_text(text):
+ rv = 0.0
+ if re.search(r'<\?(?!xml)', text):
+ rv += 0.3
+ return rv
diff --git a/pygments/lexers/prolog.py b/pygments/lexers/prolog.py
new file mode 100644
index 00000000..2b1c7634
--- /dev/null
+++ b/pygments/lexers/prolog.py
@@ -0,0 +1,306 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.prolog
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Prolog and Prolog-like languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['PrologLexer', 'LogtalkLexer']
+
+
+class PrologLexer(RegexLexer):
+ """
+ Lexer for Prolog files.
+ """
+ name = 'Prolog'
+ aliases = ['prolog']
+ filenames = ['*.ecl', '*.prolog', '*.pro', '*.pl']
+ mimetypes = ['text/x-prolog']
+
+ flags = re.UNICODE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'^#.*', Comment.Single),
+ (r'/\*', Comment.Multiline, 'nested-comment'),
+ (r'%.*', Comment.Single),
+ # character literal
+ (r'0\'.', String.Char),
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ # literal with prepended base
+ (r'\d\d?\'[a-zA-Z0-9]+', Number.Integer),
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'[\[\](){}|.,;!]', Punctuation),
+ (r':-|-->', Punctuation),
+ (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
+ r'\\[0-7]+\\|\\["\nabcefnrstv]|[^\\"])*"', String.Double),
+ (r"'(?:''|[^'])*'", String.Atom), # quoted atom
+ # Needs to not be followed by an atom.
+ # (r'=(?=\s|[a-zA-Z\[])', Operator),
+ (r'is\b', Operator),
+ (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])',
+ Operator),
+ (r'(mod|div|not)\b', Operator),
+ (r'_', Keyword), # The don't-care variable
+ (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
+ (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
+ u'(\\s*)(:-|-->)',
+ bygroups(Name.Function, Text, Operator)), # function defn
+ (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
+ u'(\\s*)(\\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (u'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
+ String.Atom), # atom, characters
+ # This one includes !
+ (u'[#&*+\\-./:<=>?@\\\\^~\u00a1-\u00bf\u2010-\u303f]+',
+ String.Atom), # atom, graphics
+ (r'[A-Z_]\w*', Name.Variable),
+ (u'\\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text),
+ ],
+ 'nested-comment': [
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'[^*/]+', Comment.Multiline),
+ (r'[*/]', Comment.Multiline),
+ ],
+ }
+
+ def analyse_text(text):
+ return ':-' in text
+
+
+class LogtalkLexer(RegexLexer):
+ """
+ For `Logtalk <http://logtalk.org/>`_ source code.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Logtalk'
+ aliases = ['logtalk']
+ filenames = ['*.lgt', '*.logtalk']
+ mimetypes = ['text/x-logtalk']
+
+ tokens = {
+ 'root': [
+ # Directives
+ (r'^\s*:-\s', Punctuation, 'directive'),
+ # Comments
+ (r'%.*?\n', Comment),
+ (r'/\*(.|\n)*?\*/', Comment),
+ # Whitespace
+ (r'\n', Text),
+ (r'\s+', Text),
+ # Numbers
+ (r"0'.", Number),
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+ # Variables
+ (r'([A-Z_]\w*)', Name.Variable),
+ # Event handlers
+ (r'(after|before)(?=[(])', Keyword),
+ # Message forwarding handler
+ (r'forward(?=[(])', Keyword),
+ # Execution-context methods
+ (r'(parameter|this|se(lf|nder))(?=[(])', Keyword),
+ # Reflection
+ (r'(current_predicate|predicate_property)(?=[(])', Keyword),
+ # DCGs and term expansion
+ (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', Keyword),
+ # Entity
+ (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword),
+ (r'(object|protocol|category)_property(?=[(])', Keyword),
+ # Entity relations
+ (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword),
+ (r'extends_(object|protocol|category)(?=[(])', Keyword),
+ (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
+ (r'(instantiat|specializ)es_class(?=[(])', Keyword),
+ # Events
+ (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
+ # Flags
+ (r'(current|set)_logtalk_flag(?=[(])', Keyword),
+ # Compiling, loading, and library paths
+ (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make)(?=[(])', Keyword),
+ (r'\blogtalk_make\b', Keyword),
+ # Database
+ (r'(clause|retract(all)?)(?=[(])', Keyword),
+ (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
+ # Control constructs
+ (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
+ (r'(fa(il|lse)|true)\b', Keyword),
+ # All solutions
+ (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
+ # Multi-threading meta-predicates
+ (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
+ # Term unification
+ (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword),
+ # Term creation and decomposition
+ (r'(functor|arg|copy_term|numbervars|term_variables)(?=[(])', Keyword),
+ # Evaluable functors
+ (r'(rem|m(ax|in|od)|abs|sign)(?=[(])', Keyword),
+ (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
+ (r'(floor|truncate|round|ceiling)(?=[(])', Keyword),
+ # Other arithmetic functors
+ (r'(cos|a(cos|sin|tan)|exp|log|s(in|qrt))(?=[(])', Keyword),
+ # Term testing
+ (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|'
+ r'ground|acyclic_term)(?=[(])', Keyword),
+ # Term comparison
+ (r'compare(?=[(])', Keyword),
+ # Stream selection and control
+ (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
+ (r'(open|close)(?=[(])', Keyword),
+ (r'flush_output(?=[(])', Keyword),
+ (r'(at_end_of_stream|flush_output)\b', Keyword),
+ (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword),
+ # Character and byte input/output
+ (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
+ (r'\bnl\b', Keyword),
+ # Term input/output
+ (r'read(_term)?(?=[(])', Keyword),
+ (r'write(q|_(canonical|term))?(?=[(])', Keyword),
+ (r'(current_)?op(?=[(])', Keyword),
+ (r'(current_)?char_conversion(?=[(])', Keyword),
+ # Atomic term processing
+ (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
+ (r'(char_code|sub_atom)(?=[(])', Keyword),
+ (r'number_c(har|ode)s(?=[(])', Keyword),
+ # Implementation defined hooks functions
+ (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
+ (r'\bhalt\b', Keyword),
+ (r'halt(?=[(])', Keyword),
+ # Message sending operators
+ (r'(::|:|\^\^)', Operator),
+ # External call
+ (r'[{}]', Keyword),
+ # Logic and control
+ (r'(ignore|once)(?=[(])', Keyword),
+ (r'\brepeat\b', Keyword),
+ # Sorting
+ (r'(key)?sort(?=[(])', Keyword),
+ # Bitwise functors
+ (r'(>>|<<|/\\|\\\\|\\)', Operator),
+ # Predicate aliases
+ (r'\bas\b', Operator),
+ # Arithemtic evaluation
+ (r'\bis\b', Keyword),
+ # Arithemtic comparison
+ (r'(=:=|=\\=|<|=<|>=|>)', Operator),
+ # Term creation and decomposition
+ (r'=\.\.', Operator),
+ # Term unification
+ (r'(=|\\=)', Operator),
+ # Term comparison
+ (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
+ # Evaluable functors
+ (r'(//|[-+*/])', Operator),
+ (r'\b(e|pi|mod|rem)\b', Operator),
+ # Other arithemtic functors
+ (r'\b\*\*\b', Operator),
+ # DCG rules
+ (r'-->', Operator),
+ # Control constructs
+ (r'([!;]|->)', Operator),
+ # Logic and control
+ (r'\\+', Operator),
+ # Mode operators
+ (r'[?@]', Operator),
+ # Existential quantifier
+ (r'\^', Operator),
+ # Strings
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # Ponctuation
+ (r'[()\[\],.|]', Text),
+ # Atoms
+ (r"[a-z]\w*", Text),
+ (r"'", String, 'quoted_atom'),
+ ],
+
+ 'quoted_atom': [
+ (r"''", String),
+ (r"'", String, '#pop'),
+ (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
+ (r"[^\\'\n]+", String),
+ (r'\\', String),
+ ],
+
+ 'directive': [
+ # Conditional compilation directives
+ (r'(el)?if(?=[(])', Keyword, 'root'),
+ (r'(e(lse|ndif))[.]', Keyword, 'root'),
+ # Entity directives
+ (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
+ (r'(end_(category|object|protocol))[.]', Keyword, 'root'),
+ # Predicate scope directives
+ (r'(public|protected|private)(?=[(])', Keyword, 'root'),
+ # Other directives
+ (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
+ (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'),
+ (r'(built_in|dynamic|synchronized|threaded)[.]', Keyword, 'root'),
+ (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|'
+ r's(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
+ (r'op(?=[(])', Keyword, 'root'),
+ (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
+ (r'[a-z]\w*(?=[(])', Text, 'root'),
+ (r'[a-z]\w*[.]', Text, 'root'),
+ ],
+
+ 'entityrelations': [
+ (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
+ # Numbers
+ (r"0'.", Number),
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+ # Variables
+ (r'([A-Z_]\w*)', Name.Variable),
+ # Atoms
+ (r"[a-z]\w*", Text),
+ (r"'", String, 'quoted_atom'),
+ # Strings
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # End of entity-opening directive
+ (r'([)]\.)', Text, 'root'),
+ # Scope operator
+ (r'(::)', Operator),
+ # Ponctuation
+ (r'[()\[\],.|]', Text),
+ # Comments
+ (r'%.*?\n', Comment),
+ (r'/\*(.|\n)*?\*/', Comment),
+ # Whitespace
+ (r'\n', Text),
+ (r'\s+', Text),
+ ]
+ }
+
+ def analyse_text(text):
+ if ':- object(' in text:
+ return 1.0
+ elif ':- protocol(' in text:
+ return 1.0
+ elif ':- category(' in text:
+ return 1.0
+ elif re.search('^:-\s[a-z]', text, re.M):
+ return 0.9
+ else:
+ return 0.0
diff --git a/pygments/lexers/python.py b/pygments/lexers/python.py
new file mode 100644
index 00000000..ea97b855
--- /dev/null
+++ b/pygments/lexers/python.py
@@ -0,0 +1,848 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.python
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Python and related languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
+ default, words, combined, do_insertions
+from pygments.util import get_bool_opt, shebang_matches
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Other, Error
+from pygments import unistring as uni
+
+__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
+ 'Python3Lexer', 'Python3TracebackLexer', 'CythonLexer',
+ 'DgLexer', 'NumPyLexer']
+
+line_re = re.compile('.*?\n')
+
+
+class PythonLexer(RegexLexer):
+ """
+ For `Python <http://www.python.org>`_ source code.
+ """
+
+ name = 'Python'
+ aliases = ['python', 'py', 'sage']
+ filenames = ['*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage']
+ mimetypes = ['text/x-python', 'application/x-python']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
+ (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
+ (r'[^\S\n]+', Text),
+ (r'\A#!.+$', Comment.Hashbang),
+ (r'#.*$', Comment.Single),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
+ include('keywords'),
+ (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
+ (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ 'fromimport'),
+ (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ 'import'),
+ include('builtins'),
+ include('backtick'),
+ ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
+ ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
+ ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
+ ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
+ ('[uU]?"""', String, combined('stringescape', 'tdqs')),
+ ("[uU]?'''", String, combined('stringescape', 'tsqs')),
+ ('[uU]?"', String, combined('stringescape', 'dqs')),
+ ("[uU]?'", String, combined('stringescape', 'sqs')),
+ include('name'),
+ include('numbers'),
+ ],
+ 'keywords': [
+ (words((
+ 'assert', 'break', 'continue', 'del', 'elif', 'else', 'except',
+ 'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass',
+ 'print', 'raise', 'return', 'try', 'while', 'yield',
+ 'yield from', 'as', 'with'), suffix=r'\b'),
+ Keyword),
+ ],
+ 'builtins': [
+ (words((
+ '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
+ 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod',
+ 'cmp', 'coerce', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod',
+ 'enumerate', 'eval', 'execfile', 'exit', 'file', 'filter', 'float',
+ 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',
+ 'input', 'int', 'intern', 'isinstance', 'issubclass', 'iter', 'len',
+ 'list', 'locals', 'long', 'map', 'max', 'min', 'next', 'object',
+ 'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce',
+ 'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice',
+ 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type',
+ 'unichr', 'unicode', 'vars', 'xrange', 'zip'),
+ prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Builtin),
+ (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True'
+ r')\b', Name.Builtin.Pseudo),
+ (words((
+ 'ArithmeticError', 'AssertionError', 'AttributeError',
+ 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
+ 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
+ 'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
+ 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
+ 'MemoryError', 'NameError', 'NotImplemented', 'NotImplementedError',
+ 'OSError', 'OverflowError', 'OverflowWarning', 'PendingDeprecationWarning',
+ 'ReferenceError', 'RuntimeError', 'RuntimeWarning', 'StandardError',
+ 'StopIteration', 'SyntaxError', 'SyntaxWarning', 'SystemError',
+ 'SystemExit', 'TabError', 'TypeError', 'UnboundLocalError',
+ 'UnicodeDecodeError', 'UnicodeEncodeError', 'UnicodeError',
+ 'UnicodeTranslateError', 'UnicodeWarning', 'UserWarning',
+ 'ValueError', 'VMSError', 'Warning', 'WindowsError',
+ 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Exception),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[bB][01]+', Number.Bin),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+j?', Number.Integer)
+ ],
+ 'backtick': [
+ ('`.*?`', String.Backtick),
+ ],
+ 'name': [
+ (r'@[\w.]+', Name.Decorator),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'funcname': [
+ ('[a-zA-Z_]\w*', Name.Function, '#pop')
+ ],
+ 'classname': [
+ ('[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'(?:[ \t]|\\\n)+', Text),
+ (r'as\b', Keyword.Namespace),
+ (r',', Operator),
+ (r'[a-zA-Z_][\w.]*', Name.Namespace),
+ default('#pop') # all else: go back
+ ],
+ 'fromimport': [
+ (r'(?:[ \t]|\\\n)+', Text),
+ (r'import\b', Keyword.Namespace, '#pop'),
+ # if None occurs here, it's "raise x from None", since None can
+ # never be a module name
+ (r'None\b', Name.Builtin.Pseudo, '#pop'),
+ # sadly, in "raise x from y" y will be highlighted as namespace too
+ (r'[a-zA-Z_.][\w.]*', Name.Namespace),
+ # anything else here also means "raise x from y" and is therefore
+ # not an error
+ default('#pop'),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings': [
+ # the old style '%s' % (...) string formatting
+ (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
+ # backslashes, quotes and formatting signs must be parsed one at a time
+ (r'[^\\\'"%\n]+', String),
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'pythonw?(2(\.\d)?)?') or \
+ 'import ' in text[:1000]
+
+
+class Python3Lexer(RegexLexer):
+ """
+ For `Python <http://www.python.org>`_ source code (version 3.0).
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Python 3'
+ aliases = ['python3', 'py3']
+ filenames = [] # Nothing until Python 3 gets widespread
+ mimetypes = ['text/x-python3', 'application/x-python3']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
+
+ tokens = PythonLexer.tokens.copy()
+ tokens['keywords'] = [
+ (words((
+ 'assert', 'async', 'await', 'break', 'continue', 'del', 'elif',
+ 'else', 'except', 'finally', 'for', 'global', 'if', 'lambda', 'pass',
+ 'raise', 'nonlocal', 'return', 'try', 'while', 'yield', 'yield from',
+ 'as', 'with'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'True', 'False', 'None'), suffix=r'\b'),
+ Keyword.Constant),
+ ]
+ tokens['builtins'] = [
+ (words((
+ '__import__', 'abs', 'all', 'any', 'bin', 'bool', 'bytearray', 'bytes',
+ 'chr', 'classmethod', 'cmp', 'compile', 'complex', 'delattr', 'dict',
+ 'dir', 'divmod', 'enumerate', 'eval', 'filter', 'float', 'format',
+ 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',
+ 'input', 'int', 'isinstance', 'issubclass', 'iter', 'len', 'list',
+ 'locals', 'map', 'max', 'memoryview', 'min', 'next', 'object', 'oct',
+ 'open', 'ord', 'pow', 'print', 'property', 'range', 'repr', 'reversed',
+ 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod', 'str',
+ 'sum', 'super', 'tuple', 'type', 'vars', 'zip'), prefix=r'(?<!\.)',
+ suffix=r'\b'),
+ Name.Builtin),
+ (r'(?<!\.)(self|Ellipsis|NotImplemented)\b', Name.Builtin.Pseudo),
+ (words((
+ 'ArithmeticError', 'AssertionError', 'AttributeError',
+ 'BaseException', 'BufferError', 'BytesWarning', 'DeprecationWarning',
+ 'EOFError', 'EnvironmentError', 'Exception', 'FloatingPointError',
+ 'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError',
+ 'ImportWarning', 'IndentationError', 'IndexError', 'KeyError',
+ 'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError',
+ 'NotImplementedError', 'OSError', 'OverflowError',
+ 'PendingDeprecationWarning', 'ReferenceError', 'ResourceWarning',
+ 'RuntimeError', 'RuntimeWarning', 'StopIteration',
+ 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError',
+ 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
+ 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
+ 'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError', 'Warning',
+ 'WindowsError', 'ZeroDivisionError',
+ # new builtin exceptions from PEP 3151
+ 'BlockingIOError', 'ChildProcessError', 'ConnectionError',
+ 'BrokenPipeError', 'ConnectionAbortedError', 'ConnectionRefusedError',
+ 'ConnectionResetError', 'FileExistsError', 'FileNotFoundError',
+ 'InterruptedError', 'IsADirectoryError', 'NotADirectoryError',
+ 'PermissionError', 'ProcessLookupError', 'TimeoutError'),
+ prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Exception),
+ ]
+ tokens['numbers'] = [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[bB][01]+', Number.Bin),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+', Number.Integer)
+ ]
+ tokens['backtick'] = []
+ tokens['name'] = [
+ (r'@\w+', Name.Decorator),
+ (r'@', Operator), # new matrix multiplication operator
+ (uni_name, Name),
+ ]
+ tokens['funcname'] = [
+ (uni_name, Name.Function, '#pop')
+ ]
+ tokens['classname'] = [
+ (uni_name, Name.Class, '#pop')
+ ]
+ tokens['import'] = [
+ (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'\.', Name.Namespace),
+ (uni_name, Name.Namespace),
+ (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
+ default('#pop') # all else: go back
+ ]
+ tokens['fromimport'] = [
+ (r'(\s+)(import)\b', bygroups(Text, Keyword), '#pop'),
+ (r'\.', Name.Namespace),
+ (uni_name, Name.Namespace),
+ default('#pop'),
+ ]
+ tokens['strings'] = [
+ # the old style '%s' % (...) string formatting (still valid in Py3)
+ (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
+ # the new style '{}'.format(...) string formatting
+ (r'\{'
+ '((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name
+ '(\![sra])?' # conversion
+ '(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[bcdeEfFgGnosxX%]?)?'
+ '\}', String.Interpol),
+ # backslashes, quotes and formatting signs must be parsed one at a time
+ (r'[^\\\'"%\{\n]+', String),
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%|(\{{1,2})', String)
+ # newlines are an error (use "nl" state)
+ ]
+
+ def analyse_text(text):
+ return shebang_matches(text, r'pythonw?3(\.\d)?')
+
+
+class PythonConsoleLexer(Lexer):
+ """
+ For Python console output or doctests, such as:
+
+ .. sourcecode:: pycon
+
+ >>> a = 'foo'
+ >>> print a
+ foo
+ >>> 1 / 0
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ ZeroDivisionError: integer division or modulo by zero
+
+ Additional options:
+
+ `python3`
+ Use Python 3 lexer for code. Default is ``False``.
+
+ .. versionadded:: 1.0
+ """
+ name = 'Python console session'
+ aliases = ['pycon']
+ mimetypes = ['text/x-python-doctest']
+
+ def __init__(self, **options):
+ self.python3 = get_bool_opt(options, 'python3', False)
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ if self.python3:
+ pylexer = Python3Lexer(**self.options)
+ tblexer = Python3TracebackLexer(**self.options)
+ else:
+ pylexer = PythonLexer(**self.options)
+ tblexer = PythonTracebackLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ curtb = ''
+ tbindex = 0
+ tb = 0
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith(u'>>> ') or line.startswith(u'... '):
+ tb = 0
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:4])]))
+ curcode += line[4:]
+ elif line.rstrip() == u'...' and not tb:
+ # only a new >>> prompt can end an exception block
+ # otherwise an ellipsis in place of the traceback frames
+ # will be mishandled
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, u'...')]))
+ curcode += line[3:]
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, pylexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ if (line.startswith(u'Traceback (most recent call last):') or
+ re.match(u' File "[^"]+", line \\d+\\n$', line)):
+ tb = 1
+ curtb = line
+ tbindex = match.start()
+ elif line == 'KeyboardInterrupt\n':
+ yield match.start(), Name.Class, line
+ elif tb:
+ curtb += line
+ if not (line.startswith(' ') or line.strip() == u'...'):
+ tb = 0
+ for i, t, v in tblexer.get_tokens_unprocessed(curtb):
+ yield tbindex+i, t, v
+ curtb = ''
+ else:
+ yield match.start(), Generic.Output, line
+ if curcode:
+ for item in do_insertions(insertions,
+ pylexer.get_tokens_unprocessed(curcode)):
+ yield item
+ if curtb:
+ for i, t, v in tblexer.get_tokens_unprocessed(curtb):
+ yield tbindex+i, t, v
+
+
+class PythonTracebackLexer(RegexLexer):
+ """
+ For Python tracebacks.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'Python Traceback'
+ aliases = ['pytb']
+ filenames = ['*.pytb']
+ mimetypes = ['text/x-python-traceback']
+
+ tokens = {
+ 'root': [
+ (r'^Traceback \(most recent call last\):\n',
+ Generic.Traceback, 'intb'),
+ # SyntaxError starts with this.
+ (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
+ (r'^.*\n', Other),
+ ],
+ 'intb': [
+ (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
+ (r'^( File )("[^"]+")(, line )(\d+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text)),
+ (r'^( )(.+)(\n)',
+ bygroups(Text, using(PythonLexer), Text)),
+ (r'^([ \t]*)(\.\.\.)(\n)',
+ bygroups(Text, Comment, Text)), # for doctests...
+ (r'^([^:]+)(: )(.+)(\n)',
+ bygroups(Generic.Error, Text, Name, Text), '#pop'),
+ (r'^([a-zA-Z_]\w*)(:?\n)',
+ bygroups(Generic.Error, Text), '#pop')
+ ],
+ }
+
+
+class Python3TracebackLexer(RegexLexer):
+ """
+ For Python 3.0 tracebacks, with support for chained exceptions.
+
+ .. versionadded:: 1.0
+ """
+
+ name = 'Python 3.0 Traceback'
+ aliases = ['py3tb']
+ filenames = ['*.py3tb']
+ mimetypes = ['text/x-python3-traceback']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
+ (r'^During handling of the above exception, another '
+ r'exception occurred:\n\n', Generic.Traceback),
+ (r'^The above exception was the direct cause of the '
+ r'following exception:\n\n', Generic.Traceback),
+ (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
+ ],
+ 'intb': [
+ (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
+ (r'^( File )("[^"]+")(, line )(\d+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text)),
+ (r'^( )(.+)(\n)',
+ bygroups(Text, using(Python3Lexer), Text)),
+ (r'^([ \t]*)(\.\.\.)(\n)',
+ bygroups(Text, Comment, Text)), # for doctests...
+ (r'^([^:]+)(: )(.+)(\n)',
+ bygroups(Generic.Error, Text, Name, Text), '#pop'),
+ (r'^([a-zA-Z_]\w*)(:?\n)',
+ bygroups(Generic.Error, Text), '#pop')
+ ],
+ }
+
+
+class CythonLexer(RegexLexer):
+ """
+ For Pyrex and `Cython <http://cython.org>`_ source code.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Cython'
+ aliases = ['cython', 'pyx', 'pyrex']
+ filenames = ['*.pyx', '*.pxd', '*.pxi']
+ mimetypes = ['text/x-cython', 'application/x-cython']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
+ (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
+ (r'[^\S\n]+', Text),
+ (r'#.*$', Comment),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'(<)([a-zA-Z0-9.?]+)(>)',
+ bygroups(Punctuation, Keyword.Type, Punctuation)),
+ (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
+ (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
+ bygroups(Keyword, Number.Integer, Operator, Name, Operator,
+ Name, Punctuation)),
+ include('keywords'),
+ (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'),
+ (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
+ (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'),
+ include('builtins'),
+ include('backtick'),
+ ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
+ ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
+ ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
+ ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
+ ('[uU]?"""', String, combined('stringescape', 'tdqs')),
+ ("[uU]?'''", String, combined('stringescape', 'tsqs')),
+ ('[uU]?"', String, combined('stringescape', 'dqs')),
+ ("[uU]?'", String, combined('stringescape', 'sqs')),
+ include('name'),
+ include('numbers'),
+ ],
+ 'keywords': [
+ (words((
+ 'assert', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
+ 'else', 'except', 'except?', 'exec', 'finally', 'for', 'gil',
+ 'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print',
+ 'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'),
+ Keyword),
+ (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc),
+ ],
+ 'builtins': [
+ (words((
+ '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
+ 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr',
+ 'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr',
+ 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit',
+ 'file', 'filter', 'float', 'frozenset', 'getattr', 'globals',
+ 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance',
+ 'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max',
+ 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property',
+ 'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed',
+ 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod',
+ 'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode',
+ 'vars', 'xrange', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Builtin),
+ (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|NULL'
+ r')\b', Name.Builtin.Pseudo),
+ (words((
+ 'ArithmeticError', 'AssertionError', 'AttributeError',
+ 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
+ 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit', 'IOError',
+ 'ImportError', 'ImportWarning', 'IndentationError', 'IndexError', 'KeyError',
+ 'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError',
+ 'NotImplemented', 'NotImplementedError', 'OSError', 'OverflowError',
+ 'OverflowWarning', 'PendingDeprecationWarning', 'ReferenceError',
+ 'RuntimeError', 'RuntimeWarning', 'StandardError', 'StopIteration',
+ 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError',
+ 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
+ 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
+ 'UnicodeWarning', 'UserWarning', 'ValueError', 'Warning',
+ 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Exception),
+ ],
+ 'numbers': [
+ (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'0\d+', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer)
+ ],
+ 'backtick': [
+ ('`.*?`', String.Backtick),
+ ],
+ 'name': [
+ (r'@\w+', Name.Decorator),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'funcname': [
+ ('[a-zA-Z_]\w*', Name.Function, '#pop')
+ ],
+ 'cdef': [
+ (r'(public|readonly|extern|api|inline)\b', Keyword.Reserved),
+ (r'(struct|enum|union|class)\b', Keyword),
+ (r'([a-zA-Z_]\w*)(\s*)(?=[(:#=]|$)',
+ bygroups(Name.Function, Text), '#pop'),
+ (r'([a-zA-Z_]\w*)(\s*)(,)',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'from\b', Keyword, '#pop'),
+ (r'as\b', Keyword),
+ (r':', Punctuation, '#pop'),
+ (r'(?=["\'])', Text, '#pop'),
+ (r'[a-zA-Z_]\w*', Keyword.Type),
+ (r'.', Text),
+ ],
+ 'classname': [
+ ('[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'[a-zA-Z_][\w.]*', Name.Namespace),
+ (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
+ default('#pop') # all else: go back
+ ],
+ 'fromimport': [
+ (r'(\s+)(c?import)\b', bygroups(Text, Keyword), '#pop'),
+ (r'[a-zA-Z_.][\w.]*', Name.Namespace),
+ # ``cdef foo from "header"``, or ``for foo from 0 < i < 10``
+ default('#pop'),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings': [
+ (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here again for raw strings
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here again for raw strings
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ }
+
+
+class DgLexer(RegexLexer):
+ """
+ Lexer for `dg <http://pyos.github.com/dg>`_,
+ a functional and object-oriented programming language
+ running on the CPython 3 VM.
+
+ .. versionadded:: 1.6
+ """
+ name = 'dg'
+ aliases = ['dg']
+ filenames = ['*.dg']
+ mimetypes = ['text/x-dg']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*?$', Comment.Single),
+
+ (r'(?i)0b[01]+', Number.Bin),
+ (r'(?i)0o[0-7]+', Number.Oct),
+ (r'(?i)0x[0-9a-f]+', Number.Hex),
+ (r'(?i)[+-]?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?j?', Number.Float),
+ (r'(?i)[+-]?[0-9]+e[+-]?\d+j?', Number.Float),
+ (r'(?i)[+-]?[0-9]+j?', Number.Integer),
+
+ (r"(?i)(br|r?b?)'''", String, combined('stringescape', 'tsqs', 'string')),
+ (r'(?i)(br|r?b?)"""', String, combined('stringescape', 'tdqs', 'string')),
+ (r"(?i)(br|r?b?)'", String, combined('stringescape', 'sqs', 'string')),
+ (r'(?i)(br|r?b?)"', String, combined('stringescape', 'dqs', 'string')),
+
+ (r"`\w+'*`", Operator),
+ (r'\b(and|in|is|or|where)\b', Operator.Word),
+ (r'[!$%&*+\-./:<-@\\^|~;,]+', Operator),
+
+ (words((
+ 'bool', 'bytearray', 'bytes', 'classmethod', 'complex', 'dict', 'dict\'',
+ 'float', 'frozenset', 'int', 'list', 'list\'', 'memoryview', 'object',
+ 'property', 'range', 'set', 'set\'', 'slice', 'staticmethod', 'str', 'super',
+ 'tuple', 'tuple\'', 'type'), prefix=r'(?<!\.)', suffix=r'(?![\'\w])'),
+ Name.Builtin),
+ (words((
+ '__import__', 'abs', 'all', 'any', 'bin', 'bind', 'chr', 'cmp', 'compile',
+ 'complex', 'delattr', 'dir', 'divmod', 'drop', 'dropwhile', 'enumerate',
+ 'eval', 'exhaust', 'filter', 'flip', 'foldl1?', 'format', 'fst', 'getattr',
+ 'globals', 'hasattr', 'hash', 'head', 'hex', 'id', 'init', 'input',
+ 'isinstance', 'issubclass', 'iter', 'iterate', 'last', 'len', 'locals',
+ 'map', 'max', 'min', 'next', 'oct', 'open', 'ord', 'pow', 'print', 'repr',
+ 'reversed', 'round', 'setattr', 'scanl1?', 'snd', 'sorted', 'sum', 'tail',
+ 'take', 'takewhile', 'vars', 'zip'), prefix=r'(?<!\.)', suffix=r'(?![\'\w])'),
+ Name.Builtin),
+ (r"(?<!\.)(self|Ellipsis|NotImplemented|None|True|False)(?!['\w])",
+ Name.Builtin.Pseudo),
+
+ (r"(?<!\.)[A-Z]\w*(Error|Exception|Warning)'*(?!['\w])",
+ Name.Exception),
+ (r"(?<!\.)(Exception|GeneratorExit|KeyboardInterrupt|StopIteration|"
+ r"SystemExit)(?!['\w])", Name.Exception),
+
+ (r"(?<![\w.])(except|finally|for|if|import|not|otherwise|raise|"
+ r"subclass|while|with|yield)(?!['\w])", Keyword.Reserved),
+
+ (r"[A-Z_]+'*(?!['\w])", Name),
+ (r"[A-Z]\w+'*(?!['\w])", Keyword.Type),
+ (r"\w+'*", Name),
+
+ (r'[()]', Punctuation),
+ (r'.', Error),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'string': [
+ (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String),
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop')
+ ],
+ }
+
+
+class NumPyLexer(PythonLexer):
+ """
+ A Python lexer recognizing Numerical Python builtins.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'NumPy'
+ aliases = ['numpy']
+
+ # override the mimetypes to not inherit them from python
+ mimetypes = []
+ filenames = []
+
+ EXTRA_KEYWORDS = set((
+ 'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
+ 'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
+ 'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
+ 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'argmax', 'argmin',
+ 'argsort', 'argwhere', 'around', 'array', 'array2string', 'array_equal',
+ 'array_equiv', 'array_repr', 'array_split', 'array_str', 'arrayrange',
+ 'asanyarray', 'asarray', 'asarray_chkfinite', 'ascontiguousarray',
+ 'asfarray', 'asfortranarray', 'asmatrix', 'asscalar', 'astype',
+ 'atleast_1d', 'atleast_2d', 'atleast_3d', 'average', 'bartlett',
+ 'base_repr', 'beta', 'binary_repr', 'bincount', 'binomial',
+ 'bitwise_and', 'bitwise_not', 'bitwise_or', 'bitwise_xor', 'blackman',
+ 'bmat', 'broadcast', 'byte_bounds', 'bytes', 'byteswap', 'c_',
+ 'can_cast', 'ceil', 'choose', 'clip', 'column_stack', 'common_type',
+ 'compare_chararrays', 'compress', 'concatenate', 'conj', 'conjugate',
+ 'convolve', 'copy', 'corrcoef', 'correlate', 'cos', 'cosh', 'cov',
+ 'cross', 'cumprod', 'cumproduct', 'cumsum', 'delete', 'deprecate',
+ 'diag', 'diagflat', 'diagonal', 'diff', 'digitize', 'disp', 'divide',
+ 'dot', 'dsplit', 'dstack', 'dtype', 'dump', 'dumps', 'ediff1d', 'empty',
+ 'empty_like', 'equal', 'exp', 'expand_dims', 'expm1', 'extract', 'eye',
+ 'fabs', 'fastCopyAndTranspose', 'fft', 'fftfreq', 'fftshift', 'fill',
+ 'finfo', 'fix', 'flat', 'flatnonzero', 'flatten', 'fliplr', 'flipud',
+ 'floor', 'floor_divide', 'fmod', 'frexp', 'fromarrays', 'frombuffer',
+ 'fromfile', 'fromfunction', 'fromiter', 'frompyfunc', 'fromstring',
+ 'generic', 'get_array_wrap', 'get_include', 'get_numarray_include',
+ 'get_numpy_include', 'get_printoptions', 'getbuffer', 'getbufsize',
+ 'geterr', 'geterrcall', 'geterrobj', 'getfield', 'gradient', 'greater',
+ 'greater_equal', 'gumbel', 'hamming', 'hanning', 'histogram',
+ 'histogram2d', 'histogramdd', 'hsplit', 'hstack', 'hypot', 'i0',
+ 'identity', 'ifft', 'imag', 'index_exp', 'indices', 'inf', 'info',
+ 'inner', 'insert', 'int_asbuffer', 'interp', 'intersect1d',
+ 'intersect1d_nu', 'inv', 'invert', 'iscomplex', 'iscomplexobj',
+ 'isfinite', 'isfortran', 'isinf', 'isnan', 'isneginf', 'isposinf',
+ 'isreal', 'isrealobj', 'isscalar', 'issctype', 'issubclass_',
+ 'issubdtype', 'issubsctype', 'item', 'itemset', 'iterable', 'ix_',
+ 'kaiser', 'kron', 'ldexp', 'left_shift', 'less', 'less_equal', 'lexsort',
+ 'linspace', 'load', 'loads', 'loadtxt', 'log', 'log10', 'log1p', 'log2',
+ 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'logspace',
+ 'lstsq', 'mat', 'matrix', 'max', 'maximum', 'maximum_sctype',
+ 'may_share_memory', 'mean', 'median', 'meshgrid', 'mgrid', 'min',
+ 'minimum', 'mintypecode', 'mod', 'modf', 'msort', 'multiply', 'nan',
+ 'nan_to_num', 'nanargmax', 'nanargmin', 'nanmax', 'nanmin', 'nansum',
+ 'ndenumerate', 'ndim', 'ndindex', 'negative', 'newaxis', 'newbuffer',
+ 'newbyteorder', 'nonzero', 'not_equal', 'obj2sctype', 'ogrid', 'ones',
+ 'ones_like', 'outer', 'permutation', 'piecewise', 'pinv', 'pkgload',
+ 'place', 'poisson', 'poly', 'poly1d', 'polyadd', 'polyder', 'polydiv',
+ 'polyfit', 'polyint', 'polymul', 'polysub', 'polyval', 'power', 'prod',
+ 'product', 'ptp', 'put', 'putmask', 'r_', 'randint', 'random_integers',
+ 'random_sample', 'ranf', 'rank', 'ravel', 'real', 'real_if_close',
+ 'recarray', 'reciprocal', 'reduce', 'remainder', 'repeat', 'require',
+ 'reshape', 'resize', 'restoredot', 'right_shift', 'rint', 'roll',
+ 'rollaxis', 'roots', 'rot90', 'round', 'round_', 'row_stack', 's_',
+ 'sample', 'savetxt', 'sctype2char', 'searchsorted', 'seed', 'select',
+ 'set_numeric_ops', 'set_printoptions', 'set_string_function',
+ 'setbufsize', 'setdiff1d', 'seterr', 'seterrcall', 'seterrobj',
+ 'setfield', 'setflags', 'setmember1d', 'setxor1d', 'shape',
+ 'show_config', 'shuffle', 'sign', 'signbit', 'sin', 'sinc', 'sinh',
+ 'size', 'slice', 'solve', 'sometrue', 'sort', 'sort_complex', 'source',
+ 'split', 'sqrt', 'square', 'squeeze', 'standard_normal', 'std',
+ 'subtract', 'sum', 'svd', 'swapaxes', 'take', 'tan', 'tanh', 'tensordot',
+ 'test', 'tile', 'tofile', 'tolist', 'tostring', 'trace', 'transpose',
+ 'trapz', 'tri', 'tril', 'trim_zeros', 'triu', 'true_divide', 'typeDict',
+ 'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
+ 'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
+ 'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
+ ))
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ PythonLexer.get_tokens_unprocessed(self, text):
+ if token is Name and value in self.EXTRA_KEYWORDS:
+ yield index, Keyword.Pseudo, value
+ else:
+ yield index, token, value
+
+ def analyse_text(text):
+ return (shebang_matches(text, r'pythonw?(2(\.\d)?)?') or
+ 'import ' in text[:1000]) \
+ and ('import numpy' in text or 'from numpy import' in text)
diff --git a/pygments/lexers/r.py b/pygments/lexers/r.py
new file mode 100644
index 00000000..1a47ca26
--- /dev/null
+++ b/pygments/lexers/r.py
@@ -0,0 +1,453 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.r
+ ~~~~~~~~~~~~~~~~~
+
+ Lexers for the R/S languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, words, do_insertions
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['RConsoleLexer', 'SLexer', 'RdLexer']
+
+
+line_re = re.compile('.*?\n')
+
+
+class RConsoleLexer(Lexer):
+ """
+ For R console transcripts or R CMD BATCH output files.
+ """
+
+ name = 'RConsole'
+ aliases = ['rconsole', 'rout']
+ filenames = ['*.Rout']
+
+ def get_tokens_unprocessed(self, text):
+ slexer = SLexer(**self.options)
+
+ current_code_block = ''
+ insertions = []
+
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith('>') or line.startswith('+'):
+ # Colorize the prompt as such,
+ # then put rest of line into current_code_block
+ insertions.append((len(current_code_block),
+ [(0, Generic.Prompt, line[:2])]))
+ current_code_block += line[2:]
+ else:
+ # We have reached a non-prompt line!
+ # If we have stored prompt lines, need to process them first.
+ if current_code_block:
+ # Weave together the prompts and highlight code.
+ for item in do_insertions(
+ insertions, slexer.get_tokens_unprocessed(current_code_block)):
+ yield item
+ # Reset vars for next code block.
+ current_code_block = ''
+ insertions = []
+ # Now process the actual line itself, this is output from R.
+ yield match.start(), Generic.Output, line
+
+ # If we happen to end on a code block with nothing after it, need to
+ # process the last code block. This is neither elegant nor DRY so
+ # should be changed.
+ if current_code_block:
+ for item in do_insertions(
+ insertions, slexer.get_tokens_unprocessed(current_code_block)):
+ yield item
+
+
+class SLexer(RegexLexer):
+ """
+ For S, S-plus, and R source code.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'S'
+ aliases = ['splus', 's', 'r']
+ filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
+ mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
+ 'text/x-R', 'text/x-r-history', 'text/x-r-profile']
+
+ builtins_base = (
+ 'Arg', 'Conj', 'Cstack_info', 'Encoding', 'FALSE',
+ 'Filter', 'Find', 'I', 'ISOdate', 'ISOdatetime', 'Im', 'Inf',
+ 'La.svd', 'Map', 'Math.Date', 'Math.POSIXt', 'Math.data.frame',
+ 'Math.difftime', 'Math.factor', 'Mod', 'NA_character_',
+ 'NA_complex_', 'NA_real_', 'NCOL', 'NROW', 'NULLNA_integer_', 'NaN',
+ 'Negate', 'NextMethod', 'Ops.Date', 'Ops.POSIXt', 'Ops.data.frame',
+ 'Ops.difftime', 'Ops.factor', 'Ops.numeric_version', 'Ops.ordered',
+ 'Position', 'R.Version', 'R.home', 'R.version', 'R.version.string',
+ 'RNGkind', 'RNGversion', 'R_system_version', 'Re', 'Recall',
+ 'Reduce', 'Summary.Date', 'Summary.POSIXct', 'Summary.POSIXlt',
+ 'Summary.data.frame', 'Summary.difftime', 'Summary.factor',
+ 'Summary.numeric_version', 'Summary.ordered', 'Sys.Date',
+ 'Sys.chmod', 'Sys.getenv', 'Sys.getlocale', 'Sys.getpid',
+ 'Sys.glob', 'Sys.info', 'Sys.localeconv', 'Sys.readlink',
+ 'Sys.setFileTime', 'Sys.setenv', 'Sys.setlocale', 'Sys.sleep',
+ 'Sys.time', 'Sys.timezone', 'Sys.umask', 'Sys.unsetenv',
+ 'Sys.which', 'TRUE', 'UseMethod', 'Vectorize', 'abbreviate', 'abs',
+ 'acos', 'acosh', 'addNA', 'addTaskCallback', 'agrep', 'alist',
+ 'all', 'all.equal', 'all.equal.POSIXct', 'all.equal.character',
+ 'all.equal.default', 'all.equal.factor', 'all.equal.formula',
+ 'all.equal.language', 'all.equal.list', 'all.equal.numeric',
+ 'all.equal.raw', 'all.names', 'all.vars', 'any', 'anyDuplicated',
+ 'anyDuplicated.array', 'anyDuplicated.data.frame',
+ 'anyDuplicated.default', 'anyDuplicated.matrix', 'aperm',
+ 'aperm.default', 'aperm.table', 'append', 'apply', 'args',
+ 'arrayInd', 'as.Date', 'as.Date.POSIXct', 'as.Date.POSIXlt',
+ 'as.Date.character', 'as.Date.date', 'as.Date.dates',
+ 'as.Date.default', 'as.Date.factor', 'as.Date.numeric',
+ 'as.POSIXct', 'as.POSIXct.Date', 'as.POSIXct.POSIXlt',
+ 'as.POSIXct.date', 'as.POSIXct.dates', 'as.POSIXct.default',
+ 'as.POSIXct.numeric', 'as.POSIXlt', 'as.POSIXlt.Date',
+ 'as.POSIXlt.POSIXct', 'as.POSIXlt.character', 'as.POSIXlt.date',
+ 'as.POSIXlt.dates', 'as.POSIXlt.default', 'as.POSIXlt.factor',
+ 'as.POSIXlt.numeric', 'as.array', 'as.array.default', 'as.call',
+ 'as.character', 'as.character.Date', 'as.character.POSIXt',
+ 'as.character.condition', 'as.character.default',
+ 'as.character.error', 'as.character.factor', 'as.character.hexmode',
+ 'as.character.numeric_version', 'as.character.octmode',
+ 'as.character.srcref', 'as.complex', 'as.data.frame',
+ 'as.data.frame.AsIs', 'as.data.frame.Date', 'as.data.frame.POSIXct',
+ 'as.data.frame.POSIXlt', 'as.data.frame.array',
+ 'as.data.frame.character', 'as.data.frame.complex',
+ 'as.data.frame.data.frame', 'as.data.frame.default',
+ 'as.data.frame.difftime', 'as.data.frame.factor',
+ 'as.data.frame.integer', 'as.data.frame.list',
+ 'as.data.frame.logical', 'as.data.frame.matrix',
+ 'as.data.frame.model.matrix', 'as.data.frame.numeric',
+ 'as.data.frame.numeric_version', 'as.data.frame.ordered',
+ 'as.data.frame.raw', 'as.data.frame.table', 'as.data.frame.ts',
+ 'as.data.frame.vector', 'as.difftime', 'as.double',
+ 'as.double.POSIXlt', 'as.double.difftime', 'as.environment',
+ 'as.expression', 'as.expression.default', 'as.factor',
+ 'as.function', 'as.function.default', 'as.hexmode', 'as.integer',
+ 'as.list', 'as.list.Date', 'as.list.POSIXct', 'as.list.data.frame',
+ 'as.list.default', 'as.list.environment', 'as.list.factor',
+ 'as.list.function', 'as.list.numeric_version', 'as.logical',
+ 'as.logical.factor', 'as.matrix', 'as.matrix.POSIXlt',
+ 'as.matrix.data.frame', 'as.matrix.default', 'as.matrix.noquote',
+ 'as.name', 'as.null', 'as.null.default', 'as.numeric',
+ 'as.numeric_version', 'as.octmode', 'as.ordered',
+ 'as.package_version', 'as.pairlist', 'as.qr', 'as.raw', 'as.single',
+ 'as.single.default', 'as.symbol', 'as.table', 'as.table.default',
+ 'as.vector', 'as.vector.factor', 'asNamespace', 'asS3', 'asS4',
+ 'asin', 'asinh', 'assign', 'atan', 'atan2', 'atanh',
+ 'attachNamespace', 'attr', 'attr.all.equal', 'attributes',
+ 'autoload', 'autoloader', 'backsolve', 'baseenv', 'basename',
+ 'besselI', 'besselJ', 'besselK', 'besselY', 'beta',
+ 'bindingIsActive', 'bindingIsLocked', 'bindtextdomain', 'bitwAnd',
+ 'bitwNot', 'bitwOr', 'bitwShiftL', 'bitwShiftR', 'bitwXor', 'body',
+ 'bquote', 'browser', 'browserCondition', 'browserSetDebug',
+ 'browserText', 'builtins', 'by', 'by.data.frame', 'by.default',
+ 'bzfile', 'c.Date', 'c.POSIXct', 'c.POSIXlt', 'c.noquote',
+ 'c.numeric_version', 'call', 'callCC', 'capabilities', 'casefold',
+ 'cat', 'category', 'cbind', 'cbind.data.frame', 'ceiling',
+ 'char.expand', 'charToRaw', 'charmatch', 'chartr', 'check_tzones',
+ 'chol', 'chol.default', 'chol2inv', 'choose', 'class',
+ 'clearPushBack', 'close', 'close.connection', 'close.srcfile',
+ 'close.srcfilealias', 'closeAllConnections', 'col', 'colMeans',
+ 'colSums', 'colnames', 'commandArgs', 'comment', 'computeRestarts',
+ 'conditionCall', 'conditionCall.condition', 'conditionMessage',
+ 'conditionMessage.condition', 'conflicts', 'contributors', 'cos',
+ 'cosh', 'crossprod', 'cummax', 'cummin', 'cumprod', 'cumsum', 'cut',
+ 'cut.Date', 'cut.POSIXt', 'cut.default', 'dQuote', 'data.class',
+ 'data.matrix', 'date', 'debug', 'debugonce',
+ 'default.stringsAsFactors', 'delayedAssign', 'deparse', 'det',
+ 'determinant', 'determinant.matrix', 'dget', 'diag', 'diff',
+ 'diff.Date', 'diff.POSIXt', 'diff.default', 'difftime', 'digamma',
+ 'dim', 'dim.data.frame', 'dimnames', 'dimnames.data.frame', 'dir',
+ 'dir.create', 'dirname', 'do.call', 'dput', 'drop', 'droplevels',
+ 'droplevels.data.frame', 'droplevels.factor', 'dump', 'duplicated',
+ 'duplicated.POSIXlt', 'duplicated.array', 'duplicated.data.frame',
+ 'duplicated.default', 'duplicated.matrix',
+ 'duplicated.numeric_version', 'dyn.load', 'dyn.unload', 'eapply',
+ 'eigen', 'else', 'emptyenv', 'enc2native', 'enc2utf8',
+ 'encodeString', 'enquote', 'env.profile', 'environment',
+ 'environmentIsLocked', 'environmentName', 'eval', 'eval.parent',
+ 'evalq', 'exists', 'exp', 'expand.grid', 'expm1', 'expression',
+ 'factor', 'factorial', 'fifo', 'file', 'file.access', 'file.append',
+ 'file.choose', 'file.copy', 'file.create', 'file.exists',
+ 'file.info', 'file.link', 'file.path', 'file.remove', 'file.rename',
+ 'file.show', 'file.symlink', 'find.package', 'findInterval',
+ 'findPackageEnv', 'findRestart', 'floor', 'flush',
+ 'flush.connection', 'force', 'formals', 'format',
+ 'format.AsIs', 'format.Date', 'format.POSIXct', 'format.POSIXlt',
+ 'format.data.frame', 'format.default', 'format.difftime',
+ 'format.factor', 'format.hexmode', 'format.info',
+ 'format.libraryIQR', 'format.numeric_version', 'format.octmode',
+ 'format.packageInfo', 'format.pval', 'format.summaryDefault',
+ 'formatC', 'formatDL', 'forwardsolve', 'gamma', 'gc', 'gc.time',
+ 'gcinfo', 'gctorture', 'gctorture2', 'get', 'getAllConnections',
+ 'getCallingDLL', 'getCallingDLLe', 'getConnection',
+ 'getDLLRegisteredRoutines', 'getDLLRegisteredRoutines.DLLInfo',
+ 'getDLLRegisteredRoutines.character', 'getElement',
+ 'getExportedValue', 'getHook', 'getLoadedDLLs', 'getNamespace',
+ 'getNamespaceExports', 'getNamespaceImports', 'getNamespaceInfo',
+ 'getNamespaceName', 'getNamespaceUsers', 'getNamespaceVersion',
+ 'getNativeSymbolInfo', 'getOption', 'getRversion', 'getSrcLines',
+ 'getTaskCallbackNames', 'geterrmessage', 'gettext', 'gettextf',
+ 'getwd', 'gl', 'globalenv', 'gregexpr', 'grep', 'grepRaw', 'grepl',
+ 'gsub', 'gzcon', 'gzfile', 'head', 'iconv', 'iconvlist',
+ 'icuSetCollate', 'identical', 'identity', 'ifelse', 'importIntoEnv',
+ 'in', 'inherits', 'intToBits', 'intToUtf8', 'interaction', 'interactive',
+ 'intersect', 'inverse.rle', 'invisible', 'invokeRestart',
+ 'invokeRestartInteractively', 'is.R', 'is.array', 'is.atomic',
+ 'is.call', 'is.character', 'is.complex', 'is.data.frame',
+ 'is.double', 'is.element', 'is.environment', 'is.expression',
+ 'is.factor', 'is.finite', 'is.function', 'is.infinite',
+ 'is.integer', 'is.language', 'is.list', 'is.loaded', 'is.logical',
+ 'is.matrix', 'is.na', 'is.na.POSIXlt', 'is.na.data.frame',
+ 'is.na.numeric_version', 'is.name', 'is.nan', 'is.null',
+ 'is.numeric', 'is.numeric.Date', 'is.numeric.POSIXt',
+ 'is.numeric.difftime', 'is.numeric_version', 'is.object',
+ 'is.ordered', 'is.package_version', 'is.pairlist', 'is.primitive',
+ 'is.qr', 'is.raw', 'is.recursive', 'is.single', 'is.symbol',
+ 'is.table', 'is.unsorted', 'is.vector', 'isBaseNamespace',
+ 'isIncomplete', 'isNamespace', 'isOpen', 'isRestart', 'isS4',
+ 'isSeekable', 'isSymmetric', 'isSymmetric.matrix', 'isTRUE',
+ 'isatty', 'isdebugged', 'jitter', 'julian', 'julian.Date',
+ 'julian.POSIXt', 'kappa', 'kappa.default', 'kappa.lm', 'kappa.qr',
+ 'kronecker', 'l10n_info', 'labels', 'labels.default', 'lapply',
+ 'lazyLoad', 'lazyLoadDBexec', 'lazyLoadDBfetch', 'lbeta', 'lchoose',
+ 'length', 'length.POSIXlt', 'letters', 'levels', 'levels.default',
+ 'lfactorial', 'lgamma', 'library.dynam', 'library.dynam.unload',
+ 'licence', 'license', 'list.dirs', 'list.files', 'list2env', 'load',
+ 'loadNamespace', 'loadedNamespaces', 'loadingNamespaceInfo',
+ 'local', 'lockBinding', 'lockEnvironment', 'log', 'log10', 'log1p',
+ 'log2', 'logb', 'lower.tri', 'ls', 'make.names', 'make.unique',
+ 'makeActiveBinding', 'mapply', 'margin.table', 'mat.or.vec',
+ 'match', 'match.arg', 'match.call', 'match.fun', 'max', 'max.col',
+ 'mean', 'mean.Date', 'mean.POSIXct', 'mean.POSIXlt', 'mean.default',
+ 'mean.difftime', 'mem.limits', 'memCompress', 'memDecompress',
+ 'memory.profile', 'merge', 'merge.data.frame', 'merge.default',
+ 'message', 'mget', 'min', 'missing', 'mode', 'month.abb',
+ 'month.name', 'months', 'months.Date', 'months.POSIXt',
+ 'months.abb', 'months.nameletters', 'names', 'names.POSIXlt',
+ 'namespaceExport', 'namespaceImport', 'namespaceImportClasses',
+ 'namespaceImportFrom', 'namespaceImportMethods', 'nargs', 'nchar',
+ 'ncol', 'new.env', 'ngettext', 'nlevels', 'noquote', 'norm',
+ 'normalizePath', 'nrow', 'numeric_version', 'nzchar', 'objects',
+ 'oldClass', 'on.exit', 'open', 'open.connection', 'open.srcfile',
+ 'open.srcfilealias', 'open.srcfilecopy', 'options', 'order',
+ 'ordered', 'outer', 'packBits', 'packageEvent',
+ 'packageHasNamespace', 'packageStartupMessage', 'package_version',
+ 'pairlist', 'parent.env', 'parent.frame', 'parse',
+ 'parseNamespaceFile', 'paste', 'paste0', 'path.expand',
+ 'path.package', 'pipe', 'pmatch', 'pmax', 'pmax.int', 'pmin',
+ 'pmin.int', 'polyroot', 'pos.to.env', 'pretty', 'pretty.default',
+ 'prettyNum', 'print', 'print.AsIs', 'print.DLLInfo',
+ 'print.DLLInfoList', 'print.DLLRegisteredRoutines', 'print.Date',
+ 'print.NativeRoutineList', 'print.POSIXct', 'print.POSIXlt',
+ 'print.by', 'print.condition', 'print.connection',
+ 'print.data.frame', 'print.default', 'print.difftime',
+ 'print.factor', 'print.function', 'print.hexmode',
+ 'print.libraryIQR', 'print.listof', 'print.noquote',
+ 'print.numeric_version', 'print.octmode', 'print.packageInfo',
+ 'print.proc_time', 'print.restart', 'print.rle',
+ 'print.simple.list', 'print.srcfile', 'print.srcref',
+ 'print.summary.table', 'print.summaryDefault', 'print.table',
+ 'print.warnings', 'prmatrix', 'proc.time', 'prod', 'prop.table',
+ 'provideDimnames', 'psigamma', 'pushBack', 'pushBackLength', 'q',
+ 'qr', 'qr.Q', 'qr.R', 'qr.X', 'qr.coef', 'qr.default', 'qr.fitted',
+ 'qr.qty', 'qr.qy', 'qr.resid', 'qr.solve', 'quarters',
+ 'quarters.Date', 'quarters.POSIXt', 'quit', 'quote', 'range',
+ 'range.default', 'rank', 'rapply', 'raw', 'rawConnection',
+ 'rawConnectionValue', 'rawShift', 'rawToBits', 'rawToChar', 'rbind',
+ 'rbind.data.frame', 'rcond', 'read.dcf', 'readBin', 'readChar',
+ 'readLines', 'readRDS', 'readRenviron', 'readline', 'reg.finalizer',
+ 'regexec', 'regexpr', 'registerS3method', 'registerS3methods',
+ 'regmatches', 'remove', 'removeTaskCallback', 'rep', 'rep.Date',
+ 'rep.POSIXct', 'rep.POSIXlt', 'rep.factor', 'rep.int',
+ 'rep.numeric_version', 'rep_len', 'replace', 'replicate',
+ 'requireNamespace', 'restartDescription', 'restartFormals',
+ 'retracemem', 'rev', 'rev.default', 'rle', 'rm', 'round',
+ 'round.Date', 'round.POSIXt', 'row', 'row.names',
+ 'row.names.data.frame', 'row.names.default', 'rowMeans', 'rowSums',
+ 'rownames', 'rowsum', 'rowsum.data.frame', 'rowsum.default',
+ 'sQuote', 'sample', 'sample.int', 'sapply', 'save', 'save.image',
+ 'saveRDS', 'scale', 'scale.default', 'scan', 'search',
+ 'searchpaths', 'seek', 'seek.connection', 'seq', 'seq.Date',
+ 'seq.POSIXt', 'seq.default', 'seq.int', 'seq_along', 'seq_len',
+ 'sequence', 'serialize', 'set.seed', 'setHook', 'setNamespaceInfo',
+ 'setSessionTimeLimit', 'setTimeLimit', 'setdiff', 'setequal',
+ 'setwd', 'shQuote', 'showConnections', 'sign', 'signalCondition',
+ 'signif', 'simpleCondition', 'simpleError', 'simpleMessage',
+ 'simpleWarning', 'simplify2array', 'sin', 'single',
+ 'sinh', 'sink', 'sink.number', 'slice.index', 'socketConnection',
+ 'socketSelect', 'solve', 'solve.default', 'solve.qr', 'sort',
+ 'sort.POSIXlt', 'sort.default', 'sort.int', 'sort.list', 'split',
+ 'split.Date', 'split.POSIXct', 'split.data.frame', 'split.default',
+ 'sprintf', 'sqrt', 'srcfile', 'srcfilealias', 'srcfilecopy',
+ 'srcref', 'standardGeneric', 'stderr', 'stdin', 'stdout', 'stop',
+ 'stopifnot', 'storage.mode', 'strftime', 'strptime', 'strsplit',
+ 'strtoi', 'strtrim', 'structure', 'strwrap', 'sub', 'subset',
+ 'subset.data.frame', 'subset.default', 'subset.matrix',
+ 'substitute', 'substr', 'substring', 'sum', 'summary',
+ 'summary.Date', 'summary.POSIXct', 'summary.POSIXlt',
+ 'summary.connection', 'summary.data.frame', 'summary.default',
+ 'summary.factor', 'summary.matrix', 'summary.proc_time',
+ 'summary.srcfile', 'summary.srcref', 'summary.table',
+ 'suppressMessages', 'suppressPackageStartupMessages',
+ 'suppressWarnings', 'svd', 'sweep', 'sys.call', 'sys.calls',
+ 'sys.frame', 'sys.frames', 'sys.function', 'sys.load.image',
+ 'sys.nframe', 'sys.on.exit', 'sys.parent', 'sys.parents',
+ 'sys.save.image', 'sys.source', 'sys.status', 'system',
+ 'system.file', 'system.time', 'system2', 't', 't.data.frame',
+ 't.default', 'table', 'tabulate', 'tail', 'tan', 'tanh', 'tapply',
+ 'taskCallbackManager', 'tcrossprod', 'tempdir', 'tempfile',
+ 'testPlatformEquivalence', 'textConnection', 'textConnectionValue',
+ 'toString', 'toString.default', 'tolower', 'topenv', 'toupper',
+ 'trace', 'traceback', 'tracemem', 'tracingState', 'transform',
+ 'transform.data.frame', 'transform.default', 'trigamma', 'trunc',
+ 'trunc.Date', 'trunc.POSIXt', 'truncate', 'truncate.connection',
+ 'try', 'tryCatch', 'typeof', 'unclass', 'undebug', 'union',
+ 'unique', 'unique.POSIXlt', 'unique.array', 'unique.data.frame',
+ 'unique.default', 'unique.matrix', 'unique.numeric_version',
+ 'units', 'units.difftime', 'unix.time', 'unlink', 'unlist',
+ 'unloadNamespace', 'unlockBinding', 'unname', 'unserialize',
+ 'unsplit', 'untrace', 'untracemem', 'unz', 'upper.tri', 'url',
+ 'utf8ToInt', 'vapply', 'version', 'warning', 'warnings', 'weekdays',
+ 'weekdays.Date', 'weekdays.POSIXt', 'which', 'which.max',
+ 'which.min', 'with', 'with.default', 'withCallingHandlers',
+ 'withRestarts', 'withVisible', 'within', 'within.data.frame',
+ 'within.list', 'write', 'write.dcf', 'writeBin', 'writeChar',
+ 'writeLines', 'xor', 'xor.hexmode', 'xor.octmode',
+ 'xpdrows.data.frame', 'xtfrm', 'xtfrm.AsIs', 'xtfrm.Date',
+ 'xtfrm.POSIXct', 'xtfrm.POSIXlt', 'xtfrm.Surv', 'xtfrm.default',
+ 'xtfrm.difftime', 'xtfrm.factor', 'xtfrm.numeric_version', 'xzfile',
+ 'zapsmall'
+ )
+
+ tokens = {
+ 'comments': [
+ (r'#.*$', Comment.Single),
+ ],
+ 'valid_name': [
+ (r'[a-zA-Z][\w.]*', Text),
+ # can begin with ., but not if that is followed by a digit
+ (r'\.[a-zA-Z_][\w.]*', Text),
+ ],
+ 'punctuation': [
+ (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
+ ],
+ 'keywords': [
+ (words(builtins_base, suffix=r'(?![\w. =])'),
+ Keyword.Pseudo),
+ (r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
+ r'(?![\w.])',
+ Keyword.Reserved),
+ (r'(array|category|character|complex|double|function|integer|list|'
+ r'logical|matrix|numeric|vector|data.frame|c)'
+ r'(?![\w.])',
+ Keyword.Type),
+ (r'(library|require|attach|detach|source)'
+ r'(?![\w.])',
+ Keyword.Namespace)
+ ],
+ 'operators': [
+ (r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
+ (r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator)
+ ],
+ 'builtin_symbols': [
+ (r'(NULL|NA(_(integer|real|complex|character)_)?|'
+ r'letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))'
+ r'(?![\w.])',
+ Keyword.Constant),
+ (r'(T|F)\b', Name.Builtin.Pseudo),
+ ],
+ 'numbers': [
+ # hex number
+ (r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
+ # decimal number
+ (r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?',
+ Number),
+ ],
+ 'statements': [
+ include('comments'),
+ # whitespaces
+ (r'\s+', Text),
+ (r'`.*?`', String.Backtick),
+ (r'\'', String, 'string_squote'),
+ (r'\"', String, 'string_dquote'),
+ include('builtin_symbols'),
+ include('numbers'),
+ include('keywords'),
+ include('punctuation'),
+ include('operators'),
+ include('valid_name'),
+ ],
+ 'root': [
+ include('statements'),
+ # blocks:
+ (r'\{|\}', Punctuation),
+ # (r'\{', Punctuation, 'block'),
+ (r'.', Text),
+ ],
+ # 'block': [
+ # include('statements'),
+ # ('\{', Punctuation, '#push'),
+ # ('\}', Punctuation, '#pop')
+ # ],
+ 'string_squote': [
+ (r'([^\'\\]|\\.)*\'', String, '#pop'),
+ ],
+ 'string_dquote': [
+ (r'([^"\\]|\\.)*"', String, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
+ return 0.11
+
+
+class RdLexer(RegexLexer):
+ """
+ Pygments Lexer for R documentation (Rd) files
+
+ This is a very minimal implementation, highlighting little more
+ than the macros. A description of Rd syntax is found in `Writing R
+ Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
+ and `Parsing Rd files <developer.r-project.org/parseRd.pdf>`_.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Rd'
+ aliases = ['rd']
+ filenames = ['*.Rd']
+ mimetypes = ['text/x-r-doc']
+
+ # To account for verbatim / LaTeX-like / and R-like areas
+ # would require parsing.
+ tokens = {
+ 'root': [
+ # catch escaped brackets and percent sign
+ (r'\\[\\{}%]', String.Escape),
+ # comments
+ (r'%.*$', Comment),
+ # special macros with no arguments
+ (r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
+ # macros
+ (r'\\[a-zA-Z]+\b', Keyword),
+ # special preprocessor macros
+ (r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
+ # non-escaped brackets
+ (r'[{}]', Name.Builtin),
+ # everything else
+ (r'[^\\%\n{}]+', Text),
+ (r'.', Text),
+ ]
+ }
diff --git a/pygments/lexers/rdf.py b/pygments/lexers/rdf.py
new file mode 100644
index 00000000..fb14629a
--- /dev/null
+++ b/pygments/lexers/rdf.py
@@ -0,0 +1,99 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.rdf
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for semantic web and RDF query languages and markup.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, default
+from pygments.token import Keyword, Punctuation, String, Number, Operator, \
+ Whitespace, Name, Literal, Comment, Text
+
+__all__ = ['SparqlLexer']
+
+
+class SparqlLexer(RegexLexer):
+ """
+ Lexer for `SPARQL <http://www.w3.org/TR/rdf-sparql-query/>`_ query language.
+
+ .. versionadded:: 2.0
+ """
+ name = 'SPARQL'
+ aliases = ['sparql']
+ filenames = ['*.rq', '*.sparql']
+ mimetypes = ['application/sparql-query']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\s+', Whitespace),
+ (r'(select|construct|describe|ask|where|filter|group\s+by|minus|'
+ r'distinct|reduced|from named|from|order\s+by|limit|'
+ r'offset|bindings|load|clear|drop|create|add|move|copy|'
+ r'insert\s+data|delete\s+data|delete\s+where|delete|insert|'
+ r'using named|using|graph|default|named|all|optional|service|'
+ r'silent|bind|union|not in|in|as|a)', Keyword),
+ (r'(prefix|base)(\s+)([a-z][\w-]*)(\s*)(\:)',
+ bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
+ Punctuation)),
+ (r'\?[a-z_]\w*', Name.Variable),
+ (r'<[^>]+>', Name.Label),
+ (r'([a-z][\w-]*)(\:)([a-z][\w-]*)',
+ bygroups(Name.Namespace, Punctuation, Name.Tag)),
+ (r'(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
+ r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
+ r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
+ r'hours|minutes|seconds|timezone|tz|now|md5|sha1|sha256|sha384|'
+ r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
+ r'isliteral|isnumeric|regex|substr|replace|exists|not exists|'
+ r'count|sum|min|max|avg|sample|group_concat|separator)\b',
+ Name.Function),
+ (r'(true|false)', Literal),
+ (r'[+\-]?\d*\.\d+', Number.Float),
+ (r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float),
+ (r'[+\-]?\d+', Number.Integer),
+ (r'(\|\||&&|=|\*|\-|\+|/)', Operator),
+ (r'[(){}.;,:^]', Punctuation),
+ (r'#[^\n]+', Comment),
+ (r'"""', String, 'triple-double-quoted-string'),
+ (r'"', String, 'single-double-quoted-string'),
+ (r"'''", String, 'triple-single-quoted-string'),
+ (r"'", String, 'single-single-quoted-string'),
+ ],
+ 'triple-double-quoted-string': [
+ (r'"""', String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'single-double-quoted-string': [
+ (r'"', String, 'end-of-string'),
+ (r'[^"\\\n]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'triple-single-quoted-string': [
+ (r"'''", String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'single-single-quoted-string': [
+ (r"'", String, 'end-of-string'),
+ (r"[^'\\\n]+", String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'string-escape': [
+ (r'.', String, '#pop'),
+ ],
+ 'end-of-string': [
+ (r'(@)([a-z]+(:?-[a-z0-9]+)*)',
+ bygroups(Operator, Name.Function), '#pop:2'),
+ (r'\^\^', Operator, '#pop:2'),
+ default('#pop:2'),
+ ],
+ }
diff --git a/pygments/lexers/rebol.py b/pygments/lexers/rebol.py
new file mode 100644
index 00000000..b844ad96
--- /dev/null
+++ b/pygments/lexers/rebol.py
@@ -0,0 +1,431 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.rebol
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the REBOL and related languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Generic, Whitespace
+
+__all__ = ['RebolLexer', 'RedLexer']
+
+
+class RebolLexer(RegexLexer):
+ """
+ A `REBOL <http://www.rebol.com/>`_ lexer.
+
+ .. versionadded:: 1.1
+ """
+ name = 'REBOL'
+ aliases = ['rebol']
+ filenames = ['*.r', '*.r3', '*.reb']
+ mimetypes = ['text/x-rebol']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
+
+ def word_callback(lexer, match):
+ word = match.group()
+
+ if re.match(".*:$", word):
+ yield match.start(), Generic.Subheading, word
+ elif re.match(
+ r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|'
+ r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|'
+ r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|'
+ r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|'
+ r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|'
+ r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|'
+ r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|'
+ r'while|compress|decompress|secure|open|close|read|read-io|'
+ r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|'
+ r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|'
+ r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|'
+ r'browse|launch|stats|get-modes|set-modes|to-local-file|'
+ r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|'
+ r'hide|draw|show|size-text|textinfo|offset-to-caret|'
+ r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|'
+ r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|'
+ r'dsa-make-key|dsa-generate-key|dsa-make-signature|'
+ r'dsa-verify-signature|rsa-make-key|rsa-generate-key|'
+ r'rsa-encrypt)$', word):
+ yield match.start(), Name.Builtin, word
+ elif re.match(
+ r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|'
+ r'minimum|maximum|negate|complement|absolute|random|head|tail|'
+ r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|'
+ r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|'
+ r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|'
+ r'copy)$', word):
+ yield match.start(), Name.Function, word
+ elif re.match(
+ r'(error|source|input|license|help|install|echo|Usage|with|func|'
+ r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|'
+ r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|'
+ r'remold|charset|array|replace|move|extract|forskip|forall|alter|'
+ r'first+|also|take|for|forever|dispatch|attempt|what-dir|'
+ r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|'
+ r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|'
+ r'build-tag|process-source|build-markup|decode-cgi|read-cgi|'
+ r'write-user|save-user|set-user-name|protect-system|parse-xml|'
+ r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|'
+ r'scroll-para|get-face|alert|set-face|uninstall|unfocus|'
+ r'request-dir|center-face|do-events|net-error|decode-url|'
+ r'parse-header|parse-header-date|parse-email-addrs|import-email|'
+ r'send|build-attach-body|resend|show-popup|hide-popup|open-events|'
+ r'find-key-face|do-face|viewtop|confine|find-window|'
+ r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|'
+ r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|'
+ r'read-thru|load-thru|do-thru|launch-thru|load-image|'
+ r'request-download|do-face-alt|set-font|set-para|get-style|'
+ r'set-style|make-face|stylize|choose|hilight-text|hilight-all|'
+ r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|'
+ r'resize-face|load-stock|load-stock-block|notify|request|flash|'
+ r'request-color|request-pass|request-text|request-list|'
+ r'request-date|request-file|dbug|editor|link-relative-path|'
+ r'emailer|parse-error)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match(
+ r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|'
+ r'return|exit|break)$', word):
+ yield match.start(), Name.Exception, word
+ elif re.match('REBOL$', word):
+ yield match.start(), Generic.Heading, word
+ elif re.match("to-.*", word):
+ yield match.start(), Keyword, word
+ elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$',
+ word):
+ yield match.start(), Operator, word
+ elif re.match(".*\?$", word):
+ yield match.start(), Keyword, word
+ elif re.match(".*\!$", word):
+ yield match.start(), Keyword.Type, word
+ elif re.match("'.*", word):
+ yield match.start(), Name.Variable.Instance, word # lit-word
+ elif re.match("#.*", word):
+ yield match.start(), Name.Label, word # issue
+ elif re.match("%.*", word):
+ yield match.start(), Name.Decorator, word # file
+ else:
+ yield match.start(), Name.Variable, word
+
+ tokens = {
+ 'root': [
+ (r'[^R]+', Comment),
+ (r'REBOL\s+\[', Generic.Strong, 'script'),
+ (r'R', Comment)
+ ],
+ 'script': [
+ (r'\s+', Text),
+ (r'#"', String.Char, 'char'),
+ (r'#\{[0-9a-f]*\}', Number.Hex),
+ (r'2#\{', Number.Hex, 'bin2'),
+ (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
+ (r'"', String, 'string'),
+ (r'\{', String, 'string2'),
+ (r';#+.*\n', Comment.Special),
+ (r';\*+.*\n', Comment.Preproc),
+ (r';.*\n', Comment),
+ (r'%"', Name.Decorator, 'stringFile'),
+ (r'%[^(^{")\s\[\]]+', Name.Decorator),
+ (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
+ (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
+ (r'\d+[\-/][0-9a-z]+[\-/]\d+(\/\d+\:\d+((\:\d+)?'
+ r'([.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
+ (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
+ (r'\d+X\d+', Keyword.Constant), # pair
+ (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
+ (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
+ (r'[+-]?\d+(\'\d+)?', Number),
+ (r'[\[\]()]', Generic.Strong),
+ (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator), # url
+ (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # url
+ (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # email
+ (r'comment\s"', Comment, 'commentString1'),
+ (r'comment\s\{', Comment, 'commentString2'),
+ (r'comment\s\[', Comment, 'commentBlock'),
+ (r'comment\s[^(\s{"\[]+', Comment),
+ (r'/[^(^{")\s/[\]]*', Name.Attribute),
+ (r'([^(^{")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
+ (r'<[\w:.-]*>', Name.Tag),
+ (r'<[^(<>\s")]+', Name.Tag, 'tag'),
+ (r'([^(^{")\s]+)', Text),
+ ],
+ 'string': [
+ (r'[^(^")]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'"', String, '#pop'),
+ ],
+ 'string2': [
+ (r'[^(^{})]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'\{', String, '#push'),
+ (r'\}', String, '#pop'),
+ ],
+ 'stringFile': [
+ (r'[^(^")]+', Name.Decorator),
+ (escape_re, Name.Decorator),
+ (r'\^.', Name.Decorator),
+ (r'"', Name.Decorator, '#pop'),
+ ],
+ 'char': [
+ (escape_re + '"', String.Char, '#pop'),
+ (r'\^."', String.Char, '#pop'),
+ (r'."', String.Char, '#pop'),
+ ],
+ 'tag': [
+ (escape_re, Name.Tag),
+ (r'"', Name.Tag, 'tagString'),
+ (r'[^(<>\r\n")]+', Name.Tag),
+ (r'>', Name.Tag, '#pop'),
+ ],
+ 'tagString': [
+ (r'[^(^")]+', Name.Tag),
+ (escape_re, Name.Tag),
+ (r'[(|)]+', Name.Tag),
+ (r'\^.', Name.Tag),
+ (r'"', Name.Tag, '#pop'),
+ ],
+ 'tuple': [
+ (r'(\d+\.)+', Keyword.Constant),
+ (r'\d+', Keyword.Constant, '#pop'),
+ ],
+ 'bin2': [
+ (r'\s+', Number.Hex),
+ (r'([01]\s*){8}', Number.Hex),
+ (r'\}', Number.Hex, '#pop'),
+ ],
+ 'commentString1': [
+ (r'[^(^")]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'"', Comment, '#pop'),
+ ],
+ 'commentString2': [
+ (r'[^(^{})]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'\{', Comment, '#push'),
+ (r'\}', Comment, '#pop'),
+ ],
+ 'commentBlock': [
+ (r'\[', Comment, '#push'),
+ (r'\]', Comment, '#pop'),
+ (r'"', Comment, "commentString1"),
+ (r'\{', Comment, "commentString2"),
+ (r'[^(\[\]"{)]+', Comment),
+ ],
+ }
+
+ def analyse_text(text):
+ """
+ Check if code contains REBOL header and so it probably not R code
+ """
+ if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE):
+ # The code starts with REBOL header
+ return 1.0
+ elif re.search(r'\s*REBOL\s*[', text, re.IGNORECASE):
+ # The code contains REBOL header but also some text before it
+ return 0.5
+
+
+class RedLexer(RegexLexer):
+ """
+ A `Red-language <http://www.red-lang.org/>`_ lexer.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Red'
+ aliases = ['red', 'red/system']
+ filenames = ['*.red', '*.reds']
+ mimetypes = ['text/x-red', 'text/x-red-system']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
+
+ def word_callback(lexer, match):
+ word = match.group()
+
+ if re.match(".*:$", word):
+ yield match.start(), Generic.Subheading, word
+ elif re.match(r'(if|unless|either|any|all|while|until|loop|repeat|'
+ r'foreach|forall|func|function|does|has|switch|'
+ r'case|reduce|compose|get|set|print|prin|equal\?|'
+ r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|'
+ r'greater-or-equal\?|same\?|not|type\?|stats|'
+ r'bind|union|replace|charset|routine)$', word):
+ yield match.start(), Name.Builtin, word
+ elif re.match(r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|'
+ r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|'
+ r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|'
+ r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|'
+ r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|'
+ r'update|write)$', word):
+ yield match.start(), Name.Function, word
+ elif re.match(r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|'
+ r'none|crlf|dot|null-byte)$', word):
+ yield match.start(), Name.Builtin.Pseudo, word
+ elif re.match(r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|'
+ r'#switch|#default|#get-definition)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match(r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|'
+ r'raise-error|return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|'
+ r'quote|forever)$', word):
+ yield match.start(), Name.Exception, word
+ elif re.match(r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|'
+ r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|'
+ r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|'
+ r'any-struct\?|none\?|word\?|any-series\?)$', word):
+ yield match.start(), Keyword, word
+ elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match("to-.*", word):
+ yield match.start(), Keyword, word
+ elif re.match('(\+|-\*\*|-|\*\*|//|/|\*|and|or|xor|=\?|===|==|=|<>|<=|>=|'
+ '<<<|>>>|<<|>>|<|>%)$', word):
+ yield match.start(), Operator, word
+ elif re.match(".*\!$", word):
+ yield match.start(), Keyword.Type, word
+ elif re.match("'.*", word):
+ yield match.start(), Name.Variable.Instance, word # lit-word
+ elif re.match("#.*", word):
+ yield match.start(), Name.Label, word # issue
+ elif re.match("%.*", word):
+ yield match.start(), Name.Decorator, word # file
+ elif re.match(":.*", word):
+ yield match.start(), Generic.Subheading, word # get-word
+ else:
+ yield match.start(), Name.Variable, word
+
+ tokens = {
+ 'root': [
+ (r'[^R]+', Comment),
+ (r'Red/System\s+\[', Generic.Strong, 'script'),
+ (r'Red\s+\[', Generic.Strong, 'script'),
+ (r'R', Comment)
+ ],
+ 'script': [
+ (r'\s+', Text),
+ (r'#"', String.Char, 'char'),
+ (r'#\{[0-9a-f\s]*\}', Number.Hex),
+ (r'2#\{', Number.Hex, 'bin2'),
+ (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
+ (r'([0-9a-f]+)(h)((\s)|(?=[\[\]{}"()]))',
+ bygroups(Number.Hex, Name.Variable, Whitespace)),
+ (r'"', String, 'string'),
+ (r'\{', String, 'string2'),
+ (r';#+.*\n', Comment.Special),
+ (r';\*+.*\n', Comment.Preproc),
+ (r';.*\n', Comment),
+ (r'%"', Name.Decorator, 'stringFile'),
+ (r'%[^(^{")\s\[\]]+', Name.Decorator),
+ (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
+ (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
+ (r'\d+[\-/][0-9a-z]+[\-/]\d+(/\d+:\d+((:\d+)?'
+ r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
+ (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
+ (r'\d+X\d+', Keyword.Constant), # pair
+ (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
+ (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
+ (r'[+-]?\d+(\'\d+)?', Number),
+ (r'[\[\]()]', Generic.Strong),
+ (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator), # url
+ (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # url
+ (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # email
+ (r'comment\s"', Comment, 'commentString1'),
+ (r'comment\s\{', Comment, 'commentString2'),
+ (r'comment\s\[', Comment, 'commentBlock'),
+ (r'comment\s[^(\s{"\[]+', Comment),
+ (r'/[^(^{^")\s/[\]]*', Name.Attribute),
+ (r'([^(^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
+ (r'<[\w:.-]*>', Name.Tag),
+ (r'<[^(<>\s")]+', Name.Tag, 'tag'),
+ (r'([^(^{")\s]+)', Text),
+ ],
+ 'string': [
+ (r'[^(^")]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'"', String, '#pop'),
+ ],
+ 'string2': [
+ (r'[^(^{})]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'\{', String, '#push'),
+ (r'\}', String, '#pop'),
+ ],
+ 'stringFile': [
+ (r'[^(^")]+', Name.Decorator),
+ (escape_re, Name.Decorator),
+ (r'\^.', Name.Decorator),
+ (r'"', Name.Decorator, '#pop'),
+ ],
+ 'char': [
+ (escape_re + '"', String.Char, '#pop'),
+ (r'\^."', String.Char, '#pop'),
+ (r'."', String.Char, '#pop'),
+ ],
+ 'tag': [
+ (escape_re, Name.Tag),
+ (r'"', Name.Tag, 'tagString'),
+ (r'[^(<>\r\n")]+', Name.Tag),
+ (r'>', Name.Tag, '#pop'),
+ ],
+ 'tagString': [
+ (r'[^(^")]+', Name.Tag),
+ (escape_re, Name.Tag),
+ (r'[(|)]+', Name.Tag),
+ (r'\^.', Name.Tag),
+ (r'"', Name.Tag, '#pop'),
+ ],
+ 'tuple': [
+ (r'(\d+\.)+', Keyword.Constant),
+ (r'\d+', Keyword.Constant, '#pop'),
+ ],
+ 'bin2': [
+ (r'\s+', Number.Hex),
+ (r'([01]\s*){8}', Number.Hex),
+ (r'\}', Number.Hex, '#pop'),
+ ],
+ 'commentString1': [
+ (r'[^(^")]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'"', Comment, '#pop'),
+ ],
+ 'commentString2': [
+ (r'[^(^{})]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'\{', Comment, '#push'),
+ (r'\}', Comment, '#pop'),
+ ],
+ 'commentBlock': [
+ (r'\[', Comment, '#push'),
+ (r'\]', Comment, '#pop'),
+ (r'"', Comment, "commentString1"),
+ (r'\{', Comment, "commentString2"),
+ (r'[^(\[\]"{)]+', Comment),
+ ],
+ }
diff --git a/pygments/lexers/resource.py b/pygments/lexers/resource.py
new file mode 100644
index 00000000..4647bef8
--- /dev/null
+++ b/pygments/lexers/resource.py
@@ -0,0 +1,84 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.resource
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for resource definition files.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Comment, String, Number, Operator, Text, \
+ Keyword, Name
+
+__all__ = ['ResourceLexer']
+
+
+class ResourceLexer(RegexLexer):
+ """Lexer for `ICU Resource bundles
+ <http://userguide.icu-project.org/locale/resources>`_.
+
+ .. versionadded:: 2.0
+ """
+ name = 'ResourceBundle'
+ aliases = ['resource', 'resourcebundle']
+ filenames = ['*.txt']
+
+ _types = (':table', ':array', ':string', ':bin', ':import', ':intvector',
+ ':int', ':alias')
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'//.*?$', Comment),
+ (r'"', String, 'string'),
+ (r'-?\d+', Number.Integer),
+ (r'[,{}]', Operator),
+ (r'([^\s{:]+)(\s*)(%s?)' % '|'.join(_types),
+ bygroups(Name, Text, Keyword)),
+ (r'\s+', Text),
+ (words(_types), Keyword),
+ ],
+ 'string': [
+ (r'(\\x[0-9a-f]{2}|\\u[0-9a-f]{4}|\\U00[0-9a-f]{6}|'
+ r'\\[0-7]{1,3}|\\c.|\\[abtnvfre\'"?\\]|\\\{|[^"{\\])+', String),
+ (r'\{', String.Escape, 'msgname'),
+ (r'"', String, '#pop')
+ ],
+ 'msgname': [
+ (r'([^{},]+)(\s*)', bygroups(Name, String.Escape), ('#pop', 'message'))
+ ],
+ 'message': [
+ (r'\{', String.Escape, 'msgname'),
+ (r'\}', String.Escape, '#pop'),
+ (r'(,)(\s*)([a-z]+)(\s*\})',
+ bygroups(Operator, String.Escape, Keyword, String.Escape), '#pop'),
+ (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)(offset)(\s*)(:)(\s*)(-?\d+)(\s*)',
+ bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
+ String.Escape, Operator.Word, String.Escape, Operator,
+ String.Escape, Number.Integer, String.Escape), 'choice'),
+ (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)',
+ bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
+ String.Escape), 'choice'),
+ (r'\s+', String.Escape)
+ ],
+ 'choice': [
+ (r'(=|<|>|<=|>=|!=)(-?\d+)(\s*\{)',
+ bygroups(Operator, Number.Integer, String.Escape), 'message'),
+ (r'([a-z]+)(\s*\{)', bygroups(Keyword.Type, String.Escape), 'str'),
+ (r'\}', String.Escape, ('#pop', '#pop')),
+ (r'\s+', String.Escape)
+ ],
+ 'str': [
+ (r'\}', String.Escape, '#pop'),
+ (r'\{', String.Escape, 'msgname'),
+ (r'[^{}]+', String)
+ ]
+ }
+
+ def analyse_text(text):
+ return text.startswith('root:table')
diff --git a/pygments/lexers/_robotframeworklexer.py b/pygments/lexers/robotframework.py
index bc64e12b..eab06efe 100644
--- a/pygments/lexers/_robotframeworklexer.py
+++ b/pygments/lexers/robotframework.py
@@ -1,11 +1,11 @@
# -*- coding: utf-8 -*-
"""
- pygments.lexers._robotframeworklexer
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ pygments.lexers.robotframework
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lexer for Robot Framework.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -27,6 +27,9 @@ import re
from pygments.lexer import Lexer
from pygments.token import Token
+from pygments.util import text_type
+
+__all__ = ['RobotFrameworkLexer']
HEADING = Token.Generic.Heading
@@ -57,10 +60,10 @@ class RobotFrameworkLexer(Lexer):
Supports both space and pipe separated plain text formats.
- *New in Pygments 1.6.*
+ .. versionadded:: 1.6
"""
name = 'RobotFramework'
- aliases = ['RobotFramework', 'robotframework']
+ aliases = ['robotframework']
filenames = ['*.txt', '*.robot']
mimetypes = ['text/x-robotframework']
@@ -77,14 +80,14 @@ class RobotFrameworkLexer(Lexer):
for value, token in row_tokenizer.tokenize(row):
for value, token in var_tokenizer.tokenize(value, token):
if value:
- yield index, token, unicode(value)
+ yield index, token, text_type(value)
index += len(value)
class VariableTokenizer(object):
def tokenize(self, string, token):
- var = VariableSplitter(string, identifiers='$@%')
+ var = VariableSplitter(string, identifiers='$@%&')
if var.start < 0 or token in (COMMENT, ERROR):
yield string, token
return
@@ -202,7 +205,7 @@ class Tokenizer(object):
def _is_assign(self, value):
if value.endswith('='):
value = value[:-1].strip()
- var = VariableSplitter(value, identifiers='$@')
+ var = VariableSplitter(value, identifiers='$@&')
return var.start == 0 and var.end == len(value)
@@ -259,7 +262,7 @@ class TestCaseSetting(Setting):
class KeywordSetting(TestCaseSetting):
_keyword_settings = ('teardown',)
- _other_settings = ('documentation', 'arguments', 'return', 'timeout')
+ _other_settings = ('documentation', 'arguments', 'return', 'timeout', 'tags')
class Variable(Tokenizer):
@@ -462,13 +465,13 @@ class VariableSplitter:
self.identifier = self._variable_chars[0]
self.base = ''.join(self._variable_chars[2:-1])
self.end = self.start + len(self._variable_chars)
- if self._has_list_variable_index():
- self.index = ''.join(self._list_variable_index_chars[1:-1])
- self.end += len(self._list_variable_index_chars)
+ if self._has_list_or_dict_variable_index():
+ self.index = ''.join(self._list_and_dict_variable_index_chars[1:-1])
+ self.end += len(self._list_and_dict_variable_index_chars)
- def _has_list_variable_index(self):
- return self._list_variable_index_chars\
- and self._list_variable_index_chars[-1] == ']'
+ def _has_list_or_dict_variable_index(self):
+ return self._list_and_dict_variable_index_chars\
+ and self._list_and_dict_variable_index_chars[-1] == ']'
def _split(self, string):
start_index, max_index = self._find_variable(string)
@@ -476,7 +479,7 @@ class VariableSplitter:
self._open_curly = 1
self._state = self._variable_state
self._variable_chars = [string[start_index], '{']
- self._list_variable_index_chars = []
+ self._list_and_dict_variable_index_chars = []
self._string = string
start_index += 2
for index, char in enumerate(string[start_index:]):
@@ -527,14 +530,14 @@ class VariableSplitter:
if char == '}' and not self._is_escaped(self._string, index):
self._open_curly -= 1
if self._open_curly == 0:
- if not self._is_list_variable():
+ if not self._is_list_or_dict_variable():
raise StopIteration
self._state = self._waiting_list_variable_index_state
elif char in self._identifiers:
self._state = self._internal_variable_start_state
- def _is_list_variable(self):
- return self._variable_chars[0] == '@'
+ def _is_list_or_dict_variable(self):
+ return self._variable_chars[0] in ('@','&')
def _internal_variable_start_state(self, char, index):
self._state = self._variable_state
@@ -548,10 +551,10 @@ class VariableSplitter:
def _waiting_list_variable_index_state(self, char, index):
if char != '[':
raise StopIteration
- self._list_variable_index_chars.append(char)
+ self._list_and_dict_variable_index_chars.append(char)
self._state = self._list_variable_index_state
def _list_variable_index_state(self, char, index):
- self._list_variable_index_chars.append(char)
+ self._list_and_dict_variable_index_chars.append(char)
if char == ']':
raise StopIteration
diff --git a/pygments/lexers/ruby.py b/pygments/lexers/ruby.py
new file mode 100644
index 00000000..63fed60f
--- /dev/null
+++ b/pygments/lexers/ruby.py
@@ -0,0 +1,519 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ruby
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Ruby and related languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, include, \
+ bygroups, default, LexerContext, do_insertions, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Generic
+from pygments.util import shebang_matches
+
+__all__ = ['RubyLexer', 'RubyConsoleLexer', 'FancyLexer']
+
+line_re = re.compile('.*?\n')
+
+
+RUBY_OPERATORS = (
+ '*', '**', '-', '+', '-@', '+@', '/', '%', '&', '|', '^', '`', '~',
+ '[]', '[]=', '<<', '>>', '<', '<>', '<=>', '>', '>=', '==', '==='
+)
+
+
+class RubyLexer(ExtendedRegexLexer):
+ """
+ For `Ruby <http://www.ruby-lang.org>`_ source code.
+ """
+
+ name = 'Ruby'
+ aliases = ['rb', 'ruby', 'duby']
+ filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
+ '*.rbx', '*.duby']
+ mimetypes = ['text/x-ruby', 'application/x-ruby']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ def heredoc_callback(self, match, ctx):
+ # okay, this is the hardest part of parsing Ruby...
+ # match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
+
+ start = match.start(1)
+ yield start, Operator, match.group(1) # <<-?
+ yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
+ yield match.start(3), Name.Constant, match.group(3) # heredoc name
+ yield match.start(4), String.Heredoc, match.group(4) # quote again
+
+ heredocstack = ctx.__dict__.setdefault('heredocstack', [])
+ outermost = not bool(heredocstack)
+ heredocstack.append((match.group(1) == '<<-', match.group(3)))
+
+ ctx.pos = match.start(5)
+ ctx.end = match.end(5)
+ # this may find other heredocs
+ for i, t, v in self.get_tokens_unprocessed(context=ctx):
+ yield i, t, v
+ ctx.pos = match.end()
+
+ if outermost:
+ # this is the outer heredoc again, now we can process them all
+ for tolerant, hdname in heredocstack:
+ lines = []
+ for match in line_re.finditer(ctx.text, ctx.pos):
+ if tolerant:
+ check = match.group().strip()
+ else:
+ check = match.group().rstrip()
+ if check == hdname:
+ for amatch in lines:
+ yield amatch.start(), String.Heredoc, amatch.group()
+ yield match.start(), Name.Constant, match.group()
+ ctx.pos = match.end()
+ break
+ else:
+ lines.append(match)
+ else:
+ # end of heredoc not found -- error!
+ for amatch in lines:
+ yield amatch.start(), Error, amatch.group()
+ ctx.end = len(ctx.text)
+ del heredocstack[:]
+
+ def gen_rubystrings_rules():
+ def intp_regex_callback(self, match, ctx):
+ yield match.start(1), String.Regex, match.group(1) # begin
+ nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
+ for i, t, v in self.get_tokens_unprocessed(context=nctx):
+ yield match.start(3)+i, t, v
+ yield match.start(4), String.Regex, match.group(4) # end[mixounse]*
+ ctx.pos = match.end()
+
+ def intp_string_callback(self, match, ctx):
+ yield match.start(1), String.Other, match.group(1)
+ nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
+ for i, t, v in self.get_tokens_unprocessed(context=nctx):
+ yield match.start(3)+i, t, v
+ yield match.start(4), String.Other, match.group(4) # end
+ ctx.pos = match.end()
+
+ states = {}
+ states['strings'] = [
+ # easy ones
+ (r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
+ (words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
+ (r":'(\\\\|\\'|[^'])*'", String.Symbol),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r':"', String.Symbol, 'simple-sym'),
+ (r'([a-zA-Z_]\w*)(:)(?!:)',
+ bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
+ (r'"', String.Double, 'simple-string'),
+ (r'(?<!\.)`', String.Backtick, 'simple-backtick'),
+ ]
+
+ # double-quoted string and symbol
+ for name, ttype, end in ('string', String.Double, '"'), \
+ ('sym', String.Symbol, '"'), \
+ ('backtick', String.Backtick, '`'):
+ states['simple-'+name] = [
+ include('string-intp-escaped'),
+ (r'[^\\%s#]+' % end, ttype),
+ (r'[\\#]', ttype),
+ (end, ttype, '#pop'),
+ ]
+
+ # braced quoted strings
+ for lbrace, rbrace, bracecc, name in \
+ ('\\{', '\\}', '{}', 'cb'), \
+ ('\\[', '\\]', '\\[\\]', 'sb'), \
+ ('\\(', '\\)', '()', 'pa'), \
+ ('<', '>', '<>', 'ab'):
+ states[name+'-intp-string'] = [
+ (r'\\[\\' + bracecc + ']', String.Other),
+ (lbrace, String.Other, '#push'),
+ (rbrace, String.Other, '#pop'),
+ include('string-intp-escaped'),
+ (r'[\\#' + bracecc + ']', String.Other),
+ (r'[^\\#' + bracecc + ']+', String.Other),
+ ]
+ states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
+ name+'-intp-string'))
+ states[name+'-string'] = [
+ (r'\\[\\' + bracecc + ']', String.Other),
+ (lbrace, String.Other, '#push'),
+ (rbrace, String.Other, '#pop'),
+ (r'[\\#' + bracecc + ']', String.Other),
+ (r'[^\\#' + bracecc + ']+', String.Other),
+ ]
+ states['strings'].append((r'%[qsw]' + lbrace, String.Other,
+ name+'-string'))
+ states[name+'-regex'] = [
+ (r'\\[\\' + bracecc + ']', String.Regex),
+ (lbrace, String.Regex, '#push'),
+ (rbrace + '[mixounse]*', String.Regex, '#pop'),
+ include('string-intp'),
+ (r'[\\#' + bracecc + ']', String.Regex),
+ (r'[^\\#' + bracecc + ']+', String.Regex),
+ ]
+ states['strings'].append((r'%r' + lbrace, String.Regex,
+ name+'-regex'))
+
+ # these must come after %<brace>!
+ states['strings'] += [
+ # %r regex
+ (r'(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
+ intp_regex_callback),
+ # regular fancy strings with qsw
+ (r'%[qsw]([\W_])((?:\\\1|(?!\1).)*)\1', String.Other),
+ (r'(%[QWx]([\W_]))((?:\\\2|(?!\2).)*)(\2)',
+ intp_string_callback),
+ # special forms of fancy strings after operators or
+ # in method calls with braces
+ (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
+ bygroups(Text, String.Other, None)),
+ # and because of fixed width lookbehinds the whole thing a
+ # second time for line startings...
+ (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
+ bygroups(Text, String.Other, None)),
+ # all regular fancy strings without qsw
+ (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
+ intp_string_callback),
+ ]
+
+ return states
+
+ tokens = {
+ 'root': [
+ (r'\A#!.+?$', Comment.Hashbang),
+ (r'#.*?$', Comment.Single),
+ (r'=begin\s.*?\n=end.*?$', Comment.Multiline),
+ # keywords
+ (words((
+ 'BEGIN', 'END', 'alias', 'begin', 'break', 'case', 'defined?',
+ 'do', 'else', 'elsif', 'end', 'ensure', 'for', 'if', 'in', 'next', 'redo',
+ 'rescue', 'raise', 'retry', 'return', 'super', 'then', 'undef',
+ 'unless', 'until', 'when', 'while', 'yield'), suffix=r'\b'),
+ Keyword),
+ # start of function, class and module names
+ (r'(module)(\s+)([a-zA-Z_]\w*'
+ r'(?:::[a-zA-Z_]\w*)*)',
+ bygroups(Keyword, Text, Name.Namespace)),
+ (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ # special methods
+ (words((
+ 'initialize', 'new', 'loop', 'include', 'extend', 'raise', 'attr_reader',
+ 'attr_writer', 'attr_accessor', 'attr', 'catch', 'throw', 'private',
+ 'module_function', 'public', 'protected', 'true', 'false', 'nil'),
+ suffix=r'\b'),
+ Keyword.Pseudo),
+ (r'(not|and|or)\b', Operator.Word),
+ (words((
+ 'autoload', 'block_given', 'const_defined', 'eql', 'equal', 'frozen', 'include',
+ 'instance_of', 'is_a', 'iterator', 'kind_of', 'method_defined', 'nil',
+ 'private_method_defined', 'protected_method_defined',
+ 'public_method_defined', 'respond_to', 'tainted'), suffix=r'\?'),
+ Name.Builtin),
+ (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
+ (words((
+ 'Array', 'Float', 'Integer', 'String', '__id__', '__send__', 'abort',
+ 'ancestors', 'at_exit', 'autoload', 'binding', 'callcc', 'caller',
+ 'catch', 'chomp', 'chop', 'class_eval', 'class_variables',
+ 'clone', 'const_defined?', 'const_get', 'const_missing', 'const_set',
+ 'constants', 'display', 'dup', 'eval', 'exec', 'exit', 'extend', 'fail', 'fork',
+ 'format', 'freeze', 'getc', 'gets', 'global_variables', 'gsub',
+ 'hash', 'id', 'included_modules', 'inspect', 'instance_eval',
+ 'instance_method', 'instance_methods',
+ 'instance_variable_get', 'instance_variable_set', 'instance_variables',
+ 'lambda', 'load', 'local_variables', 'loop',
+ 'method', 'method_missing', 'methods', 'module_eval', 'name',
+ 'object_id', 'open', 'p', 'print', 'printf', 'private_class_method',
+ 'private_instance_methods',
+ 'private_methods', 'proc', 'protected_instance_methods',
+ 'protected_methods', 'public_class_method',
+ 'public_instance_methods', 'public_methods',
+ 'putc', 'puts', 'raise', 'rand', 'readline', 'readlines', 'require',
+ 'scan', 'select', 'self', 'send', 'set_trace_func', 'singleton_methods', 'sleep',
+ 'split', 'sprintf', 'srand', 'sub', 'syscall', 'system', 'taint',
+ 'test', 'throw', 'to_a', 'to_s', 'trace_var', 'trap', 'untaint',
+ 'untrace_var', 'warn'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Builtin),
+ (r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
+ # normal heredocs
+ (r'(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
+ heredoc_callback),
+ # empty string heredocs
+ (r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback),
+ (r'__END__', Comment.Preproc, 'end-part'),
+ # multiline regex (after keywords or assignments)
+ (r'(?:^|(?<=[=<>~!:])|'
+ r'(?<=(?:\s|;)when\s)|'
+ r'(?<=(?:\s|;)or\s)|'
+ r'(?<=(?:\s|;)and\s)|'
+ r'(?<=\.index\s)|'
+ r'(?<=\.scan\s)|'
+ r'(?<=\.sub\s)|'
+ r'(?<=\.sub!\s)|'
+ r'(?<=\.gsub\s)|'
+ r'(?<=\.gsub!\s)|'
+ r'(?<=\.match\s)|'
+ r'(?<=(?:\s|;)if\s)|'
+ r'(?<=(?:\s|;)elsif\s)|'
+ r'(?<=^when\s)|'
+ r'(?<=^index\s)|'
+ r'(?<=^scan\s)|'
+ r'(?<=^sub\s)|'
+ r'(?<=^gsub\s)|'
+ r'(?<=^sub!\s)|'
+ r'(?<=^gsub!\s)|'
+ r'(?<=^match\s)|'
+ r'(?<=^if\s)|'
+ r'(?<=^elsif\s)'
+ r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
+ # multiline regex (in method calls or subscripts)
+ (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
+ # multiline regex (this time the funny no whitespace rule)
+ (r'(\s+)(/)(?![\s=])', bygroups(Text, String.Regex),
+ 'multiline-regex'),
+ # lex numbers and ignore following regular expressions which
+ # are division operators in fact (grrrr. i hate that. any
+ # better ideas?)
+ # since pygments 0.7 we also eat a "?" operator after numbers
+ # so that the char operator does not work. Chars are not allowed
+ # there so that you can use the ternary operator.
+ # stupid example:
+ # x>=0?n[x]:""
+ (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
+ bygroups(Number.Oct, Text, Operator)),
+ (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
+ bygroups(Number.Hex, Text, Operator)),
+ (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
+ bygroups(Number.Bin, Text, Operator)),
+ (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
+ bygroups(Number.Integer, Text, Operator)),
+ # Names
+ (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
+ (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
+ (r'\$\w+', Name.Variable.Global),
+ (r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
+ (r'\$-[0adFiIlpvw]', Name.Variable.Global),
+ (r'::', Operator),
+ include('strings'),
+ # chars
+ (r'\?(\\[MC]-)*' # modifiers
+ r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
+ r'(?!\w)',
+ String.Char),
+ (r'[A-Z]\w+', Name.Constant),
+ # this is needed because ruby attributes can look
+ # like keywords (class) or like this: ` ?!?
+ (words(RUBY_OPERATORS, prefix=r'(\.|::)'),
+ bygroups(Operator, Name.Operator)),
+ (r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
+ bygroups(Operator, Name)),
+ (r'[a-zA-Z_]\w*[!?]?', Name),
+ (r'(\[|\]|\*\*|<<?|>>?|>=|<=|<=>|=~|={3}|'
+ r'!~|&&?|\|\||\.{1,3})', Operator),
+ (r'[-+/*%=<>&!^|~]=?', Operator),
+ (r'[(){};,/?:\\]', Punctuation),
+ (r'\s+', Text)
+ ],
+ 'funcname': [
+ (r'\(', Punctuation, 'defexpr'),
+ (r'(?:([a-zA-Z_]\w*)(\.))?'
+ r'([a-zA-Z_]\w*[!?]?|\*\*?|[-+]@?|'
+ r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)',
+ bygroups(Name.Class, Operator, Name.Function), '#pop'),
+ default('#pop')
+ ],
+ 'classname': [
+ (r'\(', Punctuation, 'defexpr'),
+ (r'<<', Operator, '#pop'),
+ (r'[A-Z_]\w*', Name.Class, '#pop'),
+ default('#pop')
+ ],
+ 'defexpr': [
+ (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
+ (r'\(', Operator, '#push'),
+ include('root')
+ ],
+ 'in-intp': [
+ (r'\{', String.Interpol, '#push'),
+ (r'\}', String.Interpol, '#pop'),
+ include('root'),
+ ],
+ 'string-intp': [
+ (r'#\{', String.Interpol, 'in-intp'),
+ (r'#@@?[a-zA-Z_]\w*', String.Interpol),
+ (r'#\$[a-zA-Z_]\w*', String.Interpol)
+ ],
+ 'string-intp-escaped': [
+ include('string-intp'),
+ (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
+ String.Escape)
+ ],
+ 'interpolated-regex': [
+ include('string-intp'),
+ (r'[\\#]', String.Regex),
+ (r'[^\\#]+', String.Regex),
+ ],
+ 'interpolated-string': [
+ include('string-intp'),
+ (r'[\\#]', String.Other),
+ (r'[^\\#]+', String.Other),
+ ],
+ 'multiline-regex': [
+ include('string-intp'),
+ (r'\\\\', String.Regex),
+ (r'\\/', String.Regex),
+ (r'[\\#]', String.Regex),
+ (r'[^\\/#]+', String.Regex),
+ (r'/[mixounse]*', String.Regex, '#pop'),
+ ],
+ 'end-part': [
+ (r'.+', Comment.Preproc, '#pop')
+ ]
+ }
+ tokens.update(gen_rubystrings_rules())
+
+ def analyse_text(text):
+ return shebang_matches(text, r'ruby(1\.\d)?')
+
+
+class RubyConsoleLexer(Lexer):
+ """
+ For Ruby interactive console (**irb**) output like:
+
+ .. sourcecode:: rbcon
+
+ irb(main):001:0> a = 1
+ => 1
+ irb(main):002:0> puts a
+ 1
+ => nil
+ """
+ name = 'Ruby irb session'
+ aliases = ['rbcon', 'irb']
+ mimetypes = ['text/x-ruby-shellsession']
+
+ _prompt_re = re.compile('irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] '
+ '|>> |\?> ')
+
+ def get_tokens_unprocessed(self, text):
+ rblexer = RubyLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = self._prompt_re.match(line)
+ if m is not None:
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, rblexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ yield match.start(), Generic.Output, line
+ if curcode:
+ for item in do_insertions(
+ insertions, rblexer.get_tokens_unprocessed(curcode)):
+ yield item
+
+
+class FancyLexer(RegexLexer):
+ """
+ Pygments Lexer For `Fancy <http://www.fancy-lang.org/>`_.
+
+ Fancy is a self-hosted, pure object-oriented, dynamic,
+ class-based, concurrent general-purpose programming language
+ running on Rubinius, the Ruby VM.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Fancy'
+ filenames = ['*.fy', '*.fancypack']
+ aliases = ['fancy', 'fy']
+ mimetypes = ['text/x-fancysrc']
+
+ tokens = {
+ # copied from PerlLexer:
+ 'balanced-regex': [
+ (r'/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex, '#pop'),
+ (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'),
+ (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
+ (r'\{(\\\\|\\\}|[^}])*\}[egimosx]*', String.Regex, '#pop'),
+ (r'<(\\\\|\\>|[^>])*>[egimosx]*', String.Regex, '#pop'),
+ (r'\[(\\\\|\\\]|[^\]])*\][egimosx]*', String.Regex, '#pop'),
+ (r'\((\\\\|\\\)|[^)])*\)[egimosx]*', String.Regex, '#pop'),
+ (r'@(\\\\|\\@|[^@])*@[egimosx]*', String.Regex, '#pop'),
+ (r'%(\\\\|\\%|[^%])*%[egimosx]*', String.Regex, '#pop'),
+ (r'\$(\\\\|\\\$|[^$])*\$[egimosx]*', String.Regex, '#pop'),
+ ],
+ 'root': [
+ (r'\s+', Text),
+
+ # balanced delimiters (copied from PerlLexer):
+ (r's\{(\\\\|\\\}|[^}])*\}\s*', String.Regex, 'balanced-regex'),
+ (r's<(\\\\|\\>|[^>])*>\s*', String.Regex, 'balanced-regex'),
+ (r's\[(\\\\|\\\]|[^\]])*\]\s*', String.Regex, 'balanced-regex'),
+ (r's\((\\\\|\\\)|[^)])*\)\s*', String.Regex, 'balanced-regex'),
+ (r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex),
+ (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
+
+ # Comments
+ (r'#(.*?)\n', Comment.Single),
+ # Symbols
+ (r'\'([^\'\s\[\](){}]+|\[\])', String.Symbol),
+ # Multi-line DoubleQuotedString
+ (r'"""(\\\\|\\"|[^"])*"""', String),
+ # DoubleQuotedString
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # keywords
+ (r'(def|class|try|catch|finally|retry|return|return_local|match|'
+ r'case|->|=>)\b', Keyword),
+ # constants
+ (r'(self|super|nil|false|true)\b', Name.Constant),
+ (r'[(){};,/?|:\\]', Punctuation),
+ # names
+ (words((
+ 'Object', 'Array', 'Hash', 'Directory', 'File', 'Class', 'String',
+ 'Number', 'Enumerable', 'FancyEnumerable', 'Block', 'TrueClass',
+ 'NilClass', 'FalseClass', 'Tuple', 'Symbol', 'Stack', 'Set',
+ 'FancySpec', 'Method', 'Package', 'Range'), suffix=r'\b'),
+ Name.Builtin),
+ # functions
+ (r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function),
+ # operators, must be below functions
+ (r'[-+*/~,<>=&!?%^\[\].$]+', Operator),
+ ('[A-Z]\w*', Name.Constant),
+ ('@[a-zA-Z_]\w*', Name.Variable.Instance),
+ ('@@[a-zA-Z_]\w*', Name.Variable.Class),
+ ('@@?', Operator),
+ ('[a-zA-Z_]\w*', Name),
+ # numbers - / checks are necessary to avoid mismarking regexes,
+ # see comment in RubyLexer
+ (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
+ bygroups(Number.Oct, Text, Operator)),
+ (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
+ bygroups(Number.Hex, Text, Operator)),
+ (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
+ bygroups(Number.Bin, Text, Operator)),
+ (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
+ bygroups(Number.Integer, Text, Operator)),
+ (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer)
+ ]
+ }
diff --git a/pygments/lexers/rust.py b/pygments/lexers/rust.py
new file mode 100644
index 00000000..d8939678
--- /dev/null
+++ b/pygments/lexers/rust.py
@@ -0,0 +1,190 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.rust
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Rust language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, words, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Whitespace
+
+__all__ = ['RustLexer']
+
+
+class RustLexer(RegexLexer):
+ """
+ Lexer for the Rust programming language (version 1.0).
+
+ .. versionadded:: 1.6
+ """
+ name = 'Rust'
+ filenames = ['*.rs']
+ aliases = ['rust']
+ mimetypes = ['text/rust']
+
+ tokens = {
+ 'root': [
+ # rust allows a file to start with a shebang, but if the first line
+ # starts with #![ then it’s not a shebang but a crate attribute.
+ (r'#![^[\r\n].*$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ # Whitespace and Comments
+ (r'\n', Whitespace),
+ (r'\s+', Whitespace),
+ (r'//!.*?\n', String.Doc),
+ (r'///(\n|[^/].*?\n)', String.Doc),
+ (r'//(.*?)\n', Comment.Single),
+ (r'/\*\*(\n|[^/*])', String.Doc, 'doccomment'),
+ (r'/\*!', String.Doc, 'doccomment'),
+ (r'/\*', Comment.Multiline, 'comment'),
+
+ # Macro parameters
+ (r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
+ # Keywords
+ (words((
+ 'as', 'box', 'crate', 'do', 'else', 'enum', 'extern', # break and continue are in labels
+ 'fn', 'for', 'if', 'impl', 'in', 'loop', 'match', 'mut', 'priv',
+ 'proc', 'pub', 'ref', 'return', 'static', 'struct',
+ 'trait', 'true', 'type', 'unsafe', 'while'), suffix=r'\b'),
+ Keyword),
+ (words(('alignof', 'be', 'const', 'offsetof', 'pure', 'sizeof',
+ 'typeof', 'once', 'unsized', 'yield'), suffix=r'\b'),
+ Keyword.Reserved),
+ (r'(mod|use)\b', Keyword.Namespace),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'let\b', Keyword.Declaration),
+ (words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64', 'usize',
+ 'isize', 'f32', 'f64', 'str', 'bool'), suffix=r'\b'),
+ Keyword.Type),
+ (r'self\b', Name.Builtin.Pseudo),
+ # Prelude (taken from Rust’s src/libstd/prelude.rs)
+ (words((
+ # Reexported core operators
+ 'Copy', 'Send', 'Sized', 'Sync',
+ 'Drop', 'Fn', 'FnMut', 'FnOnce',
+
+ # Reexported functions
+ 'drop',
+
+ # Reexported types and traits
+ 'Box',
+ 'ToOwned',
+ 'Clone',
+ 'PartialEq', 'PartialOrd', 'Eq', 'Ord',
+ 'AsRef', 'AsMut', 'Into', 'From',
+ 'Default',
+ 'Iterator', 'Extend', 'IntoIterator',
+ 'DoubleEndedIterator', 'ExactSizeIterator',
+ 'Option',
+ 'Some', 'None',
+ 'Result',
+ 'Ok', 'Err',
+ 'SliceConcatExt',
+ 'String', 'ToString',
+ 'Vec',
+ ), suffix=r'\b'),
+ Name.Builtin),
+ # Labels
+ (r'(break|continue)(\s*)(\'[A-Za-z_]\w*)?', bygroups(Keyword, Text.Whitespace, Name.Label)),
+ # Character Literal
+ (r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
+ String.Char),
+ (r"""b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
+ String.Char),
+ # Binary Literal
+ (r'0b[01_]+', Number.Bin, 'number_lit'),
+ # Octal Literal
+ (r'0o[0-7_]+', Number.Oct, 'number_lit'),
+ # Hexadecimal Literal
+ (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
+ # Decimal Literal
+ (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float, 'number_lit'),
+ (r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
+ # String Literal
+ (r'b"', String, 'bytestring'),
+ (r'"', String, 'string'),
+ (r'b?r(#*)".*?"\1', String),
+
+ # Lifetime
+ (r"""'static""", Name.Builtin),
+ (r"""'[a-zA-Z_]\w*""", Name.Attribute),
+
+ # Operators and Punctuation
+ (r'[{}()\[\],.;]', Punctuation),
+ (r'[+\-*/%&|<>^!~@=:?]', Operator),
+
+ # Identifier
+ (r'[a-zA-Z_]\w*', Name),
+
+ # Attributes
+ (r'#!?\[', Comment.Preproc, 'attribute['),
+ # Macros
+ (r'([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\s*)(\{)',
+ bygroups(Comment.Preproc, Punctuation, Whitespace, Name,
+ Whitespace, Punctuation), 'macro{'),
+ (r'([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\()',
+ bygroups(Comment.Preproc, Punctuation, Whitespace, Name,
+ Punctuation), 'macro('),
+ ],
+ 'comment': [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'doccomment': [
+ (r'[^*/]+', String.Doc),
+ (r'/\*', String.Doc, '#push'),
+ (r'\*/', String.Doc, '#pop'),
+ (r'[*/]', String.Doc),
+ ],
+ 'number_lit': [
+ (r'[ui](8|16|32|64|size)', Keyword, '#pop'),
+ (r'f(32|64)', Keyword, '#pop'),
+ default('#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r"""\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}""", String.Escape),
+ (r'[^\\"]+', String),
+ (r'\\', String),
+ ],
+ 'bytestring': [
+ (r"""\\x[89a-fA-F][0-9a-fA-F]""", String.Escape),
+ include('string'),
+ ],
+ 'macro{': [
+ (r'\{', Operator, '#push'),
+ (r'\}', Operator, '#pop'),
+ ],
+ 'macro(': [
+ (r'\(', Operator, '#push'),
+ (r'\)', Operator, '#pop'),
+ ],
+ 'attribute_common': [
+ (r'"', String, 'string'),
+ (r'\[', Comment.Preproc, 'attribute['),
+ (r'\(', Comment.Preproc, 'attribute('),
+ ],
+ 'attribute[': [
+ include('attribute_common'),
+ (r'\];?', Comment.Preproc, '#pop'),
+ (r'[^"\]]+', Comment.Preproc),
+ ],
+ 'attribute(': [
+ include('attribute_common'),
+ (r'\);?', Comment.Preproc, '#pop'),
+ (r'[^")]+', Comment.Preproc),
+ ],
+ }
diff --git a/pygments/lexers/scripting.py b/pygments/lexers/scripting.py
new file mode 100644
index 00000000..473ea7eb
--- /dev/null
+++ b/pygments/lexers/scripting.py
@@ -0,0 +1,923 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.scripting
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for scripting and embedded languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, combined, \
+ words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Whitespace
+from pygments.util import get_bool_opt, get_list_opt, iteritems
+
+__all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer',
+ 'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer', 'HybrisLexer']
+
+
+class LuaLexer(RegexLexer):
+ """
+ For `Lua <http://www.lua.org>`_ source code.
+
+ Additional options accepted:
+
+ `func_name_highlighting`
+ If given and ``True``, highlight builtin function names
+ (default: ``True``).
+ `disabled_modules`
+ If given, must be a list of module names whose function names
+ should not be highlighted. By default all modules are highlighted.
+
+ To get a list of allowed modules have a look into the
+ `_lua_builtins` module:
+
+ .. sourcecode:: pycon
+
+ >>> from pygments.lexers._lua_builtins import MODULES
+ >>> MODULES.keys()
+ ['string', 'coroutine', 'modules', 'io', 'basic', ...]
+ """
+
+ name = 'Lua'
+ aliases = ['lua']
+ filenames = ['*.lua', '*.wlua']
+ mimetypes = ['text/x-lua', 'application/x-lua']
+
+ tokens = {
+ 'root': [
+ # lua allows a file to start with a shebang
+ (r'#!(.*?)$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ (r'(?s)--\[(=*)\[.*?\]\1\]', Comment.Multiline),
+ ('--.*$', Comment.Single),
+
+ (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
+ (r'(?i)\d+e[+-]?\d+', Number.Float),
+ ('(?i)0x[0-9a-f]*', Number.Hex),
+ (r'\d+', Number.Integer),
+
+ (r'\n', Text),
+ (r'[^\S\n]', Text),
+ # multiline strings
+ (r'(?s)\[(=*)\[.*?\]\1\]', String),
+
+ (r'(==|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#])', Operator),
+ (r'[\[\]{}().,:;]', Punctuation),
+ (r'(and|or|not)\b', Operator.Word),
+
+ ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
+ r'while)\b', Keyword),
+ (r'(local)\b', Keyword.Declaration),
+ (r'(true|false|nil)\b', Keyword.Constant),
+
+ (r'(function)\b', Keyword, 'funcname'),
+
+ (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
+
+ ("'", String.Single, combined('stringescape', 'sqs')),
+ ('"', String.Double, combined('stringescape', 'dqs'))
+ ],
+
+ 'funcname': [
+ (r'\s+', Text),
+ ('(?:([A-Za-z_]\w*)(\.))?([A-Za-z_]\w*)',
+ bygroups(Name.Class, Punctuation, Name.Function), '#pop'),
+ # inline function
+ ('\(', Punctuation, '#pop'),
+ ],
+
+ # if I understand correctly, every character is valid in a lua string,
+ # so this state is only for later corrections
+ 'string': [
+ ('.', String)
+ ],
+
+ 'stringescape': [
+ (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
+ ],
+
+ 'sqs': [
+ ("'", String, '#pop'),
+ include('string')
+ ],
+
+ 'dqs': [
+ ('"', String, '#pop'),
+ include('string')
+ ]
+ }
+
+ def __init__(self, **options):
+ self.func_name_highlighting = get_bool_opt(
+ options, 'func_name_highlighting', True)
+ self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
+
+ self._functions = set()
+ if self.func_name_highlighting:
+ from pygments.lexers._lua_builtins import MODULES
+ for mod, func in iteritems(MODULES):
+ if mod not in self.disabled_modules:
+ self._functions.update(func)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self._functions:
+ yield index, Name.Builtin, value
+ continue
+ elif '.' in value:
+ a, b = value.split('.')
+ yield index, Name, a
+ yield index + len(a), Punctuation, u'.'
+ yield index + len(a) + 1, Name, b
+ continue
+ yield index, token, value
+
+
+class MoonScriptLexer(LuaLexer):
+ """
+ For `MoonScript <http://moonscript.org>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = "MoonScript"
+ aliases = ["moon", "moonscript"]
+ filenames = ["*.moon"]
+ mimetypes = ['text/x-moonscript', 'application/x-moonscript']
+
+ tokens = {
+ 'root': [
+ (r'#!(.*?)$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ ('--.*$', Comment.Single),
+ (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
+ (r'(?i)\d+e[+-]?\d+', Number.Float),
+ (r'(?i)0x[0-9a-f]*', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'(?s)\[(=*)\[.*?\]\1\]', String),
+ (r'(->|=>)', Name.Function),
+ (r':[a-zA-Z_]\w*', Name.Variable),
+ (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator),
+ (r'[;,]', Punctuation),
+ (r'[\[\]{}()]', Keyword.Type),
+ (r'[a-zA-Z_]\w*:', Name.Variable),
+ (words((
+ 'class', 'extends', 'if', 'then', 'super', 'do', 'with',
+ 'import', 'export', 'while', 'elseif', 'return', 'for', 'in',
+ 'from', 'when', 'using', 'else', 'and', 'or', 'not', 'switch',
+ 'break'), suffix=r'\b'),
+ Keyword),
+ (r'(true|false|nil)\b', Keyword.Constant),
+ (r'(and|or|not)\b', Operator.Word),
+ (r'(self)\b', Name.Builtin.Pseudo),
+ (r'@@?([a-zA-Z_]\w*)?', Name.Variable.Class),
+ (r'[A-Z]\w*', Name.Class), # proper name
+ (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
+ ("'", String.Single, combined('stringescape', 'sqs')),
+ ('"', String.Double, combined('stringescape', 'dqs'))
+ ],
+ 'stringescape': [
+ (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
+ ],
+ 'sqs': [
+ ("'", String.Single, '#pop'),
+ (".", String)
+ ],
+ 'dqs': [
+ ('"', String.Double, '#pop'),
+ (".", String)
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ # set . as Operator instead of Punctuation
+ for index, token, value in LuaLexer.get_tokens_unprocessed(self, text):
+ if token == Punctuation and value == ".":
+ token = Operator
+ yield index, token, value
+
+
+class ChaiscriptLexer(RegexLexer):
+ """
+ For `ChaiScript <http://chaiscript.com/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'ChaiScript'
+ aliases = ['chai', 'chaiscript']
+ filenames = ['*.chai']
+ mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'^\#.*?\n', Comment.Single)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ include('commentsandwhitespace'),
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'[=+\-*/]', Operator),
+ (r'(for|in|while|do|break|return|continue|if|else|'
+ r'throw|try|catch'
+ r')\b', Keyword, 'slashstartsregex'),
+ (r'(var)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(attr|def|fun)\b', Keyword.Reserved),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'(eval|throw)\b', Name.Builtin),
+ (r'`\S+`', Name.Builtin),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"', String.Double, 'dqstring'),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ],
+ 'dqstring': [
+ (r'\$\{[^"}]+?\}', String.Interpol),
+ (r'\$', String.Double),
+ (r'\\\\', String.Double),
+ (r'\\"', String.Double),
+ (r'[^\\"$]+', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ }
+
+
+class LSLLexer(RegexLexer):
+ """
+ For Second Life's Linden Scripting Language source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'LSL'
+ aliases = ['lsl']
+ filenames = ['*.lsl']
+ mimetypes = ['text/x-lsl']
+
+ flags = re.MULTILINE
+
+ lsl_keywords = r'\b(?:do|else|for|if|jump|return|while)\b'
+ lsl_types = r'\b(?:float|integer|key|list|quaternion|rotation|string|vector)\b'
+ lsl_states = r'\b(?:(?:state)\s+\w+|default)\b'
+ lsl_events = r'\b(?:state_(?:entry|exit)|touch(?:_(?:start|end))?|(?:land_)?collision(?:_(?:start|end))?|timer|listen|(?:no_)?sensor|control|(?:not_)?at_(?:rot_)?target|money|email|run_time_permissions|changed|attach|dataserver|moving_(?:start|end)|link_message|(?:on|object)_rez|remote_data|http_re(?:sponse|quest)|path_update|transaction_result)\b'
+ lsl_functions_builtin = r'\b(?:ll(?:ReturnObjectsBy(?:ID|Owner)|Json(?:2List|[GS]etValue|ValueType)|Sin|Cos|Tan|Atan2|Sqrt|Pow|Abs|Fabs|Frand|Floor|Ceil|Round|Vec(?:Mag|Norm|Dist)|Rot(?:Between|2(?:Euler|Fwd|Left|Up))|(?:Euler|Axes)2Rot|Whisper|(?:Region|Owner)?Say|Shout|Listen(?:Control|Remove)?|Sensor(?:Repeat|Remove)?|Detected(?:Name|Key|Owner|Type|Pos|Vel|Grab|Rot|Group|LinkNumber)|Die|Ground|Wind|(?:[GS]et)(?:AnimationOverride|MemoryLimit|PrimMediaParams|ParcelMusicURL|Object(?:Desc|Name)|PhysicsMaterial|Status|Scale|Color|Alpha|Texture|Pos|Rot|Force|Torque)|ResetAnimationOverride|(?:Scale|Offset|Rotate)Texture|(?:Rot)?Target(?:Remove)?|(?:Stop)?MoveToTarget|Apply(?:Rotational)?Impulse|Set(?:KeyframedMotion|ContentType|RegionPos|(?:Angular)?Velocity|Buoyancy|HoverHeight|ForceAndTorque|TimerEvent|ScriptState|Damage|TextureAnim|Sound(?:Queueing|Radius)|Vehicle(?:Type|(?:Float|Vector|Rotation)Param)|(?:Touch|Sit)?Text|Camera(?:Eye|At)Offset|PrimitiveParams|ClickAction|Link(?:Alpha|Color|PrimitiveParams(?:Fast)?|Texture(?:Anim)?|Camera|Media)|RemoteScriptAccessPin|PayPrice|LocalRot)|ScaleByFactor|Get(?:(?:Max|Min)ScaleFactor|ClosestNavPoint|StaticPath|SimStats|Env|PrimitiveParams|Link(?:PrimitiveParams|Number(?:OfSides)?|Key|Name|Media)|HTTPHeader|FreeURLs|Object(?:Details|PermMask|PrimCount)|Parcel(?:MaxPrims|Details|Prim(?:Count|Owners))|Attached|(?:SPMax|Free|Used)Memory|Region(?:Name|TimeDilation|FPS|Corner|AgentCount)|Root(?:Position|Rotation)|UnixTime|(?:Parcel|Region)Flags|(?:Wall|GMT)clock|SimulatorHostname|BoundingBox|GeometricCenter|Creator|NumberOf(?:Prims|NotecardLines|Sides)|Animation(?:List)?|(?:Camera|Local)(?:Pos|Rot)|Vel|Accel|Omega|Time(?:stamp|OfDay)|(?:Object|CenterOf)?Mass|MassMKS|Energy|Owner|(?:Owner)?Key|SunDirection|Texture(?:Offset|Scale|Rot)|Inventory(?:Number|Name|Key|Type|Creator|PermMask)|Permissions(?:Key)?|StartParameter|List(?:Length|EntryType)|Date|Agent(?:Size|Info|Language|List)|LandOwnerAt|NotecardLine|Script(?:Name|State))|(?:Get|Reset|GetAndReset)Time|PlaySound(?:Slave)?|LoopSound(?:Master|Slave)?|(?:Trigger|Stop|Preload)Sound|(?:(?:Get|Delete)Sub|Insert)String|To(?:Upper|Lower)|Give(?:InventoryList|Money)|RezObject|(?:Stop)?LookAt|Sleep|CollisionFilter|(?:Take|Release)Controls|DetachFromAvatar|AttachToAvatar(?:Temp)?|InstantMessage|(?:GetNext)?Email|StopHover|MinEventDelay|RotLookAt|String(?:Length|Trim)|(?:Start|Stop)Animation|TargetOmega|RequestPermissions|(?:Create|Break)Link|BreakAllLinks|(?:Give|Remove)Inventory|Water|PassTouches|Request(?:Agent|Inventory)Data|TeleportAgent(?:Home|GlobalCoords)?|ModifyLand|CollisionSound|ResetScript|MessageLinked|PushObject|PassCollisions|AxisAngle2Rot|Rot2(?:Axis|Angle)|A(?:cos|sin)|AngleBetween|AllowInventoryDrop|SubStringIndex|List2(?:CSV|Integer|Json|Float|String|Key|Vector|Rot|List(?:Strided)?)|DeleteSubList|List(?:Statistics|Sort|Randomize|(?:Insert|Find|Replace)List)|EdgeOfWorld|AdjustSoundVolume|Key2Name|TriggerSoundLimited|EjectFromLand|(?:CSV|ParseString)2List|OverMyLand|SameGroup|UnSit|Ground(?:Slope|Normal|Contour)|GroundRepel|(?:Set|Remove)VehicleFlags|(?:AvatarOn)?(?:Link)?SitTarget|Script(?:Danger|Profiler)|Dialog|VolumeDetect|ResetOtherScript|RemoteLoadScriptPin|(?:Open|Close)RemoteDataChannel|SendRemoteData|RemoteDataReply|(?:Integer|String)ToBase64|XorBase64|Log(?:10)?|Base64To(?:String|Integer)|ParseStringKeepNulls|RezAtRoot|RequestSimulatorData|ForceMouselook|(?:Load|Release|(?:E|Une)scape)URL|ParcelMedia(?:CommandList|Query)|ModPow|MapDestination|(?:RemoveFrom|AddTo|Reset)Land(?:Pass|Ban)List|(?:Set|Clear)CameraParams|HTTP(?:Request|Response)|TextBox|DetectedTouch(?:UV|Face|Pos|(?:N|Bin)ormal|ST)|(?:MD5|SHA1|DumpList2)String|Request(?:Secure)?URL|Clear(?:Prim|Link)Media|(?:Link)?ParticleSystem|(?:Get|Request)(?:Username|DisplayName)|RegionSayTo|CastRay|GenerateKey|TransferLindenDollars|ManageEstateAccess|(?:Create|Delete)Character|ExecCharacterCmd|Evade|FleeFrom|NavigateTo|PatrolPoints|Pursue|UpdateCharacter|WanderWithin))\b'
+ lsl_constants_float = r'\b(?:DEG_TO_RAD|PI(?:_BY_TWO)?|RAD_TO_DEG|SQRT2|TWO_PI)\b'
+ lsl_constants_integer = r'\b(?:JSON_APPEND|STATUS_(?:PHYSICS|ROTATE_[XYZ]|PHANTOM|SANDBOX|BLOCK_GRAB(?:_OBJECT)?|(?:DIE|RETURN)_AT_EDGE|CAST_SHADOWS|OK|MALFORMED_PARAMS|TYPE_MISMATCH|BOUNDS_ERROR|NOT_(?:FOUND|SUPPORTED)|INTERNAL_ERROR|WHITELIST_FAILED)|AGENT(?:_(?:BY_(?:LEGACY_|USER)NAME|FLYING|ATTACHMENTS|SCRIPTED|MOUSELOOK|SITTING|ON_OBJECT|AWAY|WALKING|IN_AIR|TYPING|CROUCHING|BUSY|ALWAYS_RUN|AUTOPILOT|LIST_(?:PARCEL(?:_OWNER)?|REGION)))?|CAMERA_(?:PITCH|DISTANCE|BEHINDNESS_(?:ANGLE|LAG)|(?:FOCUS|POSITION)(?:_(?:THRESHOLD|LOCKED|LAG))?|FOCUS_OFFSET|ACTIVE)|ANIM_ON|LOOP|REVERSE|PING_PONG|SMOOTH|ROTATE|SCALE|ALL_SIDES|LINK_(?:ROOT|SET|ALL_(?:OTHERS|CHILDREN)|THIS)|ACTIVE|PASSIVE|SCRIPTED|CONTROL_(?:FWD|BACK|(?:ROT_)?(?:LEFT|RIGHT)|UP|DOWN|(?:ML_)?LBUTTON)|PERMISSION_(?:RETURN_OBJECTS|DEBIT|OVERRIDE_ANIMATIONS|SILENT_ESTATE_MANAGEMENT|TAKE_CONTROLS|TRIGGER_ANIMATION|ATTACH|CHANGE_LINKS|(?:CONTROL|TRACK)_CAMERA|TELEPORT)|INVENTORY_(?:TEXTURE|SOUND|OBJECT|SCRIPT|LANDMARK|CLOTHING|NOTECARD|BODYPART|ANIMATION|GESTURE|ALL|NONE)|CHANGED_(?:INVENTORY|COLOR|SHAPE|SCALE|TEXTURE|LINK|ALLOWED_DROP|OWNER|REGION(?:_START)?|TELEPORT|MEDIA)|OBJECT_(?:(?:PHYSICS|SERVER|STREAMING)_COST|UNKNOWN_DETAIL|CHARACTER_TIME|PHANTOM|PHYSICS|TEMP_ON_REZ|NAME|DESC|POS|PRIM_EQUIVALENCE|RETURN_(?:PARCEL(?:_OWNER)?|REGION)|ROO?T|VELOCITY|OWNER|GROUP|CREATOR|ATTACHED_POINT|RENDER_WEIGHT|PATHFINDING_TYPE|(?:RUNNING|TOTAL)_SCRIPT_COUNT|SCRIPT_(?:MEMORY|TIME))|TYPE_(?:INTEGER|FLOAT|STRING|KEY|VECTOR|ROTATION|INVALID)|(?:DEBUG|PUBLIC)_CHANNEL|ATTACH_(?:AVATAR_CENTER|CHEST|HEAD|BACK|PELVIS|MOUTH|CHIN|NECK|NOSE|BELLY|[LR](?:SHOULDER|HAND|FOOT|EAR|EYE|[UL](?:ARM|LEG)|HIP)|(?:LEFT|RIGHT)_PEC|HUD_(?:CENTER_[12]|TOP_(?:RIGHT|CENTER|LEFT)|BOTTOM(?:_(?:RIGHT|LEFT))?))|LAND_(?:LEVEL|RAISE|LOWER|SMOOTH|NOISE|REVERT)|DATA_(?:ONLINE|NAME|BORN|SIM_(?:POS|STATUS|RATING)|PAYINFO)|PAYMENT_INFO_(?:ON_FILE|USED)|REMOTE_DATA_(?:CHANNEL|REQUEST|REPLY)|PSYS_(?:PART_(?:BF_(?:ZERO|ONE(?:_MINUS_(?:DEST_COLOR|SOURCE_(ALPHA|COLOR)))?|DEST_COLOR|SOURCE_(ALPHA|COLOR))|BLEND_FUNC_(DEST|SOURCE)|FLAGS|(?:START|END)_(?:COLOR|ALPHA|SCALE|GLOW)|MAX_AGE|(?:RIBBON|WIND|INTERP_(?:COLOR|SCALE)|BOUNCE|FOLLOW_(?:SRC|VELOCITY)|TARGET_(?:POS|LINEAR)|EMISSIVE)_MASK)|SRC_(?:MAX_AGE|PATTERN|ANGLE_(?:BEGIN|END)|BURST_(?:RATE|PART_COUNT|RADIUS|SPEED_(?:MIN|MAX))|ACCEL|TEXTURE|TARGET_KEY|OMEGA|PATTERN_(?:DROP|EXPLODE|ANGLE(?:_CONE(?:_EMPTY)?)?)))|VEHICLE_(?:REFERENCE_FRAME|TYPE_(?:NONE|SLED|CAR|BOAT|AIRPLANE|BALLOON)|(?:LINEAR|ANGULAR)_(?:FRICTION_TIMESCALE|MOTOR_DIRECTION)|LINEAR_MOTOR_OFFSET|HOVER_(?:HEIGHT|EFFICIENCY|TIMESCALE)|BUOYANCY|(?:LINEAR|ANGULAR)_(?:DEFLECTION_(?:EFFICIENCY|TIMESCALE)|MOTOR_(?:DECAY_)?TIMESCALE)|VERTICAL_ATTRACTION_(?:EFFICIENCY|TIMESCALE)|BANKING_(?:EFFICIENCY|MIX|TIMESCALE)|FLAG_(?:NO_DEFLECTION_UP|LIMIT_(?:ROLL_ONLY|MOTOR_UP)|HOVER_(?:(?:WATER|TERRAIN|UP)_ONLY|GLOBAL_HEIGHT)|MOUSELOOK_(?:STEER|BANK)|CAMERA_DECOUPLED))|PRIM_(?:TYPE(?:_(?:BOX|CYLINDER|PRISM|SPHERE|TORUS|TUBE|RING|SCULPT))?|HOLE_(?:DEFAULT|CIRCLE|SQUARE|TRIANGLE)|MATERIAL(?:_(?:STONE|METAL|GLASS|WOOD|FLESH|PLASTIC|RUBBER))?|SHINY_(?:NONE|LOW|MEDIUM|HIGH)|BUMP_(?:NONE|BRIGHT|DARK|WOOD|BARK|BRICKS|CHECKER|CONCRETE|TILE|STONE|DISKS|GRAVEL|BLOBS|SIDING|LARGETILE|STUCCO|SUCTION|WEAVE)|TEXGEN_(?:DEFAULT|PLANAR)|SCULPT_(?:TYPE_(?:SPHERE|TORUS|PLANE|CYLINDER|MASK)|FLAG_(?:MIRROR|INVERT))|PHYSICS(?:_(?:SHAPE_(?:CONVEX|NONE|PRIM|TYPE)))?|(?:POS|ROT)_LOCAL|SLICE|TEXT|FLEXIBLE|POINT_LIGHT|TEMP_ON_REZ|PHANTOM|POSITION|SIZE|ROTATION|TEXTURE|NAME|OMEGA|DESC|LINK_TARGET|COLOR|BUMP_SHINY|FULLBRIGHT|TEXGEN|GLOW|MEDIA_(?:ALT_IMAGE_ENABLE|CONTROLS|(?:CURRENT|HOME)_URL|AUTO_(?:LOOP|PLAY|SCALE|ZOOM)|FIRST_CLICK_INTERACT|(?:WIDTH|HEIGHT)_PIXELS|WHITELIST(?:_ENABLE)?|PERMS_(?:INTERACT|CONTROL)|PARAM_MAX|CONTROLS_(?:STANDARD|MINI)|PERM_(?:NONE|OWNER|GROUP|ANYONE)|MAX_(?:URL_LENGTH|WHITELIST_(?:SIZE|COUNT)|(?:WIDTH|HEIGHT)_PIXELS)))|MASK_(?:BASE|OWNER|GROUP|EVERYONE|NEXT)|PERM_(?:TRANSFER|MODIFY|COPY|MOVE|ALL)|PARCEL_(?:MEDIA_COMMAND_(?:STOP|PAUSE|PLAY|LOOP|TEXTURE|URL|TIME|AGENT|UNLOAD|AUTO_ALIGN|TYPE|SIZE|DESC|LOOP_SET)|FLAG_(?:ALLOW_(?:FLY|(?:GROUP_)?SCRIPTS|LANDMARK|TERRAFORM|DAMAGE|CREATE_(?:GROUP_)?OBJECTS)|USE_(?:ACCESS_(?:GROUP|LIST)|BAN_LIST|LAND_PASS_LIST)|LOCAL_SOUND_ONLY|RESTRICT_PUSHOBJECT|ALLOW_(?:GROUP|ALL)_OBJECT_ENTRY)|COUNT_(?:TOTAL|OWNER|GROUP|OTHER|SELECTED|TEMP)|DETAILS_(?:NAME|DESC|OWNER|GROUP|AREA|ID|SEE_AVATARS))|LIST_STAT_(?:MAX|MIN|MEAN|MEDIAN|STD_DEV|SUM(?:_SQUARES)?|NUM_COUNT|GEOMETRIC_MEAN|RANGE)|PAY_(?:HIDE|DEFAULT)|REGION_FLAG_(?:ALLOW_DAMAGE|FIXED_SUN|BLOCK_TERRAFORM|SANDBOX|DISABLE_(?:COLLISIONS|PHYSICS)|BLOCK_FLY|ALLOW_DIRECT_TELEPORT|RESTRICT_PUSHOBJECT)|HTTP_(?:METHOD|MIMETYPE|BODY_(?:MAXLENGTH|TRUNCATED)|CUSTOM_HEADER|PRAGMA_NO_CACHE|VERBOSE_THROTTLE|VERIFY_CERT)|STRING_(?:TRIM(?:_(?:HEAD|TAIL))?)|CLICK_ACTION_(?:NONE|TOUCH|SIT|BUY|PAY|OPEN(?:_MEDIA)?|PLAY|ZOOM)|TOUCH_INVALID_FACE|PROFILE_(?:NONE|SCRIPT_MEMORY)|RC_(?:DATA_FLAGS|DETECT_PHANTOM|GET_(?:LINK_NUM|NORMAL|ROOT_KEY)|MAX_HITS|REJECT_(?:TYPES|AGENTS|(?:NON)?PHYSICAL|LAND))|RCERR_(?:CAST_TIME_EXCEEDED|SIM_PERF_LOW|UNKNOWN)|ESTATE_ACCESS_(?:ALLOWED_(?:AGENT|GROUP)_(?:ADD|REMOVE)|BANNED_AGENT_(?:ADD|REMOVE))|DENSITY|FRICTION|RESTITUTION|GRAVITY_MULTIPLIER|KFM_(?:COMMAND|CMD_(?:PLAY|STOP|PAUSE|SET_MODE)|MODE|FORWARD|LOOP|PING_PONG|REVERSE|DATA|ROTATION|TRANSLATION)|ERR_(?:GENERIC|PARCEL_PERMISSIONS|MALFORMED_PARAMS|RUNTIME_PERMISSIONS|THROTTLED)|CHARACTER_(?:CMD_(?:(?:SMOOTH_)?STOP|JUMP)|DESIRED_(?:TURN_)?SPEED|RADIUS|STAY_WITHIN_PARCEL|LENGTH|ORIENTATION|ACCOUNT_FOR_SKIPPED_FRAMES|AVOIDANCE_MODE|TYPE(?:_(?:[A-D]|NONE))?|MAX_(?:DECEL|TURN_RADIUS|(?:ACCEL|SPEED)))|PURSUIT_(?:OFFSET|FUZZ_FACTOR|GOAL_TOLERANCE|INTERCEPT)|REQUIRE_LINE_OF_SIGHT|FORCE_DIRECT_PATH|VERTICAL|HORIZONTAL|AVOID_(?:CHARACTERS|DYNAMIC_OBSTACLES|NONE)|PU_(?:EVADE_(?:HIDDEN|SPOTTED)|FAILURE_(?:DYNAMIC_PATHFINDING_DISABLED|INVALID_(?:GOAL|START)|NO_(?:NAVMESH|VALID_DESTINATION)|OTHER|TARGET_GONE|(?:PARCEL_)?UNREACHABLE)|(?:GOAL|SLOWDOWN_DISTANCE)_REACHED)|TRAVERSAL_TYPE(?:_(?:FAST|NONE|SLOW))?|CONTENT_TYPE_(?:ATOM|FORM|HTML|JSON|LLSD|RSS|TEXT|XHTML|XML)|GCNP_(?:RADIUS|STATIC)|(?:PATROL|WANDER)_PAUSE_AT_WAYPOINTS|OPT_(?:AVATAR|CHARACTER|EXCLUSION_VOLUME|LEGACY_LINKSET|MATERIAL_VOLUME|OTHER|STATIC_OBSTACLE|WALKABLE)|SIM_STAT_PCT_CHARS_STEPPED)\b'
+ lsl_constants_integer_boolean = r'\b(?:FALSE|TRUE)\b'
+ lsl_constants_rotation = r'\b(?:ZERO_ROTATION)\b'
+ lsl_constants_string = r'\b(?:EOF|JSON_(?:ARRAY|DELETE|FALSE|INVALID|NULL|NUMBER|OBJECT|STRING|TRUE)|NULL_KEY|TEXTURE_(?:BLANK|DEFAULT|MEDIA|PLYWOOD|TRANSPARENT)|URL_REQUEST_(?:GRANTED|DENIED))\b'
+ lsl_constants_vector = r'\b(?:TOUCH_INVALID_(?:TEXCOORD|VECTOR)|ZERO_VECTOR)\b'
+ lsl_invalid_broken = r'\b(?:LAND_(?:LARGE|MEDIUM|SMALL)_BRUSH)\b'
+ lsl_invalid_deprecated = r'\b(?:ATTACH_[LR]PEC|DATA_RATING|OBJECT_ATTACHMENT_(?:GEOMETRY_BYTES|SURFACE_AREA)|PRIM_(?:CAST_SHADOWS|MATERIAL_LIGHT|TYPE_LEGACY)|PSYS_SRC_(?:INNER|OUTER)ANGLE|VEHICLE_FLAG_NO_FLY_UP|ll(?:Cloud|Make(?:Explosion|Fountain|Smoke|Fire)|RemoteDataSetRegion|Sound(?:Preload)?|XorBase64Strings(?:Correct)?))\b'
+ lsl_invalid_illegal = r'\b(?:event)\b'
+ lsl_invalid_unimplemented = r'\b(?:CHARACTER_(?:MAX_ANGULAR_(?:ACCEL|SPEED)|TURN_SPEED_MULTIPLIER)|PERMISSION_(?:CHANGE_(?:JOINTS|PERMISSIONS)|RELEASE_OWNERSHIP|REMAP_CONTROLS)|PRIM_PHYSICS_MATERIAL|PSYS_SRC_OBJ_REL_MASK|ll(?:CollisionSprite|(?:Stop)?PointAt|(?:(?:Refresh|Set)Prim)URL|(?:Take|Release)Camera|RemoteLoadScript))\b'
+ lsl_reserved_godmode = r'\b(?:ll(?:GodLikeRezObject|Set(?:Inventory|Object)PermMask))\b'
+ lsl_reserved_log = r'\b(?:print)\b'
+ lsl_operators = r'\+\+|\-\-|<<|>>|&&?|\|\|?|\^|~|[!%<>=*+\-/]=?'
+
+ tokens = {
+ 'root':
+ [
+ (r'//.*?\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"', String.Double, 'string'),
+ (lsl_keywords, Keyword),
+ (lsl_types, Keyword.Type),
+ (lsl_states, Name.Class),
+ (lsl_events, Name.Builtin),
+ (lsl_functions_builtin, Name.Function),
+ (lsl_constants_float, Keyword.Constant),
+ (lsl_constants_integer, Keyword.Constant),
+ (lsl_constants_integer_boolean, Keyword.Constant),
+ (lsl_constants_rotation, Keyword.Constant),
+ (lsl_constants_string, Keyword.Constant),
+ (lsl_constants_vector, Keyword.Constant),
+ (lsl_invalid_broken, Error),
+ (lsl_invalid_deprecated, Error),
+ (lsl_invalid_illegal, Error),
+ (lsl_invalid_unimplemented, Error),
+ (lsl_reserved_godmode, Keyword.Reserved),
+ (lsl_reserved_log, Keyword.Reserved),
+ (r'\b([a-zA-Z_]\w*)\b', Name.Variable),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d*', Number.Float),
+ (r'(\d+\.\d*|\.\d+)', Number.Float),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (lsl_operators, Operator),
+ (r':=?', Error),
+ (r'[,;{}()\[\]]', Punctuation),
+ (r'\n+', Whitespace),
+ (r'\s+', Whitespace)
+ ],
+ 'comment':
+ [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'string':
+ [
+ (r'\\([nt"\\])', String.Escape),
+ (r'"', String.Double, '#pop'),
+ (r'\\.', Error),
+ (r'[^"\\]+', String.Double),
+ ]
+ }
+
+
+class AppleScriptLexer(RegexLexer):
+ """
+ For `AppleScript source code
+ <http://developer.apple.com/documentation/AppleScript/
+ Conceptual/AppleScriptLangGuide>`_,
+ including `AppleScript Studio
+ <http://developer.apple.com/documentation/AppleScript/
+ Reference/StudioReference>`_.
+ Contributed by Andreas Amann <aamann@mac.com>.
+
+ .. versionadded:: 1.0
+ """
+
+ name = 'AppleScript'
+ aliases = ['applescript']
+ filenames = ['*.applescript']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ Identifiers = r'[a-zA-Z]\w*'
+
+ # XXX: use words() for all of these
+ Literals = ('AppleScript', 'current application', 'false', 'linefeed',
+ 'missing value', 'pi', 'quote', 'result', 'return', 'space',
+ 'tab', 'text item delimiters', 'true', 'version')
+ Classes = ('alias ', 'application ', 'boolean ', 'class ', 'constant ',
+ 'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ',
+ 'real ', 'record ', 'reference ', 'RGB color ', 'script ',
+ 'text ', 'unit types', '(?:Unicode )?text', 'string')
+ BuiltIn = ('attachment', 'attribute run', 'character', 'day', 'month',
+ 'paragraph', 'word', 'year')
+ HandlerParams = ('about', 'above', 'against', 'apart from', 'around',
+ 'aside from', 'at', 'below', 'beneath', 'beside',
+ 'between', 'for', 'given', 'instead of', 'on', 'onto',
+ 'out of', 'over', 'since')
+ Commands = ('ASCII (character|number)', 'activate', 'beep', 'choose URL',
+ 'choose application', 'choose color', 'choose file( name)?',
+ 'choose folder', 'choose from list',
+ 'choose remote application', 'clipboard info',
+ 'close( access)?', 'copy', 'count', 'current date', 'delay',
+ 'delete', 'display (alert|dialog)', 'do shell script',
+ 'duplicate', 'exists', 'get eof', 'get volume settings',
+ 'info for', 'launch', 'list (disks|folder)', 'load script',
+ 'log', 'make', 'mount volume', 'new', 'offset',
+ 'open( (for access|location))?', 'path to', 'print', 'quit',
+ 'random number', 'read', 'round', 'run( script)?',
+ 'say', 'scripting components',
+ 'set (eof|the clipboard to|volume)', 'store script',
+ 'summarize', 'system attribute', 'system info',
+ 'the clipboard', 'time to GMT', 'write', 'quoted form')
+ References = ('(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)',
+ 'first', 'second', 'third', 'fourth', 'fifth', 'sixth',
+ 'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back',
+ 'before', 'behind', 'every', 'front', 'index', 'last',
+ 'middle', 'some', 'that', 'through', 'thru', 'where', 'whose')
+ Operators = ("and", "or", "is equal", "equals", "(is )?equal to", "is not",
+ "isn't", "isn't equal( to)?", "is not equal( to)?",
+ "doesn't equal", "does not equal", "(is )?greater than",
+ "comes after", "is not less than or equal( to)?",
+ "isn't less than or equal( to)?", "(is )?less than",
+ "comes before", "is not greater than or equal( to)?",
+ "isn't greater than or equal( to)?",
+ "(is )?greater than or equal( to)?", "is not less than",
+ "isn't less than", "does not come before",
+ "doesn't come before", "(is )?less than or equal( to)?",
+ "is not greater than", "isn't greater than",
+ "does not come after", "doesn't come after", "starts? with",
+ "begins? with", "ends? with", "contains?", "does not contain",
+ "doesn't contain", "is in", "is contained by", "is not in",
+ "is not contained by", "isn't contained by", "div", "mod",
+ "not", "(a )?(ref( to)?|reference to)", "is", "does")
+ Control = ('considering', 'else', 'error', 'exit', 'from', 'if',
+ 'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to',
+ 'try', 'until', 'using terms from', 'while', 'whith',
+ 'with timeout( of)?', 'with transaction', 'by', 'continue',
+ 'end', 'its?', 'me', 'my', 'return', 'of', 'as')
+ Declarations = ('global', 'local', 'prop(erty)?', 'set', 'get')
+ Reserved = ('but', 'put', 'returning', 'the')
+ StudioClasses = ('action cell', 'alert reply', 'application', 'box',
+ 'browser( cell)?', 'bundle', 'button( cell)?', 'cell',
+ 'clip view', 'color well', 'color-panel',
+ 'combo box( item)?', 'control',
+ 'data( (cell|column|item|row|source))?', 'default entry',
+ 'dialog reply', 'document', 'drag info', 'drawer',
+ 'event', 'font(-panel)?', 'formatter',
+ 'image( (cell|view))?', 'matrix', 'menu( item)?', 'item',
+ 'movie( view)?', 'open-panel', 'outline view', 'panel',
+ 'pasteboard', 'plugin', 'popup button',
+ 'progress indicator', 'responder', 'save-panel',
+ 'scroll view', 'secure text field( cell)?', 'slider',
+ 'sound', 'split view', 'stepper', 'tab view( item)?',
+ 'table( (column|header cell|header view|view))',
+ 'text( (field( cell)?|view))?', 'toolbar( item)?',
+ 'user-defaults', 'view', 'window')
+ StudioEvents = ('accept outline drop', 'accept table drop', 'action',
+ 'activated', 'alert ended', 'awake from nib', 'became key',
+ 'became main', 'begin editing', 'bounds changed',
+ 'cell value', 'cell value changed', 'change cell value',
+ 'change item value', 'changed', 'child of item',
+ 'choose menu item', 'clicked', 'clicked toolbar item',
+ 'closed', 'column clicked', 'column moved',
+ 'column resized', 'conclude drop', 'data representation',
+ 'deminiaturized', 'dialog ended', 'document nib name',
+ 'double clicked', 'drag( (entered|exited|updated))?',
+ 'drop', 'end editing', 'exposed', 'idle', 'item expandable',
+ 'item value', 'item value changed', 'items changed',
+ 'keyboard down', 'keyboard up', 'launched',
+ 'load data representation', 'miniaturized', 'mouse down',
+ 'mouse dragged', 'mouse entered', 'mouse exited',
+ 'mouse moved', 'mouse up', 'moved',
+ 'number of browser rows', 'number of items',
+ 'number of rows', 'open untitled', 'opened', 'panel ended',
+ 'parameters updated', 'plugin loaded', 'prepare drop',
+ 'prepare outline drag', 'prepare outline drop',
+ 'prepare table drag', 'prepare table drop',
+ 'read from file', 'resigned active', 'resigned key',
+ 'resigned main', 'resized( sub views)?',
+ 'right mouse down', 'right mouse dragged',
+ 'right mouse up', 'rows changed', 'scroll wheel',
+ 'selected tab view item', 'selection changed',
+ 'selection changing', 'should begin editing',
+ 'should close', 'should collapse item',
+ 'should end editing', 'should expand item',
+ 'should open( untitled)?',
+ 'should quit( after last window closed)?',
+ 'should select column', 'should select item',
+ 'should select row', 'should select tab view item',
+ 'should selection change', 'should zoom', 'shown',
+ 'update menu item', 'update parameters',
+ 'update toolbar item', 'was hidden', 'was miniaturized',
+ 'will become active', 'will close', 'will dismiss',
+ 'will display browser cell', 'will display cell',
+ 'will display item cell', 'will display outline cell',
+ 'will finish launching', 'will hide', 'will miniaturize',
+ 'will move', 'will open', 'will pop up', 'will quit',
+ 'will resign active', 'will resize( sub views)?',
+ 'will select tab view item', 'will show', 'will zoom',
+ 'write to file', 'zoomed')
+ StudioCommands = ('animate', 'append', 'call method', 'center',
+ 'close drawer', 'close panel', 'display',
+ 'display alert', 'display dialog', 'display panel', 'go',
+ 'hide', 'highlight', 'increment', 'item for',
+ 'load image', 'load movie', 'load nib', 'load panel',
+ 'load sound', 'localized string', 'lock focus', 'log',
+ 'open drawer', 'path for', 'pause', 'perform action',
+ 'play', 'register', 'resume', 'scroll', 'select( all)?',
+ 'show', 'size to fit', 'start', 'step back',
+ 'step forward', 'stop', 'synchronize', 'unlock focus',
+ 'update')
+ StudioProperties = ('accepts arrow key', 'action method', 'active',
+ 'alignment', 'allowed identifiers',
+ 'allows branch selection', 'allows column reordering',
+ 'allows column resizing', 'allows column selection',
+ 'allows customization',
+ 'allows editing text attributes',
+ 'allows empty selection', 'allows mixed state',
+ 'allows multiple selection', 'allows reordering',
+ 'allows undo', 'alpha( value)?', 'alternate image',
+ 'alternate increment value', 'alternate title',
+ 'animation delay', 'associated file name',
+ 'associated object', 'auto completes', 'auto display',
+ 'auto enables items', 'auto repeat',
+ 'auto resizes( outline column)?',
+ 'auto save expanded items', 'auto save name',
+ 'auto save table columns', 'auto saves configuration',
+ 'auto scroll', 'auto sizes all columns to fit',
+ 'auto sizes cells', 'background color', 'bezel state',
+ 'bezel style', 'bezeled', 'border rect', 'border type',
+ 'bordered', 'bounds( rotation)?', 'box type',
+ 'button returned', 'button type',
+ 'can choose directories', 'can choose files',
+ 'can draw', 'can hide',
+ 'cell( (background color|size|type))?', 'characters',
+ 'class', 'click count', 'clicked( data)? column',
+ 'clicked data item', 'clicked( data)? row',
+ 'closeable', 'collating', 'color( (mode|panel))',
+ 'command key down', 'configuration',
+ 'content(s| (size|view( margins)?))?', 'context',
+ 'continuous', 'control key down', 'control size',
+ 'control tint', 'control view',
+ 'controller visible', 'coordinate system',
+ 'copies( on scroll)?', 'corner view', 'current cell',
+ 'current column', 'current( field)? editor',
+ 'current( menu)? item', 'current row',
+ 'current tab view item', 'data source',
+ 'default identifiers', 'delta (x|y|z)',
+ 'destination window', 'directory', 'display mode',
+ 'displayed cell', 'document( (edited|rect|view))?',
+ 'double value', 'dragged column', 'dragged distance',
+ 'dragged items', 'draws( cell)? background',
+ 'draws grid', 'dynamically scrolls', 'echos bullets',
+ 'edge', 'editable', 'edited( data)? column',
+ 'edited data item', 'edited( data)? row', 'enabled',
+ 'enclosing scroll view', 'ending page',
+ 'error handling', 'event number', 'event type',
+ 'excluded from windows menu', 'executable path',
+ 'expanded', 'fax number', 'field editor', 'file kind',
+ 'file name', 'file type', 'first responder',
+ 'first visible column', 'flipped', 'floating',
+ 'font( panel)?', 'formatter', 'frameworks path',
+ 'frontmost', 'gave up', 'grid color', 'has data items',
+ 'has horizontal ruler', 'has horizontal scroller',
+ 'has parent data item', 'has resize indicator',
+ 'has shadow', 'has sub menu', 'has vertical ruler',
+ 'has vertical scroller', 'header cell', 'header view',
+ 'hidden', 'hides when deactivated', 'highlights by',
+ 'horizontal line scroll', 'horizontal page scroll',
+ 'horizontal ruler view', 'horizontally resizable',
+ 'icon image', 'id', 'identifier',
+ 'ignores multiple clicks',
+ 'image( (alignment|dims when disabled|frame style|scaling))?',
+ 'imports graphics', 'increment value',
+ 'indentation per level', 'indeterminate', 'index',
+ 'integer value', 'intercell spacing', 'item height',
+ 'key( (code|equivalent( modifier)?|window))?',
+ 'knob thickness', 'label', 'last( visible)? column',
+ 'leading offset', 'leaf', 'level', 'line scroll',
+ 'loaded', 'localized sort', 'location', 'loop mode',
+ 'main( (bunde|menu|window))?', 'marker follows cell',
+ 'matrix mode', 'maximum( content)? size',
+ 'maximum visible columns',
+ 'menu( form representation)?', 'miniaturizable',
+ 'miniaturized', 'minimized image', 'minimized title',
+ 'minimum column width', 'minimum( content)? size',
+ 'modal', 'modified', 'mouse down state',
+ 'movie( (controller|file|rect))?', 'muted', 'name',
+ 'needs display', 'next state', 'next text',
+ 'number of tick marks', 'only tick mark values',
+ 'opaque', 'open panel', 'option key down',
+ 'outline table column', 'page scroll', 'pages across',
+ 'pages down', 'palette label', 'pane splitter',
+ 'parent data item', 'parent window', 'pasteboard',
+ 'path( (names|separator))?', 'playing',
+ 'plays every frame', 'plays selection only', 'position',
+ 'preferred edge', 'preferred type', 'pressure',
+ 'previous text', 'prompt', 'properties',
+ 'prototype cell', 'pulls down', 'rate',
+ 'released when closed', 'repeated',
+ 'requested print time', 'required file type',
+ 'resizable', 'resized column', 'resource path',
+ 'returns records', 'reuses columns', 'rich text',
+ 'roll over', 'row height', 'rulers visible',
+ 'save panel', 'scripts path', 'scrollable',
+ 'selectable( identifiers)?', 'selected cell',
+ 'selected( data)? columns?', 'selected data items?',
+ 'selected( data)? rows?', 'selected item identifier',
+ 'selection by rect', 'send action on arrow key',
+ 'sends action when done editing', 'separates columns',
+ 'separator item', 'sequence number', 'services menu',
+ 'shared frameworks path', 'shared support path',
+ 'sheet', 'shift key down', 'shows alpha',
+ 'shows state by', 'size( mode)?',
+ 'smart insert delete enabled', 'sort case sensitivity',
+ 'sort column', 'sort order', 'sort type',
+ 'sorted( data rows)?', 'sound', 'source( mask)?',
+ 'spell checking enabled', 'starting page', 'state',
+ 'string value', 'sub menu', 'super menu', 'super view',
+ 'tab key traverses cells', 'tab state', 'tab type',
+ 'tab view', 'table view', 'tag', 'target( printer)?',
+ 'text color', 'text container insert',
+ 'text container origin', 'text returned',
+ 'tick mark position', 'time stamp',
+ 'title(d| (cell|font|height|position|rect))?',
+ 'tool tip', 'toolbar', 'trailing offset', 'transparent',
+ 'treat packages as directories', 'truncated labels',
+ 'types', 'unmodified characters', 'update views',
+ 'use sort indicator', 'user defaults',
+ 'uses data source', 'uses ruler',
+ 'uses threaded animation',
+ 'uses title from previous column', 'value wraps',
+ 'version',
+ 'vertical( (line scroll|page scroll|ruler view))?',
+ 'vertically resizable', 'view',
+ 'visible( document rect)?', 'volume', 'width', 'window',
+ 'windows menu', 'wraps', 'zoomable', 'zoomed')
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (u'¬\\n', String.Escape),
+ (r"'s\s+", Text), # This is a possessive, consider moving
+ (r'(--|#).*?$', Comment),
+ (r'\(\*', Comment.Multiline, 'comment'),
+ (r'[(){}!,.:]', Punctuation),
+ (u'(«)([^»]+)(»)',
+ bygroups(Text, Name.Builtin, Text)),
+ (r'\b((?:considering|ignoring)\s*)'
+ r'(application responses|case|diacriticals|hyphens|'
+ r'numeric strings|punctuation|white space)',
+ bygroups(Keyword, Name.Builtin)),
+ (u'(-|\\*|\\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\\^)', Operator),
+ (r"\b(%s)\b" % '|'.join(Operators), Operator.Word),
+ (r'^(\s*(?:on|end)\s+)'
+ r'(%s)' % '|'.join(StudioEvents[::-1]),
+ bygroups(Keyword, Name.Function)),
+ (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'\b(as )(%s)\b' % '|'.join(Classes),
+ bygroups(Keyword, Name.Class)),
+ (r'\b(%s)\b' % '|'.join(Literals), Name.Constant),
+ (r'\b(%s)\b' % '|'.join(Commands), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(Control), Keyword),
+ (r'\b(%s)\b' % '|'.join(Declarations), Keyword),
+ (r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin),
+ (r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute),
+ (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(References), Name.Builtin),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r'\b(%s)\b' % Identifiers, Name.Variable),
+ (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
+ (r'[-+]?\d+', Number.Integer),
+ ],
+ 'comment': [
+ ('\(\*', Comment.Multiline, '#push'),
+ ('\*\)', Comment.Multiline, '#pop'),
+ ('[^*(]+', Comment.Multiline),
+ ('[*(]', Comment.Multiline),
+ ],
+ }
+
+
+class RexxLexer(RegexLexer):
+ """
+ `Rexx <http://www.rexxinfo.org/>`_ is a scripting language available for
+ a wide range of different platforms with its roots found on mainframe
+ systems. It is popular for I/O- and data based tasks and can act as glue
+ language to bind different applications together.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Rexx'
+ aliases = ['rexx', 'arexx']
+ filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
+ mimetypes = ['text/x-rexx']
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\s', Whitespace),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"', String, 'string_double'),
+ (r"'", String, 'string_single'),
+ (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
+ (r'([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b',
+ bygroups(Name.Function, Whitespace, Operator, Whitespace,
+ Keyword.Declaration)),
+ (r'([a-z_]\w*)(\s*)(:)',
+ bygroups(Name.Label, Whitespace, Operator)),
+ include('function'),
+ include('keyword'),
+ include('operator'),
+ (r'[a-z_]\w*', Text),
+ ],
+ 'function': [
+ (words((
+ 'abbrev', 'abs', 'address', 'arg', 'b2x', 'bitand', 'bitor', 'bitxor',
+ 'c2d', 'c2x', 'center', 'charin', 'charout', 'chars', 'compare',
+ 'condition', 'copies', 'd2c', 'd2x', 'datatype', 'date', 'delstr',
+ 'delword', 'digits', 'errortext', 'form', 'format', 'fuzz', 'insert',
+ 'lastpos', 'left', 'length', 'linein', 'lineout', 'lines', 'max',
+ 'min', 'overlay', 'pos', 'queued', 'random', 'reverse', 'right', 'sign',
+ 'sourceline', 'space', 'stream', 'strip', 'substr', 'subword', 'symbol',
+ 'time', 'trace', 'translate', 'trunc', 'value', 'verify', 'word',
+ 'wordindex', 'wordlength', 'wordpos', 'words', 'x2b', 'x2c', 'x2d',
+ 'xrange'), suffix=r'(\s*)(\()'),
+ bygroups(Name.Builtin, Whitespace, Operator)),
+ ],
+ 'keyword': [
+ (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
+ r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
+ r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
+ r'while)\b', Keyword.Reserved),
+ ],
+ 'operator': [
+ (r'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
+ r'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
+ r'¬>>|¬>|¬|\.|,)', Operator),
+ ],
+ 'string_double': [
+ (r'[^"\n]+', String),
+ (r'""', String),
+ (r'"', String, '#pop'),
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
+ ],
+ 'string_single': [
+ (r'[^\'\n]', String),
+ (r'\'\'', String),
+ (r'\'', String, '#pop'),
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
+ ],
+ 'comment': [
+ (r'[^*]+', Comment.Multiline),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'\*', Comment.Multiline),
+ ]
+ }
+
+ _c = lambda s: re.compile(s, re.MULTILINE)
+ _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
+ _ADDRESS_PATTERN = _c(r'^\s*address\s+')
+ _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
+ _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
+ _PROCEDURE_PATTERN = _c(r'^\s*([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b')
+ _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
+ _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
+ PATTERNS_AND_WEIGHTS = (
+ (_ADDRESS_COMMAND_PATTERN, 0.2),
+ (_ADDRESS_PATTERN, 0.05),
+ (_DO_WHILE_PATTERN, 0.1),
+ (_ELSE_DO_PATTERN, 0.1),
+ (_IF_THEN_DO_PATTERN, 0.1),
+ (_PROCEDURE_PATTERN, 0.5),
+ (_PARSE_ARG_PATTERN, 0.2),
+ )
+
+ def analyse_text(text):
+ """
+ Check for inital comment and patterns that distinguish Rexx from other
+ C-like languages.
+ """
+ if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
+ # Header matches MVS Rexx requirements, this is certainly a Rexx
+ # script.
+ return 1.0
+ elif text.startswith('/*'):
+ # Header matches general Rexx requirements; the source code might
+ # still be any language using C comments such as C++, C# or Java.
+ lowerText = text.lower()
+ result = sum(weight
+ for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
+ if pattern.search(lowerText)) + 0.01
+ return min(result, 1.0)
+
+
+class MOOCodeLexer(RegexLexer):
+ """
+ For `MOOCode <http://www.moo.mud.org/>`_ (the MOO scripting
+ language).
+
+ .. versionadded:: 0.9
+ """
+ name = 'MOOCode'
+ filenames = ['*.moo']
+ aliases = ['moocode', 'moo']
+ mimetypes = ['text/x-moocode']
+
+ tokens = {
+ 'root': [
+ # Numbers
+ (r'(0|[1-9][0-9_]*)', Number.Integer),
+ # Strings
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # exceptions
+ (r'(E_PERM|E_DIV)', Name.Exception),
+ # db-refs
+ (r'((#[-0-9]+)|(\$\w+))', Name.Entity),
+ # Keywords
+ (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while'
+ r'|endwhile|break|continue|return|try'
+ r'|except|endtry|finally|in)\b', Keyword),
+ # builtins
+ (r'(random|length)', Name.Builtin),
+ # special variables
+ (r'(player|caller|this|args)', Name.Variable.Instance),
+ # skip whitespace
+ (r'\s+', Text),
+ (r'\n', Text),
+ # other operators
+ (r'([!;=,{}&|:.\[\]@()<>?]+)', Operator),
+ # function call
+ (r'(\w+)(\()', bygroups(Name.Function, Operator)),
+ # variables
+ (r'(\w+)', Text),
+ ]
+ }
+
+
+class HybrisLexer(RegexLexer):
+ """
+ For `Hybris <http://www.hybris-lang.org>`_ source code.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Hybris'
+ aliases = ['hybris', 'hy']
+ filenames = ['*.hy', '*.hyb']
+ mimetypes = ['text/x-hybris', 'application/x-hybris']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:function|method|operator\s+)+?)'
+ r'([a-zA-Z_]\w*)'
+ r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
+ (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
+ r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
+ (r'(extends|private|protected|public|static|throws|function|method|'
+ r'operator)\b', Keyword.Declaration),
+ (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
+ r'__INC_PATH__)\b', Keyword.Constant),
+ (r'(class|struct)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'class'),
+ (r'(import|include)(\s+)',
+ bygroups(Keyword.Namespace, Text), 'import'),
+ (words((
+ 'gc_collect', 'gc_mm_items', 'gc_mm_usage', 'gc_collect_threshold',
+ 'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32', 'sha2',
+ 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos', 'cosh', 'exp',
+ 'fabs', 'floor', 'fmod', 'log', 'log10', 'pow', 'sin', 'sinh', 'sqrt', 'tan', 'tanh',
+ 'isint', 'isfloat', 'ischar', 'isstring', 'isarray', 'ismap', 'isalias', 'typeof',
+ 'sizeof', 'toint', 'tostring', 'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval',
+ 'var_names', 'var_values', 'user_functions', 'dyn_functions', 'methods', 'call',
+ 'call_method', 'mknod', 'mkfifo', 'mount', 'umount2', 'umount', 'ticks', 'usleep',
+ 'sleep', 'time', 'strtime', 'strdate', 'dllopen', 'dlllink', 'dllcall', 'dllcall_argv',
+ 'dllclose', 'env', 'exec', 'fork', 'getpid', 'wait', 'popen', 'pclose', 'exit', 'kill',
+ 'pthread_create', 'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill',
+ 'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind', 'listen',
+ 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect', 'server', 'recv',
+ 'send', 'close', 'print', 'println', 'printf', 'input', 'readline', 'serial_open',
+ 'serial_fcntl', 'serial_get_attr', 'serial_get_ispeed', 'serial_get_ospeed',
+ 'serial_set_attr', 'serial_set_ispeed', 'serial_set_ospeed', 'serial_write',
+ 'serial_read', 'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell',
+ 'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir', 'pcre_replace', 'size',
+ 'pop', 'unmap', 'has', 'keys', 'values', 'length', 'find', 'substr', 'replace', 'split',
+ 'trim', 'remove', 'contains', 'join'), suffix=r'\b'),
+ Name.Builtin),
+ (words((
+ 'MethodReference', 'Runner', 'Dll', 'Thread', 'Pipe', 'Process',
+ 'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket',
+ 'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'),
+ Keyword.Type),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
+ (r'(\.)([a-zA-Z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[~^*!%&\[\](){}<>|+=:;,./?\-@]+', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text),
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
+ }
diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py
index 5ec9deaa..1e3640bf 100644
--- a/pygments/lexers/shell.py
+++ b/pygments/lexers/shell.py
@@ -5,7 +5,7 @@
Lexers for various shells.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -27,28 +27,32 @@ class BashLexer(RegexLexer):
"""
Lexer for (ba|k|)sh shell scripts.
- *New in Pygments 0.6.*
+ .. versionadded:: 0.6
"""
name = 'Bash'
- aliases = ['bash', 'sh', 'ksh']
+ aliases = ['bash', 'sh', 'ksh', 'shell']
filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
- '.bashrc', 'bashrc', '.bash_*', 'bash_*']
+ '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD']
mimetypes = ['application/x-sh', 'application/x-shellscript']
tokens = {
'root': [
include('basic'),
- (r'\$\(\(', Keyword, 'math'),
- (r'\$\(', Keyword, 'paren'),
- (r'\${#?', Keyword, 'curly'),
(r'`', String.Backtick, 'backticks'),
include('data'),
+ include('interp'),
+ ],
+ 'interp': [
+ (r'\$\(\(', Keyword, 'math'),
+ (r'\$\(', Keyword, 'paren'),
+ (r'\$\{#?', String.Interpol, 'curly'),
+ (r'\$(\w+|.)', Name.Variable),
],
'basic': [
(r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
- r'select|continue|until|esac|elif)\s*\b',
- Keyword),
+ r'select|continue|until|esac|elif)(\s*)\b',
+ bygroups(Keyword, Text)),
(r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
@@ -56,7 +60,8 @@ class BashLexer(RegexLexer):
r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)',
Name.Builtin),
- (r'#.*\n', Comment),
+ (r'\A#!.+\n', Comment.Hashbang),
+ (r'#.*\n', Comment.Single),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]', Operator),
@@ -65,22 +70,28 @@ class BashLexer(RegexLexer):
(r'&&|\|\|', Operator),
],
'data': [
- (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
+ (r'"', String.Double, 'string'),
+ (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r"(?s)'.*?'", String.Single),
(r';', Punctuation),
(r'&', Punctuation),
(r'\|', Punctuation),
(r'\s+', Text),
- (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
(r'\d+(?= |\Z)', Number),
- (r'\$#?(\w+|.)', Name.Variable),
+ (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
(r'<', Text),
],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
+ include('interp'),
+ ],
'curly': [
- (r'}', Keyword, '#pop'),
+ (r'\}', String.Interpol, '#pop'),
(r':-', Keyword),
- (r'[a-zA-Z0-9_]+', Name.Variable),
- (r'[^}:"\'`$]+', Punctuation),
+ (r'\w+', Name.Variable),
+ (r'[^}:"\'`$\\]+', Punctuation),
(r':', Punctuation),
include('root'),
],
@@ -91,6 +102,8 @@ class BashLexer(RegexLexer):
'math': [
(r'\)\)', Keyword, '#pop'),
(r'[-+*/%^|&]|\*\*|\|\|', Operator),
+ (r'\d+#\d+', Number),
+ (r'\d+#(?! )', Number),
(r'\d+', Number),
include('root'),
],
@@ -111,7 +124,7 @@ class BashSessionLexer(Lexer):
"""
Lexer for simplistic shell sessions.
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'Bash Session'
@@ -162,7 +175,7 @@ class ShellSessionLexer(Lexer):
"""
Lexer for shell sessions that works with different command prompts
- *New in Pygments 1.6.*
+ .. versionadded:: 1.6
"""
name = 'Shell Session'
@@ -179,7 +192,7 @@ class ShellSessionLexer(Lexer):
for match in line_re.finditer(text):
line = match.group()
- m = re.match(r'^((?:\[?\S+@[^$#%]+)[$#%])(.*\n?)', line)
+ m = re.match(r'^((?:\[?\S+@[^$#%]+\]?\s*)[$#%])(.*\n?)', line)
if m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
@@ -208,10 +221,10 @@ class BatchLexer(RegexLexer):
"""
Lexer for the DOS/Windows Batch file format.
- *New in Pygments 0.7.*
+ .. versionadded:: 0.7
"""
name = 'Batchfile'
- aliases = ['bat', 'dosbatch', 'winbatch']
+ aliases = ['bat', 'batch', 'dosbatch', 'winbatch']
filenames = ['*.bat', '*.cmd']
mimetypes = ['application/x-dos-batch']
@@ -228,9 +241,9 @@ class BatchLexer(RegexLexer):
# like %~$VAR:zlt
(r'%%?[~$:\w]+%?', Name.Variable),
(r'::.*', Comment), # Technically :: only works at BOL
- (r'(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)),
- (r'(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)),
- (r'(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
+ (r'\b(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)),
+ (r'\b(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)),
+ (r'\b(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
(r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|'
r'setlocal|shift|errorlevel|exist|defined|cmdextversion|'
r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword),
@@ -264,7 +277,7 @@ class TcshLexer(RegexLexer):
"""
Lexer for tcsh scripts.
- *New in Pygments 0.10.*
+ .. versionadded:: 0.10
"""
name = 'Tcsh'
@@ -276,7 +289,7 @@ class TcshLexer(RegexLexer):
'root': [
include('basic'),
(r'\$\(', Keyword, 'paren'),
- (r'\${#?', Keyword, 'curly'),
+ (r'\$\{#?', Keyword, 'curly'),
(r'`', String.Backtick, 'backticks'),
include('data'),
],
@@ -294,24 +307,25 @@ class TcshLexer(RegexLexer):
r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
r'ver|wait|warp|watchlog|where|which)\s*\b',
Name.Builtin),
- (r'#.*\n', Comment),
+ (r'#.*', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]+', Operator),
(r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r';', Punctuation),
],
'data': [
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r'\s+', Text),
- (r'[^=\s\[\]{}()$"\'`\\]+', Text),
+ (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
],
'curly': [
- (r'}', Keyword, '#pop'),
+ (r'\}', Keyword, '#pop'),
(r':-', Keyword),
- (r'[a-zA-Z0-9_]+', Name.Variable),
+ (r'\w+', Name.Variable),
(r'[^}:"\'`$]+', Punctuation),
(r':', Punctuation),
include('root'),
@@ -331,7 +345,7 @@ class PowerShellLexer(RegexLexer):
"""
For Windows PowerShell code.
- *New in Pygments 1.5.*
+ .. versionadded:: 1.5
"""
name = 'PowerShell'
aliases = ['powershell', 'posh', 'ps1', 'psm1']
@@ -387,13 +401,13 @@ class PowerShellLexer(RegexLexer):
(r'`[\'"$@-]', Punctuation),
(r'"', String.Double, 'string'),
(r"'([^']|'')*'", String.Single),
- (r'(\$|@@|@)((global|script|private|env):)?[a-z0-9_]+',
+ (r'(\$|@@|@)((global|script|private|env):)?\w+',
Name.Variable),
(r'(%s)\b' % '|'.join(keywords), Keyword),
(r'-(%s)\b' % '|'.join(operators), Operator),
- (r'(%s)-[a-z_][a-z0-9_]*\b' % '|'.join(verbs), Name.Builtin),
- (r'\[[a-z_\[][a-z0-9_. `,\[\]]*\]', Name.Constant), # .net [type]s
- (r'-[a-z_][a-z0-9_]*', Name),
+ (r'(%s)-[a-z_]\w*\b' % '|'.join(verbs), Name.Builtin),
+ (r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
+ (r'-[a-z_]\w*', Name),
(r'\w+', Name),
(r'[.,;@{}\[\]$()=+*/\\&%!~?^`|<>-]|::', Punctuation),
],
@@ -408,7 +422,7 @@ class PowerShellLexer(RegexLexer):
(r'[#&.]', Comment.Multiline),
],
'string': [
- (r"`[0abfnrtv'\"\$]", String.Escape),
+ (r"`[0abfnrtv'\"$`]", String.Escape),
(r'[^$`"]+', String.Double),
(r'\$\(', Punctuation, 'child'),
(r'""', String.Double),
diff --git a/pygments/lexers/smalltalk.py b/pygments/lexers/smalltalk.py
new file mode 100644
index 00000000..ebeb6320
--- /dev/null
+++ b/pygments/lexers/smalltalk.py
@@ -0,0 +1,195 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.smalltalk
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Smalltalk and related languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['SmalltalkLexer', 'NewspeakLexer']
+
+
+class SmalltalkLexer(RegexLexer):
+ """
+ For `Smalltalk <http://www.smalltalk.org/>`_ syntax.
+ Contributed by Stefan Matthias Aust.
+ Rewritten by Nils Winter.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Smalltalk'
+ filenames = ['*.st']
+ aliases = ['smalltalk', 'squeak', 'st']
+ mimetypes = ['text/x-smalltalk']
+
+ tokens = {
+ 'root': [
+ (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)),
+ include('squeak fileout'),
+ include('whitespaces'),
+ include('method definition'),
+ (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)),
+ include('objects'),
+ (r'\^|\:=|\_', Operator),
+ # temporaries
+ (r'[\]({}.;!]', Text),
+ ],
+ 'method definition': [
+ # Not perfect can't allow whitespaces at the beginning and the
+ # without breaking everything
+ (r'([a-zA-Z]+\w*:)(\s*)(\w+)',
+ bygroups(Name.Function, Text, Name.Variable)),
+ (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)),
+ (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$',
+ bygroups(Name.Function, Text, Name.Variable, Text)),
+ ],
+ 'blockvariables': [
+ include('whitespaces'),
+ (r'(:)(\s*)(\w+)',
+ bygroups(Operator, Text, Name.Variable)),
+ (r'\|', Operator, '#pop'),
+ default('#pop'), # else pop
+ ],
+ 'literals': [
+ (r"'(''|[^'])*'", String, 'afterobject'),
+ (r'\$.', String.Char, 'afterobject'),
+ (r'#\(', String.Symbol, 'parenth'),
+ (r'\)', Text, 'afterobject'),
+ (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
+ ],
+ '_parenth_helper': [
+ include('whitespaces'),
+ (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
+ (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol),
+ # literals
+ (r"'(''|[^'])*'", String),
+ (r'\$.', String.Char),
+ (r'#*\(', String.Symbol, 'inner_parenth'),
+ ],
+ 'parenth': [
+ # This state is a bit tricky since
+ # we can't just pop this state
+ (r'\)', String.Symbol, ('root', 'afterobject')),
+ include('_parenth_helper'),
+ ],
+ 'inner_parenth': [
+ (r'\)', String.Symbol, '#pop'),
+ include('_parenth_helper'),
+ ],
+ 'whitespaces': [
+ # skip whitespace and comments
+ (r'\s+', Text),
+ (r'"(""|[^"])*"', Comment),
+ ],
+ 'objects': [
+ (r'\[', Text, 'blockvariables'),
+ (r'\]', Text, 'afterobject'),
+ (r'\b(self|super|true|false|nil|thisContext)\b',
+ Name.Builtin.Pseudo, 'afterobject'),
+ (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
+ (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
+ (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
+ String.Symbol, 'afterobject'),
+ include('literals'),
+ ],
+ 'afterobject': [
+ (r'! !$', Keyword, '#pop'), # squeak chunk delimiter
+ include('whitespaces'),
+ (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
+ Name.Builtin, '#pop'),
+ (r'\b(new\b(?!:))', Name.Builtin),
+ (r'\:=|\_', Operator, '#pop'),
+ (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'),
+ (r'\b[a-zA-Z]+\w*', Name.Function),
+ (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'),
+ (r'\.', Punctuation, '#pop'),
+ (r';', Punctuation),
+ (r'[\])}]', Text),
+ (r'[\[({]', Text, '#pop'),
+ ],
+ 'squeak fileout': [
+ # Squeak fileout format (optional)
+ (r'^"(""|[^"])*"!', Keyword),
+ (r"^'(''|[^'])*'!", Keyword),
+ (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
+ bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
+ (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)",
+ bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
+ (r'^(\w+)( subclass: )(#\w+)'
+ r'(\s+instanceVariableNames: )(.*?)'
+ r'(\s+classVariableNames: )(.*?)'
+ r'(\s+poolDictionaries: )(.*?)'
+ r'(\s+category: )(.*?)(!)',
+ bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword,
+ String, Keyword, String, Keyword, String, Keyword)),
+ (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)',
+ bygroups(Name.Class, Keyword, String, Keyword)),
+ (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)),
+ (r'! !$', Keyword),
+ ],
+ }
+
+
+class NewspeakLexer(RegexLexer):
+ """
+ For `Newspeak <http://newspeaklanguage.org/>` syntax.
+
+ .. versionadded:: 1.1
+ """
+ name = 'Newspeak'
+ filenames = ['*.ns2']
+ aliases = ['newspeak', ]
+ mimetypes = ['text/x-newspeak']
+
+ tokens = {
+ 'root': [
+ (r'\b(Newsqueak2)\b', Keyword.Declaration),
+ (r"'[^']*'", String),
+ (r'\b(class)(\s+)(\w+)(\s*)',
+ bygroups(Keyword.Declaration, Text, Name.Class, Text)),
+ (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b',
+ Keyword),
+ (r'(\w+\:)(\s*)([a-zA-Z_]\w+)',
+ bygroups(Name.Function, Text, Name.Variable)),
+ (r'(\w+)(\s*)(=)',
+ bygroups(Name.Attribute, Text, Operator)),
+ (r'<\w+>', Comment.Special),
+ include('expressionstat'),
+ include('whitespace')
+ ],
+
+ 'expressionstat': [
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'\d+', Number.Integer),
+ (r':\w+', Name.Variable),
+ (r'(\w+)(::)', bygroups(Name.Variable, Operator)),
+ (r'\w+:', Name.Function),
+ (r'\w+', Name.Variable),
+ (r'\(|\)', Punctuation),
+ (r'\[|\]', Punctuation),
+ (r'\{|\}', Punctuation),
+
+ (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator),
+ (r'\.|;', Punctuation),
+ include('whitespace'),
+ include('literals'),
+ ],
+ 'literals': [
+ (r'\$.', String),
+ (r"'[^']*'", String),
+ (r"#'[^']*'", String.Symbol),
+ (r"#\w+:?", String.Symbol),
+ (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol)
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ (r'"[^"]*"', Comment)
+ ],
+ }
diff --git a/pygments/lexers/snobol.py b/pygments/lexers/snobol.py
new file mode 100644
index 00000000..e4178f9c
--- /dev/null
+++ b/pygments/lexers/snobol.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.snobol
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the SNOBOL language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['SnobolLexer']
+
+
+class SnobolLexer(RegexLexer):
+ """
+ Lexer for the SNOBOL4 programming language.
+
+ Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
+ Does not require spaces around binary operators.
+
+ .. versionadded:: 1.5
+ """
+
+ name = "Snobol"
+ aliases = ["snobol"]
+ filenames = ['*.snobol']
+ mimetypes = ['text/x-snobol']
+
+ tokens = {
+ # root state, start of line
+ # comments, continuation lines, and directives start in column 1
+ # as do labels
+ 'root': [
+ (r'\*.*\n', Comment),
+ (r'[+.] ', Punctuation, 'statement'),
+ (r'-.*\n', Comment),
+ (r'END\s*\n', Name.Label, 'heredoc'),
+ (r'[A-Za-z$][\w$]*', Name.Label, 'statement'),
+ (r'\s+', Text, 'statement'),
+ ],
+ # statement state, line after continuation or label
+ 'statement': [
+ (r'\s*\n', Text, '#pop'),
+ (r'\s+', Text),
+ (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
+ r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
+ r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
+ r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
+ Name.Builtin),
+ (r'[A-Za-z][\w.]*', Name),
+ # ASCII equivalents of original operators
+ # | for the EBCDIC equivalent, ! likewise
+ # \ for EBCDIC negation
+ (r'\*\*|[?$.!%*/#+\-@|&\\=]', Operator),
+ (r'"[^"]*"', String),
+ (r"'[^']*'", String),
+ # Accept SPITBOL syntax for real numbers
+ # as well as Macro SNOBOL4
+ (r'[0-9]+(?=[^.EeDd])', Number.Integer),
+ (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
+ # Goto
+ (r':', Punctuation, 'goto'),
+ (r'[()<>,;]', Punctuation),
+ ],
+ # Goto block
+ 'goto': [
+ (r'\s*\n', Text, "#pop:2"),
+ (r'\s+', Text),
+ (r'F|S', Keyword),
+ (r'(\()([A-Za-z][\w.]*)(\))',
+ bygroups(Punctuation, Name.Label, Punctuation))
+ ],
+ # everything after the END statement is basically one
+ # big heredoc.
+ 'heredoc': [
+ (r'.*\n', String.Heredoc)
+ ]
+ }
diff --git a/pygments/lexers/special.py b/pygments/lexers/special.py
index 9b3cd508..d3a168e7 100644
--- a/pygments/lexers/special.py
+++ b/pygments/lexers/special.py
@@ -5,16 +5,15 @@
Special lexers.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-import cStringIO
from pygments.lexer import Lexer
from pygments.token import Token, Error, Text
-from pygments.util import get_choice_opt, b
+from pygments.util import get_choice_opt, text_type, BytesIO
__all__ = ['TextLexer', 'RawTokenLexer']
@@ -35,7 +34,8 @@ class TextLexer(Lexer):
_ttype_cache = {}
-line_re = re.compile(b('.*?\n'))
+line_re = re.compile(b'.*?\n')
+
class RawTokenLexer(Lexer):
"""
@@ -60,12 +60,12 @@ class RawTokenLexer(Lexer):
Lexer.__init__(self, **options)
def get_tokens(self, text):
- if isinstance(text, unicode):
+ if isinstance(text, text_type):
# raw token stream never has any non-ASCII characters
text = text.encode('ascii')
if self.compress == 'gz':
import gzip
- gzipfile = gzip.GzipFile('', 'rb', 9, cStringIO.StringIO(text))
+ gzipfile = gzip.GzipFile('', 'rb', 9, BytesIO(text))
text = gzipfile.read()
elif self.compress == 'bz2':
import bz2
@@ -73,7 +73,7 @@ class RawTokenLexer(Lexer):
# do not call Lexer.get_tokens() because we do not want Unicode
# decoding to occur, and stripping is not optional.
- text = text.strip(b('\n')) + b('\n')
+ text = text.strip(b'\n') + b'\n'
for i, t, v in self.get_tokens_unprocessed(text):
yield t, v
@@ -81,9 +81,9 @@ class RawTokenLexer(Lexer):
length = 0
for match in line_re.finditer(text):
try:
- ttypestr, val = match.group().split(b('\t'), 1)
+ ttypestr, val = match.group().split(b'\t', 1)
except ValueError:
- val = match.group().decode(self.encoding)
+ val = match.group().decode('ascii', 'replace')
ttype = Error
else:
ttype = _ttype_cache.get(ttypestr)
diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py
index 546d1f87..f575ed38 100644
--- a/pygments/lexers/sql.py
+++ b/pygments/lexers/sql.py
@@ -34,28 +34,30 @@
The ``tests/examplefiles`` contains a few test files with data to be
parsed by these lexers.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups
+from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words
from pygments.token import Punctuation, \
- Text, Comment, Operator, Keyword, Name, String, Number, Generic
+ Text, Comment, Operator, Keyword, Name, String, Number, Generic
from pygments.lexers import get_lexer_by_name, ClassNotFound
+from pygments.util import iteritems
from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
- PSEUDO_TYPES, PLPGSQL_KEYWORDS
+ PSEUDO_TYPES, PLPGSQL_KEYWORDS
__all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer',
- 'SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer']
+ 'SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer', 'RqlLexer']
line_re = re.compile('.*?\n')
language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
+
def language_callback(lexer, match):
"""Parse the content of a $-string using a lexer
@@ -101,7 +103,7 @@ class PostgresBase(object):
if lang.lower() == 'sql':
return get_lexer_by_name('postgresql', **self.options)
- tries = [ lang ]
+ tries = [lang]
if lang.startswith('pl'):
tries.append(lang[2:])
if lang.endswith('u'):
@@ -124,7 +126,7 @@ class PostgresLexer(PostgresBase, RegexLexer):
"""
Lexer for the PostgreSQL dialect of SQL.
- *New in Pygments 1.5.*
+ .. versionadded:: 1.5
"""
name = 'PostgreSQL SQL dialect'
@@ -137,29 +139,29 @@ class PostgresLexer(PostgresBase, RegexLexer):
(r'\s+', Text),
(r'--.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'(' + '|'.join([s.replace(" ", "\s+")
- for s in DATATYPES + PSEUDO_TYPES])
- + r')\b', Name.Builtin),
- (r'(' + '|'.join(KEYWORDS) + r')\b', Keyword),
+ (r'(' + '|'.join(s.replace(" ", "\s+")
+ for s in DATATYPES + PSEUDO_TYPES)
+ + r')\b', Name.Builtin),
+ (words(KEYWORDS, suffix=r'\b'), Keyword),
(r'[+*/<>=~!@#%^&|`?-]+', Operator),
(r'::', Operator), # cast
(r'\$\d+', Name.Variable),
(r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
(r'[0-9]+', Number.Integer),
(r"(E|U&)?'(''|[^'])*'", String.Single),
- (r'(U&)?"(""|[^"])*"', String.Name), # quoted identifier
- (r'(?s)(\$[^\$]*\$)(.*?)(\1)', language_callback),
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
+ (r'(U&)?"(""|[^"])*"', String.Name), # quoted identifier
+ (r'(?s)(\$[^$]*\$)(.*?)(\1)', language_callback),
+ (r'[a-z_]\w*', Name),
# psql variable in SQL
- (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable),
+ (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable),
- (r'[;:()\[\]\{\},\.]', Punctuation),
+ (r'[;:()\[\]{},.]', Punctuation),
],
'multiline-comments': [
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'\*/', Comment.Multiline, '#pop'),
- (r'[^/\*]+', Comment.Multiline),
+ (r'[^/*]+', Comment.Multiline),
(r'[/*]', Comment.Multiline)
],
}
@@ -169,20 +171,20 @@ class PlPgsqlLexer(PostgresBase, RegexLexer):
"""
Handle the extra syntax in Pl/pgSQL language.
- *New in Pygments 1.5.*
+ .. versionadded:: 1.5
"""
name = 'PL/pgSQL'
aliases = ['plpgsql']
mimetypes = ['text/x-plpgsql']
flags = re.IGNORECASE
- tokens = dict((k, l[:]) for (k, l) in PostgresLexer.tokens.iteritems())
+ tokens = dict((k, l[:]) for (k, l) in iteritems(PostgresLexer.tokens))
# extend the keywords list
for i, pattern in enumerate(tokens['root']):
if pattern[1] == Keyword:
tokens['root'][i] = (
- r'(' + '|'.join(KEYWORDS + PLPGSQL_KEYWORDS) + r')\b',
+ words(KEYWORDS + PLPGSQL_KEYWORDS, suffix=r'\b'),
Keyword)
del i
break
@@ -191,10 +193,10 @@ class PlPgsqlLexer(PostgresBase, RegexLexer):
# Add specific PL/pgSQL rules (before the SQL ones)
tokens['root'][:0] = [
- (r'\%[a-z][a-z0-9_]*\b', Name.Builtin), # actually, a datatype
+ (r'\%[a-z]\w*\b', Name.Builtin), # actually, a datatype
(r':=', Operator),
- (r'\<\<[a-z][a-z0-9_]*\>\>', Name.Label),
- (r'\#[a-z][a-z0-9_]*\b', Keyword.Pseudo), # #variable_conflict
+ (r'\<\<[a-z]\w*\>\>', Name.Label),
+ (r'\#[a-z]\w*\b', Keyword.Pseudo), # #variable_conflict
]
@@ -210,7 +212,7 @@ class PsqlRegexLexer(PostgresBase, RegexLexer):
aliases = [] # not public
flags = re.IGNORECASE
- tokens = dict((k, l[:]) for (k, l) in PostgresLexer.tokens.iteritems())
+ tokens = dict((k, l[:]) for (k, l) in iteritems(PostgresLexer.tokens))
tokens['root'].append(
(r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
@@ -218,7 +220,7 @@ class PsqlRegexLexer(PostgresBase, RegexLexer):
(r'\n', Text, 'root'),
(r'\s+', Text),
(r'\\[^\s]+', Keyword.Pseudo),
- (r""":(['"]?)[a-z][a-z0-9_]*\b\1""", Name.Variable),
+ (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable),
(r"'(''|[^'])*'", String.Single),
(r"`([^`])*`", String.Backtick),
(r"[^\s]+", String.Symbol),
@@ -239,24 +241,28 @@ class lookahead(object):
def __init__(self, x):
self.iter = iter(x)
self._nextitem = None
+
def __iter__(self):
return self
+
def send(self, i):
self._nextitem = i
return i
- def next(self):
+
+ def __next__(self):
if self._nextitem is not None:
ni = self._nextitem
self._nextitem = None
return ni
- return self.iter.next()
+ return next(self.iter)
+ next = __next__
class PostgresConsoleLexer(Lexer):
"""
Lexer for psql sessions.
- *New in Pygments 1.5.*
+ .. versionadded:: 1.5
"""
name = 'PostgreSQL console (psql)'
@@ -277,7 +283,7 @@ class PostgresConsoleLexer(Lexer):
insertions = []
while 1:
try:
- line = lines.next()
+ line = next(lines)
except StopIteration:
# allow the emission of partially collected items
# the repl loop will be broken below
@@ -303,18 +309,18 @@ class PostgresConsoleLexer(Lexer):
# TODO: better handle multiline comments at the end with
# a lexer with an external state?
if re_psql_command.match(curcode) \
- or re_end_command.search(curcode):
+ or re_end_command.search(curcode):
break
# Emit the combined stream of command and prompt(s)
for item in do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode)):
+ sql.get_tokens_unprocessed(curcode)):
yield item
# Emit the output lines
out_token = Generic.Output
while 1:
- line = lines.next()
+ line = next(lines)
mprompt = re_prompt.match(line)
if mprompt is not None:
# push the line back to have it processed by the prompt
@@ -324,7 +330,7 @@ class PostgresConsoleLexer(Lexer):
mmsg = re_message.match(line)
if mmsg is not None:
if mmsg.group(1).startswith("ERROR") \
- or mmsg.group(1).startswith("FATAL"):
+ or mmsg.group(1).startswith("FATAL"):
out_token = Generic.Error
yield (mmsg.start(1), Generic.Strong, mmsg.group(1))
yield (mmsg.start(2), out_token, mmsg.group(2))
@@ -349,97 +355,100 @@ class SqlLexer(RegexLexer):
(r'\s+', Text),
(r'--.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'(ABORT|ABS|ABSOLUTE|ACCESS|ADA|ADD|ADMIN|AFTER|AGGREGATE|'
- r'ALIAS|ALL|ALLOCATE|ALTER|ANALYSE|ANALYZE|AND|ANY|ARE|AS|'
- r'ASC|ASENSITIVE|ASSERTION|ASSIGNMENT|ASYMMETRIC|AT|ATOMIC|'
- r'AUTHORIZATION|AVG|BACKWARD|BEFORE|BEGIN|BETWEEN|BITVAR|'
- r'BIT_LENGTH|BOTH|BREADTH|BY|C|CACHE|CALL|CALLED|CARDINALITY|'
- r'CASCADE|CASCADED|CASE|CAST|CATALOG|CATALOG_NAME|CHAIN|'
- r'CHARACTERISTICS|CHARACTER_LENGTH|CHARACTER_SET_CATALOG|'
- r'CHARACTER_SET_NAME|CHARACTER_SET_SCHEMA|CHAR_LENGTH|CHECK|'
- r'CHECKED|CHECKPOINT|CLASS|CLASS_ORIGIN|CLOB|CLOSE|CLUSTER|'
- r'COALSECE|COBOL|COLLATE|COLLATION|COLLATION_CATALOG|'
- r'COLLATION_NAME|COLLATION_SCHEMA|COLUMN|COLUMN_NAME|'
- r'COMMAND_FUNCTION|COMMAND_FUNCTION_CODE|COMMENT|COMMIT|'
- r'COMMITTED|COMPLETION|CONDITION_NUMBER|CONNECT|CONNECTION|'
- r'CONNECTION_NAME|CONSTRAINT|CONSTRAINTS|CONSTRAINT_CATALOG|'
- r'CONSTRAINT_NAME|CONSTRAINT_SCHEMA|CONSTRUCTOR|CONTAINS|'
- r'CONTINUE|CONVERSION|CONVERT|COPY|CORRESPONTING|COUNT|'
- r'CREATE|CREATEDB|CREATEUSER|CROSS|CUBE|CURRENT|CURRENT_DATE|'
- r'CURRENT_PATH|CURRENT_ROLE|CURRENT_TIME|CURRENT_TIMESTAMP|'
- r'CURRENT_USER|CURSOR|CURSOR_NAME|CYCLE|DATA|DATABASE|'
- r'DATETIME_INTERVAL_CODE|DATETIME_INTERVAL_PRECISION|DAY|'
- r'DEALLOCATE|DECLARE|DEFAULT|DEFAULTS|DEFERRABLE|DEFERRED|'
- r'DEFINED|DEFINER|DELETE|DELIMITER|DELIMITERS|DEREF|DESC|'
- r'DESCRIBE|DESCRIPTOR|DESTROY|DESTRUCTOR|DETERMINISTIC|'
- r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|'
- r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|'
- r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|'
- r'EXCEPTION|EXCEPT|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
- r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|'
- r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|'
- r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|'
- r'GROUP|GROUPING|HANDLER|HAVING|HIERARCHY|HOLD|HOST|IDENTITY|'
- r'IGNORE|ILIKE|IMMEDIATE|IMMUTABLE|IMPLEMENTATION|IMPLICIT|IN|'
- r'INCLUDING|INCREMENT|INDEX|INDITCATOR|INFIX|INHERITS|INITIALIZE|'
- r'INITIALLY|INNER|INOUT|INPUT|INSENSITIVE|INSERT|INSTANTIABLE|'
- r'INSTEAD|INTERSECT|INTO|INVOKER|IS|ISNULL|ISOLATION|ITERATE|JOIN|'
- r'KEY|KEY_MEMBER|KEY_TYPE|LANCOMPILER|LANGUAGE|LARGE|LAST|'
- r'LATERAL|LEADING|LEFT|LENGTH|LESS|LEVEL|LIKE|LIMIT|LISTEN|LOAD|'
- r'LOCAL|LOCALTIME|LOCALTIMESTAMP|LOCATION|LOCATOR|LOCK|LOWER|'
- r'MAP|MATCH|MAX|MAXVALUE|MESSAGE_LENGTH|MESSAGE_OCTET_LENGTH|'
- r'MESSAGE_TEXT|METHOD|MIN|MINUTE|MINVALUE|MOD|MODE|MODIFIES|'
- r'MODIFY|MONTH|MORE|MOVE|MUMPS|NAMES|NATIONAL|NATURAL|NCHAR|'
- r'NCLOB|NEW|NEXT|NO|NOCREATEDB|NOCREATEUSER|NONE|NOT|NOTHING|'
- r'NOTIFY|NOTNULL|NULL|NULLABLE|NULLIF|OBJECT|OCTET_LENGTH|OF|OFF|'
- r'OFFSET|OIDS|OLD|ON|ONLY|OPEN|OPERATION|OPERATOR|OPTION|OPTIONS|'
- r'OR|ORDER|ORDINALITY|OUT|OUTER|OUTPUT|OVERLAPS|OVERLAY|OVERRIDING|'
- r'OWNER|PAD|PARAMETER|PARAMETERS|PARAMETER_MODE|PARAMATER_NAME|'
- r'PARAMATER_ORDINAL_POSITION|PARAMETER_SPECIFIC_CATALOG|'
- r'PARAMETER_SPECIFIC_NAME|PARAMATER_SPECIFIC_SCHEMA|PARTIAL|'
- r'PASCAL|PENDANT|PLACING|PLI|POSITION|POSTFIX|PRECISION|PREFIX|'
- r'PREORDER|PREPARE|PRESERVE|PRIMARY|PRIOR|PRIVILEGES|PROCEDURAL|'
- r'PROCEDURE|PUBLIC|READ|READS|RECHECK|RECURSIVE|REF|REFERENCES|'
- r'REFERENCING|REINDEX|RELATIVE|RENAME|REPEATABLE|REPLACE|RESET|'
- r'RESTART|RESTRICT|RESULT|RETURN|RETURNED_LENGTH|'
- r'RETURNED_OCTET_LENGTH|RETURNED_SQLSTATE|RETURNS|REVOKE|RIGHT|'
- r'ROLE|ROLLBACK|ROLLUP|ROUTINE|ROUTINE_CATALOG|ROUTINE_NAME|'
- r'ROUTINE_SCHEMA|ROW|ROWS|ROW_COUNT|RULE|SAVE_POINT|SCALE|SCHEMA|'
- r'SCHEMA_NAME|SCOPE|SCROLL|SEARCH|SECOND|SECURITY|SELECT|SELF|'
- r'SENSITIVE|SERIALIZABLE|SERVER_NAME|SESSION|SESSION_USER|SET|'
- r'SETOF|SETS|SHARE|SHOW|SIMILAR|SIMPLE|SIZE|SOME|SOURCE|SPACE|'
- r'SPECIFIC|SPECIFICTYPE|SPECIFIC_NAME|SQL|SQLCODE|SQLERROR|'
- r'SQLEXCEPTION|SQLSTATE|SQLWARNINIG|STABLE|START|STATE|STATEMENT|'
- r'STATIC|STATISTICS|STDIN|STDOUT|STORAGE|STRICT|STRUCTURE|STYPE|'
- r'SUBCLASS_ORIGIN|SUBLIST|SUBSTRING|SUM|SYMMETRIC|SYSID|SYSTEM|'
- r'SYSTEM_USER|TABLE|TABLE_NAME| TEMP|TEMPLATE|TEMPORARY|TERMINATE|'
- r'THAN|THEN|TIMESTAMP|TIMEZONE_HOUR|TIMEZONE_MINUTE|TO|TOAST|'
- r'TRAILING|TRANSATION|TRANSACTIONS_COMMITTED|'
- r'TRANSACTIONS_ROLLED_BACK|TRANSATION_ACTIVE|TRANSFORM|'
- r'TRANSFORMS|TRANSLATE|TRANSLATION|TREAT|TRIGGER|TRIGGER_CATALOG|'
- r'TRIGGER_NAME|TRIGGER_SCHEMA|TRIM|TRUE|TRUNCATE|TRUSTED|TYPE|'
- r'UNCOMMITTED|UNDER|UNENCRYPTED|UNION|UNIQUE|UNKNOWN|UNLISTEN|'
- r'UNNAMED|UNNEST|UNTIL|UPDATE|UPPER|USAGE|USER|'
- r'USER_DEFINED_TYPE_CATALOG|USER_DEFINED_TYPE_NAME|'
- r'USER_DEFINED_TYPE_SCHEMA|USING|VACUUM|VALID|VALIDATOR|VALUES|'
- r'VARIABLE|VERBOSE|VERSION|VIEW|VOLATILE|WHEN|WHENEVER|WHERE|'
- r'WITH|WITHOUT|WORK|WRITE|YEAR|ZONE)\b', Keyword),
- (r'(ARRAY|BIGINT|BINARY|BIT|BLOB|BOOLEAN|CHAR|CHARACTER|DATE|'
- r'DEC|DECIMAL|FLOAT|INT|INTEGER|INTERVAL|NUMBER|NUMERIC|REAL|'
- r'SERIAL|SMALLINT|VARCHAR|VARYING|INT8|SERIAL8|TEXT)\b',
+ (words((
+ 'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER', 'AGGREGATE',
+ 'ALIAS', 'ALL', 'ALLOCATE', 'ALTER', 'ANALYSE', 'ANALYZE', 'AND', 'ANY', 'ARE', 'AS',
+ 'ASC', 'ASENSITIVE', 'ASSERTION', 'ASSIGNMENT', 'ASYMMETRIC', 'AT', 'ATOMIC',
+ 'AUTHORIZATION', 'AVG', 'BACKWARD', 'BEFORE', 'BEGIN', 'BETWEEN', 'BITVAR',
+ 'BIT_LENGTH', 'BOTH', 'BREADTH', 'BY', 'C', 'CACHE', 'CALL', 'CALLED', 'CARDINALITY',
+ 'CASCADE', 'CASCADED', 'CASE', 'CAST', 'CATALOG', 'CATALOG_NAME', 'CHAIN',
+ 'CHARACTERISTICS', 'CHARACTER_LENGTH', 'CHARACTER_SET_CATALOG',
+ 'CHARACTER_SET_NAME', 'CHARACTER_SET_SCHEMA', 'CHAR_LENGTH', 'CHECK',
+ 'CHECKED', 'CHECKPOINT', 'CLASS', 'CLASS_ORIGIN', 'CLOB', 'CLOSE', 'CLUSTER',
+ 'COALSECE', 'COBOL', 'COLLATE', 'COLLATION', 'COLLATION_CATALOG',
+ 'COLLATION_NAME', 'COLLATION_SCHEMA', 'COLUMN', 'COLUMN_NAME',
+ 'COMMAND_FUNCTION', 'COMMAND_FUNCTION_CODE', 'COMMENT', 'COMMIT',
+ 'COMMITTED', 'COMPLETION', 'CONDITION_NUMBER', 'CONNECT', 'CONNECTION',
+ 'CONNECTION_NAME', 'CONSTRAINT', 'CONSTRAINTS', 'CONSTRAINT_CATALOG',
+ 'CONSTRAINT_NAME', 'CONSTRAINT_SCHEMA', 'CONSTRUCTOR', 'CONTAINS',
+ 'CONTINUE', 'CONVERSION', 'CONVERT', 'COPY', 'CORRESPONTING', 'COUNT',
+ 'CREATE', 'CREATEDB', 'CREATEUSER', 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_DATE',
+ 'CURRENT_PATH', 'CURRENT_ROLE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP',
+ 'CURRENT_USER', 'CURSOR', 'CURSOR_NAME', 'CYCLE', 'DATA', 'DATABASE',
+ 'DATETIME_INTERVAL_CODE', 'DATETIME_INTERVAL_PRECISION', 'DAY',
+ 'DEALLOCATE', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE', 'DEFERRED',
+ 'DEFINED', 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS', 'DEREF', 'DESC',
+ 'DESCRIBE', 'DESCRIPTOR', 'DESTROY', 'DESTRUCTOR', 'DETERMINISTIC',
+ 'DIAGNOSTICS', 'DICTIONARY', 'DISCONNECT', 'DISPATCH', 'DISTINCT', 'DO',
+ 'DOMAIN', 'DROP', 'DYNAMIC', 'DYNAMIC_FUNCTION', 'DYNAMIC_FUNCTION_CODE',
+ 'EACH', 'ELSE', 'ENCODING', 'ENCRYPTED', 'END', 'END-EXEC', 'EQUALS', 'ESCAPE', 'EVERY',
+ 'EXCEPTION', 'EXCEPT', 'EXCLUDING', 'EXCLUSIVE', 'EXEC', 'EXECUTE', 'EXISTING',
+ 'EXISTS', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FOR',
+ 'FORCE', 'FOREIGN', 'FORTRAN', 'FORWARD', 'FOUND', 'FREE', 'FREEZE', 'FROM', 'FULL',
+ 'FUNCTION', 'G', 'GENERAL', 'GENERATED', 'GET', 'GLOBAL', 'GO', 'GOTO', 'GRANT', 'GRANTED',
+ 'GROUP', 'GROUPING', 'HANDLER', 'HAVING', 'HIERARCHY', 'HOLD', 'HOST', 'IDENTITY',
+ 'IGNORE', 'ILIKE', 'IMMEDIATE', 'IMMUTABLE', 'IMPLEMENTATION', 'IMPLICIT', 'IN',
+ 'INCLUDING', 'INCREMENT', 'INDEX', 'INDITCATOR', 'INFIX', 'INHERITS', 'INITIALIZE',
+ 'INITIALLY', 'INNER', 'INOUT', 'INPUT', 'INSENSITIVE', 'INSERT', 'INSTANTIABLE',
+ 'INSTEAD', 'INTERSECT', 'INTO', 'INVOKER', 'IS', 'ISNULL', 'ISOLATION', 'ITERATE', 'JOIN',
+ 'KEY', 'KEY_MEMBER', 'KEY_TYPE', 'LANCOMPILER', 'LANGUAGE', 'LARGE', 'LAST',
+ 'LATERAL', 'LEADING', 'LEFT', 'LENGTH', 'LESS', 'LEVEL', 'LIKE', 'LIMIT', 'LISTEN', 'LOAD',
+ 'LOCAL', 'LOCALTIME', 'LOCALTIMESTAMP', 'LOCATION', 'LOCATOR', 'LOCK', 'LOWER',
+ 'MAP', 'MATCH', 'MAX', 'MAXVALUE', 'MESSAGE_LENGTH', 'MESSAGE_OCTET_LENGTH',
+ 'MESSAGE_TEXT', 'METHOD', 'MIN', 'MINUTE', 'MINVALUE', 'MOD', 'MODE', 'MODIFIES',
+ 'MODIFY', 'MONTH', 'MORE', 'MOVE', 'MUMPS', 'NAMES', 'NATIONAL', 'NATURAL', 'NCHAR',
+ 'NCLOB', 'NEW', 'NEXT', 'NO', 'NOCREATEDB', 'NOCREATEUSER', 'NONE', 'NOT', 'NOTHING',
+ 'NOTIFY', 'NOTNULL', 'NULL', 'NULLABLE', 'NULLIF', 'OBJECT', 'OCTET_LENGTH', 'OF', 'OFF',
+ 'OFFSET', 'OIDS', 'OLD', 'ON', 'ONLY', 'OPEN', 'OPERATION', 'OPERATOR', 'OPTION', 'OPTIONS',
+ 'OR', 'ORDER', 'ORDINALITY', 'OUT', 'OUTER', 'OUTPUT', 'OVERLAPS', 'OVERLAY', 'OVERRIDING',
+ 'OWNER', 'PAD', 'PARAMETER', 'PARAMETERS', 'PARAMETER_MODE', 'PARAMATER_NAME',
+ 'PARAMATER_ORDINAL_POSITION', 'PARAMETER_SPECIFIC_CATALOG',
+ 'PARAMETER_SPECIFIC_NAME', 'PARAMATER_SPECIFIC_SCHEMA', 'PARTIAL',
+ 'PASCAL', 'PENDANT', 'PLACING', 'PLI', 'POSITION', 'POSTFIX', 'PRECISION', 'PREFIX',
+ 'PREORDER', 'PREPARE', 'PRESERVE', 'PRIMARY', 'PRIOR', 'PRIVILEGES', 'PROCEDURAL',
+ 'PROCEDURE', 'PUBLIC', 'READ', 'READS', 'RECHECK', 'RECURSIVE', 'REF', 'REFERENCES',
+ 'REFERENCING', 'REINDEX', 'RELATIVE', 'RENAME', 'REPEATABLE', 'REPLACE', 'RESET',
+ 'RESTART', 'RESTRICT', 'RESULT', 'RETURN', 'RETURNED_LENGTH',
+ 'RETURNED_OCTET_LENGTH', 'RETURNED_SQLSTATE', 'RETURNS', 'REVOKE', 'RIGHT',
+ 'ROLE', 'ROLLBACK', 'ROLLUP', 'ROUTINE', 'ROUTINE_CATALOG', 'ROUTINE_NAME',
+ 'ROUTINE_SCHEMA', 'ROW', 'ROWS', 'ROW_COUNT', 'RULE', 'SAVE_POINT', 'SCALE', 'SCHEMA',
+ 'SCHEMA_NAME', 'SCOPE', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY', 'SELECT', 'SELF',
+ 'SENSITIVE', 'SERIALIZABLE', 'SERVER_NAME', 'SESSION', 'SESSION_USER', 'SET',
+ 'SETOF', 'SETS', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE', 'SIZE', 'SOME', 'SOURCE', 'SPACE',
+ 'SPECIFIC', 'SPECIFICTYPE', 'SPECIFIC_NAME', 'SQL', 'SQLCODE', 'SQLERROR',
+ 'SQLEXCEPTION', 'SQLSTATE', 'SQLWARNINIG', 'STABLE', 'START', 'STATE', 'STATEMENT',
+ 'STATIC', 'STATISTICS', 'STDIN', 'STDOUT', 'STORAGE', 'STRICT', 'STRUCTURE', 'STYPE',
+ 'SUBCLASS_ORIGIN', 'SUBLIST', 'SUBSTRING', 'SUM', 'SYMMETRIC', 'SYSID', 'SYSTEM',
+ 'SYSTEM_USER', 'TABLE', 'TABLE_NAME', ' TEMP', 'TEMPLATE', 'TEMPORARY', 'TERMINATE',
+ 'THAN', 'THEN', 'TIMESTAMP', 'TIMEZONE_HOUR', 'TIMEZONE_MINUTE', 'TO', 'TOAST',
+ 'TRAILING', 'TRANSATION', 'TRANSACTIONS_COMMITTED',
+ 'TRANSACTIONS_ROLLED_BACK', 'TRANSATION_ACTIVE', 'TRANSFORM',
+ 'TRANSFORMS', 'TRANSLATE', 'TRANSLATION', 'TREAT', 'TRIGGER', 'TRIGGER_CATALOG',
+ 'TRIGGER_NAME', 'TRIGGER_SCHEMA', 'TRIM', 'TRUE', 'TRUNCATE', 'TRUSTED', 'TYPE',
+ 'UNCOMMITTED', 'UNDER', 'UNENCRYPTED', 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN',
+ 'UNNAMED', 'UNNEST', 'UNTIL', 'UPDATE', 'UPPER', 'USAGE', 'USER',
+ 'USER_DEFINED_TYPE_CATALOG', 'USER_DEFINED_TYPE_NAME',
+ 'USER_DEFINED_TYPE_SCHEMA', 'USING', 'VACUUM', 'VALID', 'VALIDATOR', 'VALUES',
+ 'VARIABLE', 'VERBOSE', 'VERSION', 'VIEW', 'VOLATILE', 'WHEN', 'WHENEVER', 'WHERE',
+ 'WITH', 'WITHOUT', 'WORK', 'WRITE', 'YEAR', 'ZONE'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'ARRAY', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'CHARACTER', 'DATE',
+ 'DEC', 'DECIMAL', 'FLOAT', 'INT', 'INTEGER', 'INTERVAL', 'NUMBER', 'NUMERIC', 'REAL',
+ 'SERIAL', 'SMALLINT', 'VARCHAR', 'VARYING', 'INT8', 'SERIAL8', 'TEXT'), suffix=r'\b'),
Name.Builtin),
(r'[+*/<>=~!@#%^&|`?-]', Operator),
(r'[0-9]+', Number.Integer),
# TODO: Backslash escapes?
(r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
- (r'[;:()\[\],\.]', Punctuation)
+ (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
+ (r'[a-z_][\w$]*', Name), # allow $s in strings for Oracle
+ (r'[;:()\[\],.]', Punctuation)
],
'multiline-comments': [
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'\*/', Comment.Multiline, '#pop'),
- (r'[^/\*]+', Comment.Multiline),
+ (r'[^/*]+', Comment.Multiline),
(r'[/*]', Comment.Multiline)
]
}
@@ -462,10 +471,9 @@ class MySqlLexer(RegexLexer):
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'[0-9]+', Number.Integer),
(r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float),
- # TODO: add backslash escapes
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Double),
- (r"`(``|[^`])*`", String.Symbol),
+ (r"'(\\\\|\\'|''|[^'])*'", String.Single),
+ (r'"(\\\\|\\"|""|[^"])*"', String.Double),
+ (r"`(\\\\|\\`|``|[^`])*`", String.Symbol),
(r'[+*/<>=~!@#%^&|`?-]', Operator),
(r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|'
r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|'
@@ -504,16 +512,16 @@ class MySqlLexer(RegexLexer):
# TODO: this list is not complete
(r'\b(auto_increment|engine|charset|tables)\b', Keyword.Pseudo),
(r'(true|false|null)', Name.Constant),
- (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\()',
+ (r'([a-z_]\w*)(\s*)(\()',
bygroups(Name.Function, Text, Punctuation)),
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
- (r'@[A-Za-z0-9]*[._]*[A-Za-z0-9]*', Name.Variable),
- (r'[;:()\[\],\.]', Punctuation)
+ (r'[a-z_]\w*', Name),
+ (r'@[a-z0-9]*[._]*[a-z0-9]*', Name.Variable),
+ (r'[;:()\[\],.]', Punctuation)
],
'multiline-comments': [
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'\*/', Comment.Multiline, '#pop'),
- (r'[^/\*]+', Comment.Multiline),
+ (r'[^/*]+', Comment.Multiline),
(r'[/*]', Comment.Multiline)
]
}
@@ -523,7 +531,7 @@ class SqliteConsoleLexer(Lexer):
"""
Lexer for example sessions using sqlite3.
- *New in Pygments 0.11.*
+ .. versionadded:: 0.11
"""
name = 'sqlite3con'
@@ -557,3 +565,34 @@ class SqliteConsoleLexer(Lexer):
for item in do_insertions(insertions,
sql.get_tokens_unprocessed(curcode)):
yield item
+
+
+class RqlLexer(RegexLexer):
+ """
+ Lexer for Relation Query Language.
+
+ `RQL <http://www.logilab.org/project/rql>`_
+
+ .. versionadded:: 2.0
+ """
+ name = 'RQL'
+ aliases = ['rql']
+ filenames = ['*.rql']
+ mimetypes = ['text/x-rql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'(DELETE|SET|INSERT|UNION|DISTINCT|WITH|WHERE|BEING|OR'
+ r'|AND|NOT|GROUPBY|HAVING|ORDERBY|ASC|DESC|LIMIT|OFFSET'
+ r'|TODAY|NOW|TRUE|FALSE|NULL|EXISTS)\b', Keyword),
+ (r'[+*/<>=%-]', Operator),
+ (r'(Any|is|instance_of|CWEType|CWRelation)\b', Name.Builtin),
+ (r'[0-9]+', Number.Integer),
+ (r'[A-Z_]\w*\??', Name),
+ (r"'(''|[^'])*'", String.Single),
+ (r'"(""|[^"])*"', String.Single),
+ (r'[;:()\[\],.]', Punctuation)
+ ],
+ }
diff --git a/pygments/lexers/tcl.py b/pygments/lexers/tcl.py
new file mode 100644
index 00000000..96feb7a8
--- /dev/null
+++ b/pygments/lexers/tcl.py
@@ -0,0 +1,145 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.tcl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Tcl and related languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number
+from pygments.util import shebang_matches
+
+__all__ = ['TclLexer']
+
+
+class TclLexer(RegexLexer):
+ """
+ For Tcl source code.
+
+ .. versionadded:: 0.10
+ """
+
+ keyword_cmds_re = words((
+ 'after', 'apply', 'array', 'break', 'catch', 'continue', 'elseif', 'else', 'error',
+ 'eval', 'expr', 'for', 'foreach', 'global', 'if', 'namespace', 'proc', 'rename', 'return',
+ 'set', 'switch', 'then', 'trace', 'unset', 'update', 'uplevel', 'upvar', 'variable',
+ 'vwait', 'while'), prefix=r'\b', suffix=r'\b')
+
+ builtin_cmds_re = words((
+ 'append', 'bgerror', 'binary', 'cd', 'chan', 'clock', 'close', 'concat', 'dde', 'dict',
+ 'encoding', 'eof', 'exec', 'exit', 'fblocked', 'fconfigure', 'fcopy', 'file',
+ 'fileevent', 'flush', 'format', 'gets', 'glob', 'history', 'http', 'incr', 'info', 'interp',
+ 'join', 'lappend', 'lassign', 'lindex', 'linsert', 'list', 'llength', 'load', 'loadTk',
+ 'lrange', 'lrepeat', 'lreplace', 'lreverse', 'lsearch', 'lset', 'lsort', 'mathfunc',
+ 'mathop', 'memory', 'msgcat', 'open', 'package', 'pid', 'pkg::create', 'pkg_mkIndex',
+ 'platform', 'platform::shell', 'puts', 'pwd', 're_syntax', 'read', 'refchan',
+ 'regexp', 'registry', 'regsub', 'scan', 'seek', 'socket', 'source', 'split', 'string',
+ 'subst', 'tell', 'time', 'tm', 'unknown', 'unload'), prefix=r'\b', suffix=r'\b')
+
+ name = 'Tcl'
+ aliases = ['tcl']
+ filenames = ['*.tcl', '*.rvt']
+ mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl']
+
+ def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""):
+ return [
+ (keyword_cmds_re, Keyword, 'params' + context),
+ (builtin_cmds_re, Name.Builtin, 'params' + context),
+ (r'([\w.-]+)', Name.Variable, 'params' + context),
+ (r'#', Comment, 'comment'),
+ ]
+
+ tokens = {
+ 'root': [
+ include('command'),
+ include('basic'),
+ include('data'),
+ (r'\}', Keyword), # HACK: somehow we miscounted our braces
+ ],
+ 'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re),
+ 'command-in-brace': _gen_command_rules(keyword_cmds_re,
+ builtin_cmds_re,
+ "-in-brace"),
+ 'command-in-bracket': _gen_command_rules(keyword_cmds_re,
+ builtin_cmds_re,
+ "-in-bracket"),
+ 'command-in-paren': _gen_command_rules(keyword_cmds_re,
+ builtin_cmds_re,
+ "-in-paren"),
+ 'basic': [
+ (r'\(', Keyword, 'paren'),
+ (r'\[', Keyword, 'bracket'),
+ (r'\{', Keyword, 'brace'),
+ (r'"', String.Double, 'string'),
+ (r'(eq|ne|in|ni)\b', Operator.Word),
+ (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator),
+ ],
+ 'data': [
+ (r'\s+', Text),
+ (r'0x[a-fA-F0-9]+', Number.Hex),
+ (r'0[0-7]+', Number.Oct),
+ (r'\d+\.\d+', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'\$([\w.:-]+)', Name.Variable),
+ (r'([\w.:-]+)', Text),
+ ],
+ 'params': [
+ (r';', Keyword, '#pop'),
+ (r'\n', Text, '#pop'),
+ (r'(else|elseif|then)\b', Keyword),
+ include('basic'),
+ include('data'),
+ ],
+ 'params-in-brace': [
+ (r'\}', Keyword, ('#pop', '#pop')),
+ include('params')
+ ],
+ 'params-in-paren': [
+ (r'\)', Keyword, ('#pop', '#pop')),
+ include('params')
+ ],
+ 'params-in-bracket': [
+ (r'\]', Keyword, ('#pop', '#pop')),
+ include('params')
+ ],
+ 'string': [
+ (r'\[', String.Double, 'string-square'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double),
+ (r'"', String.Double, '#pop')
+ ],
+ 'string-square': [
+ (r'\[', String.Double, 'string-square'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double),
+ (r'\]', String.Double, '#pop')
+ ],
+ 'brace': [
+ (r'\}', Keyword, '#pop'),
+ include('command-in-brace'),
+ include('basic'),
+ include('data'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('command-in-paren'),
+ include('basic'),
+ include('data'),
+ ],
+ 'bracket': [
+ (r'\]', Keyword, '#pop'),
+ include('command-in-bracket'),
+ include('basic'),
+ include('data'),
+ ],
+ 'comment': [
+ (r'.*[^\\]\n', Comment, '#pop'),
+ (r'.*\\\n', Comment),
+ ],
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'(tcl)')
diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py
index 63fc5f37..bfca0d38 100644
--- a/pygments/lexers/templates.py
+++ b/pygments/lexers/templates.py
@@ -5,21 +5,24 @@
Lexers for various template engines' markup.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexers.web import \
- PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer, LassoLexer
-from pygments.lexers.agile import PythonLexer, PerlLexer
-from pygments.lexers.compiled import JavaLexer
-from pygments.lexers.jvm import TeaLangLexer
+from pygments.lexers.html import HtmlLexer, XmlLexer
+from pygments.lexers.javascript import JavascriptLexer, LassoLexer
+from pygments.lexers.css import CssLexer
+from pygments.lexers.php import PhpLexer
+from pygments.lexers.python import PythonLexer
+from pygments.lexers.perl import PerlLexer
+from pygments.lexers.jvm import JavaLexer, TeaLangLexer
+from pygments.lexers.data import YamlLexer
from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
- include, using, this
-from pygments.token import Error, Punctuation, \
- Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
+ include, using, this, default, combined
+from pygments.token import Error, Punctuation, Whitespace, \
+ Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
from pygments.util import html_doctype_matches, looks_like_xml
__all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
@@ -36,9 +39,12 @@ __all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
- 'ColdfusionHtmlLexer', 'VelocityLexer', 'VelocityHtmlLexer',
- 'VelocityXmlLexer', 'SspLexer', 'TeaTemplateLexer', 'LassoHtmlLexer',
- 'LassoXmlLexer', 'LassoCssLexer', 'LassoJavascriptLexer']
+ 'ColdfusionHtmlLexer', 'ColdfusionCFCLexer', 'VelocityLexer',
+ 'VelocityHtmlLexer', 'VelocityXmlLexer', 'SspLexer',
+ 'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
+ 'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
+ 'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
+ 'TwigLexer', 'TwigHtmlLexer']
class ErbLexer(Lexer):
@@ -59,7 +65,7 @@ class ErbLexer(Lexer):
_block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
def __init__(self, **options):
- from pygments.lexers.agile import RubyLexer
+ from pygments.lexers.ruby import RubyLexer
self.ruby_lexer = RubyLexer(**options)
Lexer.__init__(self, **options)
@@ -102,7 +108,7 @@ class ErbLexer(Lexer):
data = tokens.pop()
r_idx = 0
for r_idx, r_token, r_value in \
- self.ruby_lexer.get_tokens_unprocessed(data):
+ self.ruby_lexer.get_tokens_unprocessed(data):
yield r_idx + idx, r_token, r_value
idx += len(data)
state = 2
@@ -115,7 +121,7 @@ class ErbLexer(Lexer):
yield idx, Comment.Preproc, tag[0]
r_idx = 0
for r_idx, r_token, r_value in \
- self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
+ self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
yield idx + 1 + r_idx, r_token, r_value
idx += len(tag)
state = 0
@@ -159,22 +165,23 @@ class SmartyLexer(RegexLexer):
(r'(\{php\})(.*?)(\{/php\})',
bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
Comment.Preproc)),
- (r'(\{)(/?[a-zA-Z_][a-zA-Z0-9_]*)(\s*)',
+ (r'(\{)(/?[a-zA-Z_]\w*)(\s*)',
bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
(r'\{', Comment.Preproc, 'smarty')
],
'smarty': [
(r'\s+', Text),
+ (r'\{', Comment.Preproc, '#push'),
(r'\}', Comment.Preproc, '#pop'),
- (r'#[a-zA-Z_][a-zA-Z0-9_]*#', Name.Variable),
- (r'\$[a-zA-Z_][a-zA-Z0-9_]*(\.[a-zA-Z0-9_]+)*', Name.Variable),
- (r'[~!%^&*()+=|\[\]:;,.<>/?{}@-]', Operator),
+ (r'#[a-zA-Z_]\w*#', Name.Variable),
+ (r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
+ (r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
(r'(true|false|null)\b', Keyword.Constant),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Attribute)
+ (r'[a-zA-Z_]\w*', Name.Attribute)
]
}
@@ -201,11 +208,11 @@ class VelocityLexer(RegexLexer):
name = 'Velocity'
aliases = ['velocity']
- filenames = ['*.vm','*.fhtml']
+ filenames = ['*.vm', '*.fhtml']
flags = re.MULTILINE | re.DOTALL
- identifier = r'[a-zA-Z_][a-zA-Z0-9_]*'
+ identifier = r'[a-zA-Z_]\w*'
tokens = {
'root': [
@@ -227,10 +234,10 @@ class VelocityLexer(RegexLexer):
(r'(\.)(' + identifier + r')',
bygroups(Punctuation, Name.Variable), '#push'),
(r'\}', Punctuation, '#pop'),
- (r'', Other, '#pop')
+ default('#pop')
],
'directiveparams': [
- (r'(&&|\|\||==?|!=?|[-<>+*%&\|\^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
+ (r'(&&|\|\||==?|!=?|[-<>+*%&|^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
Operator),
(r'\[', Operator, 'rangeoperator'),
(r'\b' + identifier + r'\b', Name.Function),
@@ -251,7 +258,9 @@ class VelocityLexer(RegexLexer):
(r"\b[0-9]+\b", Number),
(r'(true|false|null)\b', Keyword.Constant),
(r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop')
+ (r'\)', Punctuation, '#pop'),
+ (r'\[', Punctuation, '#push'),
+ (r'\]', Punctuation, '#pop'),
]
}
@@ -263,39 +272,39 @@ class VelocityLexer(RegexLexer):
rv += 0.15
if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
rv += 0.15
- if re.search(r'\$\{?[a-zA-Z_][a-zA-Z0-9_]*(\([^)]*\))?'
- r'(\.[a-zA-Z0-9_]+(\([^)]*\))?)*\}?', text):
+ if re.search(r'\$\{?[a-zA-Z_]\w*(\([^)]*\))?'
+ r'(\.\w+(\([^)]*\))?)*\}?', text):
rv += 0.01
return rv
class VelocityHtmlLexer(DelegatingLexer):
"""
- Subclass of the `VelocityLexer` that highlights unlexer data
+ Subclass of the `VelocityLexer` that highlights unlexed data
with the `HtmlLexer`.
"""
name = 'HTML+Velocity'
aliases = ['html+velocity']
- alias_filenames = ['*.html','*.fhtml']
+ alias_filenames = ['*.html', '*.fhtml']
mimetypes = ['text/html+velocity']
def __init__(self, **options):
super(VelocityHtmlLexer, self).__init__(HtmlLexer, VelocityLexer,
- **options)
+ **options)
class VelocityXmlLexer(DelegatingLexer):
"""
- Subclass of the `VelocityLexer` that highlights unlexer data
+ Subclass of the `VelocityLexer` that highlights unlexed data
with the `XmlLexer`.
"""
name = 'XML+Velocity'
aliases = ['xml+velocity']
- alias_filenames = ['*.xml','*.vm']
+ alias_filenames = ['*.xml', '*.vm']
mimetypes = ['application/xml+velocity']
def __init__(self, **options):
@@ -305,7 +314,7 @@ class VelocityXmlLexer(DelegatingLexer):
def analyse_text(text):
rv = VelocityLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
- rv += 0.5
+ rv += 0.4
return rv
@@ -343,25 +352,25 @@ class DjangoLexer(RegexLexer):
Text, Comment.Preproc, Text, Keyword, Text,
Comment.Preproc)),
# filter blocks
- (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
+ (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)',
bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
'block'),
- (r'(\{%)(-?\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
+ (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
bygroups(Comment.Preproc, Text, Keyword), 'block'),
(r'\{', Other)
],
'varnames': [
- (r'(\|)(\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
+ (r'(\|)(\s*)([a-zA-Z_]\w*)',
bygroups(Operator, Text, Name.Function)),
- (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_][a-zA-Z0-9_]*)',
+ (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)',
bygroups(Keyword, Text, Keyword, Text, Name.Function)),
(r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
(r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
Keyword),
(r'(loop|block|super|forloop)\b', Name.Builtin),
- (r'[a-zA-Z][a-zA-Z0-9_-]*', Name.Variable),
- (r'\.[a-zA-Z0-9_]+', Name.Variable),
+ (r'[a-zA-Z_][\w-]*', Name.Variable),
+ (r'\.\w+', Name.Variable),
(r':?"(\\\\|\\"|[^"])*"', String.Double),
(r":?'(\\\\|\\'|[^'])*'", String.Single),
(r'([{}()\[\]+\-*/,:~]|[><=]=?)', Operator),
@@ -397,7 +406,7 @@ class MyghtyLexer(RegexLexer):
Generic `myghty templates`_ lexer. Code that isn't Myghty
markup is yielded as `Token.Other`.
- *New in Pygments 0.6.*
+ .. versionadded:: 0.6
.. _myghty templates: http://www.myghty.org/
"""
@@ -442,10 +451,10 @@ class MyghtyLexer(RegexLexer):
class MyghtyHtmlLexer(DelegatingLexer):
"""
- Subclass of the `MyghtyLexer` that highlights unlexer data
+ Subclass of the `MyghtyLexer` that highlights unlexed data
with the `HtmlLexer`.
- *New in Pygments 0.6.*
+ .. versionadded:: 0.6
"""
name = 'HTML+Myghty'
@@ -459,10 +468,10 @@ class MyghtyHtmlLexer(DelegatingLexer):
class MyghtyXmlLexer(DelegatingLexer):
"""
- Subclass of the `MyghtyLexer` that highlights unlexer data
+ Subclass of the `MyghtyLexer` that highlights unlexed data
with the `XmlLexer`.
- *New in Pygments 0.6.*
+ .. versionadded:: 0.6
"""
name = 'XML+Myghty'
@@ -476,10 +485,10 @@ class MyghtyXmlLexer(DelegatingLexer):
class MyghtyJavascriptLexer(DelegatingLexer):
"""
- Subclass of the `MyghtyLexer` that highlights unlexer data
+ Subclass of the `MyghtyLexer` that highlights unlexed data
with the `JavascriptLexer`.
- *New in Pygments 0.6.*
+ .. versionadded:: 0.6
"""
name = 'JavaScript+Myghty'
@@ -495,10 +504,10 @@ class MyghtyJavascriptLexer(DelegatingLexer):
class MyghtyCssLexer(DelegatingLexer):
"""
- Subclass of the `MyghtyLexer` that highlights unlexer data
+ Subclass of the `MyghtyLexer` that highlights unlexed data
with the `CssLexer`.
- *New in Pygments 0.6.*
+ .. versionadded:: 0.6
"""
name = 'CSS+Myghty'
@@ -517,7 +526,7 @@ class MasonLexer(RegexLexer):
.. _mason templates: http://www.masonhq.com/
- *New in Pygments 1.4.*
+ .. versionadded:: 1.4
"""
name = 'Mason'
aliases = ['mason']
@@ -570,7 +579,7 @@ class MakoLexer(RegexLexer):
Generic `mako templates`_ lexer. Code that isn't Mako
markup is yielded as `Token.Other`.
- *New in Pygments 0.7.*
+ .. versionadded:: 0.7
.. _mako templates: http://www.makotemplates.org/
"""
@@ -589,11 +598,11 @@ class MakoLexer(RegexLexer):
(r'(\s*)(##[^\n]*)(\n|\Z)',
bygroups(Text, Comment.Preproc, Other)),
(r'(?s)<%doc>.*?</%doc>', Comment.Preproc),
- (r'(<%)([\w\.\:]+)',
+ (r'(<%)([\w.:]+)',
bygroups(Comment.Preproc, Name.Builtin), 'tag'),
- (r'(</%)([\w\.\:]+)(>)',
+ (r'(</%)([\w.:]+)(>)',
bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
- (r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'),
+ (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
(r'(<%(?:!?))(.*?)(%>)(?s)',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'(\$\{)(.*?)(\})',
@@ -638,7 +647,7 @@ class MakoHtmlLexer(DelegatingLexer):
Subclass of the `MakoLexer` that highlights unlexed data
with the `HtmlLexer`.
- *New in Pygments 0.7.*
+ .. versionadded:: 0.7
"""
name = 'HTML+Mako'
@@ -647,14 +656,15 @@ class MakoHtmlLexer(DelegatingLexer):
def __init__(self, **options):
super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
- **options)
+ **options)
+
class MakoXmlLexer(DelegatingLexer):
"""
- Subclass of the `MakoLexer` that highlights unlexer data
+ Subclass of the `MakoLexer` that highlights unlexed data
with the `XmlLexer`.
- *New in Pygments 0.7.*
+ .. versionadded:: 0.7
"""
name = 'XML+Mako'
@@ -663,14 +673,15 @@ class MakoXmlLexer(DelegatingLexer):
def __init__(self, **options):
super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
- **options)
+ **options)
+
class MakoJavascriptLexer(DelegatingLexer):
"""
- Subclass of the `MakoLexer` that highlights unlexer data
+ Subclass of the `MakoLexer` that highlights unlexed data
with the `JavascriptLexer`.
- *New in Pygments 0.7.*
+ .. versionadded:: 0.7
"""
name = 'JavaScript+Mako'
@@ -681,14 +692,15 @@ class MakoJavascriptLexer(DelegatingLexer):
def __init__(self, **options):
super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
- MakoLexer, **options)
+ MakoLexer, **options)
+
class MakoCssLexer(DelegatingLexer):
"""
- Subclass of the `MakoLexer` that highlights unlexer data
+ Subclass of the `MakoLexer` that highlights unlexed data
with the `CssLexer`.
- *New in Pygments 0.7.*
+ .. versionadded:: 0.7
"""
name = 'CSS+Mako'
@@ -697,7 +709,7 @@ class MakoCssLexer(DelegatingLexer):
def __init__(self, **options):
super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
- **options)
+ **options)
# Genshi and Cheetah lexers courtesy of Matt Good.
@@ -741,7 +753,7 @@ class CheetahLexer(RegexLexer):
(bygroups(Comment.Preproc, using(CheetahPythonLexer),
Comment.Preproc))),
# TODO support other Python syntax like $foo['bar']
- (r'(\$)([a-zA-Z_][a-zA-Z0-9_\.]*[a-zA-Z0-9_])',
+ (r'(\$)([a-zA-Z_][\w.]*\w)',
bygroups(Comment.Preproc, using(CheetahPythonLexer))),
(r'(\$\{!?)(.*?)(\})(?s)',
bygroups(Comment.Preproc, using(CheetahPythonLexer),
@@ -749,7 +761,7 @@ class CheetahLexer(RegexLexer):
(r'''(?sx)
(.+?) # anything, followed by:
(?:
- (?=[#][#a-zA-Z]*) | # an eval comment
+ (?=\#[#a-zA-Z]*) | # an eval comment
(?=\$[a-zA-Z_{]) | # a substitution
\Z # end of string
)
@@ -761,7 +773,7 @@ class CheetahLexer(RegexLexer):
class CheetahHtmlLexer(DelegatingLexer):
"""
- Subclass of the `CheetahLexer` that highlights unlexer data
+ Subclass of the `CheetahLexer` that highlights unlexed data
with the `HtmlLexer`.
"""
@@ -776,7 +788,7 @@ class CheetahHtmlLexer(DelegatingLexer):
class CheetahXmlLexer(DelegatingLexer):
"""
- Subclass of the `CheetahLexer` that highlights unlexer data
+ Subclass of the `CheetahLexer` that highlights unlexed data
with the `XmlLexer`.
"""
@@ -791,7 +803,7 @@ class CheetahXmlLexer(DelegatingLexer):
class CheetahJavascriptLexer(DelegatingLexer):
"""
- Subclass of the `CheetahLexer` that highlights unlexer data
+ Subclass of the `CheetahLexer` that highlights unlexed data
with the `JavascriptLexer`.
"""
@@ -822,11 +834,11 @@ class GenshiTextLexer(RegexLexer):
tokens = {
'root': [
- (r'[^#\$\s]+', Other),
+ (r'[^#$\s]+', Other),
(r'^(\s*)(##.*)$', bygroups(Text, Comment)),
(r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
include('variable'),
- (r'[#\$\s]', Other),
+ (r'[#$\s]', Other),
],
'directive': [
(r'\n', Text, '#pop'),
@@ -839,7 +851,7 @@ class GenshiTextLexer(RegexLexer):
'variable': [
(r'(?<!\$)(\$\{)(.+?)(\})',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- (r'(?<!\$)(\$)([a-zA-Z_][a-zA-Z0-9_\.]*)',
+ (r'(?<!\$)(\$)([a-zA-Z_][\w.]*)',
Name.Variable),
]
}
@@ -855,7 +867,7 @@ class GenshiMarkupLexer(RegexLexer):
tokens = {
'root': [
- (r'[^<\$]+', Other),
+ (r'[^<$]+', Other),
(r'(<\?python)(.*?)(\?>)',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
# yield style and script blocks as Other
@@ -863,11 +875,11 @@ class GenshiMarkupLexer(RegexLexer):
(r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
(r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'),
include('variable'),
- (r'[<\$]', Other),
+ (r'[<$]', Other),
],
'pytag': [
(r'\s+', Text),
- (r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'pyattr'),
+ (r'[\w:-]+\s*=', Name.Attribute, 'pyattr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'pyattr': [
@@ -877,8 +889,8 @@ class GenshiMarkupLexer(RegexLexer):
],
'tag': [
(r'\s+', Text),
- (r'py:[a-zA-Z0-9_-]+\s*=', Name.Attribute, 'pyattr'),
- (r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'attr'),
+ (r'py:[\w-]+\s*=', Name.Attribute, 'pyattr'),
+ (r'[\w:-]+\s*=', Name.Attribute, 'attr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
@@ -903,7 +915,7 @@ class GenshiMarkupLexer(RegexLexer):
'variable': [
(r'(?<!\$)(\$\{)(.+?)(\})',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- (r'(?<!\$)(\$)([a-zA-Z_][a-zA-Z0-9_\.]*)',
+ (r'(?<!\$)(\$)([a-zA-Z_][\w\.]*)',
Name.Variable),
]
}
@@ -1110,7 +1122,7 @@ class HtmlPhpLexer(DelegatingLexer):
class XmlPhpLexer(DelegatingLexer):
"""
- Subclass of `PhpLexer` that higlights unhandled data with the `XmlLexer`.
+ Subclass of `PhpLexer` that highlights unhandled data with the `XmlLexer`.
"""
name = 'XML+PHP'
@@ -1168,7 +1180,7 @@ class JavascriptPhpLexer(DelegatingLexer):
class HtmlSmartyLexer(DelegatingLexer):
"""
- Subclass of the `SmartyLexer` that highighlights unlexed data with the
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
`HtmlLexer`.
Nested Javascript and CSS is highlighted too.
@@ -1251,7 +1263,7 @@ class JavascriptSmartyLexer(DelegatingLexer):
class HtmlDjangoLexer(DelegatingLexer):
"""
- Subclass of the `DjangoLexer` that highighlights unlexed data with the
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
`HtmlLexer`.
Nested Javascript and CSS is highlighted too.
@@ -1341,7 +1353,7 @@ class JspRootLexer(RegexLexer):
Base for the `JspLexer`. Yields `Token.Other` for area outside of
JSP tags.
- *New in Pygments 0.7.*
+ .. versionadded:: 0.7
"""
tokens = {
@@ -1365,7 +1377,7 @@ class JspLexer(DelegatingLexer):
"""
Lexer for Java Server Pages.
- *New in Pygments 0.7.*
+ .. versionadded:: 0.7
"""
name = 'Java Server Page'
aliases = ['jsp']
@@ -1388,7 +1400,7 @@ class EvoqueLexer(RegexLexer):
"""
For files using the Evoque templating system.
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'Evoque'
aliases = ['evoque']
@@ -1410,7 +1422,7 @@ class EvoqueLexer(RegexLexer):
String, Punctuation)),
# directives: evoque, overlay
# see doc for handling first name arg: /directives/evoque/
- #+ minor inconsistency: the "name" in e.g. $overlay{name=site_base}
+ # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
# should be using(PythonLexer), not passed out as String
(r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?'
r'(.*?)((?(4)%)\})',
@@ -1436,12 +1448,13 @@ class EvoqueLexer(RegexLexer):
],
}
+
class EvoqueHtmlLexer(DelegatingLexer):
"""
Subclass of the `EvoqueLexer` that highlights unlexed data with the
`HtmlLexer`.
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'HTML+Evoque'
aliases = ['html+evoque']
@@ -1452,12 +1465,13 @@ class EvoqueHtmlLexer(DelegatingLexer):
super(EvoqueHtmlLexer, self).__init__(HtmlLexer, EvoqueLexer,
**options)
+
class EvoqueXmlLexer(DelegatingLexer):
"""
Subclass of the `EvoqueLexer` that highlights unlexed data with the
`XmlLexer`.
- *New in Pygments 1.1.*
+ .. versionadded:: 1.1
"""
name = 'XML+Evoque'
aliases = ['xml+evoque']
@@ -1468,6 +1482,7 @@ class EvoqueXmlLexer(DelegatingLexer):
super(EvoqueXmlLexer, self).__init__(XmlLexer, EvoqueLexer,
**options)
+
class ColdfusionLexer(RegexLexer):
"""
Coldfusion statements
@@ -1476,26 +1491,33 @@ class ColdfusionLexer(RegexLexer):
aliases = ['cfs']
filenames = []
mimetypes = []
- flags = re.IGNORECASE | re.MULTILINE
+ flags = re.IGNORECASE
tokens = {
'root': [
- (r'//.*', Comment),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
(r'\+\+|--', Operator),
(r'[-+*/^&=!]', Operator),
- (r'<=|>=|<|>', Operator),
+ (r'<=|>=|<|>|==', Operator),
(r'mod\b', Operator),
(r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
(r'\|\||&&', Operator),
+ (r'\?', Operator),
(r'"', String.Double, 'string'),
# There is a special rule for allowing html in single quoted
# strings, evidently.
(r"'.*?'", String.Single),
(r'\d+', Number),
- (r'(if|else|len|var|case|default|break|switch)\b', Keyword),
- (r'([A-Za-z_$][A-Za-z0-9_.]*)(\s*)(\()',
+ (r'(if|else|len|var|xml|default|break|switch|component|property|function|do|'
+ r'try|catch|in|continue|for|return|while|required|any|array|binary|boolean|'
+ r'component|date|guid|numeric|query|string|struct|uuid|case)\b', Keyword),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(application|session|client|cookie|super|this|variables|arguments)\b',
+ Name.Constant),
+ (r'([a-z_$][\w.]*)(\s*)(\()',
bygroups(Name.Function, Text, Punctuation)),
- (r'[A-Za-z_$][A-Za-z0-9_.]*', Name.Variable),
+ (r'[a-z_$][\w.]*', Name.Variable),
(r'[()\[\]{};:,.\\]', Punctuation),
(r'\s+', Text),
],
@@ -1525,7 +1547,7 @@ class ColdfusionMarkupLexer(RegexLexer):
(r'<[^<>]*', Other),
],
'tags': [
- (r'(?s)<!---.*?--->', Comment.Multiline),
+ (r'<!---', Comment.Multiline, 'cfcomment'),
(r'(?s)<!--.*?-->', Comment),
(r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
(r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
@@ -1541,12 +1563,17 @@ class ColdfusionMarkupLexer(RegexLexer):
(r'[^#<]+', Other),
(r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
Punctuation)),
- #(r'<cfoutput.*?>', Name.Builtin, '#push'),
+ # (r'<cfoutput.*?>', Name.Builtin, '#push'),
(r'</cfoutput.*?>', Name.Builtin, '#pop'),
include('tags'),
(r'(?s)<[^<>]*', Other),
(r'#', Other),
],
+ 'cfcomment': [
+ (r'<!---', Comment.Multiline, '#push'),
+ (r'--->', Comment.Multiline, '#pop'),
+ (r'([^<-]|<(?!!---)|-(?!-->))+', Comment.Multiline),
+ ],
}
@@ -1556,7 +1583,7 @@ class ColdfusionHtmlLexer(DelegatingLexer):
"""
name = 'Coldfusion HTML'
aliases = ['cfm']
- filenames = ['*.cfm', '*.cfml', '*.cfc']
+ filenames = ['*.cfm', '*.cfml']
mimetypes = ['application/x-coldfusion']
def __init__(self, **options):
@@ -1564,11 +1591,27 @@ class ColdfusionHtmlLexer(DelegatingLexer):
**options)
+class ColdfusionCFCLexer(DelegatingLexer):
+ """
+ Coldfusion markup/script components
+
+ .. versionadded:: 2.0
+ """
+ name = 'Coldfusion CFC'
+ aliases = ['cfc']
+ filenames = ['*.cfc']
+ mimetypes = []
+
+ def __init__(self, **options):
+ super(ColdfusionCFCLexer, self).__init__(ColdfusionHtmlLexer, ColdfusionLexer,
+ **options)
+
+
class SspLexer(DelegatingLexer):
"""
Lexer for Scalate Server Pages.
- *New in Pygments 1.4.*
+ .. versionadded:: 1.4
"""
name = 'Scalate Server Page'
aliases = ['ssp']
@@ -1594,7 +1637,7 @@ class TeaTemplateRootLexer(RegexLexer):
Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
code blocks.
- *New in Pygments 1.5.*
+ .. versionadded:: 1.5
"""
tokens = {
@@ -1602,20 +1645,20 @@ class TeaTemplateRootLexer(RegexLexer):
(r'<%\S?', Keyword, 'sec'),
(r'[^<]+', Other),
(r'<', Other),
- ],
+ ],
'sec': [
(r'%>', Keyword, '#pop'),
# note: '\w\W' != '.' without DOTALL.
(r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
- ],
- }
+ ],
+ }
class TeaTemplateLexer(DelegatingLexer):
"""
Lexer for `Tea Templates <http://teatrove.org/>`_.
- *New in Pygments 1.5.*
+ .. versionadded:: 1.5
"""
name = 'Tea'
aliases = ['tea']
@@ -1642,7 +1685,7 @@ class LassoHtmlLexer(DelegatingLexer):
Nested JavaScript and CSS is also highlighted.
- *New in Pygments 1.6.*
+ .. versionadded:: 1.6
"""
name = 'HTML+Lasso'
@@ -1658,9 +1701,7 @@ class LassoHtmlLexer(DelegatingLexer):
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.01
- if re.search(r'<\w+>', text, re.I):
- rv += 0.2
- if html_doctype_matches(text):
+ if html_doctype_matches(text): # same as HTML lexer
rv += 0.5
return rv
@@ -1670,7 +1711,7 @@ class LassoXmlLexer(DelegatingLexer):
Subclass of the `LassoLexer` which highlights unhandled data with the
`XmlLexer`.
- *New in Pygments 1.6.*
+ .. versionadded:: 1.6
"""
name = 'XML+Lasso'
@@ -1694,7 +1735,7 @@ class LassoCssLexer(DelegatingLexer):
Subclass of the `LassoLexer` which highlights unhandled data with the
`CssLexer`.
- *New in Pygments 1.6.*
+ .. versionadded:: 1.6
"""
name = 'CSS+Lasso'
@@ -1720,7 +1761,7 @@ class LassoJavascriptLexer(DelegatingLexer):
Subclass of the `LassoLexer` which highlights unhandled data with the
`JavascriptLexer`.
- *New in Pygments 1.6.*
+ .. versionadded:: 1.6
"""
name = 'JavaScript+Lasso'
@@ -1740,3 +1781,394 @@ class LassoJavascriptLexer(DelegatingLexer):
if 'function' in text:
rv += 0.2
return rv
+
+
+class HandlebarsLexer(RegexLexer):
+ """
+ Generic `handlebars <http://handlebarsjs.com/>` template lexer.
+
+ Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
+ Everything else is left for a delegating lexer.
+
+ .. versionadded:: 2.0
+ """
+
+ name = "Handlebars"
+ aliases = ['handlebars']
+
+ tokens = {
+ 'root': [
+ (r'[^{]+', Other),
+
+ (r'\{\{!.*\}\}', Comment),
+
+ (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
+ (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
+ ],
+
+ 'tag': [
+ (r'\s+', Text),
+ (r'\}\}\}', Comment.Special, '#pop'),
+ (r'\}\}', Comment.Preproc, '#pop'),
+
+ # Handlebars
+ (r'([#/]*)(each|if|unless|else|with|log|in)', bygroups(Keyword,
+ Keyword)),
+
+ # General {{#block}}
+ (r'([#/])([\w-]+)', bygroups(Name.Function, Name.Function)),
+
+ # {{opt=something}}
+ (r'([\w-]+)(=)', bygroups(Name.Attribute, Operator)),
+
+ # borrowed from DjangoLexer
+ (r':?"(\\\\|\\"|[^"])*"', String.Double),
+ (r":?'(\\\\|\\'|[^'])*'", String.Single),
+ (r'[a-zA-Z][\w-]*', Name.Variable),
+ (r'\.[\w-]+', Name.Variable),
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ ]
+ }
+
+
+class HandlebarsHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `HandlebarsLexer` that highlights unlexed data with the
+ `HtmlLexer`.
+
+ .. versionadded:: 2.0
+ """
+
+ name = "HTML+Handlebars"
+ aliases = ["html+handlebars"]
+ filenames = ['*.handlebars', '*.hbs']
+ mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
+
+ def __init__(self, **options):
+ super(HandlebarsHtmlLexer, self).__init__(HtmlLexer, HandlebarsLexer, **options)
+
+
+class YamlJinjaLexer(DelegatingLexer):
+ """
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
+ `YamlLexer`.
+
+ Commonly used in Saltstack salt states.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'YAML+Jinja'
+ aliases = ['yaml+jinja', 'salt', 'sls']
+ filenames = ['*.sls']
+ mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
+
+ def __init__(self, **options):
+ super(YamlJinjaLexer, self).__init__(YamlLexer, DjangoLexer, **options)
+
+
+class LiquidLexer(RegexLexer):
+ """
+ Lexer for `Liquid templates
+ <http://www.rubydoc.info/github/Shopify/liquid>`_.
+
+ .. versionadded:: 2.0
+ """
+ name = 'liquid'
+ aliases = ['liquid']
+ filenames = ['*.liquid']
+
+ tokens = {
+ 'root': [
+ (r'[^{]+', Text),
+ # tags and block tags
+ (r'(\{%)(\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'),
+ # output tags
+ (r'(\{\{)(\s*)([^\s}]+)',
+ bygroups(Punctuation, Whitespace, using(this, state = 'generic')),
+ 'output'),
+ (r'\{', Text)
+ ],
+
+ 'tag-or-block': [
+ # builtin logic blocks
+ (r'(if|unless|elsif|case)(?=\s+)', Keyword.Reserved, 'condition'),
+ (r'(when)(\s+)', bygroups(Keyword.Reserved, Whitespace),
+ combined('end-of-block', 'whitespace', 'generic')),
+ (r'(else)(\s*)(%\})',
+ bygroups(Keyword.Reserved, Whitespace, Punctuation), '#pop'),
+
+ # other builtin blocks
+ (r'(capture)(\s+)([^\s%]+)(\s*)(%\})',
+ bygroups(Name.Tag, Whitespace, using(this, state = 'variable'),
+ Whitespace, Punctuation), '#pop'),
+ (r'(comment)(\s*)(%\})',
+ bygroups(Name.Tag, Whitespace, Punctuation), 'comment'),
+ (r'(raw)(\s*)(%\})',
+ bygroups(Name.Tag, Whitespace, Punctuation), 'raw'),
+
+ # end of block
+ (r'(end(case|unless|if))(\s*)(%\})',
+ bygroups(Keyword.Reserved, None, Whitespace, Punctuation), '#pop'),
+ (r'(end([^\s%]+))(\s*)(%\})',
+ bygroups(Name.Tag, None, Whitespace, Punctuation), '#pop'),
+
+ # builtin tags (assign and include are handled together with usual tags)
+ (r'(cycle)(\s+)(?:([^\s:]*)(:))?(\s*)',
+ bygroups(Name.Tag, Whitespace,
+ using(this, state='generic'), Punctuation, Whitespace),
+ 'variable-tag-markup'),
+
+ # other tags or blocks
+ (r'([^\s%]+)(\s*)', bygroups(Name.Tag, Whitespace), 'tag-markup')
+ ],
+
+ 'output': [
+ include('whitespace'),
+ ('\}\}', Punctuation, '#pop'), # end of output
+
+ (r'\|', Punctuation, 'filters')
+ ],
+
+ 'filters': [
+ include('whitespace'),
+ (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
+
+ (r'([^\s|:]+)(:?)(\s*)',
+ bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
+ ],
+
+ 'filter-markup': [
+ (r'\|', Punctuation, '#pop'),
+ include('end-of-tag'),
+ include('default-param-markup')
+ ],
+
+ 'condition': [
+ include('end-of-block'),
+ include('whitespace'),
+
+ (r'([^\s=!><]+)(\s*)([=!><]=?)(\s*)(\S+)(\s*)(%\})',
+ bygroups(using(this, state = 'generic'), Whitespace, Operator,
+ Whitespace, using(this, state = 'generic'), Whitespace,
+ Punctuation)),
+ (r'\b!', Operator),
+ (r'\bnot\b', Operator.Word),
+ (r'([\w.\'"]+)(\s+)(contains)(\s+)([\w.\'"]+)',
+ bygroups(using(this, state = 'generic'), Whitespace, Operator.Word,
+ Whitespace, using(this, state = 'generic'))),
+
+ include('generic'),
+ include('whitespace')
+ ],
+
+ 'generic-value': [
+ include('generic'),
+ include('end-at-whitespace')
+ ],
+
+ 'operator': [
+ (r'(\s*)((=|!|>|<)=?)(\s*)',
+ bygroups(Whitespace, Operator, None, Whitespace), '#pop'),
+ (r'(\s*)(\bcontains\b)(\s*)',
+ bygroups(Whitespace, Operator.Word, Whitespace), '#pop'),
+ ],
+
+ 'end-of-tag': [
+ (r'\}\}', Punctuation, '#pop')
+ ],
+
+ 'end-of-block': [
+ (r'%\}', Punctuation, ('#pop', '#pop'))
+ ],
+
+ 'end-at-whitespace': [
+ (r'\s+', Whitespace, '#pop')
+ ],
+
+ # states for unknown markup
+ 'param-markup': [
+ include('whitespace'),
+ # params with colons or equals
+ (r'([^\s=:]+)(\s*)(=|:)',
+ bygroups(Name.Attribute, Whitespace, Operator)),
+ # explicit variables
+ (r'(\{\{)(\s*)([^\s}])(\s*)(\}\})',
+ bygroups(Punctuation, Whitespace, using(this, state = 'variable'),
+ Whitespace, Punctuation)),
+
+ include('string'),
+ include('number'),
+ include('keyword'),
+ (r',', Punctuation)
+ ],
+
+ 'default-param-markup': [
+ include('param-markup'),
+ (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
+ ],
+
+ 'variable-param-markup': [
+ include('param-markup'),
+ include('variable'),
+ (r'.', Text) # fallback
+ ],
+
+ 'tag-markup': [
+ (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
+ include('default-param-markup')
+ ],
+
+ 'variable-tag-markup': [
+ (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
+ include('variable-param-markup')
+ ],
+
+ # states for different values types
+ 'keyword': [
+ (r'\b(false|true)\b', Keyword.Constant)
+ ],
+
+ 'variable': [
+ (r'[a-zA-Z_]\w*', Name.Variable),
+ (r'(?<=\w)\.(?=\w)', Punctuation)
+ ],
+
+ 'string': [
+ (r"'[^']*'", String.Single),
+ (r'"[^"]*"', String.Double)
+ ],
+
+ 'number': [
+ (r'\d+\.\d+', Number.Float),
+ (r'\d+', Number.Integer)
+ ],
+
+ 'generic': [ # decides for variable, string, keyword or number
+ include('keyword'),
+ include('string'),
+ include('number'),
+ include('variable')
+ ],
+
+ 'whitespace': [
+ (r'[ \t]+', Whitespace)
+ ],
+
+ # states for builtin blocks
+ 'comment': [
+ (r'(\{%)(\s*)(endcomment)(\s*)(%\})',
+ bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
+ Punctuation), ('#pop', '#pop')),
+ (r'.', Comment)
+ ],
+
+ 'raw': [
+ (r'[^{]+', Text),
+ (r'(\{%)(\s*)(endraw)(\s*)(%\})',
+ bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
+ Punctuation), '#pop'),
+ (r'\{', Text)
+ ],
+ }
+
+
+class TwigLexer(RegexLexer):
+ """
+ `Twig <http://twig.sensiolabs.org/>`_ template lexer.
+
+ It just highlights Twig code between the preprocessor directives,
+ other data is left untouched by the lexer.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Twig'
+ aliases = ['twig']
+ mimetypes = ['application/x-twig']
+
+ flags = re.M | re.S
+
+ # Note that a backslash is included in the following two patterns
+ # PHP uses a backslash as a namespace separator
+ _ident_char = r'[\\\w-]|[^\x00-\x7f]'
+ _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
+ _ident_end = r'(?:' + _ident_char + ')*'
+ _ident_inner = _ident_begin + _ident_end
+
+ tokens = {
+ 'root': [
+ (r'[^{]+', Other),
+ (r'\{\{', Comment.Preproc, 'var'),
+ # twig comments
+ (r'\{\#.*?\#\}', Comment),
+ # raw twig blocks
+ (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
+ r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
+ Other, Comment.Preproc, Text, Keyword, Text,
+ Comment.Preproc)),
+ (r'(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)'
+ r'(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})',
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
+ Other, Comment.Preproc, Text, Keyword, Text,
+ Comment.Preproc)),
+ # filter blocks
+ (r'(\{%%)(-?\s*)(filter)(\s+)(%s)' % _ident_inner,
+ bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
+ 'tag'),
+ (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
+ bygroups(Comment.Preproc, Text, Keyword), 'tag'),
+ (r'\{', Other),
+ ],
+ 'varnames': [
+ (r'(\|)(\s*)(%s)' % _ident_inner,
+ bygroups(Operator, Text, Name.Function)),
+ (r'(is)(\s+)(not)?(\s*)(%s)' % _ident_inner,
+ bygroups(Keyword, Text, Keyword, Text, Name.Function)),
+ (r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
+ (r'(in|not|and|b-and|or|b-or|b-xor|is'
+ r'if|elseif|else|import'
+ r'constant|defined|divisibleby|empty|even|iterable|odd|sameas'
+ r'matches|starts\s+with|ends\s+with)\b',
+ Keyword),
+ (r'(loop|block|parent)\b', Name.Builtin),
+ (_ident_inner, Name.Variable),
+ (r'\.' + _ident_inner, Name.Variable),
+ (r'\.[0-9]+', Number),
+ (r':?"(\\\\|\\"|[^"])*"', String.Double),
+ (r":?'(\\\\|\\'|[^'])*'", String.Single),
+ (r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ ],
+ 'var': [
+ (r'\s+', Text),
+ (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
+ include('varnames')
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
+ include('varnames'),
+ (r'.', Punctuation),
+ ],
+ }
+
+
+class TwigHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `TwigLexer` that highlights unlexed data with the
+ `HtmlLexer`.
+
+ .. versionadded:: 2.0
+ """
+
+ name = "HTML+Twig"
+ aliases = ["html+twig"]
+ filenames = ['*.twig']
+ mimetypes = ['text/html+twig']
+
+ def __init__(self, **options):
+ super(TwigHtmlLexer, self).__init__(HtmlLexer, TwigLexer, **options)
diff --git a/pygments/lexers/testing.py b/pygments/lexers/testing.py
new file mode 100644
index 00000000..55f4c054
--- /dev/null
+++ b/pygments/lexers/testing.py
@@ -0,0 +1,131 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.testing
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for testing languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Comment, Keyword, Name, String
+
+__all__ = ['GherkinLexer']
+
+
+class GherkinLexer(RegexLexer):
+ """
+ For `Gherkin <http://github.com/aslakhellesoy/gherkin/>` syntax.
+
+ .. versionadded:: 1.2
+ """
+ name = 'Gherkin'
+ aliases = ['cucumber', 'gherkin']
+ filenames = ['*.feature']
+ mimetypes = ['text/x-gherkin']
+
+ feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
+ feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
+ examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
+ step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )'
+
+ tokens = {
+ 'comments': [
+ (r'^\s*#.*$', Comment),
+ ],
+ 'feature_elements': [
+ (step_keywords, Keyword, "step_content_stack"),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'feature_elements_on_stack': [
+ (step_keywords, Keyword, "#pop:2"),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'examples_table': [
+ (r"\s+\|", Keyword, 'examples_table_header'),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'examples_table_header': [
+ (r"\s+\|\s*$", Keyword, "#pop:2"),
+ include('comments'),
+ (r"\\\|", Name.Variable),
+ (r"\s*\|", Keyword),
+ (r"[^|]", Name.Variable),
+ ],
+ 'scenario_sections_on_stack': [
+ (feature_element_keywords,
+ bygroups(Name.Function, Keyword, Keyword, Name.Function),
+ "feature_elements_on_stack"),
+ ],
+ 'narrative': [
+ include('scenario_sections_on_stack'),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'table_vars': [
+ (r'(<[^>]+>)', Name.Variable),
+ ],
+ 'numbers': [
+ (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String),
+ ],
+ 'string': [
+ include('table_vars'),
+ (r'(\s|.)', String),
+ ],
+ 'py_string': [
+ (r'"""', Keyword, "#pop"),
+ include('string'),
+ ],
+ 'step_content_root': [
+ (r"$", Keyword, "#pop"),
+ include('step_content'),
+ ],
+ 'step_content_stack': [
+ (r"$", Keyword, "#pop:2"),
+ include('step_content'),
+ ],
+ 'step_content': [
+ (r'"', Name.Function, "double_string"),
+ include('table_vars'),
+ include('numbers'),
+ include('comments'),
+ (r'(\s|.)', Name.Function),
+ ],
+ 'table_content': [
+ (r"\s+\|\s*$", Keyword, "#pop"),
+ include('comments'),
+ (r"\\\|", String),
+ (r"\s*\|", Keyword),
+ include('string'),
+ ],
+ 'double_string': [
+ (r'"', Name.Function, "#pop"),
+ include('string'),
+ ],
+ 'root': [
+ (r'\n', Name.Function),
+ include('comments'),
+ (r'"""', Keyword, "py_string"),
+ (r'\s+\|', Keyword, 'table_content'),
+ (r'"', Name.Function, "double_string"),
+ include('table_vars'),
+ include('numbers'),
+ (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)),
+ (step_keywords, bygroups(Name.Function, Keyword),
+ 'step_content_root'),
+ (feature_keywords, bygroups(Keyword, Keyword, Name.Function),
+ 'narrative'),
+ (feature_element_keywords,
+ bygroups(Name.Function, Keyword, Keyword, Name.Function),
+ 'feature_elements'),
+ (examples_keywords,
+ bygroups(Name.Function, Keyword, Keyword, Name.Function),
+ 'examples_table'),
+ (r'(\s|.)', Name.Function),
+ ]
+ }
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
index de9979cf..4bec5ec8 100644
--- a/pygments/lexers/text.py
+++ b/pygments/lexers/text.py
@@ -5,1889 +5,21 @@
Lexers for non-source code file types.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-from bisect import bisect
-
-from pygments.lexer import Lexer, LexerContext, RegexLexer, ExtendedRegexLexer, \
- bygroups, include, using, this, do_insertions
-from pygments.token import Punctuation, Text, Comment, Keyword, Name, String, \
- Generic, Operator, Number, Whitespace, Literal
-from pygments.util import get_bool_opt, ClassNotFound
-from pygments.lexers.other import BashLexer
-
-__all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer',
- 'MakefileLexer', 'DiffLexer', 'IrcLogsLexer', 'TexLexer',
- 'GroffLexer', 'ApacheConfLexer', 'BBCodeLexer', 'MoinWikiLexer',
- 'RstLexer', 'VimLexer', 'GettextLexer', 'SquidConfLexer',
- 'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer',
- 'LighttpdConfLexer', 'NginxConfLexer', 'CMakeLexer', 'HttpLexer',
- 'PyPyLogLexer', 'RegeditLexer', 'HxmlLexer', 'EbnfLexer']
-
-
-class IniLexer(RegexLexer):
- """
- Lexer for configuration files in INI style.
- """
-
- name = 'INI'
- aliases = ['ini', 'cfg', 'dosini']
- filenames = ['*.ini', '*.cfg']
- mimetypes = ['text/x-ini']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'[;#].*', Comment.Single),
- (r'\[.*?\]$', Keyword),
- (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
- bygroups(Name.Attribute, Text, Operator, Text, String))
- ]
- }
-
- def analyse_text(text):
- npos = text.find('\n')
- if npos < 3:
- return False
- return text[0] == '[' and text[npos-1] == ']'
-
-
-class RegeditLexer(RegexLexer):
- """
- Lexer for `Windows Registry
- <http://en.wikipedia.org/wiki/Windows_Registry#.REG_files>`_ files produced
- by regedit.
-
- *New in Pygments 1.6.*
- """
-
- name = 'reg'
- aliases = ['registry']
- filenames = ['*.reg']
- mimetypes = ['text/x-windows-registry']
-
- tokens = {
- 'root': [
- (r'Windows Registry Editor.*', Text),
- (r'\s+', Text),
- (r'[;#].*', Comment.Single),
- (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
- bygroups(Keyword, Operator, Name.Builtin, Keyword)),
- # String keys, which obey somewhat normal escaping
- (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
- bygroups(Name.Attribute, Text, Operator, Text),
- 'value'),
- # Bare keys (includes @)
- (r'(.*?)([ \t]*)(=)([ \t]*)',
- bygroups(Name.Attribute, Text, Operator, Text),
- 'value'),
- ],
- 'value': [
- (r'-', Operator, '#pop'), # delete value
- (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
- bygroups(Name.Variable, Punctuation, Number), '#pop'),
- # As far as I know, .reg files do not support line continuation.
- (r'.*', String, '#pop'),
- ]
- }
-
- def analyse_text(text):
- return text.startswith('Windows Registry Editor')
-
-
-class PropertiesLexer(RegexLexer):
- """
- Lexer for configuration files in Java's properties format.
-
- *New in Pygments 1.4.*
- """
-
- name = 'Properties'
- aliases = ['properties', 'jproperties']
- filenames = ['*.properties']
- mimetypes = ['text/x-java-properties']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'(?:[;#]|//).*$', Comment),
- (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
- bygroups(Name.Attribute, Text, Operator, Text, String)),
- ],
- }
-
-
-class SourcesListLexer(RegexLexer):
- """
- Lexer that highlights debian sources.list files.
-
- *New in Pygments 0.7.*
- """
-
- name = 'Debian Sourcelist'
- aliases = ['sourceslist', 'sources.list', 'debsources']
- filenames = ['sources.list']
- mimetype = ['application/x-debian-sourceslist']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*?$', Comment),
- (r'^(deb(?:-src)?)(\s+)',
- bygroups(Keyword, Text), 'distribution')
- ],
- 'distribution': [
- (r'#.*?$', Comment, '#pop'),
- (r'\$\(ARCH\)', Name.Variable),
- (r'[^\s$[]+', String),
- (r'\[', String.Other, 'escaped-distribution'),
- (r'\$', String),
- (r'\s+', Text, 'components')
- ],
- 'escaped-distribution': [
- (r'\]', String.Other, '#pop'),
- (r'\$\(ARCH\)', Name.Variable),
- (r'[^\]$]+', String.Other),
- (r'\$', String.Other)
- ],
- 'components': [
- (r'#.*?$', Comment, '#pop:2'),
- (r'$', Text, '#pop:2'),
- (r'\s+', Text),
- (r'\S+', Keyword.Pseudo),
- ]
- }
-
- def analyse_text(text):
- for line in text.split('\n'):
- line = line.strip()
- if not (line.startswith('#') or line.startswith('deb ') or
- line.startswith('deb-src ') or not line):
- return False
- return True
-
-
-class MakefileLexer(Lexer):
- """
- Lexer for BSD and GNU make extensions (lenient enough to handle both in
- the same file even).
-
- *Rewritten in Pygments 0.10.*
- """
-
- name = 'Makefile'
- aliases = ['make', 'makefile', 'mf', 'bsdmake']
- filenames = ['*.mak', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile']
- mimetypes = ['text/x-makefile']
-
- r_special = re.compile(r'^(?:'
- # BSD Make
- r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|'
- # GNU Make
- r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:))(?=\s)')
- r_comment = re.compile(r'^\s*@?#')
-
- def get_tokens_unprocessed(self, text):
- ins = []
- lines = text.splitlines(True)
- done = ''
- lex = BaseMakefileLexer(**self.options)
- backslashflag = False
- for line in lines:
- if self.r_special.match(line) or backslashflag:
- ins.append((len(done), [(0, Comment.Preproc, line)]))
- backslashflag = line.strip().endswith('\\')
- elif self.r_comment.match(line):
- ins.append((len(done), [(0, Comment, line)]))
- else:
- done += line
- for item in do_insertions(ins, lex.get_tokens_unprocessed(done)):
- yield item
-
-
-class BaseMakefileLexer(RegexLexer):
- """
- Lexer for simple Makefiles (no preprocessing).
-
- *New in Pygments 0.10.*
- """
-
- name = 'Base Makefile'
- aliases = ['basemake']
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)),
- (r'\$\((?:.*\\\n|.*\n)+', using(BashLexer)),
- (r'\s+', Text),
- (r'#.*?\n', Comment),
- (r'(export)(\s+)(?=[a-zA-Z0-9_${}\t -]+\n)',
- bygroups(Keyword, Text), 'export'),
- (r'export\s+', Keyword),
- # assignment
- (r'([a-zA-Z0-9_${}.-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
- bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
- # strings
- (r'(?s)"(\\\\|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\.|[^'\\])*'", String.Single),
- # targets
- (r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text),
- 'block-header'),
- # TODO: add paren handling (grr)
- ],
- 'export': [
- (r'[a-zA-Z0-9_${}-]+', Name.Variable),
- (r'\n', Text, '#pop'),
- (r'\s+', Text),
- ],
- 'block-header': [
- (r'[^,\\\n#]+', Number),
- (r',', Punctuation),
- (r'#.*?\n', Comment),
- (r'\\\n', Text), # line continuation
- (r'\\.', Text),
- (r'(?:[\t ]+.*\n|\n)+', using(BashLexer), '#pop'),
- ],
- }
-
-
-class DiffLexer(RegexLexer):
- """
- Lexer for unified or context-style diffs or patches.
- """
-
- name = 'Diff'
- aliases = ['diff', 'udiff']
- filenames = ['*.diff', '*.patch']
- mimetypes = ['text/x-diff', 'text/x-patch']
-
- tokens = {
- 'root': [
- (r' .*\n', Text),
- (r'\+.*\n', Generic.Inserted),
- (r'-.*\n', Generic.Deleted),
- (r'!.*\n', Generic.Strong),
- (r'@.*\n', Generic.Subheading),
- (r'([Ii]ndex|diff).*\n', Generic.Heading),
- (r'=.*\n', Generic.Heading),
- (r'.*\n', Text),
- ]
- }
-
- def analyse_text(text):
- if text[:7] == 'Index: ':
- return True
- if text[:5] == 'diff ':
- return True
- if text[:4] == '--- ':
- return 0.9
-
-
-DPATCH_KEYWORDS = ['hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move',
- 'replace']
-
-class DarcsPatchLexer(RegexLexer):
- """
- DarcsPatchLexer is a lexer for the various versions of the darcs patch
- format. Examples of this format are derived by commands such as
- ``darcs annotate --patch`` and ``darcs send``.
-
- *New in Pygments 0.10.*
- """
- name = 'Darcs Patch'
- aliases = ['dpatch']
- filenames = ['*.dpatch', '*.darcspatch']
-
- tokens = {
- 'root': [
- (r'<', Operator),
- (r'>', Operator),
- (r'{', Operator),
- (r'}', Operator),
- (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])',
- bygroups(Operator, Keyword, Name, Text, Name, Operator,
- Literal.Date, Text, Operator)),
- (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)',
- bygroups(Operator, Keyword, Name, Text, Name, Operator,
- Literal.Date, Text), 'comment'),
- (r'New patches:', Generic.Heading),
- (r'Context:', Generic.Heading),
- (r'Patch bundle hash:', Generic.Heading),
- (r'(\s*)(%s)(.*\n)' % '|'.join(DPATCH_KEYWORDS),
- bygroups(Text, Keyword, Text)),
- (r'\+', Generic.Inserted, "insert"),
- (r'-', Generic.Deleted, "delete"),
- (r'.*\n', Text),
- ],
- 'comment': [
- (r'[^\]].*\n', Comment),
- (r'\]', Operator, "#pop"),
- ],
- 'specialText': [ # darcs add [_CODE_] special operators for clarity
- (r'\n', Text, "#pop"), # line-based
- (r'\[_[^_]*_]', Operator),
- ],
- 'insert': [
- include('specialText'),
- (r'\[', Generic.Inserted),
- (r'[^\n\[]+', Generic.Inserted),
- ],
- 'delete': [
- include('specialText'),
- (r'\[', Generic.Deleted),
- (r'[^\n\[]+', Generic.Deleted),
- ],
- }
-
-
-class IrcLogsLexer(RegexLexer):
- """
- Lexer for IRC logs in *irssi*, *xchat* or *weechat* style.
- """
-
- name = 'IRC logs'
- aliases = ['irc']
- filenames = ['*.weechatlog']
- mimetypes = ['text/x-irclog']
-
- flags = re.VERBOSE | re.MULTILINE
- timestamp = r"""
- (
- # irssi / xchat and others
- (?: \[|\()? # Opening bracket or paren for the timestamp
- (?: # Timestamp
- (?: (?:\d{1,4} [-/]?)+ # Date as - or /-separated groups of digits
- [T ])? # Date/time separator: T or space
- (?: \d?\d [:.]?)+ # Time as :/.-separated groups of 1 or 2 digits
- )
- (?: \]|\))?\s+ # Closing bracket or paren for the timestamp
- |
- # weechat
- \d{4}\s\w{3}\s\d{2}\s # Date
- \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
- |
- # xchat
- \w{3}\s\d{2}\s # Date
- \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
- )?
- """
- tokens = {
- 'root': [
- # log start/end
- (r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
- # hack
- ("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
- # normal msgs
- ("^" + timestamp + r"""
- (\s*<.*?>\s*) # Nick """,
- bygroups(Comment.Preproc, Name.Tag), 'msg'),
- # /me msgs
- ("^" + timestamp + r"""
- (\s*[*]\s+) # Star
- (\S+\s+.*?\n) # Nick + rest of message """,
- bygroups(Comment.Preproc, Keyword, Generic.Inserted)),
- # join/part msgs
- ("^" + timestamp + r"""
- (\s*(?:\*{3}|<?-[!@=P]?->?)\s*) # Star(s) or symbols
- (\S+\s+) # Nick + Space
- (.*?\n) # Rest of message """,
- bygroups(Comment.Preproc, Keyword, String, Comment)),
- (r"^.*?\n", Text),
- ],
- 'msg': [
- (r"\S+:(?!//)", Name.Attribute), # Prefix
- (r".*\n", Text, '#pop'),
- ],
- }
-
-
-class BBCodeLexer(RegexLexer):
- """
- A lexer that highlights BBCode(-like) syntax.
-
- *New in Pygments 0.6.*
- """
-
- name = 'BBCode'
- aliases = ['bbcode']
- mimetypes = ['text/x-bbcode']
-
- tokens = {
- 'root': [
- (r'[^[]+', Text),
- # tag/end tag begin
- (r'\[/?\w+', Keyword, 'tag'),
- # stray bracket
- (r'\[', Text),
- ],
- 'tag': [
- (r'\s+', Text),
- # attribute with value
- (r'(\w+)(=)("?[^\s"\]]+"?)',
- bygroups(Name.Attribute, Operator, String)),
- # tag argument (a la [color=green])
- (r'(=)("?[^\s"\]]+"?)',
- bygroups(Operator, String)),
- # tag end
- (r'\]', Keyword, '#pop'),
- ],
- }
-
-
-class TexLexer(RegexLexer):
- """
- Lexer for the TeX and LaTeX typesetting languages.
- """
-
- name = 'TeX'
- aliases = ['tex', 'latex']
- filenames = ['*.tex', '*.aux', '*.toc']
- mimetypes = ['text/x-tex', 'text/x-latex']
-
- tokens = {
- 'general': [
- (r'%.*?\n', Comment),
- (r'[{}]', Name.Builtin),
- (r'[&_^]', Name.Builtin),
- ],
- 'root': [
- (r'\\\[', String.Backtick, 'displaymath'),
- (r'\\\(', String, 'inlinemath'),
- (r'\$\$', String.Backtick, 'displaymath'),
- (r'\$', String, 'inlinemath'),
- (r'\\([a-zA-Z]+|.)', Keyword, 'command'),
- include('general'),
- (r'[^\\$%&_^{}]+', Text),
- ],
- 'math': [
- (r'\\([a-zA-Z]+|.)', Name.Variable),
- include('general'),
- (r'[0-9]+', Number),
- (r'[-=!+*/()\[\]]', Operator),
- (r'[^=!+*/()\[\]\\$%&_^{}0-9-]+', Name.Builtin),
- ],
- 'inlinemath': [
- (r'\\\)', String, '#pop'),
- (r'\$', String, '#pop'),
- include('math'),
- ],
- 'displaymath': [
- (r'\\\]', String, '#pop'),
- (r'\$\$', String, '#pop'),
- (r'\$', Name.Builtin),
- include('math'),
- ],
- 'command': [
- (r'\[.*?\]', Name.Attribute),
- (r'\*', Keyword),
- (r'', Text, '#pop'),
- ],
- }
-
- def analyse_text(text):
- for start in ("\\documentclass", "\\input", "\\documentstyle",
- "\\relax"):
- if text[:len(start)] == start:
- return True
-
-
-class GroffLexer(RegexLexer):
- """
- Lexer for the (g)roff typesetting language, supporting groff
- extensions. Mainly useful for highlighting manpage sources.
-
- *New in Pygments 0.6.*
- """
-
- name = 'Groff'
- aliases = ['groff', 'nroff', 'man']
- filenames = ['*.[1234567]', '*.man']
- mimetypes = ['application/x-troff', 'text/troff']
-
- tokens = {
- 'root': [
- (r'(\.)(\w+)', bygroups(Text, Keyword), 'request'),
- (r'\.', Punctuation, 'request'),
- # Regular characters, slurp till we find a backslash or newline
- (r'[^\\\n]*', Text, 'textline'),
- ],
- 'textline': [
- include('escapes'),
- (r'[^\\\n]+', Text),
- (r'\n', Text, '#pop'),
- ],
- 'escapes': [
- # groff has many ways to write escapes.
- (r'\\"[^\n]*', Comment),
- (r'\\[fn]\w', String.Escape),
- (r'\\\(.{2}', String.Escape),
- (r'\\.\[.*\]', String.Escape),
- (r'\\.', String.Escape),
- (r'\\\n', Text, 'request'),
- ],
- 'request': [
- (r'\n', Text, '#pop'),
- include('escapes'),
- (r'"[^\n"]+"', String.Double),
- (r'\d+', Number),
- (r'\S+', String),
- (r'\s+', Text),
- ],
- }
-
- def analyse_text(text):
- if text[:1] != '.':
- return False
- if text[:3] == '.\\"':
- return True
- if text[:4] == '.TH ':
- return True
- if text[1:3].isalnum() and text[3].isspace():
- return 0.9
-
-
-class ApacheConfLexer(RegexLexer):
- """
- Lexer for configuration files following the Apache config file
- format.
-
- *New in Pygments 0.6.*
- """
-
- name = 'ApacheConf'
- aliases = ['apacheconf', 'aconf', 'apache']
- filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
- mimetypes = ['text/x-apacheconf']
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'(#.*?)$', Comment),
- (r'(<[^\s>]+)(?:(\s+)(.*?))?(>)',
- bygroups(Name.Tag, Text, String, Name.Tag)),
- (r'([a-zA-Z][a-zA-Z0-9_]*)(\s+)',
- bygroups(Name.Builtin, Text), 'value'),
- (r'\.+', Text),
- ],
- 'value': [
- (r'$', Text, '#pop'),
- (r'[^\S\n]+', Text),
- (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
- (r'\d+', Number),
- (r'/([a-zA-Z0-9][a-zA-Z0-9_./-]+)', String.Other),
- (r'(on|off|none|any|all|double|email|dns|min|minimal|'
- r'os|productonly|full|emerg|alert|crit|error|warn|'
- r'notice|info|debug|registry|script|inetd|standalone|'
- r'user|group)\b', Keyword),
- (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
- (r'[^\s"]+', Text)
- ]
- }
-
-
-class MoinWikiLexer(RegexLexer):
- """
- For MoinMoin (and Trac) Wiki markup.
-
- *New in Pygments 0.7.*
- """
-
- name = 'MoinMoin/Trac Wiki markup'
- aliases = ['trac-wiki', 'moin']
- filenames = []
- mimetypes = ['text/x-trac-wiki']
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'^#.*$', Comment),
- (r'(!)(\S+)', bygroups(Keyword, Text)), # Ignore-next
- # Titles
- (r'^(=+)([^=]+)(=+)(\s*#.+)?$',
- bygroups(Generic.Heading, using(this), Generic.Heading, String)),
- # Literal code blocks, with optional shebang
- (r'({{{)(\n#!.+)?', bygroups(Name.Builtin, Name.Namespace), 'codeblock'),
- (r'(\'\'\'?|\|\||`|__|~~|\^|,,|::)', Comment), # Formatting
- # Lists
- (r'^( +)([.*-])( )', bygroups(Text, Name.Builtin, Text)),
- (r'^( +)([a-z]{1,5}\.)( )', bygroups(Text, Name.Builtin, Text)),
- # Other Formatting
- (r'\[\[\w+.*?\]\]', Keyword), # Macro
- (r'(\[[^\s\]]+)(\s+[^\]]+?)?(\])',
- bygroups(Keyword, String, Keyword)), # Link
- (r'^----+$', Keyword), # Horizontal rules
- (r'[^\n\'\[{!_~^,|]+', Text),
- (r'\n', Text),
- (r'.', Text),
- ],
- 'codeblock': [
- (r'}}}', Name.Builtin, '#pop'),
- # these blocks are allowed to be nested in Trac, but not MoinMoin
- (r'{{{', Text, '#push'),
- (r'[^{}]+', Comment.Preproc), # slurp boring text
- (r'.', Comment.Preproc), # allow loose { or }
- ],
- }
-
-
-class RstLexer(RegexLexer):
- """
- For `reStructuredText <http://docutils.sf.net/rst.html>`_ markup.
-
- *New in Pygments 0.7.*
-
- Additional options accepted:
-
- `handlecodeblocks`
- Highlight the contents of ``.. sourcecode:: langauge`` and
- ``.. code:: language`` directives with a lexer for the given
- language (default: ``True``). *New in Pygments 0.8.*
- """
- name = 'reStructuredText'
- aliases = ['rst', 'rest', 'restructuredtext']
- filenames = ['*.rst', '*.rest']
- mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"]
- flags = re.MULTILINE
-
- def _handle_sourcecode(self, match):
- from pygments.lexers import get_lexer_by_name
-
- # section header
- yield match.start(1), Punctuation, match.group(1)
- yield match.start(2), Text, match.group(2)
- yield match.start(3), Operator.Word, match.group(3)
- yield match.start(4), Punctuation, match.group(4)
- yield match.start(5), Text, match.group(5)
- yield match.start(6), Keyword, match.group(6)
- yield match.start(7), Text, match.group(7)
-
- # lookup lexer if wanted and existing
- lexer = None
- if self.handlecodeblocks:
- try:
- lexer = get_lexer_by_name(match.group(6).strip())
- except ClassNotFound:
- pass
- indention = match.group(8)
- indention_size = len(indention)
- code = (indention + match.group(9) + match.group(10) + match.group(11))
-
- # no lexer for this language. handle it like it was a code block
- if lexer is None:
- yield match.start(8), String, code
- return
-
- # highlight the lines with the lexer.
- ins = []
- codelines = code.splitlines(True)
- code = ''
- for line in codelines:
- if len(line) > indention_size:
- ins.append((len(code), [(0, Text, line[:indention_size])]))
- code += line[indention_size:]
- else:
- code += line
- for item in do_insertions(ins, lexer.get_tokens_unprocessed(code)):
- yield item
-
- # from docutils.parsers.rst.states
- closers = u'\'")]}>\u2019\u201d\xbb!?'
- unicode_delimiters = u'\u2010\u2011\u2012\u2013\u2014\u00a0'
- end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))'
- % (re.escape(unicode_delimiters),
- re.escape(closers)))
-
- tokens = {
- 'root': [
- # Heading with overline
- (r'^(=+|-+|`+|:+|\.+|\'+|"+|~+|\^+|_+|\*+|\++|#+)([ \t]*\n)'
- r'(.+)(\n)(\1)(\n)',
- bygroups(Generic.Heading, Text, Generic.Heading,
- Text, Generic.Heading, Text)),
- # Plain heading
- (r'^(\S.*)(\n)(={3,}|-{3,}|`{3,}|:{3,}|\.{3,}|\'{3,}|"{3,}|'
- r'~{3,}|\^{3,}|_{3,}|\*{3,}|\+{3,}|#{3,})(\n)',
- bygroups(Generic.Heading, Text, Generic.Heading, Text)),
- # Bulleted lists
- (r'^(\s*)([-*+])( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Numbered lists
- (r'^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- (r'^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Numbered, but keep words at BOL from becoming lists
- (r'^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)',
- bygroups(Text, Number, using(this, state='inline'))),
- (r'^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Line blocks
- (r'^(\s*)(\|)( .+\n(?:\| .+\n)*)',
- bygroups(Text, Operator, using(this, state='inline'))),
- # Sourcecode directives
- (r'^( *\.\.)(\s*)((?:source)?code)(::)([ \t]*)([^\n]+)'
- r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)',
- _handle_sourcecode),
- # A directive
- (r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
- bygroups(Punctuation, Text, Operator.Word, Punctuation, Text,
- using(this, state='inline'))),
- # A reference target
- (r'^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$',
- bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
- # A footnote/citation target
- (r'^( *\.\.)(\s*)(\[.+\])(.*?)$',
- bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
- # A substitution def
- (r'^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
- bygroups(Punctuation, Text, Name.Tag, Text, Operator.Word,
- Punctuation, Text, using(this, state='inline'))),
- # Comments
- (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc),
- # Field list
- (r'^( *)(:[a-zA-Z-]+:)(\s*)$', bygroups(Text, Name.Class, Text)),
- (r'^( *)(:.*?:)([ \t]+)(.*?)$',
- bygroups(Text, Name.Class, Text, Name.Function)),
- # Definition list
- (r'^([^ ].*(?<!::)\n)((?:(?: +.*)\n)+)',
- bygroups(using(this, state='inline'), using(this, state='inline'))),
- # Code blocks
- (r'(::)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\3.*|)\n)+)',
- bygroups(String.Escape, Text, String, String, Text, String)),
- include('inline'),
- ],
- 'inline': [
- (r'\\.', Text), # escape
- (r'``', String, 'literal'), # code
- (r'(`.+?)(<.+?>)(`__?)', # reference with inline target
- bygroups(String, String.Interpol, String)),
- (r'`.+?`__?', String), # reference
- (r'(`.+?`)(:[a-zA-Z0-9:-]+?:)?',
- bygroups(Name.Variable, Name.Attribute)), # role
- (r'(:[a-zA-Z0-9:-]+?:)(`.+?`)',
- bygroups(Name.Attribute, Name.Variable)), # role (content first)
- (r'\*\*.+?\*\*', Generic.Strong), # Strong emphasis
- (r'\*.+?\*', Generic.Emph), # Emphasis
- (r'\[.*?\]_', String), # Footnote or citation
- (r'<.+?>', Name.Tag), # Hyperlink
- (r'[^\\\n\[*`:]+', Text),
- (r'.', Text),
- ],
- 'literal': [
- (r'[^`]+', String),
- (r'``' + end_string_suffix, String, '#pop'),
- (r'`', String),
- ]
- }
-
- def __init__(self, **options):
- self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
- RegexLexer.__init__(self, **options)
-
- def analyse_text(text):
- if text[:2] == '..' and text[2:3] != '.':
- return 0.3
- p1 = text.find("\n")
- p2 = text.find("\n", p1 + 1)
- if (p2 > -1 and # has two lines
- p1 * 2 + 1 == p2 and # they are the same length
- text[p1+1] in '-=' and # the next line both starts and ends with
- text[p1+1] == text[p2-1]): # ...a sufficiently high header
- return 0.5
-
-
-class VimLexer(RegexLexer):
- """
- Lexer for VimL script files.
-
- *New in Pygments 0.8.*
- """
- name = 'VimL'
- aliases = ['vim']
- filenames = ['*.vim', '.vimrc', '.exrc', '.gvimrc',
- '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc']
- mimetypes = ['text/x-vim']
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- (r'^\s*".*', Comment),
-
- (r'[ \t]+', Text),
- # TODO: regexes can have other delims
- (r'/(\\\\|\\/|[^\n/])*/', String.Regex),
- (r'"(\\\\|\\"|[^\n"])*"', String.Double),
- (r"'(\\\\|\\'|[^\n'])*'", String.Single),
-
- # Who decided that doublequote was a good comment character??
- (r'(?<=\s)"[^\-:.%#=*].*', Comment),
- (r'-?\d+', Number),
- (r'#[0-9a-f]{6}', Number.Hex),
- (r'^:', Punctuation),
- (r'[()<>+=!|,~-]', Punctuation), # Inexact list. Looks decent.
- (r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b',
- Keyword),
- (r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin),
- (r'\b\w+\b', Name.Other), # These are postprocessed below
- (r'.', Text),
- ],
- }
- def __init__(self, **options):
- from pygments.lexers._vimbuiltins import command, option, auto
- self._cmd = command
- self._opt = option
- self._aut = auto
-
- RegexLexer.__init__(self, **options)
-
- def is_in(self, w, mapping):
- r"""
- It's kind of difficult to decide if something might be a keyword
- in VimL because it allows you to abbreviate them. In fact,
- 'ab[breviate]' is a good example. :ab, :abbre, or :abbreviate are
- valid ways to call it so rather than making really awful regexps
- like::
-
- \bab(?:b(?:r(?:e(?:v(?:i(?:a(?:t(?:e)?)?)?)?)?)?)?)?\b
-
- we match `\b\w+\b` and then call is_in() on those tokens. See
- `scripts/get_vimkw.py` for how the lists are extracted.
- """
- p = bisect(mapping, (w,))
- if p > 0:
- if mapping[p-1][0] == w[:len(mapping[p-1][0])] and \
- mapping[p-1][1][:len(w)] == w: return True
- if p < len(mapping):
- return mapping[p][0] == w[:len(mapping[p][0])] and \
- mapping[p][1][:len(w)] == w
- return False
-
- def get_tokens_unprocessed(self, text):
- # TODO: builtins are only subsequent tokens on lines
- # and 'keywords' only happen at the beginning except
- # for :au ones
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name.Other:
- if self.is_in(value, self._cmd):
- yield index, Keyword, value
- elif self.is_in(value, self._opt) or \
- self.is_in(value, self._aut):
- yield index, Name.Builtin, value
- else:
- yield index, Text, value
- else:
- yield index, token, value
-
-
-class GettextLexer(RegexLexer):
- """
- Lexer for Gettext catalog files.
-
- *New in Pygments 0.9.*
- """
- name = 'Gettext Catalog'
- aliases = ['pot', 'po']
- filenames = ['*.pot', '*.po']
- mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext']
-
- tokens = {
- 'root': [
- (r'^#,\s.*?$', Keyword.Type),
- (r'^#:\s.*?$', Keyword.Declaration),
- #(r'^#$', Comment),
- (r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
- (r'^(")([A-Za-z-]+:)(.*")$',
- bygroups(String, Name.Property, String)),
- (r'^".*"$', String),
- (r'^(msgid|msgid_plural|msgstr)(\s+)(".*")$',
- bygroups(Name.Variable, Text, String)),
- (r'^(msgstr\[)(\d)(\])(\s+)(".*")$',
- bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)),
- ]
- }
-
-
-class SquidConfLexer(RegexLexer):
- """
- Lexer for `squid <http://www.squid-cache.org/>`_ configuration files.
-
- *New in Pygments 0.9.*
- """
-
- name = 'SquidConf'
- aliases = ['squidconf', 'squid.conf', 'squid']
- filenames = ['squid.conf']
- mimetypes = ['text/x-squidconf']
- flags = re.IGNORECASE
-
- keywords = [
- "access_log", "acl", "always_direct", "announce_host",
- "announce_period", "announce_port", "announce_to", "anonymize_headers",
- "append_domain", "as_whois_server", "auth_param_basic",
- "authenticate_children", "authenticate_program", "authenticate_ttl",
- "broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
- "cache_dir", "cache_dns_program", "cache_effective_group",
- "cache_effective_user", "cache_host", "cache_host_acl",
- "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
- "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
- "cache_peer_access", "cahce_replacement_policy", "cache_stoplist",
- "cache_stoplist_pattern", "cache_store_log", "cache_swap",
- "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
- "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
- "dead_peer_timeout", "debug_options", "delay_access", "delay_class",
- "delay_initial_bucket_level", "delay_parameters", "delay_pools",
- "deny_info", "dns_children", "dns_defnames", "dns_nameservers",
- "dns_testnames", "emulate_httpd_log", "err_html_text",
- "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
- "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
- "ftp_passive", "ftp_user", "half_closed_clients", "header_access",
- "header_replace", "hierarchy_stoplist", "high_response_time_warning",
- "high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
- "http_anonymizer", "httpd_accel", "httpd_accel_host",
- "httpd_accel_port", "httpd_accel_uses_host_header",
- "httpd_accel_with_proxy", "http_port", "http_reply_access",
- "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
- "ident_lookup", "ident_lookup_access", "ident_timeout",
- "incoming_http_average", "incoming_icp_average", "inside_firewall",
- "ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
- "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
- "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
- "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
- "mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
- "memory_pools_limit", "memory_replacement_policy", "mime_table",
- "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
- "minimum_object_size", "minimum_retry_timeout", "miss_access",
- "negative_dns_ttl", "negative_ttl", "neighbor_timeout",
- "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
- "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
- "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
- "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
- "quick_abort", "quick_abort", "quick_abort_max", "quick_abort_min",
- "quick_abort_pct", "range_offset_limit", "read_timeout",
- "redirect_children", "redirect_program",
- "redirect_rewrites_host_header", "reference_age", "reference_age",
- "refresh_pattern", "reload_into_ims", "request_body_max_size",
- "request_size", "request_timeout", "shutdown_lifetime",
- "single_parent_bypass", "siteselect_timeout", "snmp_access",
- "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
- "store_avg_object_size", "store_objects_per_bucket",
- "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
- "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
- "test_reachability", "udp_hit_obj", "udp_hit_obj_size",
- "udp_incoming_address", "udp_outgoing_address", "unique_hostname",
- "unlinkd_program", "uri_whitespace", "useragent_log",
- "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
- ]
-
- opts = [
- "proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
- "multicast-responder", "on", "off", "all", "deny", "allow", "via",
- "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
- "credentialsttl", "none", "disable", "offline_toggle", "diskd",
- ]
-
- actions = [
- "shutdown", "info", "parameter", "server_list", "client_list",
- r'squid\.conf',
- ]
-
- actions_stats = [
- "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
- "redirector", "io", "reply_headers", "filedescriptors", "netdb",
- ]
-
- actions_log = ["status", "enable", "disable", "clear"]
-
- acls = [
- "url_regex", "urlpath_regex", "referer_regex", "port", "proto",
- "req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
- "dst", "time", "dstdomain", "ident", "snmp_community",
- ]
-
- ip_re = (
- r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|'
- r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|'
- r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|'
- r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}'
- r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|'
- r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|'
- r'[1-9]?\d)){3}))'
- )
-
- def makelistre(list):
- return r'\b(?:' + '|'.join(list) + r')\b'
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'#', Comment, 'comment'),
- (makelistre(keywords), Keyword),
- (makelistre(opts), Name.Constant),
- # Actions
- (makelistre(actions), String),
- (r'stats/'+makelistre(actions), String),
- (r'log/'+makelistre(actions)+r'=', String),
- (makelistre(acls), Keyword),
- (ip_re + r'(?:/(?:' + ip_re + r'|\b\d+\b))?', Number.Float),
- (r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
- (r'\S+', Text),
- ],
- 'comment': [
- (r'\s*TAG:.*', String.Escape, '#pop'),
- (r'.*', Comment, '#pop'),
- ],
- }
-
-
-class DebianControlLexer(RegexLexer):
- """
- Lexer for Debian ``control`` files and ``apt-cache show <pkg>`` outputs.
-
- *New in Pygments 0.9.*
- """
- name = 'Debian Control file'
- aliases = ['control', 'debcontrol']
- filenames = ['control']
-
- tokens = {
- 'root': [
- (r'^(Description)', Keyword, 'description'),
- (r'^(Maintainer)(:\s*)', bygroups(Keyword, Text), 'maintainer'),
- (r'^((Build-)?Depends)', Keyword, 'depends'),
- (r'^((?:Python-)?Version)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^((?:Installed-)?Size)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^(MD5Sum|SHA1|SHA256)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^([a-zA-Z\-0-9\.]*?)(:\s*)(.*?)$',
- bygroups(Keyword, Whitespace, String)),
- ],
- 'maintainer': [
- (r'<[^>]+>', Generic.Strong),
- (r'<[^>]+>$', Generic.Strong, '#pop'),
- (r',\n?', Text),
- (r'.', Text),
- ],
- 'description': [
- (r'(.*)(Homepage)(: )(\S+)',
- bygroups(Text, String, Name, Name.Class)),
- (r':.*\n', Generic.Strong),
- (r' .*\n', Text),
- ('', Text, '#pop'),
- ],
- 'depends': [
- (r':\s*', Text),
- (r'(\$)(\{)(\w+\s*:\s*\w+)', bygroups(Operator, Text, Name.Entity)),
- (r'\(', Text, 'depend_vers'),
- (r',', Text),
- (r'\|', Operator),
- (r'[\s]+', Text),
- (r'[}\)]\s*$', Text, '#pop'),
- (r'}', Text),
- (r'[^,]$', Name.Function, '#pop'),
- (r'([\+\.a-zA-Z0-9-])(\s*)', bygroups(Name.Function, Text)),
- (r'\[.*?\]', Name.Entity),
- ],
- 'depend_vers': [
- (r'\),', Text, '#pop'),
- (r'\)[^,]', Text, '#pop:2'),
- (r'([><=]+)(\s*)([^\)]+)', bygroups(Operator, Text, Number))
- ]
- }
-
-
-class YamlLexerContext(LexerContext):
- """Indentation context for the YAML lexer."""
-
- def __init__(self, *args, **kwds):
- super(YamlLexerContext, self).__init__(*args, **kwds)
- self.indent_stack = []
- self.indent = -1
- self.next_indent = 0
- self.block_scalar_indent = None
-
-
-class YamlLexer(ExtendedRegexLexer):
- """
- Lexer for `YAML <http://yaml.org/>`_, a human-friendly data serialization
- language.
-
- *New in Pygments 0.11.*
- """
-
- name = 'YAML'
- aliases = ['yaml']
- filenames = ['*.yaml', '*.yml']
- mimetypes = ['text/x-yaml']
-
-
- def something(token_class):
- """Do not produce empty tokens."""
- def callback(lexer, match, context):
- text = match.group()
- if not text:
- return
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def reset_indent(token_class):
- """Reset the indentation levels."""
- def callback(lexer, match, context):
- text = match.group()
- context.indent_stack = []
- context.indent = -1
- context.next_indent = 0
- context.block_scalar_indent = None
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def save_indent(token_class, start=False):
- """Save a possible indentation level."""
- def callback(lexer, match, context):
- text = match.group()
- extra = ''
- if start:
- context.next_indent = len(text)
- if context.next_indent < context.indent:
- while context.next_indent < context.indent:
- context.indent = context.indent_stack.pop()
- if context.next_indent > context.indent:
- extra = text[context.indent:]
- text = text[:context.indent]
- else:
- context.next_indent += len(text)
- if text:
- yield match.start(), token_class, text
- if extra:
- yield match.start()+len(text), token_class.Error, extra
- context.pos = match.end()
- return callback
-
- def set_indent(token_class, implicit=False):
- """Set the previously saved indentation level."""
- def callback(lexer, match, context):
- text = match.group()
- if context.indent < context.next_indent:
- context.indent_stack.append(context.indent)
- context.indent = context.next_indent
- if not implicit:
- context.next_indent += len(text)
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def set_block_scalar_indent(token_class):
- """Set an explicit indentation level for a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- context.block_scalar_indent = None
- if not text:
- return
- increment = match.group(1)
- if increment:
- current_indent = max(context.indent, 0)
- increment = int(increment)
- context.block_scalar_indent = current_indent + increment
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def parse_block_scalar_empty_line(indent_token_class, content_token_class):
- """Process an empty line in a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if (context.block_scalar_indent is None or
- len(text) <= context.block_scalar_indent):
- if text:
- yield match.start(), indent_token_class, text
- else:
- indentation = text[:context.block_scalar_indent]
- content = text[context.block_scalar_indent:]
- yield match.start(), indent_token_class, indentation
- yield (match.start()+context.block_scalar_indent,
- content_token_class, content)
- context.pos = match.end()
- return callback
-
- def parse_block_scalar_indent(token_class):
- """Process indentation spaces in a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if context.block_scalar_indent is None:
- if len(text) <= max(context.indent, 0):
- context.stack.pop()
- context.stack.pop()
- return
- context.block_scalar_indent = len(text)
- else:
- if len(text) < context.block_scalar_indent:
- context.stack.pop()
- context.stack.pop()
- return
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def parse_plain_scalar_indent(token_class):
- """Process indentation spaces in a plain scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if len(text) <= context.indent:
- context.stack.pop()
- context.stack.pop()
- return
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
-
-
- tokens = {
- # the root rules
- 'root': [
- # ignored whitespaces
- (r'[ ]+(?=#|$)', Text),
- # line breaks
- (r'\n+', Text),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # the '%YAML' directive
- (r'^%YAML(?=[ ]|$)', reset_indent(Name.Tag), 'yaml-directive'),
- # the %TAG directive
- (r'^%TAG(?=[ ]|$)', reset_indent(Name.Tag), 'tag-directive'),
- # document start and document end indicators
- (r'^(?:---|\.\.\.)(?=[ ]|$)', reset_indent(Name.Namespace),
- 'block-line'),
- # indentation spaces
- (r'[ ]*(?![ \t\n\r\f\v]|$)', save_indent(Text, start=True),
- ('block-line', 'indentation')),
- ],
-
- # trailing whitespaces after directives or a block scalar indicator
- 'ignored-line': [
- # ignored whitespaces
- (r'[ ]+(?=#|$)', Text),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # line break
- (r'\n', Text, '#pop:2'),
- ],
-
- # the %YAML directive
- 'yaml-directive': [
- # the version number
- (r'([ ]+)([0-9]+\.[0-9]+)',
- bygroups(Text, Number), 'ignored-line'),
- ],
-
- # the %YAG directive
- 'tag-directive': [
- # a tag handle and the corresponding prefix
- (r'([ ]+)(!|![0-9A-Za-z_-]*!)'
- r'([ ]+)(!|!?[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)',
- bygroups(Text, Keyword.Type, Text, Keyword.Type),
- 'ignored-line'),
- ],
-
- # block scalar indicators and indentation spaces
- 'indentation': [
- # trailing whitespaces are ignored
- (r'[ ]*$', something(Text), '#pop:2'),
- # whitespaces preceeding block collection indicators
- (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text)),
- # block collection indicators
- (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
- # the beginning a block line
- (r'[ ]*', save_indent(Text), '#pop'),
- ],
-
- # an indented line in the block context
- 'block-line': [
- # the line end
- (r'[ ]*(?=#|$)', something(Text), '#pop'),
- # whitespaces separating tokens
- (r'[ ]+', Text),
- # tags, anchors and aliases,
- include('descriptors'),
- # block collections and scalars
- include('block-nodes'),
- # flow collections and quoted scalars
- include('flow-nodes'),
- # a plain scalar
- (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`-]|[?:-][^ \t\n\r\f\v])',
- something(Name.Variable),
- 'plain-scalar-in-block-context'),
- ],
-
- # tags, anchors, aliases
- 'descriptors' : [
- # a full-form tag
- (r'!<[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+>', Keyword.Type),
- # a tag in the form '!', '!suffix' or '!handle!suffix'
- (r'!(?:[0-9A-Za-z_-]+)?'
- r'(?:![0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)?', Keyword.Type),
- # an anchor
- (r'&[0-9A-Za-z_-]+', Name.Label),
- # an alias
- (r'\*[0-9A-Za-z_-]+', Name.Variable),
- ],
-
- # block collections and scalars
- 'block-nodes': [
- # implicit key
- (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
- # literal and folded scalars
- (r'[|>]', Punctuation.Indicator,
- ('block-scalar-content', 'block-scalar-header')),
- ],
-
- # flow collections and quoted scalars
- 'flow-nodes': [
- # a flow sequence
- (r'\[', Punctuation.Indicator, 'flow-sequence'),
- # a flow mapping
- (r'\{', Punctuation.Indicator, 'flow-mapping'),
- # a single-quoted scalar
- (r'\'', String, 'single-quoted-scalar'),
- # a double-quoted scalar
- (r'\"', String, 'double-quoted-scalar'),
- ],
-
- # the content of a flow collection
- 'flow-collection': [
- # whitespaces
- (r'[ ]+', Text),
- # line breaks
- (r'\n+', Text),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # simple indicators
- (r'[?:,]', Punctuation.Indicator),
- # tags, anchors and aliases
- include('descriptors'),
- # nested collections and quoted scalars
- include('flow-nodes'),
- # a plain scalar
- (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`])',
- something(Name.Variable),
- 'plain-scalar-in-flow-context'),
- ],
-
- # a flow sequence indicated by '[' and ']'
- 'flow-sequence': [
- # include flow collection rules
- include('flow-collection'),
- # the closing indicator
- (r'\]', Punctuation.Indicator, '#pop'),
- ],
-
- # a flow mapping indicated by '{' and '}'
- 'flow-mapping': [
- # include flow collection rules
- include('flow-collection'),
- # the closing indicator
- (r'\}', Punctuation.Indicator, '#pop'),
- ],
-
- # block scalar lines
- 'block-scalar-content': [
- # line break
- (r'\n', Text),
- # empty line
- (r'^[ ]+$',
- parse_block_scalar_empty_line(Text, Name.Constant)),
- # indentation spaces (we may leave the state here)
- (r'^[ ]*', parse_block_scalar_indent(Text)),
- # line content
- (r'[^\n\r\f\v]+', Name.Constant),
- ],
-
- # the content of a literal or folded scalar
- 'block-scalar-header': [
- # indentation indicator followed by chomping flag
- (r'([1-9])?[+-]?(?=[ ]|$)',
- set_block_scalar_indent(Punctuation.Indicator),
- 'ignored-line'),
- # chomping flag followed by indentation indicator
- (r'[+-]?([1-9])?(?=[ ]|$)',
- set_block_scalar_indent(Punctuation.Indicator),
- 'ignored-line'),
- ],
-
- # ignored and regular whitespaces in quoted scalars
- 'quoted-scalar-whitespaces': [
- # leading and trailing whitespaces are ignored
- (r'^[ ]+', Text),
- (r'[ ]+$', Text),
- # line breaks are ignored
- (r'\n+', Text),
- # other whitespaces are a part of the value
- (r'[ ]+', Name.Variable),
- ],
-
- # single-quoted scalars
- 'single-quoted-scalar': [
- # include whitespace and line break rules
- include('quoted-scalar-whitespaces'),
- # escaping of the quote character
- (r'\'\'', String.Escape),
- # regular non-whitespace characters
- (r'[^ \t\n\r\f\v\']+', String),
- # the closing quote
- (r'\'', String, '#pop'),
- ],
-
- # double-quoted scalars
- 'double-quoted-scalar': [
- # include whitespace and line break rules
- include('quoted-scalar-whitespaces'),
- # escaping of special characters
- (r'\\[0abt\tn\nvfre "\\N_LP]', String),
- # escape codes
- (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
- String.Escape),
- # regular non-whitespace characters
- (r'[^ \t\n\r\f\v\"\\]+', String),
- # the closing quote
- (r'"', String, '#pop'),
- ],
-
- # the beginning of a new line while scanning a plain scalar
- 'plain-scalar-in-block-context-new-line': [
- # empty lines
- (r'^[ ]+$', Text),
- # line breaks
- (r'\n+', Text),
- # document start and document end indicators
- (r'^(?=---|\.\.\.)', something(Name.Namespace), '#pop:3'),
- # indentation spaces (we may leave the block line state here)
- (r'^[ ]*', parse_plain_scalar_indent(Text), '#pop'),
- ],
-
- # a plain scalar in the block context
- 'plain-scalar-in-block-context': [
- # the scalar ends with the ':' indicator
- (r'[ ]*(?=:[ ]|:$)', something(Text), '#pop'),
- # the scalar ends with whitespaces followed by a comment
- (r'[ ]+(?=#)', Text, '#pop'),
- # trailing whitespaces are ignored
- (r'[ ]+$', Text),
- # line breaks are ignored
- (r'\n+', Text, 'plain-scalar-in-block-context-new-line'),
- # other whitespaces are a part of the value
- (r'[ ]+', Literal.Scalar.Plain),
- # regular non-whitespace characters
- (r'(?::(?![ \t\n\r\f\v])|[^ \t\n\r\f\v:])+', Literal.Scalar.Plain),
- ],
-
- # a plain scalar is the flow context
- 'plain-scalar-in-flow-context': [
- # the scalar ends with an indicator character
- (r'[ ]*(?=[,:?\[\]{}])', something(Text), '#pop'),
- # the scalar ends with a comment
- (r'[ ]+(?=#)', Text, '#pop'),
- # leading and trailing whitespaces are ignored
- (r'^[ ]+', Text),
- (r'[ ]+$', Text),
- # line breaks are ignored
- (r'\n+', Text),
- # other whitespaces are a part of the value
- (r'[ ]+', Name.Variable),
- # regular non-whitespace characters
- (r'[^ \t\n\r\f\v,:?\[\]{}]+', Name.Variable),
- ],
-
- }
-
- def get_tokens_unprocessed(self, text=None, context=None):
- if context is None:
- context = YamlLexerContext(text, 0)
- return super(YamlLexer, self).get_tokens_unprocessed(text, context)
-
-
-class LighttpdConfLexer(RegexLexer):
- """
- Lexer for `Lighttpd <http://lighttpd.net/>`_ configuration files.
-
- *New in Pygments 0.11.*
- """
- name = 'Lighttpd configuration file'
- aliases = ['lighty', 'lighttpd']
- filenames = []
- mimetypes = ['text/x-lighttpd-conf']
-
- tokens = {
- 'root': [
- (r'#.*\n', Comment.Single),
- (r'/\S*', Name), # pathname
- (r'[a-zA-Z._-]+', Keyword),
- (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
- (r'[0-9]+', Number),
- (r'=>|=~|\+=|==|=|\+', Operator),
- (r'\$[A-Z]+', Name.Builtin),
- (r'[(){}\[\],]', Punctuation),
- (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
- (r'\s+', Text),
- ],
-
- }
-
-
-class NginxConfLexer(RegexLexer):
- """
- Lexer for `Nginx <http://nginx.net/>`_ configuration files.
-
- *New in Pygments 0.11.*
- """
- name = 'Nginx configuration file'
- aliases = ['nginx']
- filenames = []
- mimetypes = ['text/x-nginx-conf']
-
- tokens = {
- 'root': [
- (r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Text, Name)),
- (r'[^\s;#]+', Keyword, 'stmt'),
- include('base'),
- ],
- 'block': [
- (r'}', Punctuation, '#pop:2'),
- (r'[^\s;#]+', Keyword.Namespace, 'stmt'),
- include('base'),
- ],
- 'stmt': [
- (r'{', Punctuation, 'block'),
- (r';', Punctuation, '#pop'),
- include('base'),
- ],
- 'base': [
- (r'#.*\n', Comment.Single),
- (r'on|off', Name.Constant),
- (r'\$[^\s;#()]+', Name.Variable),
- (r'([a-z0-9.-]+)(:)([0-9]+)',
- bygroups(Name, Punctuation, Number.Integer)),
- (r'[a-z-]+/[a-z-+]+', String), # mimetype
- #(r'[a-zA-Z._-]+', Keyword),
- (r'[0-9]+[km]?\b', Number.Integer),
- (r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Text, String.Regex)),
- (r'[:=~]', Punctuation),
- (r'[^\s;#{}$]+', String), # catch all
- (r'/[^\s;#]*', Name), # pathname
- (r'\s+', Text),
- (r'[$;]', Text), # leftover characters
- ],
- }
-
-
-class CMakeLexer(RegexLexer):
- """
- Lexer for `CMake <http://cmake.org/Wiki/CMake>`_ files.
-
- *New in Pygments 1.2.*
- """
- name = 'CMake'
- aliases = ['cmake']
- filenames = ['*.cmake', 'CMakeLists.txt']
- mimetypes = ['text/x-cmake']
-
- tokens = {
- 'root': [
- #(r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|'
- # r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|'
- # r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|'
- # r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|'
- # r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|'
- # r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|'
- # r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|'
- # r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|'
- # r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|'
- # r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|'
- # r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|'
- # r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|'
- # r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|'
- # r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|'
- # r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|'
- # r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|'
- # r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|'
- # r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|'
- # r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|'
- # r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|'
- # r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|'
- # r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|'
- # r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|'
- # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
- # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
- # r'COUNTARGS)\b', Name.Builtin, 'args'),
- (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
- Punctuation), 'args'),
- include('keywords'),
- include('ws')
- ],
- 'args': [
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- (r'(\${)(.+?)(})', bygroups(Operator, Name.Variable, Operator)),
- (r'(?s)".*?"', String.Double),
- (r'\\\S+', String),
- (r'[^\)$"# \t\n]+', String),
- (r'\n', Text), # explicitly legal
- include('keywords'),
- include('ws')
- ],
- 'string': [
-
- ],
- 'keywords': [
- (r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|'
- r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword),
- ],
- 'ws': [
- (r'[ \t]+', Text),
- (r'#.+\n', Comment),
- ]
- }
-
-
-class HttpLexer(RegexLexer):
- """
- Lexer for HTTP sessions.
-
- *New in Pygments 1.5.*
- """
-
- name = 'HTTP'
- aliases = ['http']
-
- flags = re.DOTALL
-
- def header_callback(self, match):
- if match.group(1).lower() == 'content-type':
- content_type = match.group(5).strip()
- if ';' in content_type:
- content_type = content_type[:content_type.find(';')].strip()
- self.content_type = content_type
- yield match.start(1), Name.Attribute, match.group(1)
- yield match.start(2), Text, match.group(2)
- yield match.start(3), Operator, match.group(3)
- yield match.start(4), Text, match.group(4)
- yield match.start(5), Literal, match.group(5)
- yield match.start(6), Text, match.group(6)
-
- def continuous_header_callback(self, match):
- yield match.start(1), Text, match.group(1)
- yield match.start(2), Literal, match.group(2)
- yield match.start(3), Text, match.group(3)
-
- def content_callback(self, match):
- content_type = getattr(self, 'content_type', None)
- content = match.group()
- offset = match.start()
- if content_type:
- from pygments.lexers import get_lexer_for_mimetype
- try:
- lexer = get_lexer_for_mimetype(content_type)
- except ClassNotFound:
- pass
- else:
- for idx, token, value in lexer.get_tokens_unprocessed(content):
- yield offset + idx, token, value
- return
- yield offset, Text, content
-
- tokens = {
- 'root': [
- (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
- r'(HTTP)(/)(1\.[01])(\r?\n|$)',
- bygroups(Name.Function, Text, Name.Namespace, Text,
- Keyword.Reserved, Operator, Number, Text),
- 'headers'),
- (r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)',
- bygroups(Keyword.Reserved, Operator, Number, Text, Number,
- Text, Name.Exception, Text),
- 'headers'),
- ],
- 'headers': [
- (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback),
- (r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback),
- (r'\r?\n', Text, 'content')
- ],
- 'content': [
- (r'.+', content_callback)
- ]
- }
-
-
-class PyPyLogLexer(RegexLexer):
- """
- Lexer for PyPy log files.
-
- *New in Pygments 1.5.*
- """
- name = "PyPy Log"
- aliases = ["pypylog", "pypy"]
- filenames = ["*.pypylog"]
- mimetypes = ['application/x-pypylog']
-
- tokens = {
- "root": [
- (r"\[\w+\] {jit-log-.*?$", Keyword, "jit-log"),
- (r"\[\w+\] {jit-backend-counts$", Keyword, "jit-backend-counts"),
- include("extra-stuff"),
- ],
- "jit-log": [
- (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
- (r"^\+\d+: ", Comment),
- (r"--end of the loop--", Comment),
- (r"[ifp]\d+", Name),
- (r"ptr\d+", Name),
- (r"(\()(\w+(?:\.\w+)?)(\))",
- bygroups(Punctuation, Name.Builtin, Punctuation)),
- (r"[\[\]=,()]", Punctuation),
- (r"(\d+\.\d+|inf|-inf)", Number.Float),
- (r"-?\d+", Number.Integer),
- (r"'.*'", String),
- (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name),
- (r"<.*?>+", Name.Builtin),
- (r"(label|debug_merge_point|jump|finish)", Name.Class),
- (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
- r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
- r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
- r"int_is_true|"
- r"uint_floordiv|uint_ge|uint_lt|"
- r"float_add|float_sub|float_mul|float_truediv|float_neg|"
- r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
- r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|"
- r"cast_int_to_float|cast_float_to_int|"
- r"force_token|quasiimmut_field|same_as|virtual_ref_finish|"
- r"virtual_ref|mark_opaque_ptr|"
- r"call_may_force|call_assembler|call_loopinvariant|"
- r"call_release_gil|call_pure|call|"
- r"new_with_vtable|new_array|newstr|newunicode|new|"
- r"arraylen_gc|"
- r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|"
- r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|"
- r"getfield_gc|getinteriorfield_gc|setinteriorfield_gc|"
- r"getfield_raw|setfield_gc|setfield_raw|"
- r"strgetitem|strsetitem|strlen|copystrcontent|"
- r"unicodegetitem|unicodesetitem|unicodelen|"
- r"guard_true|guard_false|guard_value|guard_isnull|"
- r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|"
- r"guard_not_forced|guard_no_exception|guard_not_invalidated)",
- Name.Builtin),
- include("extra-stuff"),
- ],
- "jit-backend-counts": [
- (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"),
- (r":", Punctuation),
- (r"\d+", Number),
- include("extra-stuff"),
- ],
- "extra-stuff": [
- (r"\s+", Text),
- (r"#.*?$", Comment),
- ],
- }
-
-
-class HxmlLexer(RegexLexer):
- """
- Lexer for `haXe build <http://haxe.org/doc/compiler>`_ files.
-
- *New in Pygments 1.6.*
- """
- name = 'Hxml'
- aliases = ['haxeml', 'hxml']
- filenames = ['*.hxml']
-
- tokens = {
- 'root': [
- # Seperator
- (r'(--)(next)', bygroups(Punctuation, Generic.Heading)),
- # Compiler switches with one dash
- (r'(-)(prompt|debug|v)', bygroups(Punctuation, Keyword.Keyword)),
- # Compilerswitches with two dashes
- (r'(--)(neko-source|flash-strict|flash-use-stage|no-opt|no-traces|'
- r'no-inline|times|no-output)', bygroups(Punctuation, Keyword)),
- # Targets and other options that take an argument
- (r'(-)(cpp|js|neko|x|as3|swf9?|swf-lib|php|xml|main|lib|D|resource|'
- r'cp|cmd)( +)(.+)',
- bygroups(Punctuation, Keyword, Whitespace, String)),
- # Options that take only numerical arguments
- (r'(-)(swf-version)( +)(\d+)',
- bygroups(Punctuation, Keyword, Number.Integer)),
- # An Option that defines the size, the fps and the background
- # color of an flash movie
- (r'(-)(swf-header)( +)(\d+)(:)(\d+)(:)(\d+)(:)([A-Fa-f0-9]{6})',
- bygroups(Punctuation, Keyword, Whitespace, Number.Integer,
- Punctuation, Number.Integer, Punctuation, Number.Integer,
- Punctuation, Number.Hex)),
- # options with two dashes that takes arguments
- (r'(--)(js-namespace|php-front|php-lib|remap|gen-hx-classes)( +)'
- r'(.+)', bygroups(Punctuation, Keyword, Whitespace, String)),
- # Single line comment, multiline ones are not allowed.
- (r'#.*', Comment.Single)
- ]
- }
-
-
-class EbnfLexer(RegexLexer):
- """
- Lexer for `ISO/IEC 14977 EBNF
- <http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
- grammars.
-
- *New in Pygments 1.7.*
- """
-
- name = 'EBNF'
- aliases = ['ebnf']
- filenames = ['*.ebnf']
- mimetypes = ['text/x-ebnf']
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comment_start'),
- include('identifier'),
- (r'=', Operator, 'production'),
- ],
- 'production': [
- include('whitespace'),
- include('comment_start'),
- include('identifier'),
- (r'"[^"]*"', String.Double),
- (r"'[^']*'", String.Single),
- (r'(\?[^?]*\?)', Name.Entity),
- (r'[\[\]{}(),|]', Punctuation),
- (r'-', Operator),
- (r';', Punctuation, '#pop'),
- ],
- 'whitespace': [
- (r'\s+', Text),
- ],
- 'comment_start': [
- (r'\(\*', Comment.Multiline, 'comment'),
- ],
- 'comment': [
- (r'[^*)]', Comment.Multiline),
- include('comment_start'),
- (r'\*\)', Comment.Multiline, '#pop'),
- (r'[*)]', Comment.Multiline),
- ],
- 'identifier': [
- (r'([a-zA-Z][a-zA-Z0-9 \-]*)', Keyword),
- ],
- }
+from pygments.lexers.configs import ApacheConfLexer, NginxConfLexer, \
+ SquidConfLexer, LighttpdConfLexer, IniLexer, RegeditLexer, PropertiesLexer
+from pygments.lexers.console import PyPyLogLexer
+from pygments.lexers.textedit import VimLexer
+from pygments.lexers.markup import BBCodeLexer, MoinWikiLexer, RstLexer, \
+ TexLexer, GroffLexer
+from pygments.lexers.installers import DebianControlLexer, SourcesListLexer
+from pygments.lexers.make import MakefileLexer, BaseMakefileLexer, CMakeLexer
+from pygments.lexers.haxe import HxmlLexer
+from pygments.lexers.diff import DiffLexer, DarcsPatchLexer
+from pygments.lexers.data import YamlLexer
+from pygments.lexers.textfmts import IrcLogsLexer, GettextLexer, HttpLexer
+
+__all__ = []
diff --git a/pygments/lexers/textedit.py b/pygments/lexers/textedit.py
new file mode 100644
index 00000000..89417216
--- /dev/null
+++ b/pygments/lexers/textedit.py
@@ -0,0 +1,169 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.textedit
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for languages related to text processing.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+from bisect import bisect
+
+from pygments.lexer import RegexLexer, include, default, bygroups, using, this
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+from pygments.lexers.python import PythonLexer
+
+__all__ = ['AwkLexer', 'VimLexer']
+
+
+class AwkLexer(RegexLexer):
+ """
+ For Awk scripts.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Awk'
+ aliases = ['awk', 'gawk', 'mawk', 'nawk']
+ filenames = ['*.awk']
+ mimetypes = ['application/x-awk']
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'#.*$', Comment.Single)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'\B', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|\|\||&&|in\b|\$|!?~|'
+ r'(\*\*|[-<>+*%\^/!=|])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(break|continue|do|while|exit|for|if|else|'
+ r'return)\b', Keyword, 'slashstartsregex'),
+ (r'function\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|'
+ r'length|match|split|sprintf|sub|substr|tolower|toupper|close|'
+ r'fflush|getline|next|nextfile|print|printf|strftime|systime|'
+ r'delete|system)\b', Keyword.Reserved),
+ (r'(ARGC|ARGIND|ARGV|BEGIN|CONVFMT|ENVIRON|END|ERRNO|FIELDWIDTHS|'
+ r'FILENAME|FNR|FS|IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|'
+ r'RSTART|RT|SUBSEP)\b', Name.Builtin),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class VimLexer(RegexLexer):
+ """
+ Lexer for VimL script files.
+
+ .. versionadded:: 0.8
+ """
+ name = 'VimL'
+ aliases = ['vim']
+ filenames = ['*.vim', '.vimrc', '.exrc', '.gvimrc',
+ '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc']
+ mimetypes = ['text/x-vim']
+ flags = re.MULTILINE
+
+ _python = r'py(?:t(?:h(?:o(?:n)?)?)?)?'
+
+ tokens = {
+ 'root': [
+ (r'^([ \t:]*)(' + _python + r')([ \t]*)(<<)([ \t]*)(.*)((?:\n|.)*)(\6)',
+ bygroups(using(this), Keyword, Text, Operator, Text, Text,
+ using(PythonLexer), Text)),
+ (r'^([ \t:]*)(' + _python + r')([ \t])(.*)',
+ bygroups(using(this), Keyword, Text, using(PythonLexer))),
+
+ (r'^\s*".*', Comment),
+
+ (r'[ \t]+', Text),
+ # TODO: regexes can have other delims
+ (r'/(\\\\|\\/|[^\n/])*/', String.Regex),
+ (r'"(\\\\|\\"|[^\n"])*"', String.Double),
+ (r"'(''|[^\n'])*'", String.Single),
+
+ # Who decided that doublequote was a good comment character??
+ (r'(?<=\s)"[^\-:.%#=*].*', Comment),
+ (r'-?\d+', Number),
+ (r'#[0-9a-f]{6}', Number.Hex),
+ (r'^:', Punctuation),
+ (r'[()<>+=!|,~-]', Punctuation), # Inexact list. Looks decent.
+ (r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b',
+ Keyword),
+ (r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin),
+ (r'\b\w+\b', Name.Other), # These are postprocessed below
+ (r'.', Text),
+ ],
+ }
+
+ def __init__(self, **options):
+ from pygments.lexers._vim_builtins import command, option, auto
+ self._cmd = command
+ self._opt = option
+ self._aut = auto
+
+ RegexLexer.__init__(self, **options)
+
+ def is_in(self, w, mapping):
+ r"""
+ It's kind of difficult to decide if something might be a keyword
+ in VimL because it allows you to abbreviate them. In fact,
+ 'ab[breviate]' is a good example. :ab, :abbre, or :abbreviate are
+ valid ways to call it so rather than making really awful regexps
+ like::
+
+ \bab(?:b(?:r(?:e(?:v(?:i(?:a(?:t(?:e)?)?)?)?)?)?)?)?\b
+
+ we match `\b\w+\b` and then call is_in() on those tokens. See
+ `scripts/get_vimkw.py` for how the lists are extracted.
+ """
+ p = bisect(mapping, (w,))
+ if p > 0:
+ if mapping[p-1][0] == w[:len(mapping[p-1][0])] and \
+ mapping[p-1][1][:len(w)] == w:
+ return True
+ if p < len(mapping):
+ return mapping[p][0] == w[:len(mapping[p][0])] and \
+ mapping[p][1][:len(w)] == w
+ return False
+
+ def get_tokens_unprocessed(self, text):
+ # TODO: builtins are only subsequent tokens on lines
+ # and 'keywords' only happen at the beginning except
+ # for :au ones
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name.Other:
+ if self.is_in(value, self._cmd):
+ yield index, Keyword, value
+ elif self.is_in(value, self._opt) or \
+ self.is_in(value, self._aut):
+ yield index, Name.Builtin, value
+ else:
+ yield index, Text, value
+ else:
+ yield index, token, value
diff --git a/pygments/lexers/textfmts.py b/pygments/lexers/textfmts.py
new file mode 100644
index 00000000..43b16f8c
--- /dev/null
+++ b/pygments/lexers/textfmts.py
@@ -0,0 +1,292 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.textfmts
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various text formats.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Generic, Literal
+from pygments.util import ClassNotFound
+
+__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer']
+
+
+class IrcLogsLexer(RegexLexer):
+ """
+ Lexer for IRC logs in *irssi*, *xchat* or *weechat* style.
+ """
+
+ name = 'IRC logs'
+ aliases = ['irc']
+ filenames = ['*.weechatlog']
+ mimetypes = ['text/x-irclog']
+
+ flags = re.VERBOSE | re.MULTILINE
+ timestamp = r"""
+ (
+ # irssi / xchat and others
+ (?: \[|\()? # Opening bracket or paren for the timestamp
+ (?: # Timestamp
+ (?: (?:\d{1,4} [-/])* # Date as - or /-separated groups of digits
+ (?:\d{1,4})
+ [T ])? # Date/time separator: T or space
+ (?: \d?\d [:.])* # Time as :/.-separated groups of 1 or 2 digits
+ (?: \d?\d)
+ )
+ (?: \]|\))?\s+ # Closing bracket or paren for the timestamp
+ |
+ # weechat
+ \d{4}\s\w{3}\s\d{2}\s # Date
+ \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
+ |
+ # xchat
+ \w{3}\s\d{2}\s # Date
+ \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
+ )?
+ """
+ tokens = {
+ 'root': [
+ # log start/end
+ (r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
+ # hack
+ ("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
+ # normal msgs
+ ("^" + timestamp + r"""
+ (\s*<.*?>\s*) # Nick """,
+ bygroups(Comment.Preproc, Name.Tag), 'msg'),
+ # /me msgs
+ ("^" + timestamp + r"""
+ (\s*[*]\s+) # Star
+ (\S+\s+.*?\n) # Nick + rest of message """,
+ bygroups(Comment.Preproc, Keyword, Generic.Inserted)),
+ # join/part msgs
+ ("^" + timestamp + r"""
+ (\s*(?:\*{3}|<?-[!@=P]?->?)\s*) # Star(s) or symbols
+ (\S+\s+) # Nick + Space
+ (.*?\n) # Rest of message """,
+ bygroups(Comment.Preproc, Keyword, String, Comment)),
+ (r"^.*?\n", Text),
+ ],
+ 'msg': [
+ (r"\S+:(?!//)", Name.Attribute), # Prefix
+ (r".*\n", Text, '#pop'),
+ ],
+ }
+
+
+class GettextLexer(RegexLexer):
+ """
+ Lexer for Gettext catalog files.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Gettext Catalog'
+ aliases = ['pot', 'po']
+ filenames = ['*.pot', '*.po']
+ mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext']
+
+ tokens = {
+ 'root': [
+ (r'^#,\s.*?$', Keyword.Type),
+ (r'^#:\s.*?$', Keyword.Declaration),
+ # (r'^#$', Comment),
+ (r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
+ (r'^(")([A-Za-z-]+:)(.*")$',
+ bygroups(String, Name.Property, String)),
+ (r'^".*"$', String),
+ (r'^(msgid|msgid_plural|msgstr|msgctxt)(\s+)(".*")$',
+ bygroups(Name.Variable, Text, String)),
+ (r'^(msgstr\[)(\d)(\])(\s+)(".*")$',
+ bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)),
+ ]
+ }
+
+
+class HttpLexer(RegexLexer):
+ """
+ Lexer for HTTP sessions.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'HTTP'
+ aliases = ['http']
+
+ flags = re.DOTALL
+
+ def header_callback(self, match):
+ if match.group(1).lower() == 'content-type':
+ content_type = match.group(5).strip()
+ if ';' in content_type:
+ content_type = content_type[:content_type.find(';')].strip()
+ self.content_type = content_type
+ yield match.start(1), Name.Attribute, match.group(1)
+ yield match.start(2), Text, match.group(2)
+ yield match.start(3), Operator, match.group(3)
+ yield match.start(4), Text, match.group(4)
+ yield match.start(5), Literal, match.group(5)
+ yield match.start(6), Text, match.group(6)
+
+ def continuous_header_callback(self, match):
+ yield match.start(1), Text, match.group(1)
+ yield match.start(2), Literal, match.group(2)
+ yield match.start(3), Text, match.group(3)
+
+ def content_callback(self, match):
+ content_type = getattr(self, 'content_type', None)
+ content = match.group()
+ offset = match.start()
+ if content_type:
+ from pygments.lexers import get_lexer_for_mimetype
+ possible_lexer_mimetypes = [content_type]
+ if '+' in content_type:
+ # application/calendar+xml can be treated as application/xml
+ # if there's not a better match.
+ general_type = re.sub(r'^(.*)/.*\+(.*)$', r'\1/\2',
+ content_type)
+ possible_lexer_mimetypes.append(general_type)
+
+ for i in possible_lexer_mimetypes:
+ try:
+ lexer = get_lexer_for_mimetype(i)
+ except ClassNotFound:
+ pass
+ else:
+ for idx, token, value in lexer.get_tokens_unprocessed(content):
+ yield offset + idx, token, value
+ return
+ yield offset, Text, content
+
+ tokens = {
+ 'root': [
+ (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
+ r'(HTTP)(/)(1\.[01])(\r?\n|\Z)',
+ bygroups(Name.Function, Text, Name.Namespace, Text,
+ Keyword.Reserved, Operator, Number, Text),
+ 'headers'),
+ (r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)',
+ bygroups(Keyword.Reserved, Operator, Number, Text, Number,
+ Text, Name.Exception, Text),
+ 'headers'),
+ ],
+ 'headers': [
+ (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)', header_callback),
+ (r'([\t ]+)([^\r\n]+)(\r?\n|\Z)', continuous_header_callback),
+ (r'\r?\n', Text, 'content')
+ ],
+ 'content': [
+ (r'.+', content_callback)
+ ]
+ }
+
+ def analyse_text(text):
+ return text.startswith(('GET /', 'POST /', 'PUT /', 'DELETE /', 'HEAD /',
+ 'OPTIONS /', 'TRACE /', 'PATCH /'))
+
+
+class TodotxtLexer(RegexLexer):
+ """
+ Lexer for `Todo.txt <http://todotxt.com/>`_ todo list format.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Todotxt'
+ aliases = ['todotxt']
+ # *.todotxt is not a standard extension for Todo.txt files; including it
+ # makes testing easier, and also makes autodetecting file type easier.
+ filenames = ['todo.txt', '*.todotxt']
+ mimetypes = ['text/x-todo']
+
+ # Aliases mapping standard token types of Todo.txt format concepts
+ CompleteTaskText = Operator # Chosen to de-emphasize complete tasks
+ IncompleteTaskText = Text # Incomplete tasks should look like plain text
+
+ # Priority should have most emphasis to indicate importance of tasks
+ Priority = Generic.Heading
+ # Dates should have next most emphasis because time is important
+ Date = Generic.Subheading
+
+ # Project and context should have equal weight, and be in different colors
+ Project = Generic.Error
+ Context = String
+
+ # If tag functionality is added, it should have the same weight as Project
+ # and Context, and a different color. Generic.Traceback would work well.
+
+ # Regex patterns for building up rules; dates, priorities, projects, and
+ # contexts are all atomic
+ # TODO: Make date regex more ISO 8601 compliant
+ date_regex = r'\d{4,}-\d{2}-\d{2}'
+ priority_regex = r'\([A-Z]\)'
+ project_regex = r'\+\S+'
+ context_regex = r'@\S+'
+
+ # Compound regex expressions
+ complete_one_date_regex = r'(x )(' + date_regex + r')'
+ complete_two_date_regex = (complete_one_date_regex + r'( )(' +
+ date_regex + r')')
+ priority_date_regex = r'(' + priority_regex + r')( )(' + date_regex + r')'
+
+ tokens = {
+ # Should parse starting at beginning of line; each line is a task
+ 'root': [
+ # Complete task entry points: two total:
+ # 1. Complete task with two dates
+ (complete_two_date_regex, bygroups(CompleteTaskText, Date,
+ CompleteTaskText, Date),
+ 'complete'),
+ # 2. Complete task with one date
+ (complete_one_date_regex, bygroups(CompleteTaskText, Date),
+ 'complete'),
+
+ # Incomplete task entry points: six total:
+ # 1. Priority plus date
+ (priority_date_regex, bygroups(Priority, IncompleteTaskText, Date),
+ 'incomplete'),
+ # 2. Priority only
+ (priority_regex, Priority, 'incomplete'),
+ # 3. Leading date
+ (date_regex, Date, 'incomplete'),
+ # 4. Leading context
+ (context_regex, Context, 'incomplete'),
+ # 5. Leading project
+ (project_regex, Project, 'incomplete'),
+ # 6. Non-whitespace catch-all
+ ('\S+', IncompleteTaskText, 'incomplete'),
+ ],
+
+ # Parse a complete task
+ 'complete': [
+ # Newline indicates end of task, should return to root
+ (r'\s*\n', CompleteTaskText, '#pop'),
+ # Tokenize contexts and projects
+ (context_regex, Context),
+ (project_regex, Project),
+ # Tokenize non-whitespace text
+ ('\S+', CompleteTaskText),
+ # Tokenize whitespace not containing a newline
+ ('\s+', CompleteTaskText),
+ ],
+
+ # Parse an incomplete task
+ 'incomplete': [
+ # Newline indicates end of task, should return to root
+ (r'\s*\n', IncompleteTaskText, '#pop'),
+ # Tokenize contexts and projects
+ (context_regex, Context),
+ (project_regex, Project),
+ # Tokenize non-whitespace text
+ ('\S+', IncompleteTaskText),
+ # Tokenize whitespace not containing a newline
+ ('\s+', IncompleteTaskText),
+ ],
+ }
diff --git a/pygments/lexers/theorem.py b/pygments/lexers/theorem.py
new file mode 100644
index 00000000..47fdc8b6
--- /dev/null
+++ b/pygments/lexers/theorem.py
@@ -0,0 +1,452 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.theorem
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for theorem-proving languages.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['CoqLexer', 'IsabelleLexer', 'LeanLexer']
+
+
+class CoqLexer(RegexLexer):
+ """
+ For the `Coq <http://coq.inria.fr/>`_ theorem prover.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Coq'
+ aliases = ['coq']
+ filenames = ['*.v']
+ mimetypes = ['text/x-coq']
+
+ keywords1 = (
+ # Vernacular commands
+ 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
+ 'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis',
+ 'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
+ 'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac',
+ 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
+ 'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
+ 'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
+ 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
+ 'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
+ 'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
+ 'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
+ 'outside', 'Check',
+ )
+ keywords2 = (
+ # Gallina
+ 'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
+ 'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
+ 'for', 'of', 'nosimpl', 'with', 'as',
+ )
+ keywords3 = (
+ # Sorts
+ 'Type', 'Prop',
+ )
+ keywords4 = (
+ # Tactics
+ 'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
+ 'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
+ 'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
+ 'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
+ 'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
+ 'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
+ 'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
+ 'split', 'left', 'right', 'autorewrite', 'tauto',
+ )
+ keywords5 = (
+ # Terminators
+ 'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
+ 'assumption', 'solve', 'contradiction', 'discriminate',
+ )
+ keywords6 = (
+ # Control
+ 'do', 'last', 'first', 'try', 'idtac', 'repeat',
+ )
+ # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
+ # 'downto', 'else', 'end', 'exception', 'external', 'false',
+ # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
+ # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
+ # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
+ # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
+ # 'type', 'val', 'virtual', 'when', 'while', 'with'
+ keyopts = (
+ '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', r'-\.',
+ '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
+ '<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
+ r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
+ r'/\\', r'\\/',
+ u'Π', u'λ',
+ )
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
+ word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or')
+ prefix_syms = r'[!?~]'
+ infix_syms = r'[=<>@^|&+\*/$%-]'
+ primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list',
+ 'array')
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
+ (r'\(\*', Comment, 'comment'),
+ (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+ (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ (words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
+ (words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
+ (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
+ (r'\b([A-Z][\w\']*)', Name.Class),
+ (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
+ (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+
+ (r"[^\W\d][\w']*", Name),
+
+ (r'\d[\d_]*', Number.Integer),
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Bin),
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
+
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
+ String.Char),
+ (r"'.'", String.Char),
+ (r"'", Keyword), # a stray quote is another syntax element
+
+ (r'"', String.Double, 'string'),
+
+ (r'[~?][a-z][\w\']*:', Name.Variable),
+ ],
+ 'comment': [
+ (r'[^(*)]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ (r'[(*)]', Comment),
+ ],
+ 'string': [
+ (r'[^"]+', String.Double),
+ (r'""', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'dotted': [
+ (r'\s+', Text),
+ (r'\.', Punctuation),
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+ (r'[A-Z][\w\']*', Name.Class, '#pop'),
+ (r'[a-z][a-z0-9_\']*', Name, '#pop'),
+ default('#pop')
+ ],
+ }
+
+ def analyse_text(text):
+ if text.startswith('(*'):
+ return True
+
+
+class IsabelleLexer(RegexLexer):
+ """
+ For the `Isabelle <http://isabelle.in.tum.de/>`_ proof assistant.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Isabelle'
+ aliases = ['isabelle']
+ filenames = ['*.thy']
+ mimetypes = ['text/x-isabelle']
+
+ keyword_minor = (
+ 'and', 'assumes', 'attach', 'avoids', 'binder', 'checking',
+ 'class_instance', 'class_relation', 'code_module', 'congs',
+ 'constant', 'constrains', 'datatypes', 'defines', 'file', 'fixes',
+ 'for', 'functions', 'hints', 'identifier', 'if', 'imports', 'in',
+ 'includes', 'infix', 'infixl', 'infixr', 'is', 'keywords', 'lazy',
+ 'module_name', 'monos', 'morphisms', 'no_discs_sels', 'notes',
+ 'obtains', 'open', 'output', 'overloaded', 'parametric', 'permissive',
+ 'pervasive', 'rep_compat', 'shows', 'structure', 'type_class',
+ 'type_constructor', 'unchecked', 'unsafe', 'where',
+ )
+
+ keyword_diag = (
+ 'ML_command', 'ML_val', 'class_deps', 'code_deps', 'code_thms',
+ 'display_drafts', 'find_consts', 'find_theorems', 'find_unused_assms',
+ 'full_prf', 'help', 'locale_deps', 'nitpick', 'pr', 'prf',
+ 'print_abbrevs', 'print_antiquotations', 'print_attributes',
+ 'print_binds', 'print_bnfs', 'print_bundles',
+ 'print_case_translations', 'print_cases', 'print_claset',
+ 'print_classes', 'print_codeproc', 'print_codesetup',
+ 'print_coercions', 'print_commands', 'print_context',
+ 'print_defn_rules', 'print_dependencies', 'print_facts',
+ 'print_induct_rules', 'print_inductives', 'print_interps',
+ 'print_locale', 'print_locales', 'print_methods', 'print_options',
+ 'print_orders', 'print_quot_maps', 'print_quotconsts',
+ 'print_quotients', 'print_quotientsQ3', 'print_quotmapsQ3',
+ 'print_rules', 'print_simpset', 'print_state', 'print_statement',
+ 'print_syntax', 'print_theorems', 'print_theory', 'print_trans_rules',
+ 'prop', 'pwd', 'quickcheck', 'refute', 'sledgehammer', 'smt_status',
+ 'solve_direct', 'spark_status', 'term', 'thm', 'thm_deps', 'thy_deps',
+ 'try', 'try0', 'typ', 'unused_thms', 'value', 'values', 'welcome',
+ 'print_ML_antiquotations', 'print_term_bindings', 'values_prolog',
+ )
+
+ keyword_thy = ('theory', 'begin', 'end')
+
+ keyword_section = ('header', 'chapter')
+
+ keyword_subsection = (
+ 'section', 'subsection', 'subsubsection', 'sect', 'subsect',
+ 'subsubsect',
+ )
+
+ keyword_theory_decl = (
+ 'ML', 'ML_file', 'abbreviation', 'adhoc_overloading', 'arities',
+ 'atom_decl', 'attribute_setup', 'axiomatization', 'bundle',
+ 'case_of_simps', 'class', 'classes', 'classrel', 'codatatype',
+ 'code_abort', 'code_class', 'code_const', 'code_datatype',
+ 'code_identifier', 'code_include', 'code_instance', 'code_modulename',
+ 'code_monad', 'code_printing', 'code_reflect', 'code_reserved',
+ 'code_type', 'coinductive', 'coinductive_set', 'consts', 'context',
+ 'datatype', 'datatype_new', 'datatype_new_compat', 'declaration',
+ 'declare', 'default_sort', 'defer_recdef', 'definition', 'defs',
+ 'domain', 'domain_isomorphism', 'domaindef', 'equivariance',
+ 'export_code', 'extract', 'extract_type', 'fixrec', 'fun',
+ 'fun_cases', 'hide_class', 'hide_const', 'hide_fact', 'hide_type',
+ 'import_const_map', 'import_file', 'import_tptp', 'import_type_map',
+ 'inductive', 'inductive_set', 'instantiation', 'judgment', 'lemmas',
+ 'lifting_forget', 'lifting_update', 'local_setup', 'locale',
+ 'method_setup', 'nitpick_params', 'no_adhoc_overloading',
+ 'no_notation', 'no_syntax', 'no_translations', 'no_type_notation',
+ 'nominal_datatype', 'nonterminal', 'notation', 'notepad', 'oracle',
+ 'overloading', 'parse_ast_translation', 'parse_translation',
+ 'partial_function', 'primcorec', 'primrec', 'primrec_new',
+ 'print_ast_translation', 'print_translation', 'quickcheck_generator',
+ 'quickcheck_params', 'realizability', 'realizers', 'recdef', 'record',
+ 'refute_params', 'setup', 'setup_lifting', 'simproc_setup',
+ 'simps_of_case', 'sledgehammer_params', 'spark_end', 'spark_open',
+ 'spark_open_siv', 'spark_open_vcg', 'spark_proof_functions',
+ 'spark_types', 'statespace', 'syntax', 'syntax_declaration', 'text',
+ 'text_raw', 'theorems', 'translations', 'type_notation',
+ 'type_synonym', 'typed_print_translation', 'typedecl', 'hoarestate',
+ 'install_C_file', 'install_C_types', 'wpc_setup', 'c_defs', 'c_types',
+ 'memsafe', 'SML_export', 'SML_file', 'SML_import', 'approximate',
+ 'bnf_axiomatization', 'cartouche', 'datatype_compat',
+ 'free_constructors', 'functor', 'nominal_function',
+ 'nominal_termination', 'permanent_interpretation',
+ 'binds', 'defining', 'smt2_status', 'term_cartouche',
+ 'boogie_file', 'text_cartouche',
+ )
+
+ keyword_theory_script = ('inductive_cases', 'inductive_simps')
+
+ keyword_theory_goal = (
+ 'ax_specification', 'bnf', 'code_pred', 'corollary', 'cpodef',
+ 'crunch', 'crunch_ignore',
+ 'enriched_type', 'function', 'instance', 'interpretation', 'lemma',
+ 'lift_definition', 'nominal_inductive', 'nominal_inductive2',
+ 'nominal_primrec', 'pcpodef', 'primcorecursive',
+ 'quotient_definition', 'quotient_type', 'recdef_tc', 'rep_datatype',
+ 'schematic_corollary', 'schematic_lemma', 'schematic_theorem',
+ 'spark_vc', 'specification', 'subclass', 'sublocale', 'termination',
+ 'theorem', 'typedef', 'wrap_free_constructors',
+ )
+
+ keyword_qed = ('by', 'done', 'qed')
+ keyword_abandon_proof = ('sorry', 'oops')
+
+ keyword_proof_goal = ('have', 'hence', 'interpret')
+
+ keyword_proof_block = ('next', 'proof')
+
+ keyword_proof_chain = (
+ 'finally', 'from', 'then', 'ultimately', 'with',
+ )
+
+ keyword_proof_decl = (
+ 'ML_prf', 'also', 'include', 'including', 'let', 'moreover', 'note',
+ 'txt', 'txt_raw', 'unfolding', 'using', 'write',
+ )
+
+ keyword_proof_asm = ('assume', 'case', 'def', 'fix', 'presume')
+
+ keyword_proof_asm_goal = ('guess', 'obtain', 'show', 'thus')
+
+ keyword_proof_script = (
+ 'apply', 'apply_end', 'apply_trace', 'back', 'defer', 'prefer',
+ )
+
+ operators = (
+ '::', ':', '(', ')', '[', ']', '_', '=', ',', '|',
+ '+', '-', '!', '?',
+ )
+
+ proof_operators = ('{', '}', '.', '..')
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'\(\*', Comment, 'comment'),
+ (r'\{\*', Comment, 'text'),
+
+ (words(operators), Operator),
+ (words(proof_operators), Operator.Word),
+
+ (words(keyword_minor, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
+
+ (words(keyword_diag, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+
+ (words(keyword_thy, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_theory_decl, prefix=r'\b', suffix=r'\b'), Keyword),
+
+ (words(keyword_section, prefix=r'\b', suffix=r'\b'), Generic.Heading),
+ (words(keyword_subsection, prefix=r'\b', suffix=r'\b'), Generic.Subheading),
+
+ (words(keyword_theory_goal, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+ (words(keyword_theory_script, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+
+ (words(keyword_abandon_proof, prefix=r'\b', suffix=r'\b'), Generic.Error),
+
+ (words(keyword_qed, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_goal, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_block, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_decl, prefix=r'\b', suffix=r'\b'), Keyword),
+
+ (words(keyword_proof_chain, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_asm, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_asm_goal, prefix=r'\b', suffix=r'\b'), Keyword),
+
+ (words(keyword_proof_script, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
+
+ (r'\\<\w*>', Text.Symbol),
+
+ (r"[^\W\d][.\w']*", Name),
+ (r"\?[^\W\d][.\w']*", Name),
+ (r"'[^\W\d][.\w']*", Name.Type),
+
+ (r'\d[\d_]*', Name), # display numbers as name
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Bin),
+
+ (r'"', String, 'string'),
+ (r'`', String.Other, 'fact'),
+ ],
+ 'comment': [
+ (r'[^(*)]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ (r'[(*)]', Comment),
+ ],
+ 'text': [
+ (r'[^*}]+', Comment),
+ (r'\*\}', Comment, '#pop'),
+ (r'\*', Comment),
+ (r'\}', Comment),
+ ],
+ 'string': [
+ (r'[^"\\]+', String),
+ (r'\\<\w*>', String.Symbol),
+ (r'\\"', String),
+ (r'\\', String),
+ (r'"', String, '#pop'),
+ ],
+ 'fact': [
+ (r'[^`\\]+', String.Other),
+ (r'\\<\w*>', String.Symbol),
+ (r'\\`', String.Other),
+ (r'\\', String.Other),
+ (r'`', String.Other, '#pop'),
+ ],
+ }
+
+
+class LeanLexer(RegexLexer):
+ """
+ For the `Lean <https://github.com/leanprover/lean>`_
+ theorem prover.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Lean'
+ aliases = ['lean']
+ filenames = ['*.lean']
+ mimetypes = ['text/x-lean']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ keywords1 = ('import', 'abbreviation', 'opaque_hint', 'tactic_hint', 'definition', 'renaming',
+ 'inline', 'hiding', 'exposing', 'parameter', 'parameters', 'conjecture',
+ 'hypothesis', 'lemma', 'corollary', 'variable', 'variables', 'print', 'theorem',
+ 'axiom', 'inductive', 'structure', 'universe', 'alias', 'help',
+ 'options', 'precedence', 'postfix', 'prefix', 'calc_trans', 'calc_subst', 'calc_refl',
+ 'infix', 'infixl', 'infixr', 'notation', 'eval', 'check', 'exit', 'coercion', 'end',
+ 'private', 'using', 'namespace', 'including', 'instance', 'section', 'context',
+ 'protected', 'expose', 'export', 'set_option', 'add_rewrite', 'extends',
+ 'open', 'example', 'constant', 'constants', 'print', 'opaque', 'reducible', 'irreducible'
+ )
+
+ keywords2 = (
+ 'forall', 'fun', 'Pi', 'obtain', 'from', 'have', 'show', 'assume', 'take',
+ 'let', 'if', 'else', 'then', 'by', 'in', 'with', 'begin', 'proof', 'qed', 'calc', 'match'
+ )
+
+ keywords3 = (
+ # Sorts
+ 'Type', 'Prop',
+ )
+
+ operators = (
+ '!=', '#', '&', '&&', '*', '+', '-', '/', '@', '!', '`',
+ '-.', '->', '.', '..', '...', '::', ':>', ';', ';;', '<',
+ '<-', '=', '==', '>', '_', '`', '|', '||', '~', '=>', '<=', '>=',
+ '/\\', '\\/', u'∀', u'Π', u'λ', u'↔', u'∧', u'∨', u'≠', u'≤', u'≥',
+ u'¬', u'⁻¹', u'⬝', u'▸', u'→', u'∃', u'ℕ', u'ℤ', u'≈', u'×', u'⌞', u'⌟', u'≡',
+ u'⟨', u'⟩'
+ )
+
+ punctuation = ('(', ')', ':', '{', '}', '[', ']', u'⦃', u'⦄', ':=', ',')
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'/-', Comment, 'comment'),
+ (r'--.*?$', Comment.Single),
+ (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+ (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ (words(operators), Name.Builtin.Pseudo),
+ (words(punctuation), Operator),
+ (u"[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]"
+ u"[A-Za-z_'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079"
+ u"\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*", Name),
+ (r'\d+', Number.Integer),
+ (r'"', String.Double, 'string'),
+ (r'[~?][a-z][\w\']*:', Name.Variable)
+ ],
+ 'comment': [
+ # Multiline Comments
+ (r'[^/-]', Comment.Multiline),
+ (r'/-', Comment.Multiline, '#push'),
+ (r'-/', Comment.Multiline, '#pop'),
+ (r'[/-]', Comment.Multiline)
+ ],
+ 'string': [
+ (r'[^\\"]+', String.Double),
+ (r'\\[n"\\]', String.Escape),
+ ('"', String.Double, '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/urbi.py b/pygments/lexers/urbi.py
new file mode 100644
index 00000000..558a21fb
--- /dev/null
+++ b/pygments/lexers/urbi.py
@@ -0,0 +1,133 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.urbi
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for UrbiScript language.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import ExtendedRegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['UrbiscriptLexer']
+
+
+class UrbiscriptLexer(ExtendedRegexLexer):
+ """
+ For UrbiScript source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'UrbiScript'
+ aliases = ['urbiscript']
+ filenames = ['*.u']
+ mimetypes = ['application/x-urbiscript']
+
+ flags = re.DOTALL
+
+ # TODO
+ # - handle Experimental and deprecated tags with specific tokens
+ # - handle Angles and Durations with specific tokens
+
+ def blob_callback(lexer, match, ctx):
+ text_before_blob = match.group(1)
+ blob_start = match.group(2)
+ blob_size_str = match.group(3)
+ blob_size = int(blob_size_str)
+ yield match.start(), String, text_before_blob
+ ctx.pos += len(text_before_blob)
+
+ # if blob size doesn't match blob format (example : "\B(2)(aaa)")
+ # yield blob as a string
+ if ctx.text[match.end() + blob_size] != ")":
+ result = "\\B(" + blob_size_str + ")("
+ yield match.start(), String, result
+ ctx.pos += len(result)
+ return
+
+ # if blob is well formated, yield as Escape
+ blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")"
+ yield match.start(), String.Escape, blob_text
+ ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")"
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ # comments
+ (r'//.*?\n', Comment),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'(every|for|loop|while)(?:;|&|\||,)', Keyword),
+ (words((
+ 'assert', 'at', 'break', 'case', 'catch', 'closure', 'compl',
+ 'continue', 'default', 'else', 'enum', 'every', 'external',
+ 'finally', 'for', 'freezeif', 'if', 'new', 'onleave', 'return',
+ 'stopif', 'switch', 'this', 'throw', 'timeout', 'try',
+ 'waituntil', 'whenever', 'while'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'asm', 'auto', 'bool', 'char', 'const_cast', 'delete', 'double',
+ 'dynamic_cast', 'explicit', 'export', 'extern', 'float', 'friend',
+ 'goto', 'inline', 'int', 'long', 'mutable', 'namespace', 'register',
+ 'reinterpret_cast', 'short', 'signed', 'sizeof', 'static_cast',
+ 'struct', 'template', 'typedef', 'typeid', 'typename', 'union',
+ 'unsigned', 'using', 'virtual', 'volatile', 'wchar_t'), suffix=r'\b'),
+ Keyword.Reserved),
+ # deprecated keywords, use a meaningfull token when available
+ (r'(emit|foreach|internal|loopn|static)\b', Keyword),
+ # ignored keywords, use a meaningfull token when available
+ (r'(private|protected|public)\b', Keyword),
+ (r'(var|do|const|function|class)\b', Keyword.Declaration),
+ (r'(true|false|nil|void)\b', Keyword.Constant),
+ (words((
+ 'Barrier', 'Binary', 'Boolean', 'CallMessage', 'Channel', 'Code',
+ 'Comparable', 'Container', 'Control', 'Date', 'Dictionary', 'Directory',
+ 'Duration', 'Enumeration', 'Event', 'Exception', 'Executable', 'File',
+ 'Finalizable', 'Float', 'FormatInfo', 'Formatter', 'Global', 'Group',
+ 'Hash', 'InputStream', 'IoService', 'Job', 'Kernel', 'Lazy', 'List',
+ 'Loadable', 'Lobby', 'Location', 'Logger', 'Math', 'Mutex', 'nil',
+ 'Object', 'Orderable', 'OutputStream', 'Pair', 'Path', 'Pattern',
+ 'Position', 'Primitive', 'Process', 'Profile', 'PseudoLazy', 'PubSub',
+ 'RangeIterable', 'Regexp', 'Semaphore', 'Server', 'Singleton', 'Socket',
+ 'StackFrame', 'Stream', 'String', 'System', 'Tag', 'Timeout',
+ 'Traceable', 'TrajectoryGenerator', 'Triplet', 'Tuple', 'UObject',
+ 'UValue', 'UVar'), suffix=r'\b'),
+ Name.Builtin),
+ (r'(?:this)\b', Name.Builtin.Pseudo),
+ # don't match single | and &
+ (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator),
+ (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b',
+ Operator.Word),
+ (r'[{}\[\]()]+', Punctuation),
+ (r'(?:;|\||,|&|\?|!)+', Punctuation),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ # Float, Integer, Angle and Duration
+ (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?'
+ r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float),
+ # handle binary blob in strings
+ (r'"', String.Double, "string.double"),
+ (r"'", String.Single, "string.single"),
+ ],
+ 'string.double': [
+ (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback),
+ (r'(\\\\|\\"|[^"])*?"', String.Double, '#pop'),
+ ],
+ 'string.single': [
+ (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback),
+ (r"(\\\\|\\'|[^'])*?'", String.Single, '#pop'),
+ ],
+ # from http://pygments.org/docs/lexerdevelopment/#changing-states
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ]
+ }
diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py
index 142fef57..59fbf2fc 100644
--- a/pygments/lexers/web.py
+++ b/pygments/lexers/web.py
@@ -3,4043 +3,22 @@
pygments.lexers.web
~~~~~~~~~~~~~~~~~~~
- Lexers for web-related languages and markup.
+ Just export previously exported lexers.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-import copy
-
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, bygroups, using, \
- include, this
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Other, Punctuation, Literal
-from pygments.util import get_bool_opt, get_list_opt, looks_like_xml, \
- html_doctype_matches, unirange
-from pygments.lexers.agile import RubyLexer
-from pygments.lexers.compiled import ScalaLexer
-
-
-__all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'JsonLexer', 'CssLexer',
- 'PhpLexer', 'ActionScriptLexer', 'XsltLexer', 'ActionScript3Lexer',
- 'MxmlLexer', 'HaxeLexer', 'HamlLexer', 'SassLexer', 'ScssLexer',
- 'ObjectiveJLexer', 'CoffeeScriptLexer', 'LiveScriptLexer',
- 'DuelLexer', 'ScamlLexer', 'JadeLexer', 'XQueryLexer',
- 'DtdLexer', 'DartLexer', 'LassoLexer', 'QmlLexer', 'TypeScriptLexer']
-
-
-class JavascriptLexer(RegexLexer):
- """
- For JavaScript source code.
- """
-
- name = 'JavaScript'
- aliases = ['js', 'javascript']
- filenames = ['*.js', ]
- mimetypes = ['application/javascript', 'application/x-javascript',
- 'text/x-javascript', 'text/javascript', ]
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- (r'', Text, '#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|void|yield|'
- r'this)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
- r'extends|final|float|goto|implements|import|int|interface|long|native|'
- r'package|private|protected|public|short|static|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window)\b', Name.Builtin),
- (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
-
-
-class JsonLexer(RegexLexer):
- """
- For JSON data structures.
-
- *New in Pygments 1.5.*
- """
-
- name = 'JSON'
- aliases = ['json']
- filenames = ['*.json']
- mimetypes = [ 'application/json', ]
-
- # integer part of a number
- int_part = r'-?(0|[1-9]\d*)'
-
- # fractional part of a number
- frac_part = r'\.\d+'
-
- # exponential part of a number
- exp_part = r'[eE](\+|-)?\d+'
-
-
- flags = re.DOTALL
- tokens = {
- 'whitespace': [
- (r'\s+', Text),
- ],
-
- # represents a simple terminal value
- 'simplevalue': [
- (r'(true|false|null)\b', Keyword.Constant),
- (('%(int_part)s(%(frac_part)s%(exp_part)s|'
- '%(exp_part)s|%(frac_part)s)') % vars(),
- Number.Float),
- (int_part, Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- ],
-
-
- # the right hand side of an object, after the attribute name
- 'objectattribute': [
- include('value'),
- (r':', Punctuation),
- # comma terminates the attribute but expects more
- (r',', Punctuation, '#pop'),
- # a closing bracket terminates the entire object, so pop twice
- (r'}', Punctuation, ('#pop', '#pop')),
- ],
-
- # a json object - { attr, attr, ... }
- 'objectvalue': [
- include('whitespace'),
- (r'"(\\\\|\\"|[^"])*"', Name.Tag, 'objectattribute'),
- (r'}', Punctuation, '#pop'),
- ],
-
- # json array - [ value, value, ... }
- 'arrayvalue': [
- include('whitespace'),
- include('value'),
- (r',', Punctuation),
- (r']', Punctuation, '#pop'),
- ],
-
- # a json value - either a simple value or a complex value (object or array)
- 'value': [
- include('whitespace'),
- include('simplevalue'),
- (r'{', Punctuation, 'objectvalue'),
- (r'\[', Punctuation, 'arrayvalue'),
- ],
-
-
- # the root of a json document whould be a value
- 'root': [
- include('value'),
- ],
-
- }
-
+from pygments.lexers.html import HtmlLexer, DtdLexer, XmlLexer, XsltLexer, \
+ HamlLexer, ScamlLexer, JadeLexer
+from pygments.lexers.css import CssLexer, SassLexer, ScssLexer
+from pygments.lexers.javascript import JavascriptLexer, LiveScriptLexer, \
+ DartLexer, TypeScriptLexer, LassoLexer, ObjectiveJLexer, CoffeeScriptLexer
+from pygments.lexers.actionscript import ActionScriptLexer, \
+ ActionScript3Lexer, MxmlLexer
+from pygments.lexers.php import PhpLexer
+from pygments.lexers.webmisc import DuelLexer, XQueryLexer, SlimLexer, QmlLexer
+from pygments.lexers.data import JsonLexer
JSONLexer = JsonLexer # for backwards compatibility with Pygments 1.5
-
-class ActionScriptLexer(RegexLexer):
- """
- For ActionScript source code.
-
- *New in Pygments 0.9.*
- """
-
- name = 'ActionScript'
- aliases = ['as', 'actionscript']
- filenames = ['*.as']
- mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
- 'text/actionscript3']
-
- flags = re.DOTALL
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex),
- (r'[~\^\*!%&<>\|+=:;,/?\\-]+', Operator),
- (r'[{}\[\]();.]+', Punctuation),
- (r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
- r'throw|try|catch|var|with|new|typeof|arguments|instanceof|this|'
- r'switch)\b', Keyword),
- (r'(class|public|final|internal|native|override|private|protected|'
- r'static|import|extends|implements|interface|intrinsic|return|super|'
- r'dynamic|function|const|get|namespace|package|set)\b',
- Keyword.Declaration),
- (r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
- Keyword.Constant),
- (r'(Accessibility|AccessibilityProperties|ActionScriptVersion|'
- r'ActivityEvent|AntiAliasType|ApplicationDomain|AsBroadcaster|Array|'
- r'AsyncErrorEvent|AVM1Movie|BevelFilter|Bitmap|BitmapData|'
- r'BitmapDataChannel|BitmapFilter|BitmapFilterQuality|BitmapFilterType|'
- r'BlendMode|BlurFilter|Boolean|ByteArray|Camera|Capabilities|CapsStyle|'
- r'Class|Color|ColorMatrixFilter|ColorTransform|ContextMenu|'
- r'ContextMenuBuiltInItems|ContextMenuEvent|ContextMenuItem|'
- r'ConvultionFilter|CSMSettings|DataEvent|Date|DefinitionError|'
- r'DeleteObjectSample|Dictionary|DisplacmentMapFilter|DisplayObject|'
- r'DisplacmentMapFilterMode|DisplayObjectContainer|DropShadowFilter|'
- r'Endian|EOFError|Error|ErrorEvent|EvalError|Event|EventDispatcher|'
- r'EventPhase|ExternalInterface|FileFilter|FileReference|'
- r'FileReferenceList|FocusDirection|FocusEvent|Font|FontStyle|FontType|'
- r'FrameLabel|FullScreenEvent|Function|GlowFilter|GradientBevelFilter|'
- r'GradientGlowFilter|GradientType|Graphics|GridFitType|HTTPStatusEvent|'
- r'IBitmapDrawable|ID3Info|IDataInput|IDataOutput|IDynamicPropertyOutput'
- r'IDynamicPropertyWriter|IEventDispatcher|IExternalizable|'
- r'IllegalOperationError|IME|IMEConversionMode|IMEEvent|int|'
- r'InteractiveObject|InterpolationMethod|InvalidSWFError|InvokeEvent|'
- r'IOError|IOErrorEvent|JointStyle|Key|Keyboard|KeyboardEvent|KeyLocation|'
- r'LineScaleMode|Loader|LoaderContext|LoaderInfo|LoadVars|LocalConnection|'
- r'Locale|Math|Matrix|MemoryError|Microphone|MorphShape|Mouse|MouseEvent|'
- r'MovieClip|MovieClipLoader|Namespace|NetConnection|NetStatusEvent|'
- r'NetStream|NewObjectSample|Number|Object|ObjectEncoding|PixelSnapping|'
- r'Point|PrintJob|PrintJobOptions|PrintJobOrientation|ProgressEvent|Proxy|'
- r'QName|RangeError|Rectangle|ReferenceError|RegExp|Responder|Sample|Scene|'
- r'ScriptTimeoutError|Security|SecurityDomain|SecurityError|'
- r'SecurityErrorEvent|SecurityPanel|Selection|Shape|SharedObject|'
- r'SharedObjectFlushStatus|SimpleButton|Socket|Sound|SoundChannel|'
- r'SoundLoaderContext|SoundMixer|SoundTransform|SpreadMethod|Sprite|'
- r'StackFrame|StackOverflowError|Stage|StageAlign|StageDisplayState|'
- r'StageQuality|StageScaleMode|StaticText|StatusEvent|String|StyleSheet|'
- r'SWFVersion|SyncEvent|SyntaxError|System|TextColorType|TextField|'
- r'TextFieldAutoSize|TextFieldType|TextFormat|TextFormatAlign|'
- r'TextLineMetrics|TextRenderer|TextSnapshot|Timer|TimerEvent|Transform|'
- r'TypeError|uint|URIError|URLLoader|URLLoaderDataFormat|URLRequest|'
- r'URLRequestHeader|URLRequestMethod|URLStream|URLVariabeles|VerifyError|'
- r'Video|XML|XMLDocument|XMLList|XMLNode|XMLNodeType|XMLSocket|XMLUI)\b',
- Name.Builtin),
- (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
- r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
- r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
- r'unescape)\b',Name.Function),
- (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
-
-
-class ActionScript3Lexer(RegexLexer):
- """
- For ActionScript 3 source code.
-
- *New in Pygments 0.11.*
- """
-
- name = 'ActionScript 3'
- aliases = ['as3', 'actionscript3']
- filenames = ['*.as']
- mimetypes = ['application/x-actionscript', 'text/x-actionscript',
- 'text/actionscript']
-
- identifier = r'[$a-zA-Z_][a-zA-Z0-9_]*'
- typeidentifier = identifier + '(?:\.<\w+>)?'
-
- flags = re.DOTALL | re.MULTILINE
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'(function\s+)(' + identifier + r')(\s*)(\()',
- bygroups(Keyword.Declaration, Name.Function, Text, Operator),
- 'funcparams'),
- (r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' +
- typeidentifier + r')',
- bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text,
- Keyword.Type)),
- (r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
- bygroups(Keyword, Text, Name.Namespace, Text)),
- (r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()',
- bygroups(Keyword, Text, Keyword.Type, Text, Operator)),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex),
- (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
- (r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
- r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
- r'switch|import|include|as|is)\b',
- Keyword),
- (r'(class|public|final|internal|native|override|private|protected|'
- r'static|import|extends|implements|interface|intrinsic|return|super|'
- r'dynamic|function|const|get|namespace|package|set)\b',
- Keyword.Declaration),
- (r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
- Keyword.Constant),
- (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
- r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
- r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
- r'unescape)\b', Name.Function),
- (identifier, Name),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'[~\^\*!%&<>\|+=:;,/?\\{}\[\]().-]+', Operator),
- ],
- 'funcparams': [
- (r'\s+', Text),
- (r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
- typeidentifier + r'|\*)(\s*)',
- bygroups(Text, Punctuation, Name, Text, Operator, Text,
- Keyword.Type, Text), 'defval'),
- (r'\)', Operator, 'type')
- ],
- 'type': [
- (r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)',
- bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'),
- (r'\s*', Text, '#pop:2')
- ],
- 'defval': [
- (r'(=)(\s*)([^(),]+)(\s*)(,?)',
- bygroups(Operator, Text, using(this), Text, Operator), '#pop'),
- (r',?', Operator, '#pop')
- ]
- }
-
- def analyse_text(text):
- if re.match(r'\w+\s*:\s*\w', text):
- return 0.3
- return 0
-
-
-class CssLexer(RegexLexer):
- """
- For CSS (Cascading Style Sheets).
- """
-
- name = 'CSS'
- aliases = ['css']
- filenames = ['*.css']
- mimetypes = ['text/css']
-
- tokens = {
- 'root': [
- include('basics'),
- ],
- 'basics': [
- (r'\s+', Text),
- (r'/\*(?:.|\n)*?\*/', Comment),
- (r'{', Punctuation, 'content'),
- (r'\:[a-zA-Z0-9_-]+', Name.Decorator),
- (r'\.[a-zA-Z0-9_-]+', Name.Class),
- (r'\#[a-zA-Z0-9_-]+', Name.Function),
- (r'@[a-zA-Z0-9_-]+', Keyword, 'atrule'),
- (r'[a-zA-Z0-9_-]+', Name.Tag),
- (r'[~\^\*!%&\[\]\(\)<>\|+=@:;,./?-]', Operator),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single)
- ],
- 'atrule': [
- (r'{', Punctuation, 'atcontent'),
- (r';', Punctuation, '#pop'),
- include('basics'),
- ],
- 'atcontent': [
- include('basics'),
- (r'}', Punctuation, '#pop:2'),
- ],
- 'content': [
- (r'\s+', Text),
- (r'}', Punctuation, '#pop'),
- (r'url\(.*?\)', String.Other),
- (r'^@.*?$', Comment.Preproc),
- (r'(azimuth|background-attachment|background-color|'
- r'background-image|background-position|background-repeat|'
- r'background|border-bottom-color|border-bottom-style|'
- r'border-bottom-width|border-left-color|border-left-style|'
- r'border-left-width|border-right|border-right-color|'
- r'border-right-style|border-right-width|border-top-color|'
- r'border-top-style|border-top-width|border-bottom|'
- r'border-collapse|border-left|border-width|border-color|'
- r'border-spacing|border-style|border-top|border|caption-side|'
- r'clear|clip|color|content|counter-increment|counter-reset|'
- r'cue-after|cue-before|cue|cursor|direction|display|'
- r'elevation|empty-cells|float|font-family|font-size|'
- r'font-size-adjust|font-stretch|font-style|font-variant|'
- r'font-weight|font|height|letter-spacing|line-height|'
- r'list-style-type|list-style-image|list-style-position|'
- r'list-style|margin-bottom|margin-left|margin-right|'
- r'margin-top|margin|marker-offset|marks|max-height|max-width|'
- r'min-height|min-width|opacity|orphans|outline|outline-color|'
- r'outline-style|outline-width|overflow(?:-x|-y)?|padding-bottom|'
- r'padding-left|padding-right|padding-top|padding|page|'
- r'page-break-after|page-break-before|page-break-inside|'
- r'pause-after|pause-before|pause|pitch|pitch-range|'
- r'play-during|position|quotes|richness|right|size|'
- r'speak-header|speak-numeral|speak-punctuation|speak|'
- r'speech-rate|stress|table-layout|text-align|text-decoration|'
- r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
- r'vertical-align|visibility|voice-family|volume|white-space|'
- r'widows|width|word-spacing|z-index|bottom|left|'
- r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
- r'behind|below|bidi-override|blink|block|bold|bolder|both|'
- r'capitalize|center-left|center-right|center|circle|'
- r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
- r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
- r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
- r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
- r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
- r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
- r'inherit|inline-table|inline|inset|inside|invert|italic|'
- r'justify|katakana-iroha|katakana|landscape|larger|large|'
- r'left-side|leftwards|level|lighter|line-through|list-item|'
- r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
- r'lower|low|medium|message-box|middle|mix|monospace|'
- r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
- r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
- r'open-quote|outset|outside|overline|pointer|portrait|px|'
- r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
- r'rightwards|s-resize|sans-serif|scroll|se-resize|'
- r'semi-condensed|semi-expanded|separate|serif|show|silent|'
- r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
- r'spell-out|square|static|status-bar|super|sw-resize|'
- r'table-caption|table-cell|table-column|table-column-group|'
- r'table-footer-group|table-header-group|table-row|'
- r'table-row-group|text|text-bottom|text-top|thick|thin|'
- r'transparent|ultra-condensed|ultra-expanded|underline|'
- r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
- r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
- r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Keyword),
- (r'(indigo|gold|firebrick|indianred|yellow|darkolivegreen|'
- r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
- r'mediumslateblue|black|springgreen|crimson|lightsalmon|brown|'
- r'turquoise|olivedrab|cyan|silver|skyblue|gray|darkturquoise|'
- r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|teal|'
- r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
- r'violet|navy|orchid|blue|ghostwhite|honeydew|cornflowerblue|'
- r'darkblue|darkkhaki|mediumpurple|cornsilk|red|bisque|slategray|'
- r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
- r'gainsboro|mediumturquoise|floralwhite|coral|purple|lightgrey|'
- r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
- r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
- r'lightcoral|orangered|navajowhite|lime|palegreen|burlywood|'
- r'seashell|mediumspringgreen|fuchsia|papayawhip|blanchedalmond|'
- r'peru|aquamarine|white|darkslategray|ivory|dodgerblue|'
- r'lemonchiffon|chocolate|orange|forestgreen|slateblue|olive|'
- r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
- r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
- r'plum|aqua|darkgoldenrod|maroon|sandybrown|magenta|tan|'
- r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
- r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
- r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
- r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
- r'lightyellow|lavenderblush|linen|mediumaquamarine|green|'
- r'blueviolet|peachpuff)\b', Name.Builtin),
- (r'\!important', Comment.Preproc),
- (r'/\*(?:.|\n)*?\*/', Comment),
- (r'\#[a-zA-Z0-9]{1,6}', Number),
- (r'[\.-]?[0-9]*[\.]?[0-9]+(em|px|\%|pt|pc|in|mm|cm|ex|s)\b', Number),
- (r'-?[0-9]+', Number),
- (r'[~\^\*!%&<>\|+=@:,./?-]+', Operator),
- (r'[\[\]();]+', Punctuation),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name)
- ]
- }
-
-
-class ObjectiveJLexer(RegexLexer):
- """
- For Objective-J source code with preprocessor directives.
-
- *New in Pygments 1.3.*
- """
-
- name = 'Objective-J'
- aliases = ['objective-j', 'objectivej', 'obj-j', 'objj']
- filenames = ['*.j']
- mimetypes = ['text/x-objective-j']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)*'
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'root': [
- include('whitespace'),
-
- # function definition
- (r'^(' + _ws + r'[\+-]' + _ws + r')([\(a-zA-Z_].*?[^\(])(' + _ws + '{)',
- bygroups(using(this), using(this, state='function_signature'),
- using(this))),
-
- # class definition
- (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
- 'classname'),
- (r'(@class|@protocol)(\s*)', bygroups(Keyword, Text),
- 'forward_classname'),
- (r'(\s*)(@end)(\s*)', bygroups(Text, Keyword, Text)),
-
- include('statements'),
- ('[{\(\)}]', Punctuation),
- (';', Punctuation),
- ],
- 'whitespace': [
- (r'(@import)(\s+)("(?:\\\\|\\"|[^"])*")',
- bygroups(Comment.Preproc, Text, String.Double)),
- (r'(@import)(\s+)(<(?:\\\\|\\>|[^>])*>)',
- bygroups(Comment.Preproc, Text, String.Double)),
- (r'(#(?:include|import))(\s+)("(?:\\\\|\\"|[^"])*")',
- bygroups(Comment.Preproc, Text, String.Double)),
- (r'(#(?:include|import))(\s+)(<(?:\\\\|\\>|[^>])*>)',
- bygroups(Comment.Preproc, Text, String.Double)),
-
- (r'#if\s+0', Comment.Preproc, 'if0'),
- (r'#', Comment.Preproc, 'macro'),
-
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'<!--', Comment),
- ],
- 'slashstartsregex': [
- include('whitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- (r'', Text, '#pop'),
- ],
- 'badregex': [
- (r'\n', Text, '#pop'),
- ],
- 'statements': [
- (r'(L|@)?"', String, 'string'),
- (r"(L|@)?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
-
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
-
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?',
- Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
-
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|'
- r'else|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'prototype|__proto__)\b', Keyword, 'slashstartsregex'),
-
- (r'(var|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
-
- (r'(@selector|@private|@protected|@public|@encode|'
- r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
- r'@synthesize|@dynamic|@for|@accessors|new)\b', Keyword),
-
- (r'(int|long|float|short|double|char|unsigned|signed|void|'
- r'id|BOOL|bool|boolean|IBOutlet|IBAction|SEL|@outlet|@action)\b',
- Keyword.Type),
-
- (r'(self|super)\b', Name.Builtin),
-
- (r'(TRUE|YES|FALSE|NO|Nil|nil|NULL)\b', Keyword.Constant),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(ABS|ASIN|ACOS|ATAN|ATAN2|SIN|COS|TAN|EXP|POW|CEIL|FLOOR|ROUND|'
- r'MIN|MAX|RAND|SQRT|E|LN2|LN10|LOG2E|LOG10E|PI|PI2|PI_2|SQRT1_2|'
- r'SQRT2)\b', Keyword.Constant),
-
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window)\b', Name.Builtin),
-
- (r'([$a-zA-Z_][a-zA-Z0-9_]*)(' + _ws + r')(?=\()',
- bygroups(Name.Function, using(this))),
-
- (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name),
- ],
- 'classname' : [
- # interface definition that inherits
- (r'([a-zA-Z_][a-zA-Z0-9_]*)(' + _ws + r':' + _ws +
- r')([a-zA-Z_][a-zA-Z0-9_]*)?',
- bygroups(Name.Class, using(this), Name.Class), '#pop'),
- # interface definition for a category
- (r'([a-zA-Z_][a-zA-Z0-9_]*)(' + _ws + r'\()([a-zA-Z_][a-zA-Z0-9_]*)(\))',
- bygroups(Name.Class, using(this), Name.Label, Text), '#pop'),
- # simple interface / implementation
- (r'([a-zA-Z_][a-zA-Z0-9_]*)', Name.Class, '#pop'),
- ],
- 'forward_classname' : [
- (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*,\s*)',
- bygroups(Name.Class, Text), '#push'),
- (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*;?)',
- bygroups(Name.Class, Text), '#pop'),
- ],
- 'function_signature': [
- include('whitespace'),
-
- # start of a selector w/ parameters
- (r'(\(' + _ws + r')' # open paren
- r'([a-zA-Z_][a-zA-Z0-9_]+)' # return type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_][a-zA-Z0-9_]+' + _ws + r':)', # function name
- bygroups(using(this), Keyword.Type, using(this),
- Name.Function), 'function_parameters'),
-
- # no-param function
- (r'(\(' + _ws + r')' # open paren
- r'([a-zA-Z_][a-zA-Z0-9_]+)' # return type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_][a-zA-Z0-9_]+)', # function name
- bygroups(using(this), Keyword.Type, using(this),
- Name.Function), "#pop"),
-
- # no return type given, start of a selector w/ parameters
- (r'([$a-zA-Z_][a-zA-Z0-9_]+' + _ws + r':)', # function name
- bygroups (Name.Function), 'function_parameters'),
-
- # no return type given, no-param function
- (r'([$a-zA-Z_][a-zA-Z0-9_]+)', # function name
- bygroups(Name.Function), "#pop"),
-
- ('', Text, '#pop'),
- ],
- 'function_parameters': [
- include('whitespace'),
-
- # parameters
- (r'(\(' + _ws + ')' # open paren
- r'([^\)]+)' # type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_][a-zA-Z0-9_]+)', # param name
- bygroups(using(this), Keyword.Type, using(this), Text)),
-
- # one piece of a selector name
- (r'([$a-zA-Z_][a-zA-Z0-9_]+' + _ws + r':)', # function name
- Name.Function),
-
- # smallest possible selector piece
- (r'(:)', Name.Function),
-
- # var args
- (r'(,' + _ws + r'\.\.\.)', using(this)),
-
- # param name
- (r'([$a-zA-Z_][a-zA-Z0-9_]+)', Text),
- ],
- 'expression' : [
- (r'([$a-zA-Z_][a-zA-Z0-9_]*)(\()', bygroups(Name.Function,
- Punctuation)),
- (r'(\))', Punctuation, "#pop"),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
-
- def analyse_text(text):
- if re.search('^\s*@import\s+[<"]', text, re.MULTILINE):
- # special directive found in most Objective-J files
- return True
- return False
-
-
-class HtmlLexer(RegexLexer):
- """
- For HTML 4 and XHTML 1 markup. Nested JavaScript and CSS is highlighted
- by the appropriate lexer.
- """
-
- name = 'HTML'
- aliases = ['html']
- filenames = ['*.html', '*.htm', '*.xhtml', '*.xslt']
- mimetypes = ['text/html', 'application/xhtml+xml']
-
- flags = re.IGNORECASE | re.DOTALL
- tokens = {
- 'root': [
- ('[^<&]+', Text),
- (r'&\S*?;', Name.Entity),
- (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
- ('<!--', Comment, 'comment'),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'<\s*script\s*', Name.Tag, ('script-content', 'tag')),
- (r'<\s*style\s*', Name.Tag, ('style-content', 'tag')),
- # note: this allows tag names not used in HTML like <x:with-dash>,
- # this is to support yet-unknown template engines and the like
- (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
- ],
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
- 'tag': [
- (r'\s+', Text),
- (r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'attr'),
- (r'[a-zA-Z0-9_:-]+', Name.Attribute),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'script-content': [
- (r'<\s*/\s*script\s*>', Name.Tag, '#pop'),
- (r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
- ],
- 'style-content': [
- (r'<\s*/\s*style\s*>', Name.Tag, '#pop'),
- (r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
- ],
- 'attr': [
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if html_doctype_matches(text):
- return 0.5
-
-
-class PhpLexer(RegexLexer):
- """
- For `PHP <http://www.php.net/>`_ source code.
- For PHP embedded in HTML, use the `HtmlPhpLexer`.
-
- Additional options accepted:
-
- `startinline`
- If given and ``True`` the lexer starts highlighting with
- php code (i.e.: no starting ``<?php`` required). The default
- is ``False``.
- `funcnamehighlighting`
- If given and ``True``, highlight builtin function names
- (default: ``True``).
- `disabledmodules`
- If given, must be a list of module names whose function names
- should not be highlighted. By default all modules are highlighted
- except the special ``'unknown'`` module that includes functions
- that are known to php but are undocumented.
-
- To get a list of allowed modules have a look into the
- `_phpbuiltins` module:
-
- .. sourcecode:: pycon
-
- >>> from pygments.lexers._phpbuiltins import MODULES
- >>> MODULES.keys()
- ['PHP Options/Info', 'Zip', 'dba', ...]
-
- In fact the names of those modules match the module names from
- the php documentation.
- """
-
- name = 'PHP'
- aliases = ['php', 'php3', 'php4', 'php5']
- filenames = ['*.php', '*.php[345]', '*.inc']
- mimetypes = ['text/x-php']
-
- flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
- tokens = {
- 'root': [
- (r'<\?(php)?', Comment.Preproc, 'php'),
- (r'[^<]+', Other),
- (r'<', Other)
- ],
- 'php': [
- (r'\?>', Comment.Preproc, '#pop'),
- (r'<<<(\'?)([a-zA-Z_][a-zA-Z0-9_]*)\1\n.*?\n\2\;?\n', String),
- (r'\s+', Text),
- (r'#.*?\n', Comment.Single),
- (r'//.*?\n', Comment.Single),
- # put the empty comment here, it is otherwise seen as
- # the start of a docstring
- (r'/\*\*/', Comment.Multiline),
- (r'/\*\*.*?\*/', String.Doc),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(->|::)(\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
- bygroups(Operator, Text, Name.Attribute)),
- (r'[~!%^&*+=|:.<>/?@-]+', Operator),
- (r'[\[\]{}();,]+', Punctuation),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(function)(\s*)(?=\()', bygroups(Keyword, Text)),
- (r'(function)(\s+)(&?)(\s*)',
- bygroups(Keyword, Text, Operator, Text), 'functionname'),
- (r'(const)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
- bygroups(Keyword, Text, Name.Constant)),
- (r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
- r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
- r'FALSE|print|for|require|continue|foreach|require_once|'
- r'declare|return|default|static|do|switch|die|stdClass|'
- r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
- r'virtual|endfor|include_once|while|endforeach|global|__FILE__|'
- r'endif|list|__LINE__|endswitch|new|__sleep|endwhile|not|'
- r'array|__wakeup|E_ALL|NULL|final|php_user_filter|interface|'
- r'implements|public|private|protected|abstract|clone|try|'
- r'catch|throw|this|use|namespace|trait)\b', Keyword),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'\$\{\$+[a-zA-Z_][a-zA-Z0-9_]*\}', Name.Variable),
- (r'\$+[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
- (r'[\\a-zA-Z_][\\a-zA-Z0-9_]*', Name.Other),
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'0[0-7]+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
- (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
- (r'"', String.Double, 'string'),
- ],
- 'classname': [
- (r'[a-zA-Z_][\\a-zA-Z0-9_]*', Name.Class, '#pop')
- ],
- 'functionname': [
- (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'[^{$"\\]+', String.Double),
- (r'\\([nrt\"$\\]|[0-7]{1,3}|x[0-9A-Fa-f]{1,2})', String.Escape),
- (r'\$[a-zA-Z_][a-zA-Z0-9_]*(\[\S+\]|->[a-zA-Z_][a-zA-Z0-9_]*)?',
- String.Interpol),
- (r'(\{\$\{)(.*?)(\}\})',
- bygroups(String.Interpol, using(this, _startinline=True),
- String.Interpol)),
- (r'(\{)(\$.*?)(\})',
- bygroups(String.Interpol, using(this, _startinline=True),
- String.Interpol)),
- (r'(\$\{)(\S+)(\})',
- bygroups(String.Interpol, Name.Variable, String.Interpol)),
- (r'[${\\]+', String.Double)
- ],
- }
-
- def __init__(self, **options):
- self.funcnamehighlighting = get_bool_opt(
- options, 'funcnamehighlighting', True)
- self.disabledmodules = get_list_opt(
- options, 'disabledmodules', ['unknown'])
- self.startinline = get_bool_opt(options, 'startinline', False)
-
- # private option argument for the lexer itself
- if '_startinline' in options:
- self.startinline = options.pop('_startinline')
-
- # collect activated functions in a set
- self._functions = set()
- if self.funcnamehighlighting:
- from pygments.lexers._phpbuiltins import MODULES
- for key, value in MODULES.iteritems():
- if key not in self.disabledmodules:
- self._functions.update(value)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- if self.startinline:
- stack.append('php')
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name.Other:
- if value in self._functions:
- yield index, Name.Builtin, value
- continue
- yield index, token, value
-
- def analyse_text(text):
- rv = 0.0
- if re.search(r'<\?(?!xml)', text):
- rv += 0.3
- if '?>' in text:
- rv += 0.1
- return rv
-
-
-class DtdLexer(RegexLexer):
- """
- A lexer for DTDs (Document Type Definitions).
-
- *New in Pygments 1.5.*
- """
-
- flags = re.MULTILINE | re.DOTALL
-
- name = 'DTD'
- aliases = ['dtd']
- filenames = ['*.dtd']
- mimetypes = ['application/xml-dtd']
-
- tokens = {
- 'root': [
- include('common'),
-
- (r'(<!ELEMENT)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'element'),
- (r'(<!ATTLIST)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'attlist'),
- (r'(<!ENTITY)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Entity), 'entity'),
- (r'(<!NOTATION)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'notation'),
- (r'(<!\[)([^\[\s]+)(\s*)(\[)', # conditional sections
- bygroups(Keyword, Name.Entity, Text, Keyword)),
-
- (r'(<!DOCTYPE)(\s+)([^>\s]+)',
- bygroups(Keyword, Text, Name.Tag)),
- (r'PUBLIC|SYSTEM', Keyword.Constant),
- (r'[\[\]>]', Keyword),
- ],
-
- 'common': [
- (r'\s+', Text),
- (r'(%|&)[^;]*;', Name.Entity),
- ('<!--', Comment, 'comment'),
- (r'[(|)*,?+]', Operator),
- (r'"[^"]*"', String.Double),
- (r'\'[^\']*\'', String.Single),
- ],
-
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
-
- 'element': [
- include('common'),
- (r'EMPTY|ANY|#PCDATA', Keyword.Constant),
- (r'[^>\s\|()?+*,]+', Name.Tag),
- (r'>', Keyword, '#pop'),
- ],
-
- 'attlist': [
- include('common'),
- (r'CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION',
- Keyword.Constant),
- (r'#REQUIRED|#IMPLIED|#FIXED', Keyword.Constant),
- (r'xml:space|xml:lang', Keyword.Reserved),
- (r'[^>\s\|()?+*,]+', Name.Attribute),
- (r'>', Keyword, '#pop'),
- ],
-
- 'entity': [
- include('common'),
- (r'SYSTEM|PUBLIC|NDATA', Keyword.Constant),
- (r'[^>\s\|()?+*,]+', Name.Entity),
- (r'>', Keyword, '#pop'),
- ],
-
- 'notation': [
- include('common'),
- (r'SYSTEM|PUBLIC', Keyword.Constant),
- (r'[^>\s\|()?+*,]+', Name.Attribute),
- (r'>', Keyword, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if not looks_like_xml(text) and \
- ('<!ELEMENT' in text or '<!ATTLIST' in text or '<!ENTITY' in text):
- return 0.8
-
-class XmlLexer(RegexLexer):
- """
- Generic lexer for XML (eXtensible Markup Language).
- """
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- name = 'XML'
- aliases = ['xml']
- filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd',
- '*.wsdl', '*.wsf']
- mimetypes = ['text/xml', 'application/xml', 'image/svg+xml',
- 'application/rss+xml', 'application/atom+xml']
-
- tokens = {
- 'root': [
- ('[^<&]+', Text),
- (r'&\S*?;', Name.Entity),
- (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
- ('<!--', Comment, 'comment'),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
- ],
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
- 'tag': [
- (r'\s+', Text),
- (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'attr': [
- ('\s+', Text),
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if looks_like_xml(text):
- return 0.5
-
-
-class XsltLexer(XmlLexer):
- '''
- A lexer for XSLT.
-
- *New in Pygments 0.10.*
- '''
-
- name = 'XSLT'
- aliases = ['xslt']
- filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc
- mimetypes = ['application/xsl+xml', 'application/xslt+xml']
-
- EXTRA_KEYWORDS = set([
- 'apply-imports', 'apply-templates', 'attribute',
- 'attribute-set', 'call-template', 'choose', 'comment',
- 'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
- 'for-each', 'if', 'import', 'include', 'key', 'message',
- 'namespace-alias', 'number', 'otherwise', 'output', 'param',
- 'preserve-space', 'processing-instruction', 'sort',
- 'strip-space', 'stylesheet', 'template', 'text', 'transform',
- 'value-of', 'variable', 'when', 'with-param'
- ])
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
- m = re.match('</?xsl:([^>]*)/?>?', value)
-
- if token is Name.Tag and m and m.group(1) in self.EXTRA_KEYWORDS:
- yield index, Keyword, value
- else:
- yield index, token, value
-
- def analyse_text(text):
- if looks_like_xml(text) and '<xsl' in text:
- return 0.8
-
-
-class MxmlLexer(RegexLexer):
- """
- For MXML markup.
- Nested AS3 in <script> tags is highlighted by the appropriate lexer.
-
- *New in Pygments 1.1.*
- """
- flags = re.MULTILINE | re.DOTALL
- name = 'MXML'
- aliases = ['mxml']
- filenames = ['*.mxml']
- mimetimes = ['text/xml', 'application/xml']
-
- tokens = {
- 'root': [
- ('[^<&]+', Text),
- (r'&\S*?;', Name.Entity),
- (r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
- bygroups(String, using(ActionScript3Lexer), String)),
- ('<!--', Comment, 'comment'),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'<\s*[a-zA-Z0-9:._-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[a-zA-Z0-9:._-]+\s*>', Name.Tag),
- ],
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
- 'tag': [
- (r'\s+', Text),
- (r'[a-zA-Z0-9_.:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'attr': [
- ('\s+', Text),
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
-
-class HaxeLexer(ExtendedRegexLexer):
- """
- For Haxe source code (http://haxe.org/).
-
- *New in Pygments 1.3.*
- """
-
- name = 'Haxe'
- aliases = ['hx', 'Haxe', 'haxe', 'haXe', 'hxsl']
- filenames = ['*.hx', '*.hxsl']
- mimetypes = ['text/haxe', 'text/x-haxe', 'text/x-hx']
-
- # keywords extracted from lexer.mll in the haxe compiler source
- keyword = (r'(?:function|class|static|var|if|else|while|do|for|'
- r'break|return|continue|extends|implements|import|'
- r'switch|case|default|public|private|try|untyped|'
- r'catch|new|this|throw|extern|enum|in|interface|'
- r'cast|override|dynamic|typedef|package|'
- r'inline|using|null|true|false|abstract)\b')
-
- # idtype in lexer.mll
- typeid = r'_*[A-Z][_a-zA-Z0-9]*'
-
- # combined ident and dollar and idtype
- ident = r'(?:_*[a-z][_a-zA-Z0-9]*|_+[0-9][_a-zA-Z0-9]*|' + typeid + \
- '|_+|\$[_a-zA-Z0-9]+)'
-
- binop = (r'(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|'
- r'!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|'
- r'/|\-|=>|=)')
-
- # ident except keywords
- ident_no_keyword = r'(?!' + keyword + ')' + ident
-
- flags = re.DOTALL | re.MULTILINE
-
- preproc_stack = []
-
- def preproc_callback(self, match, ctx):
- proc = match.group(2)
-
- if proc == 'if':
- # store the current stack
- self.preproc_stack.append(ctx.stack[:])
- elif proc in ['else', 'elseif']:
- # restore the stack back to right before #if
- if self.preproc_stack: ctx.stack = self.preproc_stack[-1][:]
- elif proc == 'end':
- # remove the saved stack of previous #if
- if self.preproc_stack: self.preproc_stack.pop()
-
- # #if and #elseif should follow by an expr
- if proc in ['if', 'elseif']:
- ctx.stack.append('preproc-expr')
-
- # #error can be optionally follow by the error msg
- if proc in ['error']:
- ctx.stack.append('preproc-error')
-
- yield match.start(), Comment.Preproc, '#' + proc
- ctx.pos = match.end()
-
-
- tokens = {
- 'root': [
- include('spaces'),
- include('meta'),
- (r'(?:package)\b', Keyword.Namespace, ('semicolon', 'package')),
- (r'(?:import)\b', Keyword.Namespace, ('semicolon', 'import')),
- (r'(?:using)\b', Keyword.Namespace, ('semicolon', 'using')),
- (r'(?:extern|private)\b', Keyword.Declaration),
- (r'(?:abstract)\b', Keyword.Declaration, 'abstract'),
- (r'(?:class|interface)\b', Keyword.Declaration, 'class'),
- (r'(?:enum)\b', Keyword.Declaration, 'enum'),
- (r'(?:typedef)\b', Keyword.Declaration, 'typedef'),
-
- # top-level expression
- # although it is not supported in haxe, but it is common to write
- # expression in web pages the positive lookahead here is to prevent
- # an infinite loop at the EOF
- (r'(?=.)', Text, 'expr-statement'),
- ],
-
- # space/tab/comment/preproc
- 'spaces': [
- (r'\s+', Text),
- (r'//[^\n\r]*', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(#)(if|elseif|else|end|error)\b', preproc_callback),
- ],
-
- 'string-single-interpol': [
- (r'\$\{', String.Interpol, ('string-interpol-close', 'expr')),
- (r'\$\$', String.Escape),
- (r'\$(?=' + ident + ')', String.Interpol, 'ident'),
- include('string-single'),
- ],
-
- 'string-single': [
- (r"'", String.Single, '#pop'),
- (r'\\.', String.Escape),
- (r'.', String.Single),
- ],
-
- 'string-double': [
- (r'"', String.Double, '#pop'),
- (r'\\.', String.Escape),
- (r'.', String.Double),
- ],
-
- 'string-interpol-close': [
- (r'\$'+ident, String.Interpol),
- (r'\}', String.Interpol, '#pop'),
- ],
-
- 'package': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\.', Punctuation, 'import-ident'),
- (r'', Text, '#pop'),
- ],
-
- 'import': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\*', Keyword), # wildcard import
- (r'\.', Punctuation, 'import-ident'),
- (r'in', Keyword.Namespace, 'ident'),
- (r'', Text, '#pop'),
- ],
-
- 'import-ident': [
- include('spaces'),
- (r'\*', Keyword, '#pop'), # wildcard import
- (ident, Name.Namespace, '#pop'),
- ],
-
- 'using': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\.', Punctuation, 'import-ident'),
- (r'', Text, '#pop'),
- ],
-
- 'preproc-error': [
- (r'\s+', Comment.Preproc),
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- (r'', Text, '#pop'),
- ],
-
- 'preproc-expr': [
- (r'\s+', Comment.Preproc),
- (r'\!', Comment.Preproc),
- (r'\(', Comment.Preproc, ('#pop', 'preproc-parenthesis')),
-
- (ident, Comment.Preproc, '#pop'),
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- ],
-
- 'preproc-parenthesis': [
- (r'\s+', Comment.Preproc),
- (r'\)', Comment.Preproc, '#pop'),
- ('', Text, 'preproc-expr-in-parenthesis'),
- ],
-
- 'preproc-expr-chain': [
- (r'\s+', Comment.Preproc),
- (binop, Comment.Preproc, ('#pop', 'preproc-expr-in-parenthesis')),
- (r'', Text, '#pop'),
- ],
-
- # same as 'preproc-expr' but able to chain 'preproc-expr-chain'
- 'preproc-expr-in-parenthesis': [
- (r'\s+', Comment.Preproc),
- (r'\!', Comment.Preproc),
- (r'\(', Comment.Preproc,
- ('#pop', 'preproc-expr-chain', 'preproc-parenthesis')),
-
- (ident, Comment.Preproc, ('#pop', 'preproc-expr-chain')),
- (r"'", String.Single,
- ('#pop', 'preproc-expr-chain', 'string-single')),
- (r'"', String.Double,
- ('#pop', 'preproc-expr-chain', 'string-double')),
- ],
-
- 'abstract' : [
- include('spaces'),
- (r'', Text, ('#pop', 'abstract-body', 'abstract-relation',
- 'abstract-opaque', 'type-param-constraint', 'type-name')),
- ],
-
- 'abstract-body' : [
- include('spaces'),
- (r'\{', Punctuation, ('#pop', 'class-body')),
- ],
-
- 'abstract-opaque' : [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close', 'type')),
- (r'', Text, '#pop'),
- ],
-
- 'abstract-relation': [
- include('spaces'),
- (r'(?:to|from)', Keyword.Declaration, 'type'),
- (r',', Punctuation),
- (r'', Text, '#pop'),
- ],
-
- 'meta': [
- include('spaces'),
- (r'@', Name.Decorator, ('meta-body', 'meta-ident', 'meta-colon')),
- ],
-
- # optional colon
- 'meta-colon': [
- include('spaces'),
- (r':', Name.Decorator, '#pop'),
- (r'', Text, '#pop'),
- ],
-
- # same as 'ident' but set token as Name.Decorator instead of Name
- 'meta-ident': [
- include('spaces'),
- (ident, Name.Decorator, '#pop'),
- ],
-
- 'meta-body': [
- include('spaces'),
- (r'\(', Name.Decorator, ('#pop', 'meta-call')),
- (r'', Text, '#pop'),
- ],
-
- 'meta-call': [
- include('spaces'),
- (r'\)', Name.Decorator, '#pop'),
- (r'', Text, ('#pop', 'meta-call-sep', 'expr')),
- ],
-
- 'meta-call-sep': [
- include('spaces'),
- (r'\)', Name.Decorator, '#pop'),
- (r',', Punctuation, ('#pop', 'meta-call')),
- ],
-
- 'typedef': [
- include('spaces'),
- (r'', Text, ('#pop', 'typedef-body', 'type-param-constraint',
- 'type-name')),
- ],
-
- 'typedef-body': [
- include('spaces'),
- (r'=', Operator, ('#pop', 'optional-semicolon', 'type')),
- ],
-
- 'enum': [
- include('spaces'),
- (r'', Text, ('#pop', 'enum-body', 'bracket-open',
- 'type-param-constraint', 'type-name')),
- ],
-
- 'enum-body': [
- include('spaces'),
- include('meta'),
- (r'\}', Punctuation, '#pop'),
- (ident_no_keyword, Name, ('enum-member', 'type-param-constraint')),
- ],
-
- 'enum-member': [
- include('spaces'),
- (r'\(', Punctuation,
- ('#pop', 'semicolon', 'flag', 'function-param')),
- (r'', Punctuation, ('#pop', 'semicolon', 'flag')),
- ],
-
- 'class': [
- include('spaces'),
- (r'', Text, ('#pop', 'class-body', 'bracket-open', 'extends',
- 'type-param-constraint', 'type-name')),
- ],
-
- 'extends': [
- include('spaces'),
- (r'(?:extends|implements)\b', Keyword.Declaration, 'type'),
- (r',', Punctuation), # the comma is made optional here, since haxe2
- # requires the comma but haxe3 does not allow it
- (r'', Text, '#pop'),
- ],
-
- 'bracket-open': [
- include('spaces'),
- (r'\{', Punctuation, '#pop'),
- ],
-
- 'bracket-close': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- ],
-
- 'class-body': [
- include('spaces'),
- include('meta'),
- (r'\}', Punctuation, '#pop'),
- (r'(?:static|public|private|override|dynamic|inline|macro)\b',
- Keyword.Declaration),
- (r'', Text, 'class-member'),
- ],
-
- 'class-member': [
- include('spaces'),
- (r'(var)\b', Keyword.Declaration,
- ('#pop', 'optional-semicolon', 'prop')),
- (r'(function)\b', Keyword.Declaration,
- ('#pop', 'optional-semicolon', 'class-method')),
- ],
-
- # local function, anonymous or not
- 'function-local': [
- include('spaces'),
- (r'(' + ident_no_keyword + ')?', Name.Function,
- ('#pop', 'expr', 'flag', 'function-param',
- 'parenthesis-open', 'type-param-constraint')),
- ],
-
- 'optional-expr': [
- include('spaces'),
- include('expr'),
- (r'', Text, '#pop'),
- ],
-
- 'class-method': [
- include('spaces'),
- (ident, Name.Function, ('#pop', 'optional-expr', 'flag',
- 'function-param', 'parenthesis-open',
- 'type-param-constraint')),
- ],
-
- # function arguments
- 'function-param': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r'\?', Punctuation),
- (ident_no_keyword, Name,
- ('#pop', 'function-param-sep', 'assign', 'flag')),
- ],
-
- 'function-param-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'function-param')),
- ],
-
- # class property
- # eg. var prop(default, null):String;
- 'prop': [
- include('spaces'),
- (ident_no_keyword, Name, ('#pop', 'assign', 'flag', 'prop-get-set')),
- ],
-
- 'prop-get-set': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close',
- 'prop-get-set-opt', 'comma', 'prop-get-set-opt')),
- (r'', Text, '#pop'),
- ],
-
- 'prop-get-set-opt': [
- include('spaces'),
- (r'(?:default|null|never|dynamic|get|set)\b', Keyword, '#pop'),
- (ident_no_keyword, Text, '#pop'), #custom getter/setter
- ],
-
- 'expr-statement': [
- include('spaces'),
- # makes semicolon optional here, just to avoid checking the last
- # one is bracket or not.
- (r'', Text, ('#pop', 'optional-semicolon', 'expr')),
- ],
-
- 'expr': [
- include('spaces'),
- (r'@', Name.Decorator, ('#pop', 'optional-expr', 'meta-body',
- 'meta-ident', 'meta-colon')),
- (r'(?:\+\+|\-\-|~(?!/)|!|\-)', Operator),
- (r'\(', Punctuation, ('#pop', 'expr-chain', 'parenthesis')),
- (r'(?:inline)\b', Keyword.Declaration),
- (r'(?:function)\b', Keyword.Declaration, ('#pop', 'expr-chain',
- 'function-local')),
- (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket')),
- (r'(?:true|false|null)\b', Keyword.Constant, ('#pop', 'expr-chain')),
- (r'(?:this)\b', Keyword, ('#pop', 'expr-chain')),
- (r'(?:cast)\b', Keyword, ('#pop', 'expr-chain', 'cast')),
- (r'(?:try)\b', Keyword, ('#pop', 'catch', 'expr')),
- (r'(?:var)\b', Keyword.Declaration, ('#pop', 'var')),
- (r'(?:new)\b', Keyword, ('#pop', 'expr-chain', 'new')),
- (r'(?:switch)\b', Keyword, ('#pop', 'switch')),
- (r'(?:if)\b', Keyword, ('#pop', 'if')),
- (r'(?:do)\b', Keyword, ('#pop', 'do')),
- (r'(?:while)\b', Keyword, ('#pop', 'while')),
- (r'(?:for)\b', Keyword, ('#pop', 'for')),
- (r'(?:untyped|throw)\b', Keyword),
- (r'(?:return)\b', Keyword, ('#pop', 'optional-expr')),
- (r'(?:macro)\b', Keyword, ('#pop', 'macro')),
- (r'(?:continue|break)\b', Keyword, '#pop'),
- (r'(?:\$\s*[a-z]\b|\$(?!'+ident+'))', Name, ('#pop', 'dollar')),
- (ident_no_keyword, Name, ('#pop', 'expr-chain')),
-
- # Float
- (r'\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+[eE][\+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+\.[0-9]*[eE][\+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, ('#pop', 'expr-chain')),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'expr-chain')),
- (r'[0-9]+', Number.Integer, ('#pop', 'expr-chain')),
-
- # String
- (r"'", String.Single, ('#pop', 'expr-chain', 'string-single-interpol')),
- (r'"', String.Double, ('#pop', 'expr-chain', 'string-double')),
-
- # EReg
- (r'~/(\\\\|\\/|[^/\n])*/[gimsu]*', String.Regex, ('#pop', 'expr-chain')),
-
- # Array
- (r'\[', Punctuation, ('#pop', 'expr-chain', 'array-decl')),
- ],
-
- 'expr-chain': [
- include('spaces'),
- (r'(?:\+\+|\-\-)', Operator),
- (binop, Operator, ('#pop', 'expr')),
- (r'(?:in)\b', Keyword, ('#pop', 'expr')),
- (r'\?', Operator, ('#pop', 'expr', 'ternary', 'expr')),
- (r'(\.)(' + ident_no_keyword + ')', bygroups(Punctuation, Name)),
- (r'\[', Punctuation, 'array-access'),
- (r'\(', Punctuation, 'call'),
- (r'', Text, '#pop'),
- ],
-
- # macro reification
- 'macro': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'type')),
- (r'', Text, ('#pop', 'expr')),
- ],
-
- # cast can be written as "cast expr" or "cast(expr, type)"
- 'cast': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close',
- 'cast-type', 'expr')),
- (r'', Text, ('#pop', 'expr')),
- ],
-
- # optionally give a type as the 2nd argument of cast()
- 'cast-type': [
- include('spaces'),
- (r',', Punctuation, ('#pop', 'type')),
- (r'', Text, '#pop'),
- ],
-
- 'catch': [
- include('spaces'),
- (r'(?:catch)\b', Keyword, ('expr', 'function-param',
- 'parenthesis-open')),
- (r'', Text, '#pop'),
- ],
-
- # do-while loop
- 'do': [
- include('spaces'),
- (r'', Punctuation, ('#pop', 'do-while', 'expr')),
- ],
-
- # the while after do
- 'do-while': [
- include('spaces'),
- (r'(?:while)\b', Keyword, ('#pop', 'parenthesis',
- 'parenthesis-open')),
- ],
-
- 'while': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
- ],
-
- 'for': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
- ],
-
- 'if': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'else', 'optional-semicolon', 'expr',
- 'parenthesis')),
- ],
-
- 'else': [
- include('spaces'),
- (r'(?:else)\b', Keyword, ('#pop', 'expr')),
- (r'', Text, '#pop'),
- ],
-
- 'switch': [
- include('spaces'),
- (r'', Text, ('#pop', 'switch-body', 'bracket-open', 'expr')),
- ],
-
- 'switch-body': [
- include('spaces'),
- (r'(?:case|default)\b', Keyword, ('case-block', 'case')),
- (r'\}', Punctuation, '#pop'),
- ],
-
- 'case': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- (r'', Text, ('#pop', 'case-sep', 'case-guard', 'expr')),
- ],
-
- 'case-sep': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'case')),
- ],
-
- 'case-guard': [
- include('spaces'),
- (r'(?:if)\b', Keyword, ('#pop', 'parenthesis', 'parenthesis-open')),
- (r'', Text, '#pop'),
- ],
-
- # optional multiple expr under a case
- 'case-block': [
- include('spaces'),
- (r'(?!(?:case|default)\b|\})', Keyword, 'expr-statement'),
- (r'', Text, '#pop'),
- ],
-
- 'new': [
- include('spaces'),
- (r'', Text, ('#pop', 'call', 'parenthesis-open', 'type')),
- ],
-
- 'array-decl': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- (r'', Text, ('#pop', 'array-decl-sep', 'expr')),
- ],
-
- 'array-decl-sep': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'array-decl')),
- ],
-
- 'array-access': [
- include('spaces'),
- (r'', Text, ('#pop', 'array-access-close', 'expr')),
- ],
-
- 'array-access-close': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- ],
-
- 'comma': [
- include('spaces'),
- (r',', Punctuation, '#pop'),
- ],
-
- 'colon': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- ],
-
- 'semicolon': [
- include('spaces'),
- (r';', Punctuation, '#pop'),
- ],
-
- 'optional-semicolon': [
- include('spaces'),
- (r';', Punctuation, '#pop'),
- (r'', Text, '#pop'),
- ],
-
- # identity that CAN be a Haxe keyword
- 'ident': [
- include('spaces'),
- (ident, Name, '#pop'),
- ],
-
- 'dollar': [
- include('spaces'),
- (r'\{', Keyword, ('#pop', 'bracket-close', 'expr')),
- (r'', Text, ('#pop', 'expr-chain')),
- ],
-
- 'type-name': [
- include('spaces'),
- (typeid, Name, '#pop'),
- ],
-
- 'type-full-name': [
- include('spaces'),
- (r'\.', Punctuation, 'ident'),
- (r'', Text, '#pop'),
- ],
-
- 'type': [
- include('spaces'),
- (r'\?', Punctuation),
- (ident, Name, ('#pop', 'type-check', 'type-full-name')),
- (r'\{', Punctuation, ('#pop', 'type-check', 'type-struct')),
- (r'\(', Punctuation, ('#pop', 'type-check', 'type-parenthesis')),
- ],
-
- 'type-parenthesis': [
- include('spaces'),
- (r'', Text, ('#pop', 'parenthesis-close', 'type')),
- ],
-
- 'type-check': [
- include('spaces'),
- (r'->', Punctuation, ('#pop', 'type')),
- (r'<(?!=)', Punctuation, 'type-param'),
- (r'', Text, '#pop'),
- ],
-
- 'type-struct': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r'\?', Punctuation),
- (r'>', Punctuation, ('comma', 'type')),
- (ident_no_keyword, Name, ('#pop', 'type-struct-sep', 'type', 'colon')),
- include('class-body'),
- ],
-
- 'type-struct-sep': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-struct')),
- ],
-
- # type-param can be a normal type or a constant literal...
- 'type-param-type': [
- # Float
- (r'\.[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+[eE][\+\-]?[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+\.[0-9]*[eE][\+\-]?[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+\.[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, '#pop'),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex, '#pop'),
- (r'[0-9]+', Number.Integer, '#pop'),
-
- # String
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
-
- # EReg
- (r'~/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex, '#pop'),
-
- # Array
- (r'\[', Operator, ('#pop', 'array-decl')),
-
- include('type'),
- ],
-
- # type-param part of a type
- # ie. the <A,B> path in Map<A,B>
- 'type-param': [
- include('spaces'),
- (r'', Text, ('#pop', 'type-param-sep', 'type-param-type')),
- ],
-
- 'type-param-sep': [
- include('spaces'),
- (r'>', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-param')),
- ],
-
- # optional type-param that may include constraint
- # ie. <T:Constraint, T2:(ConstraintA,ConstraintB)>
- 'type-param-constraint': [
- include('spaces'),
- (r'<(?!=)', Punctuation, ('#pop', 'type-param-constraint-sep',
- 'type-param-constraint-flag', 'type-name')),
- (r'', Text, '#pop'),
- ],
-
- 'type-param-constraint-sep': [
- include('spaces'),
- (r'>', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-param-constraint-sep',
- 'type-param-constraint-flag', 'type-name')),
- ],
-
- # the optional constraint inside type-param
- 'type-param-constraint-flag': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'type-param-constraint-flag-type')),
- (r'', Text, '#pop'),
- ],
-
- 'type-param-constraint-flag-type': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'type-param-constraint-flag-type-sep',
- 'type')),
- (r'', Text, ('#pop', 'type')),
- ],
-
- 'type-param-constraint-flag-type-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, 'type'),
- ],
-
- # a parenthesis expr that contain exactly one expr
- 'parenthesis': [
- include('spaces'),
- (r'', Text, ('#pop', 'parenthesis-close', 'expr')),
- ],
-
- 'parenthesis-open': [
- include('spaces'),
- (r'\(', Punctuation, '#pop'),
- ],
-
- 'parenthesis-close': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- ],
-
- 'var': [
- include('spaces'),
- (ident_no_keyword, Text, ('#pop', 'var-sep', 'assign', 'flag')),
- ],
-
- # optional more var decl.
- 'var-sep': [
- include('spaces'),
- (r',', Punctuation, ('#pop', 'var')),
- (r'', Text, '#pop'),
- ],
-
- # optional assignment
- 'assign': [
- include('spaces'),
- (r'=', Operator, ('#pop', 'expr')),
- (r'', Text, '#pop'),
- ],
-
- # optional type flag
- 'flag': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'type')),
- (r'', Text, '#pop'),
- ],
-
- # colon as part of a ternary operator (?:)
- 'ternary': [
- include('spaces'),
- (r':', Operator, '#pop'),
- ],
-
- # function call
- 'call': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r'', Text, ('#pop', 'call-sep', 'expr')),
- ],
-
- # after a call param
- 'call-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'call')),
- ],
-
- # bracket can be block or object
- 'bracket': [
- include('spaces'),
- (r'(?!(?:\$\s*[a-z]\b|\$(?!'+ident+')))' + ident_no_keyword, Name,
- ('#pop', 'bracket-check')),
- (r"'", String.Single, ('#pop', 'bracket-check', 'string-single')),
- (r'"', String.Double, ('#pop', 'bracket-check', 'string-double')),
- (r'', Text, ('#pop', 'block')),
- ],
-
- 'bracket-check': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'object-sep', 'expr')), #is object
- (r'', Text, ('#pop', 'block', 'optional-semicolon', 'expr-chain')), #is block
- ],
-
- # code block
- 'block': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r'', Text, 'expr-statement'),
- ],
-
- # object in key-value pairs
- 'object': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r'', Text, ('#pop', 'object-sep', 'expr', 'colon', 'ident-or-string'))
- ],
-
- # a key of an object
- 'ident-or-string': [
- include('spaces'),
- (ident_no_keyword, Name, '#pop'),
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- ],
-
- # after a key-value pair in object
- 'object-sep': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'object')),
- ],
-
-
-
- }
-
- def analyse_text(text):
- if re.match(r'\w+\s*:\s*\w', text): return 0.3
-
-
-def _indentation(lexer, match, ctx):
- indentation = match.group(0)
- yield match.start(), Text, indentation
- ctx.last_indentation = indentation
- ctx.pos = match.end()
-
- if hasattr(ctx, 'block_state') and ctx.block_state and \
- indentation.startswith(ctx.block_indentation) and \
- indentation != ctx.block_indentation:
- ctx.stack.append(ctx.block_state)
- else:
- ctx.block_state = None
- ctx.block_indentation = None
- ctx.stack.append('content')
-
-def _starts_block(token, state):
- def callback(lexer, match, ctx):
- yield match.start(), token, match.group(0)
-
- if hasattr(ctx, 'last_indentation'):
- ctx.block_indentation = ctx.last_indentation
- else:
- ctx.block_indentation = ''
-
- ctx.block_state = state
- ctx.pos = match.end()
-
- return callback
-
-
-class HamlLexer(ExtendedRegexLexer):
- """
- For Haml markup.
-
- *New in Pygments 1.3.*
- """
-
- name = 'Haml'
- aliases = ['haml', 'HAML']
- filenames = ['*.haml']
- mimetypes = ['text/x-haml']
-
- flags = re.IGNORECASE
- # Haml can include " |\n" anywhere,
- # which is ignored and used to wrap long lines.
- # To accomodate this, use this custom faux dot instead.
- _dot = r'(?: \|\n(?=.* \|)|.)'
-
- # In certain places, a comma at the end of the line
- # allows line wrapping as well.
- _comma_dot = r'(?:,\s*\n|' + _dot + ')'
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[a-z0-9_:-]+', Name.Class, 'tag'),
- (r'\#[a-z0-9_:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _comma_dot + r'*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- 'root'),
- (r'', Text, 'plain'),
- ],
-
- 'content': [
- include('css'),
- (r'%[a-z0-9_:-]+', Name.Tag, 'tag'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
- (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'haml-comment-block'), '#pop'),
- (r'(-)(' + _comma_dot + r'*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- include('eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
- (r'\{(,\n|' + _dot + ')*?\}', using(RubyLexer)),
- (r'\[' + _dot + '*?\]', using(RubyLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(' + _dot + '*?)(\})',
- bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[a-z0-9_:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'[a-z0-9_]+', Name.Variable, '#pop'),
- (r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'),
- (r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
- (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'haml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
- (r'(#\{)(' + _dot + '*?)(\})',
- bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
-
-
-common_sass_tokens = {
- 'value': [
- (r'[ \t]+', Text),
- (r'[!$][\w-]+', Name.Variable),
- (r'url\(', String.Other, 'string-url'),
- (r'[a-z_-][\w-]*(?=\()', Name.Function),
- (r'(azimuth|background-attachment|background-color|'
- r'background-image|background-position|background-repeat|'
- r'background|border-bottom-color|border-bottom-style|'
- r'border-bottom-width|border-left-color|border-left-style|'
- r'border-left-width|border-right|border-right-color|'
- r'border-right-style|border-right-width|border-top-color|'
- r'border-top-style|border-top-width|border-bottom|'
- r'border-collapse|border-left|border-width|border-color|'
- r'border-spacing|border-style|border-top|border|caption-side|'
- r'clear|clip|color|content|counter-increment|counter-reset|'
- r'cue-after|cue-before|cue|cursor|direction|display|'
- r'elevation|empty-cells|float|font-family|font-size|'
- r'font-size-adjust|font-stretch|font-style|font-variant|'
- r'font-weight|font|height|letter-spacing|line-height|'
- r'list-style-type|list-style-image|list-style-position|'
- r'list-style|margin-bottom|margin-left|margin-right|'
- r'margin-top|margin|marker-offset|marks|max-height|max-width|'
- r'min-height|min-width|opacity|orphans|outline|outline-color|'
- r'outline-style|outline-width|overflow|padding-bottom|'
- r'padding-left|padding-right|padding-top|padding|page|'
- r'page-break-after|page-break-before|page-break-inside|'
- r'pause-after|pause-before|pause|pitch|pitch-range|'
- r'play-during|position|quotes|richness|right|size|'
- r'speak-header|speak-numeral|speak-punctuation|speak|'
- r'speech-rate|stress|table-layout|text-align|text-decoration|'
- r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
- r'vertical-align|visibility|voice-family|volume|white-space|'
- r'widows|width|word-spacing|z-index|bottom|left|'
- r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
- r'behind|below|bidi-override|blink|block|bold|bolder|both|'
- r'capitalize|center-left|center-right|center|circle|'
- r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
- r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
- r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
- r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
- r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
- r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
- r'inherit|inline-table|inline|inset|inside|invert|italic|'
- r'justify|katakana-iroha|katakana|landscape|larger|large|'
- r'left-side|leftwards|level|lighter|line-through|list-item|'
- r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
- r'lower|low|medium|message-box|middle|mix|monospace|'
- r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
- r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
- r'open-quote|outset|outside|overline|pointer|portrait|px|'
- r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
- r'rightwards|s-resize|sans-serif|scroll|se-resize|'
- r'semi-condensed|semi-expanded|separate|serif|show|silent|'
- r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
- r'spell-out|square|static|status-bar|super|sw-resize|'
- r'table-caption|table-cell|table-column|table-column-group|'
- r'table-footer-group|table-header-group|table-row|'
- r'table-row-group|text|text-bottom|text-top|thick|thin|'
- r'transparent|ultra-condensed|ultra-expanded|underline|'
- r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
- r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
- r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Name.Constant),
- (r'(indigo|gold|firebrick|indianred|darkolivegreen|'
- r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
- r'mediumslateblue|springgreen|crimson|lightsalmon|brown|'
- r'turquoise|olivedrab|cyan|skyblue|darkturquoise|'
- r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|'
- r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
- r'violet|orchid|ghostwhite|honeydew|cornflowerblue|'
- r'darkblue|darkkhaki|mediumpurple|cornsilk|bisque|slategray|'
- r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
- r'gainsboro|mediumturquoise|floralwhite|coral|lightgrey|'
- r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
- r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
- r'lightcoral|orangered|navajowhite|palegreen|burlywood|'
- r'seashell|mediumspringgreen|papayawhip|blanchedalmond|'
- r'peru|aquamarine|darkslategray|ivory|dodgerblue|'
- r'lemonchiffon|chocolate|orange|forestgreen|slateblue|'
- r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
- r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
- r'plum|darkgoldenrod|sandybrown|magenta|tan|'
- r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
- r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
- r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
- r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
- r'lightyellow|lavenderblush|linen|mediumaquamarine|'
- r'blueviolet|peachpuff)\b', Name.Entity),
- (r'(black|silver|gray|white|maroon|red|purple|fuchsia|green|'
- r'lime|olive|yellow|navy|blue|teal|aqua)\b', Name.Builtin),
- (r'\!(important|default)', Name.Exception),
- (r'(true|false)', Name.Pseudo),
- (r'(and|or|not)', Operator.Word),
- (r'/\*', Comment.Multiline, 'inline-comment'),
- (r'//[^\n]*', Comment.Single),
- (r'\#[a-z0-9]{1,6}', Number.Hex),
- (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
- (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
- (r'#{', String.Interpol, 'interpolation'),
- (r'[~\^\*!&%<>\|+=@:,./?-]+', Operator),
- (r'[\[\]()]+', Punctuation),
- (r'"', String.Double, 'string-double'),
- (r"'", String.Single, 'string-single'),
- (r'[a-z_-][\w-]*', Name),
- ],
-
- 'interpolation': [
- (r'\}', String.Interpol, '#pop'),
- include('value'),
- ],
-
- 'selector': [
- (r'[ \t]+', Text),
- (r'\:', Name.Decorator, 'pseudo-class'),
- (r'\.', Name.Class, 'class'),
- (r'\#', Name.Namespace, 'id'),
- (r'[a-zA-Z0-9_-]+', Name.Tag),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'&', Keyword),
- (r'[~\^\*!&\[\]\(\)<>\|+=@:;,./?-]', Operator),
- (r'"', String.Double, 'string-double'),
- (r"'", String.Single, 'string-single'),
- ],
-
- 'string-double': [
- (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'"', String.Double, '#pop'),
- ],
-
- 'string-single': [
- (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double),
- (r'#\{', String.Interpol, 'interpolation'),
- (r"'", String.Double, '#pop'),
- ],
-
- 'string-url': [
- (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'\)', String.Other, '#pop'),
- ],
-
- 'pseudo-class': [
- (r'[\w-]+', Name.Decorator),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'', Text, '#pop'),
- ],
-
- 'class': [
- (r'[\w-]+', Name.Class),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'', Text, '#pop'),
- ],
-
- 'id': [
- (r'[\w-]+', Name.Namespace),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'', Text, '#pop'),
- ],
-
- 'for': [
- (r'(from|to|through)', Operator.Word),
- include('value'),
- ],
-}
-
-class SassLexer(ExtendedRegexLexer):
- """
- For Sass stylesheets.
-
- *New in Pygments 1.3.*
- """
-
- name = 'Sass'
- aliases = ['sass', 'SASS']
- filenames = ['*.sass']
- mimetypes = ['text/x-sass']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'content': [
- (r'//[^\n]*', _starts_block(Comment.Single, 'single-comment'),
- 'root'),
- (r'/\*[^\n]*', _starts_block(Comment.Multiline, 'multi-comment'),
- 'root'),
- (r'@import', Keyword, 'import'),
- (r'@for', Keyword, 'for'),
- (r'@(debug|warn|if|while)', Keyword, 'value'),
- (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
- (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
- (r'@extend', Keyword, 'selector'),
- (r'@[a-z0-9_-]+', Keyword, 'selector'),
- (r'=[\w-]+', Name.Function, 'value'),
- (r'\+[\w-]+', Name.Decorator, 'value'),
- (r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))',
- bygroups(Name.Variable, Operator), 'value'),
- (r':', Name.Attribute, 'old-style-attr'),
- (r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'),
- (r'', Text, 'selector'),
- ],
-
- 'single-comment': [
- (r'.+', Comment.Single),
- (r'\n', Text, 'root'),
- ],
-
- 'multi-comment': [
- (r'.+', Comment.Multiline),
- (r'\n', Text, 'root'),
- ],
-
- 'import': [
- (r'[ \t]+', Text),
- (r'\S+', String),
- (r'\n', Text, 'root'),
- ],
-
- 'old-style-attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#{', String.Interpol, 'interpolation'),
- (r'[ \t]*=', Operator, 'value'),
- (r'', Text, 'value'),
- ],
-
- 'new-style-attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#{', String.Interpol, 'interpolation'),
- (r'[ \t]*[=:]', Operator, 'value'),
- ],
-
- 'inline-comment': [
- (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline),
- (r'#\{', String.Interpol, 'interpolation'),
- (r"\*/", Comment, '#pop'),
- ],
- }
- for group, common in common_sass_tokens.iteritems():
- tokens[group] = copy.copy(common)
- tokens['value'].append((r'\n', Text, 'root'))
- tokens['selector'].append((r'\n', Text, 'root'))
-
-
-class ScssLexer(RegexLexer):
- """
- For SCSS stylesheets.
- """
-
- name = 'SCSS'
- aliases = ['scss']
- filenames = ['*.scss']
- mimetypes = ['text/x-scss']
-
- flags = re.IGNORECASE | re.DOTALL
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@import', Keyword, 'value'),
- (r'@for', Keyword, 'for'),
- (r'@(debug|warn|if|while)', Keyword, 'value'),
- (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
- (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
- (r'@extend', Keyword, 'selector'),
- (r'@[a-z0-9_-]+', Keyword, 'selector'),
- (r'(\$[\w-]*\w)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
- (r'(?=[^;{}][;}])', Name.Attribute, 'attr'),
- (r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'),
- (r'', Text, 'selector'),
- ],
-
- 'attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#{', String.Interpol, 'interpolation'),
- (r'[ \t]*:', Operator, 'value'),
- ],
-
- 'inline-comment': [
- (r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline),
- (r'#\{', String.Interpol, 'interpolation'),
- (r"\*/", Comment, '#pop'),
- ],
- }
- for group, common in common_sass_tokens.iteritems():
- tokens[group] = copy.copy(common)
- tokens['value'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
- tokens['selector'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
-
-
-class CoffeeScriptLexer(RegexLexer):
- """
- For `CoffeeScript`_ source code.
-
- .. _CoffeeScript: http://coffeescript.org
-
- *New in Pygments 1.3.*
- """
-
- name = 'CoffeeScript'
- aliases = ['coffee-script', 'coffeescript', 'coffee']
- filenames = ['*.coffee']
- mimetypes = ['text/coffeescript']
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'###[^#].*?###', Comment.Multiline),
- (r'#(?!##[^#]).*?\n', Comment.Single),
- ],
- 'multilineregex': [
- (r'[^/#]+', String.Regex),
- (r'///([gim]+\b|\B)', String.Regex, '#pop'),
- (r'#{', String.Interpol, 'interpoling_string'),
- (r'[/#]', String.Regex),
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'///', String.Regex, ('#pop', 'multilineregex')),
- (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'', Text, '#pop'),
- ],
- 'root': [
- # this next expr leads to infinite loops root -> slashstartsregex
- #(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
- r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
- r'=(?!>)|-(?!>)|[<>+*`%&\|\^/])=?',
- Operator, 'slashstartsregex'),
- (r'(?:\([^()]+\))?\s*[=-]>', Name.Function),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(?<![\.\$])(for|own|in|of|while|until|'
- r'loop|break|return|continue|'
- r'switch|when|then|if|unless|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
- r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
- (r'(?<![\.\$])(true|false|yes|no|on|off|null|'
- r'NaN|Infinity|undefined)\b',
- Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
- Name.Builtin),
- (r'[$a-zA-Z_][a-zA-Z0-9_\.:\$]*\s*[:=]\s', Name.Variable,
- 'slashstartsregex'),
- (r'@[$a-zA-Z_][a-zA-Z0-9_\.:\$]*\s*[:=]\s', Name.Variable.Instance,
- 'slashstartsregex'),
- (r'@', Name.Other, 'slashstartsregex'),
- (r'@?[$a-zA-Z_][a-zA-Z0-9_\$]*', Name.Other, 'slashstartsregex'),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- ('"""', String, 'tdqs'),
- ("'''", String, 'tsqs'),
- ('"', String, 'dqs'),
- ("'", String, 'sqs'),
- ],
- 'strings': [
- (r'[^#\\\'"]+', String),
- # note that all coffee script strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
- ],
- 'interpoling_string' : [
- (r'}', String.Interpol, "#pop"),
- include('root')
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\.|\'', String), # double-quoted string don't need ' escapes
- (r'#{', String.Interpol, "interpoling_string"),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r'#|\\.|"', String), # single quoted strings don't need " escapses
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- (r'\\.|\'|"', String), # no need to escape quotes in triple-string
- (r'#{', String.Interpol, "interpoling_string"),
- include('strings'),
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
- include('strings')
- ],
- }
-
-
-class LiveScriptLexer(RegexLexer):
- """
- For `LiveScript`_ source code.
-
- .. _LiveScript: http://gkz.github.com/LiveScript/
-
- New in Pygments 1.6.
- """
-
- name = 'LiveScript'
- aliases = ['live-script', 'livescript']
- filenames = ['*.ls']
- mimetypes = ['text/livescript']
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'#.*?\n', Comment.Single),
- ],
- 'multilineregex': [
- include('commentsandwhitespace'),
- (r'//([gim]+\b|\B)', String.Regex, '#pop'),
- (r'/', String.Regex),
- (r'[^/#]+', String.Regex)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'//', String.Regex, ('#pop', 'multilineregex')),
- (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'', Text, '#pop'),
- ],
- 'root': [
- # this next expr leads to infinite loops root -> slashstartsregex
- #(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
- r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
- (r'\+\+|&&|(?<![\.\$])\b(?:and|x?or|is|isnt|not)\b|\?|:|=|'
- r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
- r'~(?!\~?>)|-(?!\-?>)|<(?!\[)|(?<!\])>|'
- r'[+*`%&\|\^/])=?',
- Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(?<![\.\$])(for|own|in|of|while|until|loop|break|'
- r'return|continue|switch|when|then|if|unless|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
- r'extends|this|class|by|const|var|to|til)\b', Keyword,
- 'slashstartsregex'),
- (r'(?<![\.\$])(true|false|yes|no|on|off|'
- r'null|NaN|Infinity|undefined|void)\b',
- Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
- Name.Builtin),
- (r'[$a-zA-Z_][a-zA-Z0-9_\.\-:\$]*\s*[:=]\s', Name.Variable,
- 'slashstartsregex'),
- (r'@[$a-zA-Z_][a-zA-Z0-9_\.\-:\$]*\s*[:=]\s', Name.Variable.Instance,
- 'slashstartsregex'),
- (r'@', Name.Other, 'slashstartsregex'),
- (r'@?[$a-zA-Z_][a-zA-Z0-9_\-]*', Name.Other, 'slashstartsregex'),
- (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?(?:[a-zA-Z_]+)?', Number.Float),
- (r'[0-9]+(~[0-9a-z]+)?(?:[a-zA-Z_]+)?', Number.Integer),
- ('"""', String, 'tdqs'),
- ("'''", String, 'tsqs'),
- ('"', String, 'dqs'),
- ("'", String, 'sqs'),
- (r'\\[\w$-]+', String),
- (r'<\[.*\]>', String),
- ],
- 'strings': [
- (r'[^#\\\'"]+', String),
- # note that all coffee script strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
- ],
- 'interpoling_string' : [
- (r'}', String.Interpol, "#pop"),
- include('root')
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\.|\'', String), # double-quoted string don't need ' escapes
- (r'#{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r'#|\\.|"', String), # single quoted strings don't need " escapses
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- (r'\\.|\'|"', String), # no need to escape quotes in triple-string
- (r'#{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings'),
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
- include('strings')
- ],
- }
-
-
-class DuelLexer(RegexLexer):
- """
- Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
- See http://duelengine.org/.
- See http://jsonml.org/jbst/.
-
- *New in Pygments 1.4.*
- """
-
- name = 'Duel'
- aliases = ['duel', 'Duel Engine', 'Duel View', 'JBST', 'jbst', 'JsonML+BST']
- filenames = ['*.duel','*.jbst']
- mimetypes = ['text/x-duel','text/x-jbst']
-
- flags = re.DOTALL
-
- tokens = {
- 'root': [
- (r'(<%[@=#!:]?)(.*?)(%>)',
- bygroups(Name.Tag, using(JavascriptLexer), Name.Tag)),
- (r'(<%\$)(.*?)(:)(.*?)(%>)',
- bygroups(Name.Tag, Name.Function, Punctuation, String, Name.Tag)),
- (r'(<%--)(.*?)(--%>)',
- bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
- (r'(<script.*?>)(.*?)(</script>)',
- bygroups(using(HtmlLexer),
- using(JavascriptLexer), using(HtmlLexer))),
- (r'(.+?)(?=<)', using(HtmlLexer)),
- (r'.+', using(HtmlLexer)),
- ],
- }
-
-
-class ScamlLexer(ExtendedRegexLexer):
- """
- For `Scaml markup <http://scalate.fusesource.org/>`_. Scaml is Haml for Scala.
-
- *New in Pygments 1.4.*
- """
-
- name = 'Scaml'
- aliases = ['scaml', 'SCAML']
- filenames = ['*.scaml']
- mimetypes = ['text/x-scaml']
-
- flags = re.IGNORECASE
- # Scaml does not yet support the " |\n" notation to
- # wrap long lines. Once it does, use the custom faux
- # dot instead.
- # _dot = r'(?: \|\n(?=.* \|)|.)'
- _dot = r'.'
-
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[a-z0-9_:-]+', Name.Class, 'tag'),
- (r'\#[a-z0-9_:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- 'root'),
- (r'', Text, 'plain'),
- ],
-
- 'content': [
- include('css'),
- (r'%[a-z0-9_:-]+', Name.Tag, 'tag'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
- (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'scaml-comment-block'), '#pop'),
- (r'(-@\s*)(import)?(' + _dot + r'*\n)',
- bygroups(Punctuation, Keyword, using(ScalaLexer)),
- '#pop'),
- (r'(-)(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- include('eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
- (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
- (r'\[' + _dot + '*?\]', using(ScalaLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(' + _dot + '*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[a-z0-9_:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'[a-z0-9_]+', Name.Variable, '#pop'),
- (r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'),
- (r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
- (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'scaml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
- (r'(#\{)(' + _dot + '*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
-
-
-class JadeLexer(ExtendedRegexLexer):
- """
- For Jade markup.
- Jade is a variant of Scaml, see:
- http://scalate.fusesource.org/documentation/scaml-reference.html
-
- *New in Pygments 1.4.*
- """
-
- name = 'Jade'
- aliases = ['jade', 'JADE']
- filenames = ['*.jade']
- mimetypes = ['text/x-jade']
-
- flags = re.IGNORECASE
- _dot = r'.'
-
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[a-z0-9_:-]+', Name.Class, 'tag'),
- (r'\#[a-z0-9_:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)), 'root'),
- (r'', Text, 'plain'),
- ],
-
- 'content': [
- include('css'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
- (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'scaml-comment-block'), '#pop'),
- (r'(-@\s*)(import)?(' + _dot + r'*\n)',
- bygroups(Punctuation, Keyword, using(ScalaLexer)),
- '#pop'),
- (r'(-)(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- (r'[a-z0-9_:-]+', Name.Tag, 'tag'),
- (r'\|', Text, 'eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
- (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
- (r'\[' + _dot + '*?\]', using(ScalaLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(' + _dot + '*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[a-z0-9_:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'[a-z0-9_]+', Name.Variable, '#pop'),
- (r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'),
- (r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
- (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'scaml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
- (r'(#\{)(' + _dot + '*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
-
-
-class XQueryLexer(ExtendedRegexLexer):
- """
- An XQuery lexer, parsing a stream and outputting the tokens needed to
- highlight xquery code.
-
- *New in Pygments 1.4.*
- """
- name = 'XQuery'
- aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm']
- filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm']
- mimetypes = ['text/xquery', 'application/xquery']
-
- xquery_parse_state = []
-
- # FIX UNICODE LATER
- #ncnamestartchar = (
- # ur"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|"
- # ur"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|"
- # ur"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|"
- # ur"[\u10000-\uEFFFF]"
- #)
- ncnamestartchar = r"(?:[A-Z]|_|[a-z])"
- # FIX UNICODE LATER
- #ncnamechar = ncnamestartchar + (ur"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
- # ur"[\u203F-\u2040]")
- ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])"
- ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar)
- pitarget_namestartchar = r"(?:[A-KN-WY-Z]|_|:|[a-kn-wy-z])"
- pitarget_namechar = r"(?:" + pitarget_namestartchar + r"|-|\.|[0-9])"
- pitarget = "%s+%s*" % (pitarget_namestartchar, pitarget_namechar)
- prefixedname = "%s:%s" % (ncname, ncname)
- unprefixedname = ncname
- qname = "(?:%s|%s)" % (prefixedname, unprefixedname)
-
- entityref = r'(?:&(?:lt|gt|amp|quot|apos|nbsp);)'
- charref = r'(?:&#[0-9]+;|&#x[0-9a-fA-F]+;)'
-
- stringdouble = r'(?:"(?:' + entityref + r'|' + charref + r'|""|[^&"])*")'
- stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')"
-
- # FIX UNICODE LATER
- #elementcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
- # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]'
- #quotattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|'
- # ur'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]'
- #aposattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
- # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_`\|~]'
-
-
- # CHAR elements - fix the above elementcontentchar, quotattrcontentchar,
- # aposattrcontentchar
- #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
-
- flags = re.DOTALL | re.MULTILINE | re.UNICODE
-
- def punctuation_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- # transition to root always - don't pop off stack
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def operator_root_callback(lexer, match, ctx):
- yield match.start(), Operator, match.group(1)
- # transition to root always - don't pop off stack
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def popstate_tag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- ctx.stack.append(lexer.xquery_parse_state.pop())
- ctx.pos = match.end()
-
- def popstate_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append(lexer.xquery_parse_state.pop())
- ctx.pos = match.end()
-
- def popstate_kindtest_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- next_state = lexer.xquery_parse_state.pop()
- if next_state == 'occurrenceindicator':
- if re.match("[?*+]+", match.group(2)):
- yield match.start(), Punctuation, match.group(2)
- ctx.stack.append('operator')
- ctx.pos = match.end()
- else:
- ctx.stack.append('operator')
- ctx.pos = match.end(1)
- else:
- ctx.stack.append(next_state)
- ctx.pos = match.end(1)
-
- def popstate_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- # if we have run out of our state stack, pop whatever is on the pygments
- # state stack
- if len(lexer.xquery_parse_state) == 0:
- ctx.stack.pop()
- elif len(ctx.stack) > 1:
- ctx.stack.append(lexer.xquery_parse_state.pop())
- else:
- # i don't know if i'll need this, but in case, default back to root
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_element_content_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append('element_content')
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append(ctx.state.pop)
- ctx.pos = match.end()
-
- def pushstate_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append(ctx.state.pop)
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_operator_order_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_root_validate(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_root_validate_withmode(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Keyword, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_processing_instruction_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('processing_instruction')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_element_content_processing_instruction_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('processing_instruction')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_element_content_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_operator_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_element_content_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('xml_comment')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_operator_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('xml_comment')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_kindtest_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('kindtest')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_operator_kindtestforpi_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('kindtestforpi')
- ctx.pos = match.end()
-
- def pushstate_operator_kindtest_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('occurrenceindicator')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_operator_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_operator_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- lexer.xquery_parse_state.append('operator')
- ctx.stack = ['root']#.append('root')
- ctx.pos = match.end()
-
- def pushstate_operator_root_construct_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- cur_state = ctx.stack.pop()
- lexer.xquery_parse_state.append(cur_state)
- ctx.stack = ['root']#.append('root')
- ctx.pos = match.end()
-
- def pushstate_operator_attribute_callback(lexer, match, ctx):
- yield match.start(), Name.Attribute, match.group(1)
- ctx.stack.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- tokens = {
- 'comment': [
- # xquery comments
- (r'(:\))', Comment, '#pop'),
- (r'(\(:)', Comment, '#push'),
- (r'[^:)]', Comment),
- (r'([^:)]|:|\))', Comment),
- ],
- 'whitespace': [
- (r'\s+', Text),
- ],
- 'operator': [
- include('whitespace'),
- (r'(\})', popstate_callback),
- (r'\(:', Comment, 'comment'),
-
- (r'(\{)', pushstate_root_callback),
- (r'then|else|external|at|div|except', Keyword, 'root'),
- (r'order by', Keyword, 'root'),
- (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'),
- (r'and|or', Operator.Word, 'root'),
- (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)',
- Operator.Word, 'root'),
- (r'return|satisfies|to|union|where|preserve\s+strip',
- Keyword, 'root'),
- (r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\||:=|=)',
- operator_root_callback),
- (r'(::|;|\[|//|/|,)',
- punctuation_root_callback),
- (r'(castable|cast)(\s+)(as)\b',
- bygroups(Keyword, Text, Keyword), 'singletype'),
- (r'(instance)(\s+)(of)\b',
- bygroups(Keyword, Text, Keyword), 'itemtype'),
- (r'(treat)(\s+)(as)\b',
- bygroups(Keyword, Text, Keyword), 'itemtype'),
- (r'(case|as)\b', Keyword, 'itemtype'),
- (r'(\))(\s*)(as)',
- bygroups(Punctuation, Text, Keyword), 'itemtype'),
- (r'\$', Name.Variable, 'varname'),
- (r'(for|let)(\s+)(\$)',
- bygroups(Keyword, Text, Name.Variable), 'varname'),
- #(r'\)|\?|\]', Punctuation, '#push'),
- (r'\)|\?|\]', Punctuation),
- (r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
- (r'ascending|descending|default', Keyword, '#push'),
- (r'external', Keyword),
- (r'collation', Keyword, 'uritooperator'),
- # finally catch all string literals and stay in operator state
- (stringdouble, String.Double),
- (stringsingle, String.Single),
-
- (r'(catch)(\s*)', bygroups(Keyword, Text), 'root'),
- ],
- 'uritooperator': [
- (stringdouble, String.Double, '#pop'),
- (stringsingle, String.Single, '#pop'),
- ],
- 'namespacedecl': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'(at)(\s+)('+stringdouble+')', bygroups(Keyword, Text, String.Double)),
- (r"(at)(\s+)("+stringsingle+')', bygroups(Keyword, Text, String.Single)),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- (r',', Punctuation),
- (r'=', Operator),
- (r';', Punctuation, 'root'),
- (ncname, Name.Namespace),
- ],
- 'namespacekeyword': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (stringdouble, String.Double, 'namespacedecl'),
- (stringsingle, String.Single, 'namespacedecl'),
- (r'inherit|no-inherit', Keyword, 'root'),
- (r'namespace', Keyword, 'namespacedecl'),
- (r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)),
- (r'preserve|no-preserve', Keyword),
- (r',', Punctuation),
- ],
- 'varname': [
- (r'\(:', Comment, 'comment'),
- (qname, Name.Variable, 'operator'),
- ],
- 'singletype': [
- (r'\(:', Comment, 'comment'),
- (ncname + r'(:\*)', Name.Variable, 'operator'),
- (qname, Name.Variable, 'operator'),
- ],
- 'itemtype': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'\$', Punctuation, 'varname'),
- (r'(void)(\s*)(\()(\s*)(\))',
- bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'),
- (r'(element|attribute|schema-element|schema-attribute|comment|text|'
- r'node|binary|document-node|empty-sequence)(\s*)(\()',
- pushstate_occurrenceindicator_kindtest_callback),
- # Marklogic specific type?
- (r'(processing-instruction)(\s*)(\()',
- bygroups(Keyword, Text, Punctuation),
- ('occurrenceindicator', 'kindtestforpi')),
- (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])',
- bygroups(Keyword, Text, Punctuation, Text, Punctuation),
- 'occurrenceindicator'),
- (r'\(\#', Punctuation, 'pragma'),
- (r';', Punctuation, '#pop'),
- (r'then|else', Keyword, '#pop'),
- (r'(at)(\s+)(' + stringdouble + ')',
- bygroups(Keyword, Text, String.Double), 'namespacedecl'),
- (r'(at)(\s+)(' + stringsingle + ')',
- bygroups(Keyword, Text, String.Single), 'namespacedecl'),
- (r'except|intersect|in|is|return|satisfies|to|union|where',
- Keyword, 'root'),
- (r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'),
- (r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|', Operator, 'root'),
- (r'external|at', Keyword, 'root'),
- (r'(stable)(\s+)(order)(\s+)(by)',
- bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'),
- (r'(castable|cast)(\s+)(as)',
- bygroups(Keyword, Text, Keyword), 'singletype'),
- (r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)),
- (r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)),
- (r'case|as', Keyword, 'itemtype'),
- (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
- (ncname + r':\*', Keyword.Type, 'operator'),
- (qname, Keyword.Type, 'occurrenceindicator'),
- ],
- 'kindtest': [
- (r'\(:', Comment, 'comment'),
- (r'{', Punctuation, 'root'),
- (r'(\))([*+?]?)', popstate_kindtest_callback),
- (r'\*', Name, 'closekindtest'),
- (qname, Name, 'closekindtest'),
- (r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback),
- ],
- 'kindtestforpi': [
- (r'\(:', Comment, 'comment'),
- (r'\)', Punctuation, '#pop'),
- (ncname, Name.Variable),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- ],
- 'closekindtest': [
- (r'\(:', Comment, 'comment'),
- (r'(\))', popstate_callback),
- (r',', Punctuation),
- (r'(\{)', pushstate_operator_root_callback),
- (r'\?', Punctuation),
- ],
- 'xml_comment': [
- (r'(-->)', popstate_xmlcomment_callback),
- (r'[^-]{1,2}', Literal),
- (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
- unirange(0x10000, 0x10ffff), Literal),
- ],
- 'processing_instruction': [
- (r'\s+', Text, 'processing_instruction_content'),
- (r'\?>', String.Doc, '#pop'),
- (pitarget, Name),
- ],
- 'processing_instruction_content': [
- (r'\?>', String.Doc, '#pop'),
- (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
- unirange(0x10000, 0x10ffff), Literal),
- ],
- 'cdata_section': [
- (r']]>', String.Doc, '#pop'),
- (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
- unirange(0x10000, 0x10ffff), Literal),
- ],
- 'start_tag': [
- include('whitespace'),
- (r'(/>)', popstate_tag_callback),
- (r'>', Name.Tag, 'element_content'),
- (r'"', Punctuation, 'quot_attribute_content'),
- (r"'", Punctuation, 'apos_attribute_content'),
- (r'=', Operator),
- (qname, Name.Tag),
- ],
- 'quot_attribute_content': [
- (r'"', Punctuation, 'start_tag'),
- (r'(\{)', pushstate_root_callback),
- (r'""', Name.Attribute),
- (quotattrcontentchar, Name.Attribute),
- (entityref, Name.Attribute),
- (charref, Name.Attribute),
- (r'\{\{|\}\}', Name.Attribute),
- ],
- 'apos_attribute_content': [
- (r"'", Punctuation, 'start_tag'),
- (r'\{', Punctuation, 'root'),
- (r"''", Name.Attribute),
- (aposattrcontentchar, Name.Attribute),
- (entityref, Name.Attribute),
- (charref, Name.Attribute),
- (r'\{\{|\}\}', Name.Attribute),
- ],
- 'element_content': [
- (r'</', Name.Tag, 'end_tag'),
- (r'(\{)', pushstate_root_callback),
- (r'(<!--)', pushstate_element_content_xmlcomment_callback),
- (r'(<\?)', pushstate_element_content_processing_instruction_callback),
- (r'(<!\[CDATA\[)', pushstate_element_content_cdata_section_callback),
- (r'(<)', pushstate_element_content_starttag_callback),
- (elementcontentchar, Literal),
- (entityref, Literal),
- (charref, Literal),
- (r'\{\{|\}\}', Literal),
- ],
- 'end_tag': [
- include('whitespace'),
- (r'(>)', popstate_tag_callback),
- (qname, Name.Tag),
- ],
- 'xmlspace_decl': [
- (r'\(:', Comment, 'comment'),
- (r'preserve|strip', Keyword, '#pop'),
- ],
- 'declareordering': [
- (r'\(:', Comment, 'comment'),
- include('whitespace'),
- (r'ordered|unordered', Keyword, '#pop'),
- ],
- 'xqueryversion': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- (r'encoding', Keyword),
- (r';', Punctuation, '#pop'),
- ],
- 'pragma': [
- (qname, Name.Variable, 'pragmacontents'),
- ],
- 'pragmacontents': [
- (r'#\)', Punctuation, 'operator'),
- (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
- unirange(0x10000, 0x10ffff), Literal),
- (r'(\s+)', Text),
- ],
- 'occurrenceindicator': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'\*|\?|\+', Operator, 'operator'),
- (r':=', Operator, 'root'),
- (r'', Text, 'operator'),
- ],
- 'option': [
- include('whitespace'),
- (qname, Name.Variable, '#pop'),
- ],
- 'qname_braren': [
- include('whitespace'),
- (r'(\{)', pushstate_operator_root_callback),
- (r'(\()', Punctuation, 'root'),
- ],
- 'element_qname': [
- (qname, Name.Variable, 'root'),
- ],
- 'attribute_qname': [
- (qname, Name.Variable, 'root'),
- ],
- 'root': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
-
- # handle operator state
- # order on numbers matters - handle most complex first
- (r'\d+(\.\d*)?[eE][\+\-]?\d+', Number.Double, 'operator'),
- (r'(\.\d+)[eE][\+\-]?\d+', Number.Double, 'operator'),
- (r'(\.\d+|\d+\.\d*)', Number, 'operator'),
- (r'(\d+)', Number.Integer, 'operator'),
- (r'(\.\.|\.|\))', Punctuation, 'operator'),
- (r'(declare)(\s+)(construction)',
- bygroups(Keyword, Text, Keyword), 'operator'),
- (r'(declare)(\s+)(default)(\s+)(order)',
- bygroups(Keyword, Text, Keyword, Text, Keyword), 'operator'),
- (ncname + ':\*', Name, 'operator'),
- ('\*:'+ncname, Name.Tag, 'operator'),
- ('\*', Name.Tag, 'operator'),
- (stringdouble, String.Double, 'operator'),
- (stringsingle, String.Single, 'operator'),
-
- (r'(\})', popstate_callback),
-
- #NAMESPACE DECL
- (r'(declare)(\s+)(default)(\s+)(collation)',
- bygroups(Keyword, Text, Keyword, Text, Keyword)),
- (r'(module|declare)(\s+)(namespace)',
- bygroups(Keyword, Text, Keyword), 'namespacedecl'),
- (r'(declare)(\s+)(base-uri)',
- bygroups(Keyword, Text, Keyword), 'namespacedecl'),
-
- #NAMESPACE KEYWORD
- (r'(declare)(\s+)(default)(\s+)(element|function)',
- bygroups(Keyword, Text, Keyword, Text, Keyword), 'namespacekeyword'),
- (r'(import)(\s+)(schema|module)',
- bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'),
- (r'(declare)(\s+)(copy-namespaces)',
- bygroups(Keyword, Text, Keyword), 'namespacekeyword'),
-
- #VARNAMEs
- (r'(for|let|some|every)(\s+)(\$)',
- bygroups(Keyword, Text, Name.Variable), 'varname'),
- (r'\$', Name.Variable, 'varname'),
- (r'(declare)(\s+)(variable)(\s+)(\$)',
- bygroups(Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
-
- #ITEMTYPE
- (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
-
- (r'(element|attribute|schema-element|schema-attribute|comment|'
- r'text|node|document-node|empty-sequence)(\s+)(\()',
- pushstate_operator_kindtest_callback),
-
- (r'(processing-instruction)(\s+)(\()',
- pushstate_operator_kindtestforpi_callback),
-
- (r'(<!--)', pushstate_operator_xmlcomment_callback),
-
- (r'(<\?)', pushstate_operator_processing_instruction_callback),
-
- (r'(<!\[CDATA\[)', pushstate_operator_cdata_section_callback),
-
- # (r'</', Name.Tag, 'end_tag'),
- (r'(<)', pushstate_operator_starttag_callback),
-
- (r'(declare)(\s+)(boundary-space)',
- bygroups(Keyword, Text, Keyword), 'xmlspace_decl'),
-
- (r'(validate)(\s+)(lax|strict)',
- pushstate_operator_root_validate_withmode),
- (r'(validate)(\s*)(\{)', pushstate_operator_root_validate),
- (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
- (r'(element|attribute)(\s*)(\{)',
- pushstate_operator_root_construct_callback),
-
- (r'(document|text|processing-instruction|comment)(\s*)(\{)',
- pushstate_operator_root_construct_callback),
- #ATTRIBUTE
- (r'(attribute)(\s+)(?=' + qname + r')',
- bygroups(Keyword, Text), 'attribute_qname'),
- #ELEMENT
- (r'(element)(\s+)(?=' +qname+ r')',
- bygroups(Keyword, Text), 'element_qname'),
- #PROCESSING_INSTRUCTION
- (r'(processing-instruction)(\s+)(' + ncname + r')(\s*)(\{)',
- bygroups(Keyword, Text, Name.Variable, Text, Punctuation),
- 'operator'),
-
- (r'(declare|define)(\s+)(function)',
- bygroups(Keyword, Text, Keyword)),
-
- (r'(\{)', pushstate_operator_root_callback),
-
- (r'(unordered|ordered)(\s*)(\{)',
- pushstate_operator_order_callback),
-
- (r'(declare)(\s+)(ordering)',
- bygroups(Keyword, Text, Keyword), 'declareordering'),
-
- (r'(xquery)(\s+)(version)',
- bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'),
-
- (r'(\(#)', Punctuation, 'pragma'),
-
- # sometimes return can occur in root state
- (r'return', Keyword),
-
- (r'(declare)(\s+)(option)', bygroups(Keyword, Text, Keyword),
- 'option'),
-
- #URI LITERALS - single and double quoted
- (r'(at)(\s+)('+stringdouble+')', String.Double, 'namespacedecl'),
- (r'(at)(\s+)('+stringsingle+')', String.Single, 'namespacedecl'),
-
- (r'(ancestor-or-self|ancestor|attribute|child|descendant-or-self)(::)',
- bygroups(Keyword, Punctuation)),
- (r'(descendant|following-sibling|following|parent|preceding-sibling'
- r'|preceding|self)(::)', bygroups(Keyword, Punctuation)),
-
- (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
-
- (r'then|else', Keyword),
-
- # ML specific
- (r'(try)(\s*)', bygroups(Keyword, Text), 'root'),
- (r'(catch)(\s*)(\()(\$)',
- bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'),
-
- (r'(@'+qname+')', Name.Attribute),
- (r'(@'+ncname+')', Name.Attribute),
- (r'@\*:'+ncname, Name.Attribute),
- (r'(@)', Name.Attribute),
-
- (r'//|/|\+|-|;|,|\(|\)', Punctuation),
-
- # STANDALONE QNAMES
- (qname + r'(?=\s*{)', Name.Tag, 'qname_braren'),
- (qname + r'(?=\s*\([^:])', Name.Function, 'qname_braren'),
- (qname, Name.Tag, 'operator'),
- ]
- }
-
-
-class DartLexer(RegexLexer):
- """
- For `Dart <http://dartlang.org/>`_ source code.
-
- *New in Pygments 1.5.*
- """
-
- name = 'Dart'
- aliases = ['dart']
- filenames = ['*.dart']
- mimetypes = ['text/x-dart']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- include('string_literal'),
- (r'#!(.*?)$', Comment.Preproc),
- (r'\b(import|export)\b', Keyword, 'import_decl'),
- (r'\b(library|source|part of|part)\b', Keyword),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'\b(class)\b(\s+)',
- bygroups(Keyword.Declaration, Text), 'class'),
- (r'\b(assert|break|case|catch|continue|default|do|else|finally|for|'
- r'if|in|is|new|return|super|switch|this|throw|try|while)\b',
- Keyword),
- (r'\b(abstract|const|extends|factory|final|get|implements|'
- r'native|operator|set|static|typedef|var)\b', Keyword.Declaration),
- (r'\b(bool|double|Dynamic|int|num|Object|String|void)\b', Keyword.Type),
- (r'\b(false|null|true)\b', Keyword.Constant),
- (r'[~!%^&*+=|?:<>/-]|as', Operator),
- (r'[a-zA-Z_$][a-zA-Z0-9_]*:', Name.Label),
- (r'[a-zA-Z_$][a-zA-Z0-9_]*', Name),
- (r'[(){}\[\],.;]', Punctuation),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- # DIGIT+ (‘.’ DIGIT*)? EXPONENT?
- (r'\d+(\.\d*)?([eE][+-]?\d+)?', Number),
- (r'\.\d+([eE][+-]?\d+)?', Number), # ‘.’ DIGIT+ EXPONENT?
- (r'\n', Text)
- # pseudo-keyword negate intentionally left out
- ],
- 'class': [
- (r'[a-zA-Z_$][a-zA-Z0-9_]*', Name.Class, '#pop')
- ],
- 'import_decl': [
- include('string_literal'),
- (r'\s+', Text),
- (r'\b(as|show|hide)\b', Keyword),
- (r'[a-zA-Z_$][a-zA-Z0-9_]*', Name),
- (r'\,', Punctuation),
- (r'\;', Punctuation, '#pop')
- ],
- 'string_literal': [
- # Raw strings.
- (r'r"""([\s|\S]*?)"""', String.Double),
- (r"r'''([\s|\S]*?)'''", String.Single),
- (r'r"(.*?)"', String.Double),
- (r"r'(.*?)'", String.Single),
- # Normal Strings.
- (r'"""', String.Double, 'string_double_multiline'),
- (r"'''", String.Single, 'string_single_multiline'),
- (r'"', String.Double, 'string_double'),
- (r"'", String.Single, 'string_single')
- ],
- 'string_common': [
- (r"\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z\'\"$\\])",
- String.Escape),
- (r'(\$)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(String.Interpol, Name)),
- (r'(\$\{)(.*?)(\})',
- bygroups(String.Interpol, using(this), String.Interpol))
- ],
- 'string_double': [
- (r'"', String.Double, '#pop'),
- (r'[^\"$\\\n]+', String.Double),
- include('string_common'),
- (r'\$+', String.Double)
- ],
- 'string_double_multiline': [
- (r'"""', String.Double, '#pop'),
- (r'[^\"$\\]+', String.Double),
- include('string_common'),
- (r'(\$|\")+', String.Double)
- ],
- 'string_single': [
- (r"'", String.Single, '#pop'),
- (r"[^\'$\\\n]+", String.Single),
- include('string_common'),
- (r'\$+', String.Single)
- ],
- 'string_single_multiline': [
- (r"'''", String.Single, '#pop'),
- (r'[^\'$\\]+', String.Single),
- include('string_common'),
- (r'(\$|\')+', String.Single)
- ]
- }
-
-
-class TypeScriptLexer(RegexLexer):
- """
- For `TypeScript <http://www.python.org>`_ source code.
-
- *New in Pygments 1.6.*
- """
-
- name = 'TypeScript'
- aliases = ['ts']
- filenames = ['*.ts']
- mimetypes = ['text/x-typescript']
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- (r'', Text, '#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'this)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
- r'extends|final|float|goto|implements|import|int|interface|long|native|'
- r'package|private|protected|public|short|static|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window)\b', Name.Builtin),
- # Match stuff like: module name {...}
- (r'\b(module)(\s*)(\s*[a-zA-Z0-9_?.$][\w?.$]*)(\s*)',
- bygroups(Keyword.Reserved, Text, Name.Other, Text), 'slashstartsregex'),
- # Match variable type keywords
- (r'\b(string|bool|number)\b', Keyword.Type),
- # Match stuff like: constructor
- (r'\b(constructor|declare|interface|as|AS)\b', Keyword.Reserved),
- # Match stuff like: super(argument, list)
- (r'(super)(\s*)(\([a-zA-Z0-9,_?.$\s]+\s*\))',
- bygroups(Keyword.Reserved, Text), 'slashstartsregex'),
- # Match stuff like: function() {...}
- (r'([a-zA-Z_?.$][\w?.$]*)\(\) \{', Name.Other, 'slashstartsregex'),
- # Match stuff like: (function: return type)
- (r'([a-zA-Z0-9_?.$][\w?.$]*)(\s*:\s*)([a-zA-Z0-9_?.$][\w?.$]*)',
- bygroups(Name.Other, Text, Keyword.Type)),
- (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
-
-
-class LassoLexer(RegexLexer):
- """
- For `Lasso <http://www.lassosoft.com/>`_ source code, covering both Lasso 9
- syntax and LassoScript for Lasso 8.6 and earlier. For Lasso embedded in
- HTML, use the `LassoHtmlLexer`.
-
- Additional options accepted:
-
- `builtinshighlighting`
- If given and ``True``, highlight builtin types, traits, methods, and
- members (default: ``True``).
- `requiredelimiters`
- If given and ``True``, only highlight code between delimiters as Lasso
- (default: ``False``).
-
- *New in Pygments 1.6.*
- """
-
- name = 'Lasso'
- aliases = ['lasso', 'lassoscript']
- filenames = ['*.lasso', '*.lasso[89]']
- alias_filenames = ['*.incl', '*.inc', '*.las']
- mimetypes = ['text/x-lasso']
- flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'^#!.+lasso9\b', Comment.Preproc, 'lasso'),
- (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
- (r'\[noprocess\]', Comment.Preproc, ('delimiters', 'noprocess')),
- (r'\[', Comment.Preproc, ('delimiters', 'squarebrackets')),
- (r'<\?(LassoScript|lasso|=)', Comment.Preproc,
- ('delimiters', 'anglebrackets')),
- (r'<', Other, 'delimiters'),
- (r'\s+', Other),
- (r'', Other, ('delimiters', 'lassofile')),
- ],
- 'delimiters': [
- (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
- (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
- (r'\[', Comment.Preproc, 'squarebrackets'),
- (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
- (r'<', Other),
- (r'[^[<]+', Other),
- ],
- 'nosquarebrackets': [
- (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
- (r'<', Other),
- (r'[^<]+', Other),
- ],
- 'noprocess': [
- (r'\[/noprocess\]', Comment.Preproc, '#pop'),
- (r'\[', Other),
- (r'[^[]', Other),
- ],
- 'squarebrackets': [
- (r'\]', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'anglebrackets': [
- (r'\?>', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'lassofile': [
- (r'\]', Comment.Preproc, '#pop'),
- (r'\?>', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'whitespacecomments': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*\*!.*?\*/', String.Doc),
- (r'/\*.*?\*/', Comment.Multiline),
- ],
- 'lasso': [
- # whitespace/comments
- include('whitespacecomments'),
-
- # literals
- (r'\d*\.\d+(e[+-]?\d+)?', Number.Float),
- (r'0x[\da-f]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'([+-]?)(infinity|NaN)\b', bygroups(Operator, Number)),
- (r"'", String.Single, 'singlestring'),
- (r'"', String.Double, 'doublestring'),
- (r'`[^`]*`', String.Backtick),
-
- # names
- (r'\$[a-z_][\w.]*', Name.Variable),
- (r'#([a-z_][\w.]*|\d+)', Name.Variable.Instance),
- (r"(\.)('[a-z_][\w.]*')",
- bygroups(Name.Builtin.Pseudo, Name.Variable.Class)),
- (r"(self)(\s*->\s*)('[a-z_][\w.]*')",
- bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)),
- (r'(\.\.?)([a-z_][\w.]*)',
- bygroups(Name.Builtin.Pseudo, Name.Other.Member)),
- (r'(->\\?\s*|&\s*)([a-z_][\w.]*)',
- bygroups(Operator, Name.Other.Member)),
- (r'(self|inherited|global|void)\b', Name.Builtin.Pseudo),
- (r'-[a-z_][\w.]*', Name.Attribute),
- (r'(::\s*)([a-z_][\w.]*)', bygroups(Punctuation, Name.Label)),
- (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|'
- r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|'
- r'Error_DeleteError|Error_FieldRestriction|Error_FileNotFound|'
- r'Error_InvalidDatabase|Error_InvalidPassword|'
- r'Error_InvalidUsername|Error_ModuleNotFound|'
- r'Error_NoError|Error_NoPermission|Error_OutOfMemory|'
- r'Error_ReqColumnMissing|Error_ReqFieldMissing|'
- r'Error_RequiredColumnMissing|Error_RequiredFieldMissing|'
- r'Error_UpdateError)\b', Name.Exception),
-
- # definitions
- (r'(define)(\s+)([a-z_][\w.]*)(\s*=>\s*)(type|trait|thread)\b',
- bygroups(Keyword.Declaration, Text, Name.Class, Operator, Keyword)),
- (r'(define)(\s+)([a-z_][\w.]*)(\s*->\s*)([a-z_][\w.]*=?|[-+*/%<>]|==)',
- bygroups(Keyword.Declaration, Text, Name.Class, Operator,
- Name.Function), 'signature'),
- (r'(define)(\s+)([a-z_][\w.]*)',
- bygroups(Keyword.Declaration, Text, Name.Function), 'signature'),
- (r'(public|protected|private|provide)(\s+)(([a-z_][\w.]*=?|'
- r'[-+*/%<>]|==)(?=\s*\())', bygroups(Keyword, Text, Name.Function),
- 'signature'),
- (r'(public|protected|private)(\s+)([a-z_][\w.]*)',
- bygroups(Keyword, Text, Name.Function)),
-
- # keywords
- (r'(true|false|none|minimal|full|all)\b', Keyword.Constant),
- (r'(local|var|variable|data(?=\s))\b', Keyword.Declaration),
- (r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|'
- r'null|list|queue|set|stack|staticarray)\b', Keyword.Type),
- (r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Text, Keyword)),
- (r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name)),
- (r'require\b', Keyword, 'requiresection'),
- (r'(/?)(Namespace_Using)\b', bygroups(Punctuation, Keyword.Namespace)),
- (r'(/?)(Cache|Database_Names|Database_SchemaNames|'
- r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
- r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
- r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
- r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|'
- r'Link_FirstRecord|Link_LastGroup|Link_LastRecord|Link_NextGroup|'
- r'Link_NextRecord|Link_PrevGroup|Link_PrevRecord|Log|Loop|'
- r'NoProcess|Output_None|Portal|Private|Protect|Records|Referer|'
- r'Referrer|Repeating|ResultSet|Rows|Search_Args|Search_Arguments|'
- r'Select|Sort_Args|Sort_Arguments|Thread_Atomic|Value_List|While|'
- r'Abort|Case|Else|If_Empty|If_False|If_Null|If_True|Loop_Abort|'
- r'Loop_Continue|Loop_Count|Params|Params_Up|Return|Return_Value|'
- r'Run_Children|SOAP_DefineTag|SOAP_LastRequest|SOAP_LastResponse|'
- r'Tag_Name|ascending|average|by|define|descending|do|equals|'
- r'frozen|group|handle_failure|import|in|into|join|let|match|max|'
- r'min|on|order|parent|protected|provide|public|require|skip|'
- r'split_thread|sum|take|thread|to|trait|type|where|with|yield)\b',
- bygroups(Punctuation, Keyword)),
-
- # other
- (r',', Punctuation, 'commamember'),
- (r'(and|or|not)\b', Operator.Word),
- (r'([a-z_][\w.]*)(\s*::\s*)?([a-z_][\w.]*)?(\s*=(?!=))',
- bygroups(Name, Punctuation, Name.Label, Operator)),
- (r'(/?)([\w.]+)', bygroups(Punctuation, Name.Other)),
- (r'(=)(bw|ew|cn|lte?|gte?|n?eq|ft|n?rx)\b',
- bygroups(Operator, Operator.Word)),
- (r':=|[-+*/%=<>&|!?\\]+', Operator),
- (r'[{}():;,@^]', Punctuation),
- ],
- 'singlestring': [
- (r"'", String.Single, '#pop'),
- (r"[^'\\]+", String.Single),
- include('escape'),
- (r"\\+", String.Single),
- ],
- 'doublestring': [
- (r'"', String.Double, '#pop'),
- (r'[^"\\]+', String.Double),
- include('escape'),
- (r'\\+', String.Double),
- ],
- 'escape': [
- (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:]+:|'
- r'[abefnrtv?\"\'\\]|$)', String.Escape),
- ],
- 'signature': [
- (r'=>', Operator, '#pop'),
- (r'\)', Punctuation, '#pop'),
- (r'[(,]', Punctuation, 'parameter'),
- include('lasso'),
- ],
- 'parameter': [
- (r'\)', Punctuation, '#pop'),
- (r'-?[a-z_][\w.]*', Name.Attribute, '#pop'),
- (r'\.\.\.', Name.Builtin.Pseudo),
- include('lasso'),
- ],
- 'requiresection': [
- (r'(([a-z_][\w.]*=?|[-+*/%<>]|==)(?=\s*\())', Name, 'requiresignature'),
- (r'(([a-z_][\w.]*=?|[-+*/%<>]|==)(?=(\s*::\s*[\w.]+)?\s*,))', Name),
- (r'[a-z_][\w.]*=?|[-+*/%<>]|==', Name, '#pop'),
- (r'(::\s*)([a-z_][\w.]*)', bygroups(Punctuation, Name.Label)),
- (r',', Punctuation),
- include('whitespacecomments'),
- ],
- 'requiresignature': [
- (r'(\)(?=(\s*::\s*[\w.]+)?\s*,))', Punctuation, '#pop'),
- (r'\)', Punctuation, '#pop:2'),
- (r'-?[a-z_][\w.]*', Name.Attribute),
- (r'(::\s*)([a-z_][\w.]*)', bygroups(Punctuation, Name.Label)),
- (r'\.\.\.', Name.Builtin.Pseudo),
- (r'[(,]', Punctuation),
- include('whitespacecomments'),
- ],
- 'commamember': [
- (r'(([a-z_][\w.]*=?|[-+*/%<>]|==)'
- r'(?=\s*(\(([^()]*\([^()]*\))*[^)]*\)\s*)?(::[\w.\s]+)?=>))',
- Name.Function, 'signature'),
- include('whitespacecomments'),
- (r'', Text, '#pop'),
- ],
- }
-
- def __init__(self, **options):
- self.builtinshighlighting = get_bool_opt(
- options, 'builtinshighlighting', True)
- self.requiredelimiters = get_bool_opt(
- options, 'requiredelimiters', False)
-
- self._builtins = set()
- self._members = set()
- if self.builtinshighlighting:
- from pygments.lexers._lassobuiltins import BUILTINS, MEMBERS
- for key, value in BUILTINS.iteritems():
- self._builtins.update(value)
- for key, value in MEMBERS.iteritems():
- self._members.update(value)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- if self.requiredelimiters:
- stack.append('delimiters')
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if (token is Name.Other and value.lower() in self._builtins or
- token is Name.Other.Member and value.lower() in self._members):
- yield index, Name.Builtin, value
- continue
- yield index, token, value
-
- def analyse_text(text):
- rv = 0.0
- if 'bin/lasso9' in text:
- rv += 0.8
- if re.search(r'<\?(=|lasso)|\A\[', text, re.I):
- rv += 0.4
- if re.search(r'local\(', text, re.I):
- rv += 0.4
- if '?>' in text:
- rv += 0.1
- return rv
-
-
-class QmlLexer(RegexLexer):
- """
- For QML files. See http://doc.qt.digia.com/4.7/qdeclarativeintroduction.html.
-
- *New in Pygments 1.6.*
- """
-
- # QML is based on javascript, so much of this is taken from the
- # JavascriptLexer above.
-
- name = 'QML'
- aliases = ['qml', 'Qt Meta Language', 'Qt modeling Language']
- filenames = ['*.qml',]
- mimetypes = [ 'application/x-qml',]
-
-
- # pasted from JavascriptLexer, with some additions
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- (r'', Text, '#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root' : [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
-
- # QML insertions
- (r'\bid\s*:\s*[A-Za-z][_A-Za-z.0-9]*',Keyword.Declaration,
- 'slashstartsregex'),
- (r'\b[A-Za-z][_A-Za-z.0-9]*\s*:',Keyword, 'slashstartsregex'),
-
- # the rest from JavascriptLexer
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'this)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
- r'extends|final|float|goto|implements|import|int|interface|long|native|'
- r'package|private|protected|public|short|static|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window)\b', Name.Builtin),
- (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
+__all__ = []
diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py
new file mode 100644
index 00000000..08b6c969
--- /dev/null
+++ b/pygments/lexers/webmisc.py
@@ -0,0 +1,920 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.webmisc
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for misc. web stuff.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
+ default, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal
+from pygments.util import unirange
+
+from pygments.lexers.css import _indentation, _starts_block
+from pygments.lexers.html import HtmlLexer
+from pygments.lexers.javascript import JavascriptLexer
+from pygments.lexers.ruby import RubyLexer
+
+__all__ = ['DuelLexer', 'SlimLexer', 'XQueryLexer', 'QmlLexer', 'CirruLexer']
+
+
+class DuelLexer(RegexLexer):
+ """
+ Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
+ See http://duelengine.org/.
+ See http://jsonml.org/jbst/.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Duel'
+ aliases = ['duel', 'jbst', 'jsonml+bst']
+ filenames = ['*.duel', '*.jbst']
+ mimetypes = ['text/x-duel', 'text/x-jbst']
+
+ flags = re.DOTALL
+
+ tokens = {
+ 'root': [
+ (r'(<%[@=#!:]?)(.*?)(%>)',
+ bygroups(Name.Tag, using(JavascriptLexer), Name.Tag)),
+ (r'(<%\$)(.*?)(:)(.*?)(%>)',
+ bygroups(Name.Tag, Name.Function, Punctuation, String, Name.Tag)),
+ (r'(<%--)(.*?)(--%>)',
+ bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
+ (r'(<script.*?>)(.*?)(</script>)',
+ bygroups(using(HtmlLexer),
+ using(JavascriptLexer), using(HtmlLexer))),
+ (r'(.+?)(?=<)', using(HtmlLexer)),
+ (r'.+', using(HtmlLexer)),
+ ],
+ }
+
+
+class XQueryLexer(ExtendedRegexLexer):
+ """
+ An XQuery lexer, parsing a stream and outputting the tokens needed to
+ highlight xquery code.
+
+ .. versionadded:: 1.4
+ """
+ name = 'XQuery'
+ aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm']
+ filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm']
+ mimetypes = ['text/xquery', 'application/xquery']
+
+ xquery_parse_state = []
+
+ # FIX UNICODE LATER
+ # ncnamestartchar = (
+ # ur"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|"
+ # ur"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|"
+ # ur"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|"
+ # ur"[\u10000-\uEFFFF]"
+ # )
+ ncnamestartchar = r"(?:[A-Z]|_|[a-z])"
+ # FIX UNICODE LATER
+ # ncnamechar = ncnamestartchar + (ur"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
+ # ur"[\u203F-\u2040]")
+ ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])"
+ ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar)
+ pitarget_namestartchar = r"(?:[A-KN-WYZ]|_|:|[a-kn-wyz])"
+ pitarget_namechar = r"(?:" + pitarget_namestartchar + r"|-|\.|[0-9])"
+ pitarget = "%s+%s*" % (pitarget_namestartchar, pitarget_namechar)
+ prefixedname = "%s:%s" % (ncname, ncname)
+ unprefixedname = ncname
+ qname = "(?:%s|%s)" % (prefixedname, unprefixedname)
+
+ entityref = r'(?:&(?:lt|gt|amp|quot|apos|nbsp);)'
+ charref = r'(?:&#[0-9]+;|&#x[0-9a-fA-F]+;)'
+
+ stringdouble = r'(?:"(?:' + entityref + r'|' + charref + r'|""|[^&"])*")'
+ stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')"
+
+ # FIX UNICODE LATER
+ # elementcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
+ # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+ elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_\'`|~]'
+ # quotattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|'
+ # ur'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+ quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%()*+,\-./:;=?@\[\\\]^_\'`|~]'
+ # aposattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
+ # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+ aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_`|~]'
+
+ # CHAR elements - fix the above elementcontentchar, quotattrcontentchar,
+ # aposattrcontentchar
+ # x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
+
+ flags = re.DOTALL | re.MULTILINE | re.UNICODE
+
+ def punctuation_root_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ # transition to root always - don't pop off stack
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def operator_root_callback(lexer, match, ctx):
+ yield match.start(), Operator, match.group(1)
+ # transition to root always - don't pop off stack
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def popstate_tag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ ctx.stack.append(lexer.xquery_parse_state.pop())
+ ctx.pos = match.end()
+
+ def popstate_xmlcomment_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append(lexer.xquery_parse_state.pop())
+ ctx.pos = match.end()
+
+ def popstate_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ next_state = lexer.xquery_parse_state.pop()
+ if next_state == 'occurrenceindicator':
+ if re.match("[?*+]+", match.group(2)):
+ yield match.start(), Punctuation, match.group(2)
+ ctx.stack.append('operator')
+ ctx.pos = match.end()
+ else:
+ ctx.stack.append('operator')
+ ctx.pos = match.end(1)
+ else:
+ ctx.stack.append(next_state)
+ ctx.pos = match.end(1)
+
+ def popstate_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ # if we have run out of our state stack, pop whatever is on the pygments
+ # state stack
+ if len(lexer.xquery_parse_state) == 0:
+ ctx.stack.pop()
+ elif len(ctx.stack) > 1:
+ ctx.stack.append(lexer.xquery_parse_state.pop())
+ else:
+ # i don't know if i'll need this, but in case, default back to root
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_element_content_starttag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ lexer.xquery_parse_state.append('element_content')
+ ctx.stack.append('start_tag')
+ ctx.pos = match.end()
+
+ def pushstate_cdata_section_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('cdata_section')
+ lexer.xquery_parse_state.append(ctx.state.pop)
+ ctx.pos = match.end()
+
+ def pushstate_starttag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ lexer.xquery_parse_state.append(ctx.state.pop)
+ ctx.stack.append('start_tag')
+ ctx.pos = match.end()
+
+ def pushstate_operator_order_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_validate(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_validate_withmode(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Keyword, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_processing_instruction_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('processing_instruction')
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_element_content_processing_instruction_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('processing_instruction')
+ lexer.xquery_parse_state.append('element_content')
+ ctx.pos = match.end()
+
+ def pushstate_element_content_cdata_section_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('cdata_section')
+ lexer.xquery_parse_state.append('element_content')
+ ctx.pos = match.end()
+
+ def pushstate_operator_cdata_section_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('cdata_section')
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_element_content_xmlcomment_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('xml_comment')
+ lexer.xquery_parse_state.append('element_content')
+ ctx.pos = match.end()
+
+ def pushstate_operator_xmlcomment_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('xml_comment')
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('kindtest')
+ ctx.stack.append('kindtest')
+ ctx.pos = match.end()
+
+ def pushstate_operator_kindtestforpi_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack.append('kindtestforpi')
+ ctx.pos = match.end()
+
+ def pushstate_operator_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack.append('kindtest')
+ ctx.pos = match.end()
+
+ def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('occurrenceindicator')
+ ctx.stack.append('kindtest')
+ ctx.pos = match.end()
+
+ def pushstate_operator_starttag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack.append('start_tag')
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_construct_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_root_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ cur_state = ctx.stack.pop()
+ lexer.xquery_parse_state.append(cur_state)
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_operator_attribute_callback(lexer, match, ctx):
+ yield match.start(), Name.Attribute, match.group(1)
+ ctx.stack.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ tokens = {
+ 'comment': [
+ # xquery comments
+ (r'(:\))', Comment, '#pop'),
+ (r'(\(:)', Comment, '#push'),
+ (r'[^:)]', Comment),
+ (r'([^:)]|:|\))', Comment),
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+ 'operator': [
+ include('whitespace'),
+ (r'(\})', popstate_callback),
+ (r'\(:', Comment, 'comment'),
+
+ (r'(\{)', pushstate_root_callback),
+ (r'then|else|external|at|div|except', Keyword, 'root'),
+ (r'order by', Keyword, 'root'),
+ (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'),
+ (r'and|or', Operator.Word, 'root'),
+ (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)',
+ Operator.Word, 'root'),
+ (r'return|satisfies|to|union|where|preserve\s+strip',
+ Keyword, 'root'),
+ (r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\|\||\||:=|=)',
+ operator_root_callback),
+ (r'(::|;|\[|//|/|,)',
+ punctuation_root_callback),
+ (r'(castable|cast)(\s+)(as)\b',
+ bygroups(Keyword, Text, Keyword), 'singletype'),
+ (r'(instance)(\s+)(of)\b',
+ bygroups(Keyword, Text, Keyword), 'itemtype'),
+ (r'(treat)(\s+)(as)\b',
+ bygroups(Keyword, Text, Keyword), 'itemtype'),
+ (r'(case|as)\b', Keyword, 'itemtype'),
+ (r'(\))(\s*)(as)',
+ bygroups(Punctuation, Text, Keyword), 'itemtype'),
+ (r'\$', Name.Variable, 'varname'),
+ (r'(for|let)(\s+)(\$)',
+ bygroups(Keyword, Text, Name.Variable), 'varname'),
+ # (r'\)|\?|\]', Punctuation, '#push'),
+ (r'\)|\?|\]', Punctuation),
+ (r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
+ (r'ascending|descending|default', Keyword, '#push'),
+ (r'external', Keyword),
+ (r'collation', Keyword, 'uritooperator'),
+ # finally catch all string literals and stay in operator state
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+
+ (r'(catch)(\s*)', bygroups(Keyword, Text), 'root'),
+ ],
+ 'uritooperator': [
+ (stringdouble, String.Double, '#pop'),
+ (stringsingle, String.Single, '#pop'),
+ ],
+ 'namespacedecl': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (r'(at)(\s+)('+stringdouble+')', bygroups(Keyword, Text, String.Double)),
+ (r"(at)(\s+)("+stringsingle+')', bygroups(Keyword, Text, String.Single)),
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+ (r',', Punctuation),
+ (r'=', Operator),
+ (r';', Punctuation, 'root'),
+ (ncname, Name.Namespace),
+ ],
+ 'namespacekeyword': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (stringdouble, String.Double, 'namespacedecl'),
+ (stringsingle, String.Single, 'namespacedecl'),
+ (r'inherit|no-inherit', Keyword, 'root'),
+ (r'namespace', Keyword, 'namespacedecl'),
+ (r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)),
+ (r'preserve|no-preserve', Keyword),
+ (r',', Punctuation),
+ ],
+ 'varname': [
+ (r'\(:', Comment, 'comment'),
+ (qname, Name.Variable, 'operator'),
+ ],
+ 'singletype': [
+ (r'\(:', Comment, 'comment'),
+ (ncname + r'(:\*)', Name.Variable, 'operator'),
+ (qname, Name.Variable, 'operator'),
+ ],
+ 'itemtype': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (r'\$', Punctuation, 'varname'),
+ (r'(void)(\s*)(\()(\s*)(\))',
+ bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'),
+ (r'(element|attribute|schema-element|schema-attribute|comment|text|'
+ r'node|binary|document-node|empty-sequence)(\s*)(\()',
+ pushstate_occurrenceindicator_kindtest_callback),
+ # Marklogic specific type?
+ (r'(processing-instruction)(\s*)(\()',
+ bygroups(Keyword, Text, Punctuation),
+ ('occurrenceindicator', 'kindtestforpi')),
+ (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])',
+ bygroups(Keyword, Text, Punctuation, Text, Punctuation),
+ 'occurrenceindicator'),
+ (r'\(\#', Punctuation, 'pragma'),
+ (r';', Punctuation, '#pop'),
+ (r'then|else', Keyword, '#pop'),
+ (r'(at)(\s+)(' + stringdouble + ')',
+ bygroups(Keyword, Text, String.Double), 'namespacedecl'),
+ (r'(at)(\s+)(' + stringsingle + ')',
+ bygroups(Keyword, Text, String.Single), 'namespacedecl'),
+ (r'except|intersect|in|is|return|satisfies|to|union|where',
+ Keyword, 'root'),
+ (r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'),
+ (r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|\||\|', Operator, 'root'),
+ (r'external|at', Keyword, 'root'),
+ (r'(stable)(\s+)(order)(\s+)(by)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'),
+ (r'(castable|cast)(\s+)(as)',
+ bygroups(Keyword, Text, Keyword), 'singletype'),
+ (r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)),
+ (r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)),
+ (r'case|as', Keyword, 'itemtype'),
+ (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
+ (ncname + r':\*', Keyword.Type, 'operator'),
+ (qname, Keyword.Type, 'occurrenceindicator'),
+ ],
+ 'kindtest': [
+ (r'\(:', Comment, 'comment'),
+ (r'\{', Punctuation, 'root'),
+ (r'(\))([*+?]?)', popstate_kindtest_callback),
+ (r'\*', Name, 'closekindtest'),
+ (qname, Name, 'closekindtest'),
+ (r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback),
+ ],
+ 'kindtestforpi': [
+ (r'\(:', Comment, 'comment'),
+ (r'\)', Punctuation, '#pop'),
+ (ncname, Name.Variable),
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+ ],
+ 'closekindtest': [
+ (r'\(:', Comment, 'comment'),
+ (r'(\))', popstate_callback),
+ (r',', Punctuation),
+ (r'(\{)', pushstate_operator_root_callback),
+ (r'\?', Punctuation),
+ ],
+ 'xml_comment': [
+ (r'(-->)', popstate_xmlcomment_callback),
+ (r'[^-]{1,2}', Literal),
+ (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
+ unirange(0x10000, 0x10ffff), Literal),
+ ],
+ 'processing_instruction': [
+ (r'\s+', Text, 'processing_instruction_content'),
+ (r'\?>', String.Doc, '#pop'),
+ (pitarget, Name),
+ ],
+ 'processing_instruction_content': [
+ (r'\?>', String.Doc, '#pop'),
+ (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
+ unirange(0x10000, 0x10ffff), Literal),
+ ],
+ 'cdata_section': [
+ (r']]>', String.Doc, '#pop'),
+ (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
+ unirange(0x10000, 0x10ffff), Literal),
+ ],
+ 'start_tag': [
+ include('whitespace'),
+ (r'(/>)', popstate_tag_callback),
+ (r'>', Name.Tag, 'element_content'),
+ (r'"', Punctuation, 'quot_attribute_content'),
+ (r"'", Punctuation, 'apos_attribute_content'),
+ (r'=', Operator),
+ (qname, Name.Tag),
+ ],
+ 'quot_attribute_content': [
+ (r'"', Punctuation, 'start_tag'),
+ (r'(\{)', pushstate_root_callback),
+ (r'""', Name.Attribute),
+ (quotattrcontentchar, Name.Attribute),
+ (entityref, Name.Attribute),
+ (charref, Name.Attribute),
+ (r'\{\{|\}\}', Name.Attribute),
+ ],
+ 'apos_attribute_content': [
+ (r"'", Punctuation, 'start_tag'),
+ (r'\{', Punctuation, 'root'),
+ (r"''", Name.Attribute),
+ (aposattrcontentchar, Name.Attribute),
+ (entityref, Name.Attribute),
+ (charref, Name.Attribute),
+ (r'\{\{|\}\}', Name.Attribute),
+ ],
+ 'element_content': [
+ (r'</', Name.Tag, 'end_tag'),
+ (r'(\{)', pushstate_root_callback),
+ (r'(<!--)', pushstate_element_content_xmlcomment_callback),
+ (r'(<\?)', pushstate_element_content_processing_instruction_callback),
+ (r'(<!\[CDATA\[)', pushstate_element_content_cdata_section_callback),
+ (r'(<)', pushstate_element_content_starttag_callback),
+ (elementcontentchar, Literal),
+ (entityref, Literal),
+ (charref, Literal),
+ (r'\{\{|\}\}', Literal),
+ ],
+ 'end_tag': [
+ include('whitespace'),
+ (r'(>)', popstate_tag_callback),
+ (qname, Name.Tag),
+ ],
+ 'xmlspace_decl': [
+ (r'\(:', Comment, 'comment'),
+ (r'preserve|strip', Keyword, '#pop'),
+ ],
+ 'declareordering': [
+ (r'\(:', Comment, 'comment'),
+ include('whitespace'),
+ (r'ordered|unordered', Keyword, '#pop'),
+ ],
+ 'xqueryversion': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+ (r'encoding', Keyword),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'pragma': [
+ (qname, Name.Variable, 'pragmacontents'),
+ ],
+ 'pragmacontents': [
+ (r'#\)', Punctuation, 'operator'),
+ (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
+ unirange(0x10000, 0x10ffff), Literal),
+ (r'(\s+)', Text),
+ ],
+ 'occurrenceindicator': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (r'\*|\?|\+', Operator, 'operator'),
+ (r':=', Operator, 'root'),
+ default('operator'),
+ ],
+ 'option': [
+ include('whitespace'),
+ (qname, Name.Variable, '#pop'),
+ ],
+ 'qname_braren': [
+ include('whitespace'),
+ (r'(\{)', pushstate_operator_root_callback),
+ (r'(\()', Punctuation, 'root'),
+ ],
+ 'element_qname': [
+ (qname, Name.Variable, 'root'),
+ ],
+ 'attribute_qname': [
+ (qname, Name.Variable, 'root'),
+ ],
+ 'root': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+
+ # handle operator state
+ # order on numbers matters - handle most complex first
+ (r'\d+(\.\d*)?[eE][+-]?\d+', Number.Float, 'operator'),
+ (r'(\.\d+)[eE][+-]?\d+', Number.Float, 'operator'),
+ (r'(\.\d+|\d+\.\d*)', Number.Float, 'operator'),
+ (r'(\d+)', Number.Integer, 'operator'),
+ (r'(\.\.|\.|\))', Punctuation, 'operator'),
+ (r'(declare)(\s+)(construction)',
+ bygroups(Keyword, Text, Keyword), 'operator'),
+ (r'(declare)(\s+)(default)(\s+)(order)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword), 'operator'),
+ (ncname + ':\*', Name, 'operator'),
+ ('\*:'+ncname, Name.Tag, 'operator'),
+ ('\*', Name.Tag, 'operator'),
+ (stringdouble, String.Double, 'operator'),
+ (stringsingle, String.Single, 'operator'),
+
+ (r'(\})', popstate_callback),
+
+ # NAMESPACE DECL
+ (r'(declare)(\s+)(default)(\s+)(collation)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword)),
+ (r'(module|declare)(\s+)(namespace)',
+ bygroups(Keyword, Text, Keyword), 'namespacedecl'),
+ (r'(declare)(\s+)(base-uri)',
+ bygroups(Keyword, Text, Keyword), 'namespacedecl'),
+
+ # NAMESPACE KEYWORD
+ (r'(declare)(\s+)(default)(\s+)(element|function)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword), 'namespacekeyword'),
+ (r'(import)(\s+)(schema|module)',
+ bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'),
+ (r'(declare)(\s+)(copy-namespaces)',
+ bygroups(Keyword, Text, Keyword), 'namespacekeyword'),
+
+ # VARNAMEs
+ (r'(for|let|some|every)(\s+)(\$)',
+ bygroups(Keyword, Text, Name.Variable), 'varname'),
+ (r'\$', Name.Variable, 'varname'),
+ (r'(declare)(\s+)(variable)(\s+)(\$)',
+ bygroups(Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
+
+ # ITEMTYPE
+ (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
+
+ (r'(element|attribute|schema-element|schema-attribute|comment|'
+ r'text|node|document-node|empty-sequence)(\s+)(\()',
+ pushstate_operator_kindtest_callback),
+
+ (r'(processing-instruction)(\s+)(\()',
+ pushstate_operator_kindtestforpi_callback),
+
+ (r'(<!--)', pushstate_operator_xmlcomment_callback),
+
+ (r'(<\?)', pushstate_operator_processing_instruction_callback),
+
+ (r'(<!\[CDATA\[)', pushstate_operator_cdata_section_callback),
+
+ # (r'</', Name.Tag, 'end_tag'),
+ (r'(<)', pushstate_operator_starttag_callback),
+
+ (r'(declare)(\s+)(boundary-space)',
+ bygroups(Keyword, Text, Keyword), 'xmlspace_decl'),
+
+ (r'(validate)(\s+)(lax|strict)',
+ pushstate_operator_root_validate_withmode),
+ (r'(validate)(\s*)(\{)', pushstate_operator_root_validate),
+ (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+ (r'(element|attribute)(\s*)(\{)',
+ pushstate_operator_root_construct_callback),
+
+ (r'(document|text|processing-instruction|comment)(\s*)(\{)',
+ pushstate_operator_root_construct_callback),
+ # ATTRIBUTE
+ (r'(attribute)(\s+)(?=' + qname + r')',
+ bygroups(Keyword, Text), 'attribute_qname'),
+ # ELEMENT
+ (r'(element)(\s+)(?=' + qname + r')',
+ bygroups(Keyword, Text), 'element_qname'),
+ # PROCESSING_INSTRUCTION
+ (r'(processing-instruction)(\s+)(' + ncname + r')(\s*)(\{)',
+ bygroups(Keyword, Text, Name.Variable, Text, Punctuation),
+ 'operator'),
+
+ (r'(declare|define)(\s+)(function)',
+ bygroups(Keyword, Text, Keyword)),
+
+ (r'(\{)', pushstate_operator_root_callback),
+
+ (r'(unordered|ordered)(\s*)(\{)',
+ pushstate_operator_order_callback),
+
+ (r'(declare)(\s+)(ordering)',
+ bygroups(Keyword, Text, Keyword), 'declareordering'),
+
+ (r'(xquery)(\s+)(version)',
+ bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'),
+
+ (r'(\(#)', Punctuation, 'pragma'),
+
+ # sometimes return can occur in root state
+ (r'return', Keyword),
+
+ (r'(declare)(\s+)(option)', bygroups(Keyword, Text, Keyword),
+ 'option'),
+
+ # URI LITERALS - single and double quoted
+ (r'(at)(\s+)('+stringdouble+')', String.Double, 'namespacedecl'),
+ (r'(at)(\s+)('+stringsingle+')', String.Single, 'namespacedecl'),
+
+ (r'(ancestor-or-self|ancestor|attribute|child|descendant-or-self)(::)',
+ bygroups(Keyword, Punctuation)),
+ (r'(descendant|following-sibling|following|parent|preceding-sibling'
+ r'|preceding|self)(::)', bygroups(Keyword, Punctuation)),
+
+ (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+
+ (r'then|else', Keyword),
+
+ # ML specific
+ (r'(try)(\s*)', bygroups(Keyword, Text), 'root'),
+ (r'(catch)(\s*)(\()(\$)',
+ bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'),
+
+ (r'(@'+qname+')', Name.Attribute),
+ (r'(@'+ncname+')', Name.Attribute),
+ (r'@\*:'+ncname, Name.Attribute),
+ (r'(@)', Name.Attribute),
+
+ (r'//|/|\+|-|;|,|\(|\)', Punctuation),
+
+ # STANDALONE QNAMES
+ (qname + r'(?=\s*\{)', Name.Tag, 'qname_braren'),
+ (qname + r'(?=\s*\([^:])', Name.Function, 'qname_braren'),
+ (qname, Name.Tag, 'operator'),
+ ]
+ }
+
+
+class QmlLexer(RegexLexer):
+ """
+ For QML files. See http://doc.qt.digia.com/4.7/qdeclarativeintroduction.html.
+
+ .. versionadded:: 1.6
+ """
+
+ # QML is based on javascript, so much of this is taken from the
+ # JavascriptLexer above.
+
+ name = 'QML'
+ aliases = ['qml']
+ filenames = ['*.qml']
+ mimetypes = ['application/x-qml']
+
+ # pasted from JavascriptLexer, with some additions
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'<!--', Comment),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+
+ # QML insertions
+ (r'\bid\s*:\s*[A-Za-z][\w.]*', Keyword.Declaration,
+ 'slashstartsregex'),
+ (r'\b[A-Za-z][\w.]*\s*:', Keyword, 'slashstartsregex'),
+
+ # the rest from JavascriptLexer
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+ r'this)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
+ r'extends|final|float|goto|implements|import|int|interface|long|native|'
+ r'package|private|protected|public|short|static|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Reserved),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
+ r'window)\b', Name.Builtin),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class CirruLexer(RegexLexer):
+ """
+ Syntax rules of Cirru can be found at:
+ http://cirru.org/
+
+ * using ``()`` to markup blocks, but limited in the same line
+ * using ``""`` to markup strings, allow ``\`` to escape
+ * using ``$`` as a shorthand for ``()`` till indentation end or ``)``
+ * using indentations for create nesting
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Cirru'
+ aliases = ['cirru']
+ filenames = ['*.cirru']
+ mimetypes = ['text/x-cirru']
+ flags = re.MULTILINE
+
+ tokens = {
+ 'string': [
+ (r'[^"\\\n]', String),
+ (r'\\', String.Escape, 'escape'),
+ (r'"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'.', String.Escape, '#pop'),
+ ],
+ 'function': [
+ (r'[^\s"()]+', Name.Function, '#pop'),
+ (r'\)', Operator, '#pop'),
+ (r'(?=\n)', Text, '#pop'),
+ (r'\(', Operator, '#push'),
+ (r'"', String, ('#pop', 'string')),
+ (r'[ ]+', Text.Whitespace),
+ (r'\,', Operator, '#pop'),
+ ],
+ 'line': [
+ (r'\$', Operator, 'function'),
+ (r'\(', Operator, 'function'),
+ (r'\)', Operator),
+ (r'\n', Text, '#pop'),
+ (r'"', String, 'string'),
+ (r'[ ]+', Text.Whitespace),
+ (r'[+-]?[\d.]+\b', Number),
+ (r'[^\s"()]+', Name.Variable)
+ ],
+ 'root': [
+ (r'^\n+', Text.Whitespace),
+ default(('line', 'function')),
+ ]
+ }
+
+
+class SlimLexer(ExtendedRegexLexer):
+ """
+ For Slim markup.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Slim'
+ aliases = ['slim']
+ filenames = ['*.slim']
+ mimetypes = ['text/x-slim']
+
+ flags = re.IGNORECASE
+ _dot = r'(?: \|\n(?=.* \|)|.)'
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'([ \t]*==?)(.*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ 'root'),
+ (r'[ \t]+[\w:-]+(?==)', Name.Attribute, 'html-attributes'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'[\w:-]+:[ \t]*\n', Text, 'plain'),
+ (r'(-)(.*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ '#pop'),
+ (r'\|' + _dot + r'*\n', _starts_block(Text, 'plain'), '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment.Preproc, 'slim-comment-block'), '#pop'),
+ (r'[\w:-]+', Name.Tag, 'tag'),
+ include('eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ (r'[ \t]+\n', Punctuation, '#pop:2'),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'(#\{)(.*?)(\})',
+ bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'=', Punctuation),
+ (r'"[^"]+"', using(RubyLexer), 'tag'),
+ (r'\'[^\']+\'', using(RubyLexer), 'tag'),
+ (r'\w+', Text, 'tag'),
+ ],
+
+ 'slim-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+ }
diff --git a/pygments/modeline.py b/pygments/modeline.py
index cba1cab2..54df90c4 100644
--- a/pygments/modeline.py
+++ b/pygments/modeline.py
@@ -5,7 +5,7 @@
A simple modeline parser (based on pymodeline).
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/plugin.py b/pygments/plugin.py
index 58662e96..f9ea0890 100644
--- a/pygments/plugin.py
+++ b/pygments/plugin.py
@@ -32,7 +32,7 @@
yourfilter = yourfilter:YourFilter
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
try:
diff --git a/pygments/regexopt.py b/pygments/regexopt.py
new file mode 100644
index 00000000..79903684
--- /dev/null
+++ b/pygments/regexopt.py
@@ -0,0 +1,92 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.regexopt
+ ~~~~~~~~~~~~~~~~~
+
+ An algorithm that generates optimized regexes for matching long lists of
+ literal strings.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+from re import escape
+from os.path import commonprefix
+from itertools import groupby
+from operator import itemgetter
+
+CS_ESCAPE = re.compile(r'[\^\\\-\]]')
+FIRST_ELEMENT = itemgetter(0)
+
+
+def make_charset(letters):
+ return '[' + CS_ESCAPE.sub(lambda m: '\\' + m.group(), ''.join(letters)) + ']'
+
+
+def regex_opt_inner(strings, open_paren):
+ """Return a regex that matches any string in the sorted list of strings."""
+ close_paren = open_paren and ')' or ''
+ # print strings, repr(open_paren)
+ if not strings:
+ # print '-> nothing left'
+ return ''
+ first = strings[0]
+ if len(strings) == 1:
+ # print '-> only 1 string'
+ return open_paren + escape(first) + close_paren
+ if not first:
+ # print '-> first string empty'
+ return open_paren + regex_opt_inner(strings[1:], '(?:') \
+ + '?' + close_paren
+ if len(first) == 1:
+ # multiple one-char strings? make a charset
+ oneletter = []
+ rest = []
+ for s in strings:
+ if len(s) == 1:
+ oneletter.append(s)
+ else:
+ rest.append(s)
+ if len(oneletter) > 1: # do we have more than one oneletter string?
+ if rest:
+ # print '-> 1-character + rest'
+ return open_paren + regex_opt_inner(rest, '') + '|' \
+ + make_charset(oneletter) + close_paren
+ # print '-> only 1-character'
+ return make_charset(oneletter)
+ prefix = commonprefix(strings)
+ if prefix:
+ plen = len(prefix)
+ # we have a prefix for all strings
+ # print '-> prefix:', prefix
+ return open_paren + escape(prefix) \
+ + regex_opt_inner([s[plen:] for s in strings], '(?:') \
+ + close_paren
+ # is there a suffix?
+ strings_rev = [s[::-1] for s in strings]
+ suffix = commonprefix(strings_rev)
+ if suffix:
+ slen = len(suffix)
+ # print '-> suffix:', suffix[::-1]
+ return open_paren \
+ + regex_opt_inner(sorted(s[:-slen] for s in strings), '(?:') \
+ + escape(suffix[::-1]) + close_paren
+ # recurse on common 1-string prefixes
+ # print '-> last resort'
+ return open_paren + \
+ '|'.join(regex_opt_inner(list(group[1]), '')
+ for group in groupby(strings, lambda s: s[0] == first[0])) \
+ + close_paren
+
+
+def regex_opt(strings, prefix='', suffix=''):
+ """Return a compiled regex that matches any string in the given list.
+
+ The strings to match must be literal strings, not regexes. They will be
+ regex-escaped.
+
+ *prefix* and *suffix* are pre- and appended to the final regex.
+ """
+ strings = sorted(strings)
+ return prefix + regex_opt_inner(strings, '(') + suffix
diff --git a/pygments/scanner.py b/pygments/scanner.py
index f469e694..35dbbadd 100644
--- a/pygments/scanner.py
+++ b/pygments/scanner.py
@@ -12,7 +12,7 @@
Have a look at the `DelphiLexer` to get an idea of how to use
this scanner.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
diff --git a/pygments/sphinxext.py b/pygments/sphinxext.py
new file mode 100644
index 00000000..e63d3d35
--- /dev/null
+++ b/pygments/sphinxext.py
@@ -0,0 +1,155 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.sphinxext
+ ~~~~~~~~~~~~~~~~~~
+
+ Sphinx extension to generate automatic documentation of lexers,
+ formatters and filters.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+import sys
+
+from docutils import nodes
+from docutils.statemachine import ViewList
+from sphinx.util.compat import Directive
+from sphinx.util.nodes import nested_parse_with_titles
+
+
+MODULEDOC = '''
+.. module:: %s
+
+%s
+%s
+'''
+
+LEXERDOC = '''
+.. class:: %s
+
+ :Short names: %s
+ :Filenames: %s
+ :MIME types: %s
+
+ %s
+
+'''
+
+FMTERDOC = '''
+.. class:: %s
+
+ :Short names: %s
+ :Filenames: %s
+
+ %s
+
+'''
+
+FILTERDOC = '''
+.. class:: %s
+
+ :Name: %s
+
+ %s
+
+'''
+
+class PygmentsDoc(Directive):
+ """
+ A directive to collect all lexers/formatters/filters and generate
+ autoclass directives for them.
+ """
+ has_content = False
+ required_arguments = 1
+ optional_arguments = 0
+ final_argument_whitespace = False
+ option_spec = {}
+
+ def run(self):
+ self.filenames = set()
+ if self.arguments[0] == 'lexers':
+ out = self.document_lexers()
+ elif self.arguments[0] == 'formatters':
+ out = self.document_formatters()
+ elif self.arguments[0] == 'filters':
+ out = self.document_filters()
+ else:
+ raise Exception('invalid argument for "pygmentsdoc" directive')
+ node = nodes.compound()
+ vl = ViewList(out.split('\n'), source='')
+ nested_parse_with_titles(self.state, vl, node)
+ for fn in self.filenames:
+ self.state.document.settings.record_dependencies.add(fn)
+ return node.children
+
+ def document_lexers(self):
+ from pygments.lexers._mapping import LEXERS
+ out = []
+ modules = {}
+ moduledocstrings = {}
+ for classname, data in sorted(LEXERS.items(), key=lambda x: x[0]):
+ module = data[0]
+ mod = __import__(module, None, None, [classname])
+ self.filenames.add(mod.__file__)
+ cls = getattr(mod, classname)
+ if not cls.__doc__:
+ print("Warning: %s does not have a docstring." % classname)
+ docstring = cls.__doc__
+ if isinstance(docstring, bytes):
+ docstring = docstring.decode('utf8')
+ modules.setdefault(module, []).append((
+ classname,
+ ', '.join(data[2]) or 'None',
+ ', '.join(data[3]).replace('*', '\\*').replace('_', '\\') or 'None',
+ ', '.join(data[4]) or 'None',
+ docstring))
+ if module not in moduledocstrings:
+ moddoc = mod.__doc__
+ if isinstance(moddoc, bytes):
+ moddoc = moddoc.decode('utf8')
+ moduledocstrings[module] = moddoc
+
+ for module, lexers in sorted(modules.items(), key=lambda x: x[0]):
+ heading = moduledocstrings[module].splitlines()[4].strip().rstrip('.')
+ out.append(MODULEDOC % (module, heading, '-'*len(heading)))
+ for data in lexers:
+ out.append(LEXERDOC % data)
+
+ return ''.join(out)
+
+ def document_formatters(self):
+ from pygments.formatters import FORMATTERS
+
+ out = []
+ for classname, data in sorted(FORMATTERS.items(), key=lambda x: x[0]):
+ module = data[0]
+ mod = __import__(module, None, None, [classname])
+ self.filenames.add(mod.__file__)
+ cls = getattr(mod, classname)
+ docstring = cls.__doc__
+ if isinstance(docstring, bytes):
+ docstring = docstring.decode('utf8')
+ heading = cls.__name__
+ out.append(FMTERDOC % (heading, ', '.join(data[2]) or 'None',
+ ', '.join(data[3]).replace('*', '\\*') or 'None',
+ docstring))
+ return ''.join(out)
+
+ def document_filters(self):
+ from pygments.filters import FILTERS
+
+ out = []
+ for name, cls in FILTERS.items():
+ self.filenames.add(sys.modules[cls.__module__].__file__)
+ docstring = cls.__doc__
+ if isinstance(docstring, bytes):
+ docstring = docstring.decode('utf8')
+ out.append(FILTERDOC % (cls.__name__, name, docstring))
+ return ''.join(out)
+
+
+def setup(app):
+ app.add_directive('pygmentsdoc', PygmentsDoc)
diff --git a/pygments/style.py b/pygments/style.py
index 0fc01b40..b2b990ea 100644
--- a/pygments/style.py
+++ b/pygments/style.py
@@ -5,11 +5,12 @@
Basic style object.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.token import Token, STANDARD_TYPES
+from pygments.util import add_metaclass
class StyleMeta(type):
@@ -39,7 +40,7 @@ class StyleMeta(type):
continue
ndef = _styles.get(token.parent, None)
styledefs = obj.styles.get(token, '').split()
- if not ndef or token is None:
+ if not ndef or token is None:
ndef = ['', 0, 0, 0, '', '', 0, 0, 0]
elif 'noinherit' in styledefs and token is not Token:
ndef = _styles[Token][:]
@@ -104,8 +105,8 @@ class StyleMeta(type):
return len(cls._styles)
+@add_metaclass(StyleMeta)
class Style(object):
- __metaclass__ = StyleMeta
#: overall background color (``None`` means transparent)
background_color = '#ffffff'
diff --git a/pygments/styles/__init__.py b/pygments/styles/__init__.py
index 3d6ef73c..d7a0564a 100644
--- a/pygments/styles/__init__.py
+++ b/pygments/styles/__init__.py
@@ -5,7 +5,7 @@
Contains built-in styles.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -34,6 +34,13 @@ STYLE_MAP = {
'vs': 'vs::VisualStudioStyle',
'tango': 'tango::TangoStyle',
'rrt': 'rrt::RrtStyle',
+ 'xcode': 'xcode::XcodeStyle',
+ 'igor': 'igor::IgorStyle',
+ 'paraiso-light': 'paraiso_light::ParaisoLightStyle',
+ 'paraiso-dark': 'paraiso_dark::ParaisoDarkStyle',
+ 'lovelace': 'lovelace::LovelaceStyle',
+ 'algol': 'algol::AlgolStyle',
+ 'algol_nu': 'algol_nu::Algol_NuStyle',
}
diff --git a/pygments/styles/algol.py b/pygments/styles/algol.py
new file mode 100644
index 00000000..a8726009
--- /dev/null
+++ b/pygments/styles/algol.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.algol
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Algol publication style.
+
+ This style renders source code for publication of algorithms in
+ scientific papers and academic texts, where its format is frequently used.
+
+ It is based on the style of the revised Algol-60 language report[1].
+
+ o No colours, only black, white and shades of grey are used.
+ o Keywords are rendered in lowercase underline boldface.
+ o Builtins are rendered in lowercase boldface italic.
+ o Docstrings and pragmas are rendered in dark grey boldface.
+ o Library identifiers are rendered in dark grey boldface italic.
+ o Comments are rendered in grey italic.
+
+ To render keywords without underlining, refer to the `Algol_Nu` style.
+
+ For lowercase conversion of keywords and builtins in languages where
+ these are not or might not be lowercase, a supporting lexer is required.
+ The Algol and Modula-2 lexers automatically convert to lowercase whenever
+ this style is selected.
+
+ [1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, Operator
+
+
+class AlgolStyle(Style):
+
+ background_color = "#ffffff"
+ default_style = ""
+
+ styles = {
+ Comment: "italic #888",
+ Comment.Preproc: "bold noitalic #888",
+ Comment.Special: "bold noitalic #888",
+
+ Keyword: "underline bold",
+ Keyword.Declaration: "italic",
+
+ Name.Builtin: "bold italic",
+ Name.Builtin.Pseudo: "bold italic",
+ Name.Namespace: "bold italic #666",
+ Name.Class: "bold italic #666",
+ Name.Function: "bold italic #666",
+ Name.Variable: "bold italic #666",
+ Name.Constant: "bold italic #666",
+
+ Operator.Word: "bold",
+
+ String: "italic #666",
+
+ Error: "border:#FF0000"
+ }
diff --git a/pygments/styles/algol_nu.py b/pygments/styles/algol_nu.py
new file mode 100644
index 00000000..392838f2
--- /dev/null
+++ b/pygments/styles/algol_nu.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.algol_nu
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Algol publication style without underlining of keywords.
+
+ This style renders source code for publication of algorithms in
+ scientific papers and academic texts, where its format is frequently used.
+
+ It is based on the style of the revised Algol-60 language report[1].
+
+ o No colours, only black, white and shades of grey are used.
+ o Keywords are rendered in lowercase boldface.
+ o Builtins are rendered in lowercase boldface italic.
+ o Docstrings and pragmas are rendered in dark grey boldface.
+ o Library identifiers are rendered in dark grey boldface italic.
+ o Comments are rendered in grey italic.
+
+ To render keywords with underlining, refer to the `Algol` style.
+
+ For lowercase conversion of keywords and builtins in languages where
+ these are not or might not be lowercase, a supporting lexer is required.
+ The Algol and Modula-2 lexers automatically convert to lowercase whenever
+ this style is selected.
+
+ [1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, Operator
+
+
+class Algol_NuStyle(Style):
+
+ background_color = "#ffffff"
+ default_style = ""
+
+ styles = {
+ Comment: "italic #888",
+ Comment.Preproc: "bold noitalic #888",
+ Comment.Special: "bold noitalic #888",
+
+ Keyword: "bold",
+ Keyword.Declaration: "italic",
+
+ Name.Builtin: "bold italic",
+ Name.Builtin.Pseudo: "bold italic",
+ Name.Namespace: "bold italic #666",
+ Name.Class: "bold italic #666",
+ Name.Function: "bold italic #666",
+ Name.Variable: "bold italic #666",
+ Name.Constant: "bold italic #666",
+
+ Operator.Word: "bold",
+
+ String: "italic #666",
+
+ Error: "border:#FF0000"
+ }
diff --git a/pygments/styles/arduino.py b/pygments/styles/arduino.py
new file mode 100644
index 00000000..f6bcd1cd
--- /dev/null
+++ b/pygments/styles/arduino.py
@@ -0,0 +1,97 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.arduino
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Arduino® Syntax highlighting style.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class ArduinoStyle(Style):
+ """
+ The Arduino® language style. This style is designed to highlight the Arduino source code, so exepect the best results with it.
+ """
+
+ background_color = "#ffffff"
+ default_style = ""
+
+ styles = {
+ Whitespace: "", # class: 'w'
+ Error: "#a61717", # class: 'err'
+
+ Comment: "#95a5a6", # class: 'c'
+ Comment.Multiline: "", # class: 'cm'
+ Comment.Preproc: "#434f54", # class: 'cp'
+ Comment.Single: "", # class: 'c1'
+ Comment.Special: "", # class: 'cs'
+
+ Keyword: "#728E00", # class: 'k'
+ Keyword.Constant: "#00979D", # class: 'kc'
+ Keyword.Declaration: "", # class: 'kd'
+ Keyword.Namespace: "", # class: 'kn'
+ Keyword.Pseudo: "#00979D", # class: 'kp'
+ Keyword.Reserved: "", # class: 'kr'
+ Keyword.Type: "#00979D", # class: 'kt'
+
+ Operator: "#434f54", # class: 'o'
+ Operator.Word: "", # class: 'ow'
+
+ Name: "#434f54", # class: 'n'
+ Name.Attribute: "", # class: 'na'
+ Name.Builtin: "", # class: 'nb'
+ Name.Builtin.Pseudo: "", # class: 'bp'
+ Name.Class: "", # class: 'nc'
+ Name.Constant: "", # class: 'no'
+ Name.Decorator: "", # class: 'nd'
+ Name.Entity: "", # class: 'ni'
+ Name.Exception: "", # class: 'ne'
+ Name.Function: "#D35400", # class: 'nf'
+ Name.Property: "", # class: 'py'
+ Name.Label: "", # class: 'nl'
+ Name.Namespace: "", # class: 'nn'
+ Name.Other: "#728E00", # class: 'nx'
+ Name.Tag: "", # class: 'nt'
+ Name.Variable: "", # class: 'nv'
+ Name.Variable.Class: "", # class: 'vc'
+ Name.Variable.Global: "", # class: 'vg'
+ Name.Variable.Instance: "", # class: 'vi'
+
+ Number: "#434f54", # class: 'm'
+ Number.Float: "", # class: 'mf'
+ Number.Hex: "", # class: 'mh'
+ Number.Integer: "", # class: 'mi'
+ Number.Integer.Long: "", # class: 'il'
+ Number.Oct: "", # class: 'mo'
+
+ String: "#7F8C8D", # class: 's'
+ String.Backtick: "", # class: 'sb'
+ String.Char: "", # class: 'sc'
+ String.Doc: "", # class: 'sd'
+ String.Double: "", # class: 's2'
+ String.Escape: "", # class: 'se'
+ String.Heredoc: "", # class: 'sh'
+ String.Interpol: "", # class: 'si'
+ String.Other: "", # class: 'sx'
+ String.Regex: "", # class: 'sr'
+ String.Single: "", # class: 's1'
+ String.Symbol: "", # class: 'ss'
+
+ Generic: "", # class: 'g'
+ Generic.Deleted: "", # class: 'gd',
+ Generic.Emph: "", # class: 'ge'
+ Generic.Error: "", # class: 'gr'
+ Generic.Heading: "", # class: 'gh'
+ Generic.Inserted: "", # class: 'gi'
+ Generic.Output: "", # class: 'go'
+ Generic.Prompt: "", # class: 'gp'
+ Generic.Strong: "", # class: 'gs'
+ Generic.Subheading: "", # class: 'gu'
+ Generic.Traceback: "", # class: 'gt'
+ }
diff --git a/pygments/styles/autumn.py b/pygments/styles/autumn.py
index 3960536b..2040659e 100644
--- a/pygments/styles/autumn.py
+++ b/pygments/styles/autumn.py
@@ -5,7 +5,7 @@
A colorful style, inspired by the terminal highlighting style.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/borland.py b/pygments/styles/borland.py
index 9858034e..2b1f4ca9 100644
--- a/pygments/styles/borland.py
+++ b/pygments/styles/borland.py
@@ -5,7 +5,7 @@
Style similar to the style used in the Borland IDEs.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/bw.py b/pygments/styles/bw.py
index 170442ad..56d78bd6 100644
--- a/pygments/styles/bw.py
+++ b/pygments/styles/bw.py
@@ -5,7 +5,7 @@
Simple black/white only style.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/colorful.py b/pygments/styles/colorful.py
index eb595467..ebedc02f 100644
--- a/pygments/styles/colorful.py
+++ b/pygments/styles/colorful.py
@@ -5,7 +5,7 @@
A colorful style, inspired by CodeRay.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/default.py b/pygments/styles/default.py
index 77bdac0d..df99768c 100644
--- a/pygments/styles/default.py
+++ b/pygments/styles/default.py
@@ -5,7 +5,7 @@
The default highlighting style.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/emacs.py b/pygments/styles/emacs.py
index 9f8b4074..27ae19ad 100644
--- a/pygments/styles/emacs.py
+++ b/pygments/styles/emacs.py
@@ -5,7 +5,7 @@
A highlighting style for Pygments, inspired by Emacs.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/friendly.py b/pygments/styles/friendly.py
index 732a1252..d5256a4b 100644
--- a/pygments/styles/friendly.py
+++ b/pygments/styles/friendly.py
@@ -5,7 +5,7 @@
A modern style based on the VIM pyte theme.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/fruity.py b/pygments/styles/fruity.py
index 45334159..99bbae6f 100644
--- a/pygments/styles/fruity.py
+++ b/pygments/styles/fruity.py
@@ -5,7 +5,7 @@
pygments version of my "fruity" vim theme.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/igor.py b/pygments/styles/igor.py
new file mode 100644
index 00000000..8f552709
--- /dev/null
+++ b/pygments/styles/igor.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.igor
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Igor Pro default style.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String
+
+
+class IgorStyle(Style):
+ """
+ Pygments version of the official colors for Igor Pro procedures.
+ """
+ default_style = ""
+
+ styles = {
+ Comment: 'italic #FF0000',
+ Keyword: '#0000FF',
+ Name.Function: '#C34E00',
+ Name.Decorator: '#CC00A3',
+ Name.Class: '#007575',
+ String: '#009C00'
+ }
diff --git a/pygments/styles/lovelace.py b/pygments/styles/lovelace.py
new file mode 100644
index 00000000..31bd5505
--- /dev/null
+++ b/pygments/styles/lovelace.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.lovelace
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lovelace by Miikka Salminen
+
+ Pygments style by Miikka Salminen (https://github.com/miikkas)
+ A desaturated, somewhat subdued style created for the Lovelace interactive
+ learning environment.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Punctuation, Generic, Whitespace
+
+
+class LovelaceStyle(Style):
+ """
+ The style used in Lovelace interactive learning environment. Tries to avoid
+ the "angry fruit salad" effect with desaturated and dim colours.
+ """
+ _KW_BLUE = '#2838b0'
+ _NAME_GREEN = '#388038'
+ _DOC_ORANGE = '#b85820'
+ _OW_PURPLE = '#a848a8'
+ _FUN_BROWN = '#785840'
+ _STR_RED = '#b83838'
+ _CLS_CYAN = '#287088'
+ _ESCAPE_LIME = '#709030'
+ _LABEL_CYAN = '#289870'
+ _EXCEPT_YELLOW = '#908828'
+
+ default_style = '#222222'
+
+ styles = {
+ Whitespace: '#a89028',
+ Comment: 'italic #888888',
+ Comment.Hashbang: _CLS_CYAN,
+ Comment.Multiline: '#888888',
+ Comment.Preproc: 'noitalic '+_LABEL_CYAN,
+
+ Keyword: _KW_BLUE,
+ Keyword.Constant: 'italic #444444',
+ Keyword.Declaration: 'italic',
+ Keyword.Type: 'italic',
+
+ Operator: '#666666',
+ Operator.Word: _OW_PURPLE,
+
+ Punctuation: '#888888',
+
+ Name.Attribute: _NAME_GREEN,
+ Name.Builtin: _NAME_GREEN,
+ Name.Builtin.Pseudo: 'italic',
+ Name.Class: _CLS_CYAN,
+ Name.Constant: _DOC_ORANGE,
+ Name.Decorator: _CLS_CYAN,
+ Name.Entity: _ESCAPE_LIME,
+ Name.Exception: _EXCEPT_YELLOW,
+ Name.Function: _FUN_BROWN,
+ Name.Label: _LABEL_CYAN,
+ Name.Namespace: _LABEL_CYAN,
+ Name.Tag: _KW_BLUE,
+ Name.Variable: '#b04040',
+ Name.Variable.Global:_EXCEPT_YELLOW,
+
+ String: _STR_RED,
+ String.Char: _OW_PURPLE,
+ String.Doc: 'italic '+_DOC_ORANGE,
+ String.Escape: _ESCAPE_LIME,
+ String.Interpol: 'underline',
+ String.Other: _OW_PURPLE,
+ String.Regex: _OW_PURPLE,
+
+ Number: '#444444',
+
+ Generic.Deleted: '#c02828',
+ Generic.Emph: 'italic',
+ Generic.Error: '#c02828',
+ Generic.Heading: '#666666',
+ Generic.Subheading: '#444444',
+ Generic.Inserted: _NAME_GREEN,
+ Generic.Output: '#666666',
+ Generic.Prompt: '#444444',
+ Generic.Strong: 'bold',
+ Generic.Traceback: _KW_BLUE,
+
+ Error: 'bg:'+_OW_PURPLE,
+ }
diff --git a/pygments/styles/manni.py b/pygments/styles/manni.py
index 036a2120..dd09f263 100644
--- a/pygments/styles/manni.py
+++ b/pygments/styles/manni.py
@@ -8,7 +8,7 @@
This is a port of the style used in the `php port`_ of pygments
by Manni. The style is called 'default' there.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/monokai.py b/pygments/styles/monokai.py
index 31dc83b2..9c2a0a87 100644
--- a/pygments/styles/monokai.py
+++ b/pygments/styles/monokai.py
@@ -7,7 +7,7 @@
http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -93,14 +93,14 @@ class MonokaiStyle(Style):
String.Symbol: "", # class: 'ss'
Generic: "", # class: 'g'
- Generic.Deleted: "", # class: 'gd',
+ Generic.Deleted: "#f92672", # class: 'gd',
Generic.Emph: "italic", # class: 'ge'
Generic.Error: "", # class: 'gr'
Generic.Heading: "", # class: 'gh'
- Generic.Inserted: "", # class: 'gi'
+ Generic.Inserted: "#a6e22e", # class: 'gi'
Generic.Output: "", # class: 'go'
Generic.Prompt: "", # class: 'gp'
Generic.Strong: "bold", # class: 'gs'
- Generic.Subheading: "", # class: 'gu'
+ Generic.Subheading: "#75715e", # class: 'gu'
Generic.Traceback: "", # class: 'gt'
}
diff --git a/pygments/styles/murphy.py b/pygments/styles/murphy.py
index dbf4eba9..1f83cb26 100644
--- a/pygments/styles/murphy.py
+++ b/pygments/styles/murphy.py
@@ -5,7 +5,7 @@
Murphy's style from CodeRay.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/native.py b/pygments/styles/native.py
index 0de84386..33ea3c17 100644
--- a/pygments/styles/native.py
+++ b/pygments/styles/native.py
@@ -5,7 +5,7 @@
pygments version of my "native" vim theme.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/paraiso_dark.py b/pygments/styles/paraiso_dark.py
new file mode 100644
index 00000000..f906f87d
--- /dev/null
+++ b/pygments/styles/paraiso_dark.py
@@ -0,0 +1,125 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.paraiso_dark
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Paraíso (Dark) by Jan T. Sott
+
+ Pygments template by Jan T. Sott (https://github.com/idleberg)
+ Created with Base16 Builder by Chris Kempson
+ (https://github.com/chriskempson/base16-builder).
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, Text, \
+ Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
+
+
+BACKGROUND = "#2f1e2e"
+CURRENT_LINE = "#41323f"
+SELECTION = "#4f424c"
+FOREGROUND = "#e7e9db"
+COMMENT = "#776e71"
+RED = "#ef6155"
+ORANGE = "#f99b15"
+YELLOW = "#fec418"
+GREEN = "#48b685"
+AQUA = "#5bc4bf"
+BLUE = "#06b6ef"
+PURPLE = "#815ba4"
+
+
+class ParaisoDarkStyle(Style):
+
+ default_style = ''
+
+ background_color = BACKGROUND
+ highlight_color = SELECTION
+
+ background_color = BACKGROUND
+ highlight_color = SELECTION
+
+ styles = {
+ # No corresponding class for the following:
+ Text: FOREGROUND, # class: ''
+ Whitespace: "", # class: 'w'
+ Error: RED, # class: 'err'
+ Other: "", # class 'x'
+
+ Comment: COMMENT, # class: 'c'
+ Comment.Multiline: "", # class: 'cm'
+ Comment.Preproc: "", # class: 'cp'
+ Comment.Single: "", # class: 'c1'
+ Comment.Special: "", # class: 'cs'
+
+ Keyword: PURPLE, # class: 'k'
+ Keyword.Constant: "", # class: 'kc'
+ Keyword.Declaration: "", # class: 'kd'
+ Keyword.Namespace: AQUA, # class: 'kn'
+ Keyword.Pseudo: "", # class: 'kp'
+ Keyword.Reserved: "", # class: 'kr'
+ Keyword.Type: YELLOW, # class: 'kt'
+
+ Operator: AQUA, # class: 'o'
+ Operator.Word: "", # class: 'ow' - like keywords
+
+ Punctuation: FOREGROUND, # class: 'p'
+
+ Name: FOREGROUND, # class: 'n'
+ Name.Attribute: BLUE, # class: 'na' - to be revised
+ Name.Builtin: "", # class: 'nb'
+ Name.Builtin.Pseudo: "", # class: 'bp'
+ Name.Class: YELLOW, # class: 'nc' - to be revised
+ Name.Constant: RED, # class: 'no' - to be revised
+ Name.Decorator: AQUA, # class: 'nd' - to be revised
+ Name.Entity: "", # class: 'ni'
+ Name.Exception: RED, # class: 'ne'
+ Name.Function: BLUE, # class: 'nf'
+ Name.Property: "", # class: 'py'
+ Name.Label: "", # class: 'nl'
+ Name.Namespace: YELLOW, # class: 'nn' - to be revised
+ Name.Other: BLUE, # class: 'nx'
+ Name.Tag: AQUA, # class: 'nt' - like a keyword
+ Name.Variable: RED, # class: 'nv' - to be revised
+ Name.Variable.Class: "", # class: 'vc' - to be revised
+ Name.Variable.Global: "", # class: 'vg' - to be revised
+ Name.Variable.Instance: "", # class: 'vi' - to be revised
+
+ Number: ORANGE, # class: 'm'
+ Number.Float: "", # class: 'mf'
+ Number.Hex: "", # class: 'mh'
+ Number.Integer: "", # class: 'mi'
+ Number.Integer.Long: "", # class: 'il'
+ Number.Oct: "", # class: 'mo'
+
+ Literal: ORANGE, # class: 'l'
+ Literal.Date: GREEN, # class: 'ld'
+
+ String: GREEN, # class: 's'
+ String.Backtick: "", # class: 'sb'
+ String.Char: FOREGROUND, # class: 'sc'
+ String.Doc: COMMENT, # class: 'sd' - like a comment
+ String.Double: "", # class: 's2'
+ String.Escape: ORANGE, # class: 'se'
+ String.Heredoc: "", # class: 'sh'
+ String.Interpol: ORANGE, # class: 'si'
+ String.Other: "", # class: 'sx'
+ String.Regex: "", # class: 'sr'
+ String.Single: "", # class: 's1'
+ String.Symbol: "", # class: 'ss'
+
+ Generic: "", # class: 'g'
+ Generic.Deleted: RED, # class: 'gd',
+ Generic.Emph: "italic", # class: 'ge'
+ Generic.Error: "", # class: 'gr'
+ Generic.Heading: "bold " + FOREGROUND, # class: 'gh'
+ Generic.Inserted: GREEN, # class: 'gi'
+ Generic.Output: "", # class: 'go'
+ Generic.Prompt: "bold " + COMMENT, # class: 'gp'
+ Generic.Strong: "bold", # class: 'gs'
+ Generic.Subheading: "bold " + AQUA, # class: 'gu'
+ Generic.Traceback: "", # class: 'gt'
+ }
diff --git a/pygments/styles/paraiso_light.py b/pygments/styles/paraiso_light.py
new file mode 100644
index 00000000..5424d122
--- /dev/null
+++ b/pygments/styles/paraiso_light.py
@@ -0,0 +1,125 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.paraiso_light
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Paraíso (Light) by Jan T. Sott
+
+ Pygments template by Jan T. Sott (https://github.com/idleberg)
+ Created with Base16 Builder by Chris Kempson
+ (https://github.com/chriskempson/base16-builder).
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, Text, \
+ Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
+
+
+BACKGROUND = "#e7e9db"
+CURRENT_LINE = "#b9b6b0"
+SELECTION = "#a39e9b"
+FOREGROUND = "#2f1e2e"
+COMMENT = "#8d8687"
+RED = "#ef6155"
+ORANGE = "#f99b15"
+YELLOW = "#fec418"
+GREEN = "#48b685"
+AQUA = "#5bc4bf"
+BLUE = "#06b6ef"
+PURPLE = "#815ba4"
+
+
+class ParaisoLightStyle(Style):
+
+ default_style = ''
+
+ background_color = BACKGROUND
+ highlight_color = SELECTION
+
+ background_color = BACKGROUND
+ highlight_color = SELECTION
+
+ styles = {
+ # No corresponding class for the following:
+ Text: FOREGROUND, # class: ''
+ Whitespace: "", # class: 'w'
+ Error: RED, # class: 'err'
+ Other: "", # class 'x'
+
+ Comment: COMMENT, # class: 'c'
+ Comment.Multiline: "", # class: 'cm'
+ Comment.Preproc: "", # class: 'cp'
+ Comment.Single: "", # class: 'c1'
+ Comment.Special: "", # class: 'cs'
+
+ Keyword: PURPLE, # class: 'k'
+ Keyword.Constant: "", # class: 'kc'
+ Keyword.Declaration: "", # class: 'kd'
+ Keyword.Namespace: AQUA, # class: 'kn'
+ Keyword.Pseudo: "", # class: 'kp'
+ Keyword.Reserved: "", # class: 'kr'
+ Keyword.Type: YELLOW, # class: 'kt'
+
+ Operator: AQUA, # class: 'o'
+ Operator.Word: "", # class: 'ow' - like keywords
+
+ Punctuation: FOREGROUND, # class: 'p'
+
+ Name: FOREGROUND, # class: 'n'
+ Name.Attribute: BLUE, # class: 'na' - to be revised
+ Name.Builtin: "", # class: 'nb'
+ Name.Builtin.Pseudo: "", # class: 'bp'
+ Name.Class: YELLOW, # class: 'nc' - to be revised
+ Name.Constant: RED, # class: 'no' - to be revised
+ Name.Decorator: AQUA, # class: 'nd' - to be revised
+ Name.Entity: "", # class: 'ni'
+ Name.Exception: RED, # class: 'ne'
+ Name.Function: BLUE, # class: 'nf'
+ Name.Property: "", # class: 'py'
+ Name.Label: "", # class: 'nl'
+ Name.Namespace: YELLOW, # class: 'nn' - to be revised
+ Name.Other: BLUE, # class: 'nx'
+ Name.Tag: AQUA, # class: 'nt' - like a keyword
+ Name.Variable: RED, # class: 'nv' - to be revised
+ Name.Variable.Class: "", # class: 'vc' - to be revised
+ Name.Variable.Global: "", # class: 'vg' - to be revised
+ Name.Variable.Instance: "", # class: 'vi' - to be revised
+
+ Number: ORANGE, # class: 'm'
+ Number.Float: "", # class: 'mf'
+ Number.Hex: "", # class: 'mh'
+ Number.Integer: "", # class: 'mi'
+ Number.Integer.Long: "", # class: 'il'
+ Number.Oct: "", # class: 'mo'
+
+ Literal: ORANGE, # class: 'l'
+ Literal.Date: GREEN, # class: 'ld'
+
+ String: GREEN, # class: 's'
+ String.Backtick: "", # class: 'sb'
+ String.Char: FOREGROUND, # class: 'sc'
+ String.Doc: COMMENT, # class: 'sd' - like a comment
+ String.Double: "", # class: 's2'
+ String.Escape: ORANGE, # class: 'se'
+ String.Heredoc: "", # class: 'sh'
+ String.Interpol: ORANGE, # class: 'si'
+ String.Other: "", # class: 'sx'
+ String.Regex: "", # class: 'sr'
+ String.Single: "", # class: 's1'
+ String.Symbol: "", # class: 'ss'
+
+ Generic: "", # class: 'g'
+ Generic.Deleted: RED, # class: 'gd',
+ Generic.Emph: "italic", # class: 'ge'
+ Generic.Error: "", # class: 'gr'
+ Generic.Heading: "bold " + FOREGROUND, # class: 'gh'
+ Generic.Inserted: GREEN, # class: 'gi'
+ Generic.Output: "", # class: 'go'
+ Generic.Prompt: "bold " + COMMENT, # class: 'gp'
+ Generic.Strong: "bold", # class: 'gs'
+ Generic.Subheading: "bold " + AQUA, # class: 'gu'
+ Generic.Traceback: "", # class: 'gt'
+ }
diff --git a/pygments/styles/pastie.py b/pygments/styles/pastie.py
index 2a2f386f..f65940be 100644
--- a/pygments/styles/pastie.py
+++ b/pygments/styles/pastie.py
@@ -7,7 +7,7 @@
.. _pastie: http://pastie.caboo.se/
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/perldoc.py b/pygments/styles/perldoc.py
index b8b67b29..47a097ca 100644
--- a/pygments/styles/perldoc.py
+++ b/pygments/styles/perldoc.py
@@ -7,7 +7,7 @@
.. _perldoc: http://perldoc.perl.org/
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/rrt.py b/pygments/styles/rrt.py
index 1a2fc6a4..342c9fc6 100644
--- a/pygments/styles/rrt.py
+++ b/pygments/styles/rrt.py
@@ -5,7 +5,7 @@
pygments "rrt" theme, based on Zap and Emacs defaults.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/tango.py b/pygments/styles/tango.py
index 7b1c4f3c..c65850bd 100644
--- a/pygments/styles/tango.py
+++ b/pygments/styles/tango.py
@@ -33,7 +33,7 @@
have been chosen to have the same style. Similarly, keywords (Keyword.*),
and Operator.Word (and, or, in) have been assigned the same style.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/trac.py b/pygments/styles/trac.py
index 714e36cc..bf36ce03 100644
--- a/pygments/styles/trac.py
+++ b/pygments/styles/trac.py
@@ -5,7 +5,7 @@
Port of the default trac highlighter design.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/vim.py b/pygments/styles/vim.py
index a5462db3..383fd8f0 100644
--- a/pygments/styles/vim.py
+++ b/pygments/styles/vim.py
@@ -5,7 +5,7 @@
A highlighting style for Pygments, inspired by vim.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/vs.py b/pygments/styles/vs.py
index 14a56faa..78efc547 100644
--- a/pygments/styles/vs.py
+++ b/pygments/styles/vs.py
@@ -5,7 +5,7 @@
Simple style with MS Visual Studio colors.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/pygments/styles/xcode.py b/pygments/styles/xcode.py
new file mode 100644
index 00000000..3dc9240d
--- /dev/null
+++ b/pygments/styles/xcode.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.xcode
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Style similar to the `Xcode` default theme.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Literal
+
+
+class XcodeStyle(Style):
+ """
+ Style similar to the Xcode default colouring theme.
+ """
+
+ default_style = ''
+
+ styles = {
+ Comment: '#177500',
+ Comment.Preproc: '#633820',
+
+ String: '#C41A16',
+ String.Char: '#2300CE',
+
+ Operator: '#000000',
+
+ Keyword: '#A90D91',
+
+ Name: '#000000',
+ Name.Attribute: '#836C28',
+ Name.Class: '#3F6E75',
+ Name.Function: '#000000',
+ Name.Builtin: '#A90D91',
+ # In Obj-C code this token is used to colour Cocoa types
+ Name.Builtin.Pseudo: '#5B269A',
+ Name.Variable: '#000000',
+ Name.Tag: '#000000',
+ Name.Decorator: '#000000',
+ # Workaround for a BUG here: lexer treats multiline method signatres as labels
+ Name.Label: '#000000',
+
+ Literal: '#1C01CE',
+ Number: '#1C01CE',
+ Error: '#000000',
+ }
diff --git a/pygments/token.py b/pygments/token.py
index 19a83f2e..bfdfc114 100644
--- a/pygments/token.py
+++ b/pygments/token.py
@@ -5,7 +5,7 @@
Basic token types and the standard tokens.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -49,6 +49,7 @@ Token = _TokenType()
# Special token types
Text = Token.Text
Whitespace = Text.Whitespace
+Escape = Token.Escape
Error = Token.Error
# Text that doesn't belong to this lexer (e.g. HTML in PHP)
Other = Token.Other
@@ -116,6 +117,7 @@ STANDARD_TYPES = {
Text: '',
Whitespace: 'w',
+ Escape: 'esc',
Error: 'err',
Other: 'x',
@@ -164,6 +166,7 @@ STANDARD_TYPES = {
String.Symbol: 'ss',
Number: 'm',
+ Number.Bin: 'mb',
Number.Float: 'mf',
Number.Hex: 'mh',
Number.Integer: 'mi',
@@ -176,6 +179,7 @@ STANDARD_TYPES = {
Punctuation: 'p',
Comment: 'c',
+ Comment.Hashbang: 'ch',
Comment.Multiline: 'cm',
Comment.Preproc: 'cp',
Comment.Single: 'c1',
diff --git a/pygments/unistring.py b/pygments/unistring.py
index 4b07028e..49a2819a 100644
--- a/pygments/unistring.py
+++ b/pygments/unistring.py
@@ -8,133 +8,210 @@
Inspired by chartypes_create.py from the MoinMoin project.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-from pygments.util import u_prefix
-Cc = u'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f'
+import sys
-Cf = u'\xad\u0600\u0601\u0602\u0603\u06dd\u070f\u17b4\u17b5\u200b\u200c\u200d\u200e\u200f\u202a\u202b\u202c\u202d\u202e\u2060\u2061\u2062\u2063\u2064\u206a\u206b\u206c\u206d\u206e\u206f\ufeff\ufff9\ufffa\ufffb'
+Cc = u'\x00-\x1f\x7f-\x9f'
-Cn = u'\u0378\u0379\u037f\u0380\u0381\u0382\u0383\u038b\u038d\u03a2\u0526\u0527\u0528\u0529\u052a\u052b\u052c\u052d\u052e\u052f\u0530\u0557\u0558\u0560\u0588\u058b\u058c\u058d\u058e\u058f\u0590\u05c8\u05c9\u05ca\u05cb\u05cc\u05cd\u05ce\u05cf\u05eb\u05ec\u05ed\u05ee\u05ef\u05f5\u05f6\u05f7\u05f8\u05f9\u05fa\u05fb\u05fc\u05fd\u05fe\u05ff\u0604\u0605\u061c\u061d\u0620\u065f\u070e\u074b\u074c\u07b2\u07b3\u07b4\u07b5\u07b6\u07b7\u07b8\u07b9\u07ba\u07bb\u07bc\u07bd\u07be\u07bf\u07fb\u07fc\u07fd\u07fe\u07ff\u082e\u082f\u083f\u0840\u0841\u0842\u0843\u0844\u0845\u0846\u0847\u0848\u0849\u084a\u084b\u084c\u084d\u084e\u084f\u0850\u0851\u0852\u0853\u0854\u0855\u0856\u0857\u0858\u0859\u085a\u085b\u085c\u085d\u085e\u085f\u0860\u0861\u0862\u0863\u0864\u0865\u0866\u0867\u0868\u0869\u086a\u086b\u086c\u086d\u086e\u086f\u0870\u0871\u0872\u0873\u0874\u0875\u0876\u0877\u0878\u0879\u087a\u087b\u087c\u087d\u087e\u087f\u0880\u0881\u0882\u0883\u0884\u0885\u0886\u0887\u0888\u0889\u088a\u088b\u088c\u088d\u088e\u088f\u0890\u0891\u0892\u0893\u0894\u0895\u0896\u0897\u0898\u0899\u089a\u089b\u089c\u089d\u089e\u089f\u08a0\u08a1\u08a2\u08a3\u08a4\u08a5\u08a6\u08a7\u08a8\u08a9\u08aa\u08ab\u08ac\u08ad\u08ae\u08af\u08b0\u08b1\u08b2\u08b3\u08b4\u08b5\u08b6\u08b7\u08b8\u08b9\u08ba\u08bb\u08bc\u08bd\u08be\u08bf\u08c0\u08c1\u08c2\u08c3\u08c4\u08c5\u08c6\u08c7\u08c8\u08c9\u08ca\u08cb\u08cc\u08cd\u08ce\u08cf\u08d0\u08d1\u08d2\u08d3\u08d4\u08d5\u08d6\u08d7\u08d8\u08d9\u08da\u08db\u08dc\u08dd\u08de\u08df\u08e0\u08e1\u08e2\u08e3\u08e4\u08e5\u08e6\u08e7\u08e8\u08e9\u08ea\u08eb\u08ec\u08ed\u08ee\u08ef\u08f0\u08f1\u08f2\u08f3\u08f4\u08f5\u08f6\u08f7\u08f8\u08f9\u08fa\u08fb\u08fc\u08fd\u08fe\u08ff\u093a\u093b\u094f\u0956\u0957\u0973\u0974\u0975\u0976\u0977\u0978\u0980\u0984\u098d\u098e\u0991\u0992\u09a9\u09b1\u09b3\u09b4\u09b5\u09ba\u09bb\u09c5\u09c6\u09c9\u09ca\u09cf\u09d0\u09d1\u09d2\u09d3\u09d4\u09d5\u09d6\u09d8\u09d9\u09da\u09db\u09de\u09e4\u09e5\u09fc\u09fd\u09fe\u09ff\u0a00\u0a04\u0a0b\u0a0c\u0a0d\u0a0e\u0a11\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a\u0a3b\u0a3d\u0a43\u0a44\u0a45\u0a46\u0a49\u0a4a\u0a4e\u0a4f\u0a50\u0a52\u0a53\u0a54\u0a55\u0a56\u0a57\u0a58\u0a5d\u0a5f\u0a60\u0a61\u0a62\u0a63\u0a64\u0a65\u0a76\u0a77\u0a78\u0a79\u0a7a\u0a7b\u0a7c\u0a7d\u0a7e\u0a7f\u0a80\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba\u0abb\u0ac6\u0aca\u0ace\u0acf\u0ad1\u0ad2\u0ad3\u0ad4\u0ad5\u0ad6\u0ad7\u0ad8\u0ad9\u0ada\u0adb\u0adc\u0add\u0ade\u0adf\u0ae4\u0ae5\u0af0\u0af2\u0af3\u0af4\u0af5\u0af6\u0af7\u0af8\u0af9\u0afa\u0afb\u0afc\u0afd\u0afe\u0aff\u0b00\u0b04\u0b0d\u0b0e\u0b11\u0b12\u0b29\u0b31\u0b34\u0b3a\u0b3b\u0b45\u0b46\u0b49\u0b4a\u0b4e\u0b4f\u0b50\u0b51\u0b52\u0b53\u0b54\u0b55\u0b58\u0b59\u0b5a\u0b5b\u0b5e\u0b64\u0b65\u0b72\u0b73\u0b74\u0b75\u0b76\u0b77\u0b78\u0b79\u0b7a\u0b7b\u0b7c\u0b7d\u0b7e\u0b7f\u0b80\u0b81\u0b84\u0b8b\u0b8c\u0b8d\u0b91\u0b96\u0b97\u0b98\u0b9b\u0b9d\u0ba0\u0ba1\u0ba2\u0ba5\u0ba6\u0ba7\u0bab\u0bac\u0bad\u0bba\u0bbb\u0bbc\u0bbd\u0bc3\u0bc4\u0bc5\u0bc9\u0bce\u0bcf\u0bd1\u0bd2\u0bd3\u0bd4\u0bd5\u0bd6\u0bd8\u0bd9\u0bda\u0bdb\u0bdc\u0bdd\u0bde\u0bdf\u0be0\u0be1\u0be2\u0be3\u0be4\u0be5\u0bfb\u0bfc\u0bfd\u0bfe\u0bff\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a\u0c3b\u0c3c\u0c45\u0c49\u0c4e\u0c4f\u0c50\u0c51\u0c52\u0c53\u0c54\u0c57\u0c5a\u0c5b\u0c5c\u0c5d\u0c5e\u0c5f\u0c64\u0c65\u0c70\u0c71\u0c72\u0c73\u0c74\u0c75\u0c76\u0c77\u0c80\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba\u0cbb\u0cc5\u0cc9\u0cce\u0ccf\u0cd0\u0cd1\u0cd2\u0cd3\u0cd4\u0cd7\u0cd8\u0cd9\u0cda\u0cdb\u0cdc\u0cdd\u0cdf\u0ce4\u0ce5\u0cf0\u0cf3\u0cf4\u0cf5\u0cf6\u0cf7\u0cf8\u0cf9\u0cfa\u0cfb\u0cfc\u0cfd\u0cfe\u0cff\u0d00\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a\u0d3b\u0d3c\u0d45\u0d49\u0d4e\u0d4f\u0d50\u0d51\u0d52\u0d53\u0d54\u0d55\u0d56\u0d58\u0d59\u0d5a\u0d5b\u0d5c\u0d5d\u0d5e\u0d5f\u0d64\u0d65\u0d76\u0d77\u0d78\u0d80\u0d81\u0d84\u0d97\u0d98\u0d99\u0db2\u0dbc\u0dbe\u0dbf\u0dc7\u0dc8\u0dc9\u0dcb\u0dcc\u0dcd\u0dce\u0dd5\u0dd7\u0de0\u0de1\u0de2\u0de3\u0de4\u0de5\u0de6\u0de7\u0de8\u0de9\u0dea\u0deb\u0dec\u0ded\u0dee\u0def\u0df0\u0df1\u0df5\u0df6\u0df7\u0df8\u0df9\u0dfa\u0dfb\u0dfc\u0dfd\u0dfe\u0dff\u0e00\u0e3b\u0e3c\u0e3d\u0e3e\u0e5c\u0e5d\u0e5e\u0e5f\u0e60\u0e61\u0e62\u0e63\u0e64\u0e65\u0e66\u0e67\u0e68\u0e69\u0e6a\u0e6b\u0e6c\u0e6d\u0e6e\u0e6f\u0e70\u0e71\u0e72\u0e73\u0e74\u0e75\u0e76\u0e77\u0e78\u0e79\u0e7a\u0e7b\u0e7c\u0e7d\u0e7e\u0e7f\u0e80\u0e83\u0e85\u0e86\u0e89\u0e8b\u0e8c\u0e8e\u0e8f\u0e90\u0e91\u0e92\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8\u0ea9\u0eac\u0eba\u0ebe\u0ebf\u0ec5\u0ec7\u0ece\u0ecf\u0eda\u0edb\u0ede\u0edf\u0ee0\u0ee1\u0ee2\u0ee3\u0ee4\u0ee5\u0ee6\u0ee7\u0ee8\u0ee9\u0eea\u0eeb\u0eec\u0eed\u0eee\u0eef\u0ef0\u0ef1\u0ef2\u0ef3\u0ef4\u0ef5\u0ef6\u0ef7\u0ef8\u0ef9\u0efa\u0efb\u0efc\u0efd\u0efe\u0eff\u0f48\u0f6d\u0f6e\u0f6f\u0f70\u0f8c\u0f8d\u0f8e\u0f8f\u0f98\u0fbd\u0fcd\u0fd9\u0fda\u0fdb\u0fdc\u0fdd\u0fde\u0fdf\u0fe0\u0fe1\u0fe2\u0fe3\u0fe4\u0fe5\u0fe6\u0fe7\u0fe8\u0fe9\u0fea\u0feb\u0fec\u0fed\u0fee\u0fef\u0ff0\u0ff1\u0ff2\u0ff3\u0ff4\u0ff5\u0ff6\u0ff7\u0ff8\u0ff9\u0ffa\u0ffb\u0ffc\u0ffd\u0ffe\u0fff\u10c6\u10c7\u10c8\u10c9\u10ca\u10cb\u10cc\u10cd\u10ce\u10cf\u10fd\u10fe\u10ff\u1249\u124e\u124f\u1257\u1259\u125e\u125f\u1289\u128e\u128f\u12b1\u12b6\u12b7\u12bf\u12c1\u12c6\u12c7\u12d7\u1311\u1316\u1317\u135b\u135c\u135d\u135e\u137d\u137e\u137f\u139a\u139b\u139c\u139d\u139e\u139f\u13f5\u13f6\u13f7\u13f8\u13f9\u13fa\u13fb\u13fc\u13fd\u13fe\u13ff\u169d\u169e\u169f\u16f1\u16f2\u16f3\u16f4\u16f5\u16f6\u16f7\u16f8\u16f9\u16fa\u16fb\u16fc\u16fd\u16fe\u16ff\u170d\u1715\u1716\u1717\u1718\u1719\u171a\u171b\u171c\u171d\u171e\u171f\u1737\u1738\u1739\u173a\u173b\u173c\u173d\u173e\u173f\u1754\u1755\u1756\u1757\u1758\u1759\u175a\u175b\u175c\u175d\u175e\u175f\u176d\u1771\u1774\u1775\u1776\u1777\u1778\u1779\u177a\u177b\u177c\u177d\u177e\u177f\u17de\u17df\u17ea\u17eb\u17ec\u17ed\u17ee\u17ef\u17fa\u17fb\u17fc\u17fd\u17fe\u17ff\u180f\u181a\u181b\u181c\u181d\u181e\u181f\u1878\u1879\u187a\u187b\u187c\u187d\u187e\u187f\u18ab\u18ac\u18ad\u18ae\u18af\u18f6\u18f7\u18f8\u18f9\u18fa\u18fb\u18fc\u18fd\u18fe\u18ff\u191d\u191e\u191f\u192c\u192d\u192e\u192f\u193c\u193d\u193e\u193f\u1941\u1942\u1943\u196e\u196f\u1975\u1976\u1977\u1978\u1979\u197a\u197b\u197c\u197d\u197e\u197f\u19ac\u19ad\u19ae\u19af\u19ca\u19cb\u19cc\u19cd\u19ce\u19cf\u19db\u19dc\u19dd\u1a1c\u1a1d\u1a5f\u1a7d\u1a7e\u1a8a\u1a8b\u1a8c\u1a8d\u1a8e\u1a8f\u1a9a\u1a9b\u1a9c\u1a9d\u1a9e\u1a9f\u1aae\u1aaf\u1ab0\u1ab1\u1ab2\u1ab3\u1ab4\u1ab5\u1ab6\u1ab7\u1ab8\u1ab9\u1aba\u1abb\u1abc\u1abd\u1abe\u1abf\u1ac0\u1ac1\u1ac2\u1ac3\u1ac4\u1ac5\u1ac6\u1ac7\u1ac8\u1ac9\u1aca\u1acb\u1acc\u1acd\u1ace\u1acf\u1ad0\u1ad1\u1ad2\u1ad3\u1ad4\u1ad5\u1ad6\u1ad7\u1ad8\u1ad9\u1ada\u1adb\u1adc\u1add\u1ade\u1adf\u1ae0\u1ae1\u1ae2\u1ae3\u1ae4\u1ae5\u1ae6\u1ae7\u1ae8\u1ae9\u1aea\u1aeb\u1aec\u1aed\u1aee\u1aef\u1af0\u1af1\u1af2\u1af3\u1af4\u1af5\u1af6\u1af7\u1af8\u1af9\u1afa\u1afb\u1afc\u1afd\u1afe\u1aff\u1b4c\u1b4d\u1b4e\u1b4f\u1b7d\u1b7e\u1b7f\u1bab\u1bac\u1bad\u1bba\u1bbb\u1bbc\u1bbd\u1bbe\u1bbf\u1bc0\u1bc1\u1bc2\u1bc3\u1bc4\u1bc5\u1bc6\u1bc7\u1bc8\u1bc9\u1bca\u1bcb\u1bcc\u1bcd\u1bce\u1bcf\u1bd0\u1bd1\u1bd2\u1bd3\u1bd4\u1bd5\u1bd6\u1bd7\u1bd8\u1bd9\u1bda\u1bdb\u1bdc\u1bdd\u1bde\u1bdf\u1be0\u1be1\u1be2\u1be3\u1be4\u1be5\u1be6\u1be7\u1be8\u1be9\u1bea\u1beb\u1bec\u1bed\u1bee\u1bef\u1bf0\u1bf1\u1bf2\u1bf3\u1bf4\u1bf5\u1bf6\u1bf7\u1bf8\u1bf9\u1bfa\u1bfb\u1bfc\u1bfd\u1bfe\u1bff\u1c38\u1c39\u1c3a\u1c4a\u1c4b\u1c4c\u1c80\u1c81\u1c82\u1c83\u1c84\u1c85\u1c86\u1c87\u1c88\u1c89\u1c8a\u1c8b\u1c8c\u1c8d\u1c8e\u1c8f\u1c90\u1c91\u1c92\u1c93\u1c94\u1c95\u1c96\u1c97\u1c98\u1c99\u1c9a\u1c9b\u1c9c\u1c9d\u1c9e\u1c9f\u1ca0\u1ca1\u1ca2\u1ca3\u1ca4\u1ca5\u1ca6\u1ca7\u1ca8\u1ca9\u1caa\u1cab\u1cac\u1cad\u1cae\u1caf\u1cb0\u1cb1\u1cb2\u1cb3\u1cb4\u1cb5\u1cb6\u1cb7\u1cb8\u1cb9\u1cba\u1cbb\u1cbc\u1cbd\u1cbe\u1cbf\u1cc0\u1cc1\u1cc2\u1cc3\u1cc4\u1cc5\u1cc6\u1cc7\u1cc8\u1cc9\u1cca\u1ccb\u1ccc\u1ccd\u1cce\u1ccf\u1cf3\u1cf4\u1cf5\u1cf6\u1cf7\u1cf8\u1cf9\u1cfa\u1cfb\u1cfc\u1cfd\u1cfe\u1cff\u1de7\u1de8\u1de9\u1dea\u1deb\u1dec\u1ded\u1dee\u1def\u1df0\u1df1\u1df2\u1df3\u1df4\u1df5\u1df6\u1df7\u1df8\u1df9\u1dfa\u1dfb\u1dfc\u1f16\u1f17\u1f1e\u1f1f\u1f46\u1f47\u1f4e\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e\u1f7f\u1fb5\u1fc5\u1fd4\u1fd5\u1fdc\u1ff0\u1ff1\u1ff5\u1fff\u2065\u2066\u2067\u2068\u2069\u2072\u2073\u208f\u2095\u2096\u2097\u2098\u2099\u209a\u209b\u209c\u209d\u209e\u209f\u20b9\u20ba\u20bb\u20bc\u20bd\u20be\u20bf\u20c0\u20c1\u20c2\u20c3\u20c4\u20c5\u20c6\u20c7\u20c8\u20c9\u20ca\u20cb\u20cc\u20cd\u20ce\u20cf\u20f1\u20f2\u20f3\u20f4\u20f5\u20f6\u20f7\u20f8\u20f9\u20fa\u20fb\u20fc\u20fd\u20fe\u20ff\u218a\u218b\u218c\u218d\u218e\u218f\u23e9\u23ea\u23eb\u23ec\u23ed\u23ee\u23ef\u23f0\u23f1\u23f2\u23f3\u23f4\u23f5\u23f6\u23f7\u23f8\u23f9\u23fa\u23fb\u23fc\u23fd\u23fe\u23ff\u2427\u2428\u2429\u242a\u242b\u242c\u242d\u242e\u242f\u2430\u2431\u2432\u2433\u2434\u2435\u2436\u2437\u2438\u2439\u243a\u243b\u243c\u243d\u243e\u243f\u244b\u244c\u244d\u244e\u244f\u2450\u2451\u2452\u2453\u2454\u2455\u2456\u2457\u2458\u2459\u245a\u245b\u245c\u245d\u245e\u245f\u26ce\u26e2\u26e4\u26e5\u26e6\u26e7\u2700\u2705\u270a\u270b\u2728\u274c\u274e\u2753\u2754\u2755\u275f\u2760\u2795\u2796\u2797\u27b0\u27bf\u27cb\u27cd\u27ce\u27cf\u2b4d\u2b4e\u2b4f\u2b5a\u2b5b\u2b5c\u2b5d\u2b5e\u2b5f\u2b60\u2b61\u2b62\u2b63\u2b64\u2b65\u2b66\u2b67\u2b68\u2b69\u2b6a\u2b6b\u2b6c\u2b6d\u2b6e\u2b6f\u2b70\u2b71\u2b72\u2b73\u2b74\u2b75\u2b76\u2b77\u2b78\u2b79\u2b7a\u2b7b\u2b7c\u2b7d\u2b7e\u2b7f\u2b80\u2b81\u2b82\u2b83\u2b84\u2b85\u2b86\u2b87\u2b88\u2b89\u2b8a\u2b8b\u2b8c\u2b8d\u2b8e\u2b8f\u2b90\u2b91\u2b92\u2b93\u2b94\u2b95\u2b96\u2b97\u2b98\u2b99\u2b9a\u2b9b\u2b9c\u2b9d\u2b9e\u2b9f\u2ba0\u2ba1\u2ba2\u2ba3\u2ba4\u2ba5\u2ba6\u2ba7\u2ba8\u2ba9\u2baa\u2bab\u2bac\u2bad\u2bae\u2baf\u2bb0\u2bb1\u2bb2\u2bb3\u2bb4\u2bb5\u2bb6\u2bb7\u2bb8\u2bb9\u2bba\u2bbb\u2bbc\u2bbd\u2bbe\u2bbf\u2bc0\u2bc1\u2bc2\u2bc3\u2bc4\u2bc5\u2bc6\u2bc7\u2bc8\u2bc9\u2bca\u2bcb\u2bcc\u2bcd\u2bce\u2bcf\u2bd0\u2bd1\u2bd2\u2bd3\u2bd4\u2bd5\u2bd6\u2bd7\u2bd8\u2bd9\u2bda\u2bdb\u2bdc\u2bdd\u2bde\u2bdf\u2be0\u2be1\u2be2\u2be3\u2be4\u2be5\u2be6\u2be7\u2be8\u2be9\u2bea\u2beb\u2bec\u2bed\u2bee\u2bef\u2bf0\u2bf1\u2bf2\u2bf3\u2bf4\u2bf5\u2bf6\u2bf7\u2bf8\u2bf9\u2bfa\u2bfb\u2bfc\u2bfd\u2bfe\u2bff\u2c2f\u2c5f\u2cf2\u2cf3\u2cf4\u2cf5\u2cf6\u2cf7\u2cf8\u2d26\u2d27\u2d28\u2d29\u2d2a\u2d2b\u2d2c\u2d2d\u2d2e\u2d2f\u2d66\u2d67\u2d68\u2d69\u2d6a\u2d6b\u2d6c\u2d6d\u2d6e\u2d70\u2d71\u2d72\u2d73\u2d74\u2d75\u2d76\u2d77\u2d78\u2d79\u2d7a\u2d7b\u2d7c\u2d7d\u2d7e\u2d7f\u2d97\u2d98\u2d99\u2d9a\u2d9b\u2d9c\u2d9d\u2d9e\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf\u2e32\u2e33\u2e34\u2e35\u2e36\u2e37\u2e38\u2e39\u2e3a\u2e3b\u2e3c\u2e3d\u2e3e\u2e3f\u2e40\u2e41\u2e42\u2e43\u2e44\u2e45\u2e46\u2e47\u2e48\u2e49\u2e4a\u2e4b\u2e4c\u2e4d\u2e4e\u2e4f\u2e50\u2e51\u2e52\u2e53\u2e54\u2e55\u2e56\u2e57\u2e58\u2e59\u2e5a\u2e5b\u2e5c\u2e5d\u2e5e\u2e5f\u2e60\u2e61\u2e62\u2e63\u2e64\u2e65\u2e66\u2e67\u2e68\u2e69\u2e6a\u2e6b\u2e6c\u2e6d\u2e6e\u2e6f\u2e70\u2e71\u2e72\u2e73\u2e74\u2e75\u2e76\u2e77\u2e78\u2e79\u2e7a\u2e7b\u2e7c\u2e7d\u2e7e\u2e7f\u2e9a\u2ef4\u2ef5\u2ef6\u2ef7\u2ef8\u2ef9\u2efa\u2efb\u2efc\u2efd\u2efe\u2eff\u2fd6\u2fd7\u2fd8\u2fd9\u2fda\u2fdb\u2fdc\u2fdd\u2fde\u2fdf\u2fe0\u2fe1\u2fe2\u2fe3\u2fe4\u2fe5\u2fe6\u2fe7\u2fe8\u2fe9\u2fea\u2feb\u2fec\u2fed\u2fee\u2fef\u2ffc\u2ffd\u2ffe\u2fff\u3040\u3097\u3098\u3100\u3101\u3102\u3103\u3104\u312e\u312f\u3130\u318f\u31b8\u31b9\u31ba\u31bb\u31bc\u31bd\u31be\u31bf\u31e4\u31e5\u31e6\u31e7\u31e8\u31e9\u31ea\u31eb\u31ec\u31ed\u31ee\u31ef\u321f\u32ff\u4db6\u4db7\u4db8\u4db9\u4dba\u4dbb\u4dbc\u4dbd\u4dbe\u4dbf\u9fcc\u9fcd\u9fce\u9fcf\u9fd0\u9fd1\u9fd2\u9fd3\u9fd4\u9fd5\u9fd6\u9fd7\u9fd8\u9fd9\u9fda\u9fdb\u9fdc\u9fdd\u9fde\u9fdf\u9fe0\u9fe1\u9fe2\u9fe3\u9fe4\u9fe5\u9fe6\u9fe7\u9fe8\u9fe9\u9fea\u9feb\u9fec\u9fed\u9fee\u9fef\u9ff0\u9ff1\u9ff2\u9ff3\u9ff4\u9ff5\u9ff6\u9ff7\u9ff8\u9ff9\u9ffa\u9ffb\u9ffc\u9ffd\u9ffe\u9fff\ua48d\ua48e\ua48f\ua4c7\ua4c8\ua4c9\ua4ca\ua4cb\ua4cc\ua4cd\ua4ce\ua4cf\ua62c\ua62d\ua62e\ua62f\ua630\ua631\ua632\ua633\ua634\ua635\ua636\ua637\ua638\ua639\ua63a\ua63b\ua63c\ua63d\ua63e\ua63f\ua660\ua661\ua674\ua675\ua676\ua677\ua678\ua679\ua67a\ua67b\ua698\ua699\ua69a\ua69b\ua69c\ua69d\ua69e\ua69f\ua6f8\ua6f9\ua6fa\ua6fb\ua6fc\ua6fd\ua6fe\ua6ff\ua78d\ua78e\ua78f\ua790\ua791\ua792\ua793\ua794\ua795\ua796\ua797\ua798\ua799\ua79a\ua79b\ua79c\ua79d\ua79e\ua79f\ua7a0\ua7a1\ua7a2\ua7a3\ua7a4\ua7a5\ua7a6\ua7a7\ua7a8\ua7a9\ua7aa\ua7ab\ua7ac\ua7ad\ua7ae\ua7af\ua7b0\ua7b1\ua7b2\ua7b3\ua7b4\ua7b5\ua7b6\ua7b7\ua7b8\ua7b9\ua7ba\ua7bb\ua7bc\ua7bd\ua7be\ua7bf\ua7c0\ua7c1\ua7c2\ua7c3\ua7c4\ua7c5\ua7c6\ua7c7\ua7c8\ua7c9\ua7ca\ua7cb\ua7cc\ua7cd\ua7ce\ua7cf\ua7d0\ua7d1\ua7d2\ua7d3\ua7d4\ua7d5\ua7d6\ua7d7\ua7d8\ua7d9\ua7da\ua7db\ua7dc\ua7dd\ua7de\ua7df\ua7e0\ua7e1\ua7e2\ua7e3\ua7e4\ua7e5\ua7e6\ua7e7\ua7e8\ua7e9\ua7ea\ua7eb\ua7ec\ua7ed\ua7ee\ua7ef\ua7f0\ua7f1\ua7f2\ua7f3\ua7f4\ua7f5\ua7f6\ua7f7\ua7f8\ua7f9\ua7fa\ua82c\ua82d\ua82e\ua82f\ua83a\ua83b\ua83c\ua83d\ua83e\ua83f\ua878\ua879\ua87a\ua87b\ua87c\ua87d\ua87e\ua87f\ua8c5\ua8c6\ua8c7\ua8c8\ua8c9\ua8ca\ua8cb\ua8cc\ua8cd\ua8da\ua8db\ua8dc\ua8dd\ua8de\ua8df\ua8fc\ua8fd\ua8fe\ua8ff\ua954\ua955\ua956\ua957\ua958\ua959\ua95a\ua95b\ua95c\ua95d\ua95e\ua97d\ua97e\ua97f\ua9ce\ua9da\ua9db\ua9dc\ua9dd\ua9e0\ua9e1\ua9e2\ua9e3\ua9e4\ua9e5\ua9e6\ua9e7\ua9e8\ua9e9\ua9ea\ua9eb\ua9ec\ua9ed\ua9ee\ua9ef\ua9f0\ua9f1\ua9f2\ua9f3\ua9f4\ua9f5\ua9f6\ua9f7\ua9f8\ua9f9\ua9fa\ua9fb\ua9fc\ua9fd\ua9fe\ua9ff\uaa37\uaa38\uaa39\uaa3a\uaa3b\uaa3c\uaa3d\uaa3e\uaa3f\uaa4e\uaa4f\uaa5a\uaa5b\uaa7c\uaa7d\uaa7e\uaa7f\uaac3\uaac4\uaac5\uaac6\uaac7\uaac8\uaac9\uaaca\uaacb\uaacc\uaacd\uaace\uaacf\uaad0\uaad1\uaad2\uaad3\uaad4\uaad5\uaad6\uaad7\uaad8\uaad9\uaada\uaae0\uaae1\uaae2\uaae3\uaae4\uaae5\uaae6\uaae7\uaae8\uaae9\uaaea\uaaeb\uaaec\uaaed\uaaee\uaaef\uaaf0\uaaf1\uaaf2\uaaf3\uaaf4\uaaf5\uaaf6\uaaf7\uaaf8\uaaf9\uaafa\uaafb\uaafc\uaafd\uaafe\uaaff\uab00\uab01\uab02\uab03\uab04\uab05\uab06\uab07\uab08\uab09\uab0a\uab0b\uab0c\uab0d\uab0e\uab0f\uab10\uab11\uab12\uab13\uab14\uab15\uab16\uab17\uab18\uab19\uab1a\uab1b\uab1c\uab1d\uab1e\uab1f\uab20\uab21\uab22\uab23\uab24\uab25\uab26\uab27\uab28\uab29\uab2a\uab2b\uab2c\uab2d\uab2e\uab2f\uab30\uab31\uab32\uab33\uab34\uab35\uab36\uab37\uab38\uab39\uab3a\uab3b\uab3c\uab3d\uab3e\uab3f\uab40\uab41\uab42\uab43\uab44\uab45\uab46\uab47\uab48\uab49\uab4a\uab4b\uab4c\uab4d\uab4e\uab4f\uab50\uab51\uab52\uab53\uab54\uab55\uab56\uab57\uab58\uab59\uab5a\uab5b\uab5c\uab5d\uab5e\uab5f\uab60\uab61\uab62\uab63\uab64\uab65\uab66\uab67\uab68\uab69\uab6a\uab6b\uab6c\uab6d\uab6e\uab6f\uab70\uab71\uab72\uab73\uab74\uab75\uab76\uab77\uab78\uab79\uab7a\uab7b\uab7c\uab7d\uab7e\uab7f\uab80\uab81\uab82\uab83\uab84\uab85\uab86\uab87\uab88\uab89\uab8a\uab8b\uab8c\uab8d\uab8e\uab8f\uab90\uab91\uab92\uab93\uab94\uab95\uab96\uab97\uab98\uab99\uab9a\uab9b\uab9c\uab9d\uab9e\uab9f\uaba0\uaba1\uaba2\uaba3\uaba4\uaba5\uaba6\uaba7\uaba8\uaba9\uabaa\uabab\uabac\uabad\uabae\uabaf\uabb0\uabb1\uabb2\uabb3\uabb4\uabb5\uabb6\uabb7\uabb8\uabb9\uabba\uabbb\uabbc\uabbd\uabbe\uabbf\uabee\uabef\uabfa\uabfb\uabfc\uabfd\uabfe\uabff\ud7a4\ud7a5\ud7a6\ud7a7\ud7a8\ud7a9\ud7aa\ud7ab\ud7ac\ud7ad\ud7ae\ud7af\ud7c7\ud7c8\ud7c9\ud7ca\ud7fc\ud7fd\ud7fe\ud7ff\ufa2e\ufa2f\ufa6e\ufa6f\ufada\ufadb\ufadc\ufadd\ufade\ufadf\ufae0\ufae1\ufae2\ufae3\ufae4\ufae5\ufae6\ufae7\ufae8\ufae9\ufaea\ufaeb\ufaec\ufaed\ufaee\ufaef\ufaf0\ufaf1\ufaf2\ufaf3\ufaf4\ufaf5\ufaf6\ufaf7\ufaf8\ufaf9\ufafa\ufafb\ufafc\ufafd\ufafe\ufaff\ufb07\ufb08\ufb09\ufb0a\ufb0b\ufb0c\ufb0d\ufb0e\ufb0f\ufb10\ufb11\ufb12\ufb18\ufb19\ufb1a\ufb1b\ufb1c\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbb2\ufbb3\ufbb4\ufbb5\ufbb6\ufbb7\ufbb8\ufbb9\ufbba\ufbbb\ufbbc\ufbbd\ufbbe\ufbbf\ufbc0\ufbc1\ufbc2\ufbc3\ufbc4\ufbc5\ufbc6\ufbc7\ufbc8\ufbc9\ufbca\ufbcb\ufbcc\ufbcd\ufbce\ufbcf\ufbd0\ufbd1\ufbd2\ufd40\ufd41\ufd42\ufd43\ufd44\ufd45\ufd46\ufd47\ufd48\ufd49\ufd4a\ufd4b\ufd4c\ufd4d\ufd4e\ufd4f\ufd90\ufd91\ufdc8\ufdc9\ufdca\ufdcb\ufdcc\ufdcd\ufdce\ufdcf\ufdd0\ufdd1\ufdd2\ufdd3\ufdd4\ufdd5\ufdd6\ufdd7\ufdd8\ufdd9\ufdda\ufddb\ufddc\ufddd\ufdde\ufddf\ufde0\ufde1\ufde2\ufde3\ufde4\ufde5\ufde6\ufde7\ufde8\ufde9\ufdea\ufdeb\ufdec\ufded\ufdee\ufdef\ufdfe\ufdff\ufe1a\ufe1b\ufe1c\ufe1d\ufe1e\ufe1f\ufe27\ufe28\ufe29\ufe2a\ufe2b\ufe2c\ufe2d\ufe2e\ufe2f\ufe53\ufe67\ufe6c\ufe6d\ufe6e\ufe6f\ufe75\ufefd\ufefe\uff00\uffbf\uffc0\uffc1\uffc8\uffc9\uffd0\uffd1\uffd8\uffd9\uffdd\uffde\uffdf\uffe7\uffef\ufff0\ufff1\ufff2\ufff3\ufff4\ufff5\ufff6\ufff7\ufff8\ufffe'
+Cf = u'\xad\u0600-\u0604\u061c\u06dd\u070f\u180e\u200b-\u200f\u202a-\u202e\u2060-\u2064\u2066-\u206f\ufeff\ufff9-\ufffb'
-Co = u'\ue000\ue001\ue002\ue003\ue004\ue005\ue006\ue007\ue008\ue009\ue00a\ue00b\ue00c\ue00d\ue00e\ue00f\ue010\ue011\ue012\ue013\ue014\ue015\ue016\ue017\ue018\ue019\ue01a\ue01b\ue01c\ue01d\ue01e\ue01f\ue020\ue021\ue022\ue023\ue024\ue025\ue026\ue027\ue028\ue029\ue02a\ue02b\ue02c\ue02d\ue02e\ue02f\ue030\ue031\ue032\ue033\ue034\ue035\ue036\ue037\ue038\ue039\ue03a\ue03b\ue03c\ue03d\ue03e\ue03f\ue040\ue041\ue042\ue043\ue044\ue045\ue046\ue047\ue048\ue049\ue04a\ue04b\ue04c\ue04d\ue04e\ue04f\ue050\ue051\ue052\ue053\ue054\ue055\ue056\ue057\ue058\ue059\ue05a\ue05b\ue05c\ue05d\ue05e\ue05f\ue060\ue061\ue062\ue063\ue064\ue065\ue066\ue067\ue068\ue069\ue06a\ue06b\ue06c\ue06d\ue06e\ue06f\ue070\ue071\ue072\ue073\ue074\ue075\ue076\ue077\ue078\ue079\ue07a\ue07b\ue07c\ue07d\ue07e\ue07f\ue080\ue081\ue082\ue083\ue084\ue085\ue086\ue087\ue088\ue089\ue08a\ue08b\ue08c\ue08d\ue08e\ue08f\ue090\ue091\ue092\ue093\ue094\ue095\ue096\ue097\ue098\ue099\ue09a\ue09b\ue09c\ue09d\ue09e\ue09f\ue0a0\ue0a1\ue0a2\ue0a3\ue0a4\ue0a5\ue0a6\ue0a7\ue0a8\ue0a9\ue0aa\ue0ab\ue0ac\ue0ad\ue0ae\ue0af\ue0b0\ue0b1\ue0b2\ue0b3\ue0b4\ue0b5\ue0b6\ue0b7\ue0b8\ue0b9\ue0ba\ue0bb\ue0bc\ue0bd\ue0be\ue0bf\ue0c0\ue0c1\ue0c2\ue0c3\ue0c4\ue0c5\ue0c6\ue0c7\ue0c8\ue0c9\ue0ca\ue0cb\ue0cc\ue0cd\ue0ce\ue0cf\ue0d0\ue0d1\ue0d2\ue0d3\ue0d4\ue0d5\ue0d6\ue0d7\ue0d8\ue0d9\ue0da\ue0db\ue0dc\ue0dd\ue0de\ue0df\ue0e0\ue0e1\ue0e2\ue0e3\ue0e4\ue0e5\ue0e6\ue0e7\ue0e8\ue0e9\ue0ea\ue0eb\ue0ec\ue0ed\ue0ee\ue0ef\ue0f0\ue0f1\ue0f2\ue0f3\ue0f4\ue0f5\ue0f6\ue0f7\ue0f8\ue0f9\ue0fa\ue0fb\ue0fc\ue0fd\ue0fe\ue0ff\ue100\ue101\ue102\ue103\ue104\ue105\ue106\ue107\ue108\ue109\ue10a\ue10b\ue10c\ue10d\ue10e\ue10f\ue110\ue111\ue112\ue113\ue114\ue115\ue116\ue117\ue118\ue119\ue11a\ue11b\ue11c\ue11d\ue11e\ue11f\ue120\ue121\ue122\ue123\ue124\ue125\ue126\ue127\ue128\ue129\ue12a\ue12b\ue12c\ue12d\ue12e\ue12f\ue130\ue131\ue132\ue133\ue134\ue135\ue136\ue137\ue138\ue139\ue13a\ue13b\ue13c\ue13d\ue13e\ue13f\ue140\ue141\ue142\ue143\ue144\ue145\ue146\ue147\ue148\ue149\ue14a\ue14b\ue14c\ue14d\ue14e\ue14f\ue150\ue151\ue152\ue153\ue154\ue155\ue156\ue157\ue158\ue159\ue15a\ue15b\ue15c\ue15d\ue15e\ue15f\ue160\ue161\ue162\ue163\ue164\ue165\ue166\ue167\ue168\ue169\ue16a\ue16b\ue16c\ue16d\ue16e\ue16f\ue170\ue171\ue172\ue173\ue174\ue175\ue176\ue177\ue178\ue179\ue17a\ue17b\ue17c\ue17d\ue17e\ue17f\ue180\ue181\ue182\ue183\ue184\ue185\ue186\ue187\ue188\ue189\ue18a\ue18b\ue18c\ue18d\ue18e\ue18f\ue190\ue191\ue192\ue193\ue194\ue195\ue196\ue197\ue198\ue199\ue19a\ue19b\ue19c\ue19d\ue19e\ue19f\ue1a0\ue1a1\ue1a2\ue1a3\ue1a4\ue1a5\ue1a6\ue1a7\ue1a8\ue1a9\ue1aa\ue1ab\ue1ac\ue1ad\ue1ae\ue1af\ue1b0\ue1b1\ue1b2\ue1b3\ue1b4\ue1b5\ue1b6\ue1b7\ue1b8\ue1b9\ue1ba\ue1bb\ue1bc\ue1bd\ue1be\ue1bf\ue1c0\ue1c1\ue1c2\ue1c3\ue1c4\ue1c5\ue1c6\ue1c7\ue1c8\ue1c9\ue1ca\ue1cb\ue1cc\ue1cd\ue1ce\ue1cf\ue1d0\ue1d1\ue1d2\ue1d3\ue1d4\ue1d5\ue1d6\ue1d7\ue1d8\ue1d9\ue1da\ue1db\ue1dc\ue1dd\ue1de\ue1df\ue1e0\ue1e1\ue1e2\ue1e3\ue1e4\ue1e5\ue1e6\ue1e7\ue1e8\ue1e9\ue1ea\ue1eb\ue1ec\ue1ed\ue1ee\ue1ef\ue1f0\ue1f1\ue1f2\ue1f3\ue1f4\ue1f5\ue1f6\ue1f7\ue1f8\ue1f9\ue1fa\ue1fb\ue1fc\ue1fd\ue1fe\ue1ff\ue200\ue201\ue202\ue203\ue204\ue205\ue206\ue207\ue208\ue209\ue20a\ue20b\ue20c\ue20d\ue20e\ue20f\ue210\ue211\ue212\ue213\ue214\ue215\ue216\ue217\ue218\ue219\ue21a\ue21b\ue21c\ue21d\ue21e\ue21f\ue220\ue221\ue222\ue223\ue224\ue225\ue226\ue227\ue228\ue229\ue22a\ue22b\ue22c\ue22d\ue22e\ue22f\ue230\ue231\ue232\ue233\ue234\ue235\ue236\ue237\ue238\ue239\ue23a\ue23b\ue23c\ue23d\ue23e\ue23f\ue240\ue241\ue242\ue243\ue244\ue245\ue246\ue247\ue248\ue249\ue24a\ue24b\ue24c\ue24d\ue24e\ue24f\ue250\ue251\ue252\ue253\ue254\ue255\ue256\ue257\ue258\ue259\ue25a\ue25b\ue25c\ue25d\ue25e\ue25f\ue260\ue261\ue262\ue263\ue264\ue265\ue266\ue267\ue268\ue269\ue26a\ue26b\ue26c\ue26d\ue26e\ue26f\ue270\ue271\ue272\ue273\ue274\ue275\ue276\ue277\ue278\ue279\ue27a\ue27b\ue27c\ue27d\ue27e\ue27f\ue280\ue281\ue282\ue283\ue284\ue285\ue286\ue287\ue288\ue289\ue28a\ue28b\ue28c\ue28d\ue28e\ue28f\ue290\ue291\ue292\ue293\ue294\ue295\ue296\ue297\ue298\ue299\ue29a\ue29b\ue29c\ue29d\ue29e\ue29f\ue2a0\ue2a1\ue2a2\ue2a3\ue2a4\ue2a5\ue2a6\ue2a7\ue2a8\ue2a9\ue2aa\ue2ab\ue2ac\ue2ad\ue2ae\ue2af\ue2b0\ue2b1\ue2b2\ue2b3\ue2b4\ue2b5\ue2b6\ue2b7\ue2b8\ue2b9\ue2ba\ue2bb\ue2bc\ue2bd\ue2be\ue2bf\ue2c0\ue2c1\ue2c2\ue2c3\ue2c4\ue2c5\ue2c6\ue2c7\ue2c8\ue2c9\ue2ca\ue2cb\ue2cc\ue2cd\ue2ce\ue2cf\ue2d0\ue2d1\ue2d2\ue2d3\ue2d4\ue2d5\ue2d6\ue2d7\ue2d8\ue2d9\ue2da\ue2db\ue2dc\ue2dd\ue2de\ue2df\ue2e0\ue2e1\ue2e2\ue2e3\ue2e4\ue2e5\ue2e6\ue2e7\ue2e8\ue2e9\ue2ea\ue2eb\ue2ec\ue2ed\ue2ee\ue2ef\ue2f0\ue2f1\ue2f2\ue2f3\ue2f4\ue2f5\ue2f6\ue2f7\ue2f8\ue2f9\ue2fa\ue2fb\ue2fc\ue2fd\ue2fe\ue2ff\ue300\ue301\ue302\ue303\ue304\ue305\ue306\ue307\ue308\ue309\ue30a\ue30b\ue30c\ue30d\ue30e\ue30f\ue310\ue311\ue312\ue313\ue314\ue315\ue316\ue317\ue318\ue319\ue31a\ue31b\ue31c\ue31d\ue31e\ue31f\ue320\ue321\ue322\ue323\ue324\ue325\ue326\ue327\ue328\ue329\ue32a\ue32b\ue32c\ue32d\ue32e\ue32f\ue330\ue331\ue332\ue333\ue334\ue335\ue336\ue337\ue338\ue339\ue33a\ue33b\ue33c\ue33d\ue33e\ue33f\ue340\ue341\ue342\ue343\ue344\ue345\ue346\ue347\ue348\ue349\ue34a\ue34b\ue34c\ue34d\ue34e\ue34f\ue350\ue351\ue352\ue353\ue354\ue355\ue356\ue357\ue358\ue359\ue35a\ue35b\ue35c\ue35d\ue35e\ue35f\ue360\ue361\ue362\ue363\ue364\ue365\ue366\ue367\ue368\ue369\ue36a\ue36b\ue36c\ue36d\ue36e\ue36f\ue370\ue371\ue372\ue373\ue374\ue375\ue376\ue377\ue378\ue379\ue37a\ue37b\ue37c\ue37d\ue37e\ue37f\ue380\ue381\ue382\ue383\ue384\ue385\ue386\ue387\ue388\ue389\ue38a\ue38b\ue38c\ue38d\ue38e\ue38f\ue390\ue391\ue392\ue393\ue394\ue395\ue396\ue397\ue398\ue399\ue39a\ue39b\ue39c\ue39d\ue39e\ue39f\ue3a0\ue3a1\ue3a2\ue3a3\ue3a4\ue3a5\ue3a6\ue3a7\ue3a8\ue3a9\ue3aa\ue3ab\ue3ac\ue3ad\ue3ae\ue3af\ue3b0\ue3b1\ue3b2\ue3b3\ue3b4\ue3b5\ue3b6\ue3b7\ue3b8\ue3b9\ue3ba\ue3bb\ue3bc\ue3bd\ue3be\ue3bf\ue3c0\ue3c1\ue3c2\ue3c3\ue3c4\ue3c5\ue3c6\ue3c7\ue3c8\ue3c9\ue3ca\ue3cb\ue3cc\ue3cd\ue3ce\ue3cf\ue3d0\ue3d1\ue3d2\ue3d3\ue3d4\ue3d5\ue3d6\ue3d7\ue3d8\ue3d9\ue3da\ue3db\ue3dc\ue3dd\ue3de\ue3df\ue3e0\ue3e1\ue3e2\ue3e3\ue3e4\ue3e5\ue3e6\ue3e7\ue3e8\ue3e9\ue3ea\ue3eb\ue3ec\ue3ed\ue3ee\ue3ef\ue3f0\ue3f1\ue3f2\ue3f3\ue3f4\ue3f5\ue3f6\ue3f7\ue3f8\ue3f9\ue3fa\ue3fb\ue3fc\ue3fd\ue3fe\ue3ff\ue400\ue401\ue402\ue403\ue404\ue405\ue406\ue407\ue408\ue409\ue40a\ue40b\ue40c\ue40d\ue40e\ue40f\ue410\ue411\ue412\ue413\ue414\ue415\ue416\ue417\ue418\ue419\ue41a\ue41b\ue41c\ue41d\ue41e\ue41f\ue420\ue421\ue422\ue423\ue424\ue425\ue426\ue427\ue428\ue429\ue42a\ue42b\ue42c\ue42d\ue42e\ue42f\ue430\ue431\ue432\ue433\ue434\ue435\ue436\ue437\ue438\ue439\ue43a\ue43b\ue43c\ue43d\ue43e\ue43f\ue440\ue441\ue442\ue443\ue444\ue445\ue446\ue447\ue448\ue449\ue44a\ue44b\ue44c\ue44d\ue44e\ue44f\ue450\ue451\ue452\ue453\ue454\ue455\ue456\ue457\ue458\ue459\ue45a\ue45b\ue45c\ue45d\ue45e\ue45f\ue460\ue461\ue462\ue463\ue464\ue465\ue466\ue467\ue468\ue469\ue46a\ue46b\ue46c\ue46d\ue46e\ue46f\ue470\ue471\ue472\ue473\ue474\ue475\ue476\ue477\ue478\ue479\ue47a\ue47b\ue47c\ue47d\ue47e\ue47f\ue480\ue481\ue482\ue483\ue484\ue485\ue486\ue487\ue488\ue489\ue48a\ue48b\ue48c\ue48d\ue48e\ue48f\ue490\ue491\ue492\ue493\ue494\ue495\ue496\ue497\ue498\ue499\ue49a\ue49b\ue49c\ue49d\ue49e\ue49f\ue4a0\ue4a1\ue4a2\ue4a3\ue4a4\ue4a5\ue4a6\ue4a7\ue4a8\ue4a9\ue4aa\ue4ab\ue4ac\ue4ad\ue4ae\ue4af\ue4b0\ue4b1\ue4b2\ue4b3\ue4b4\ue4b5\ue4b6\ue4b7\ue4b8\ue4b9\ue4ba\ue4bb\ue4bc\ue4bd\ue4be\ue4bf\ue4c0\ue4c1\ue4c2\ue4c3\ue4c4\ue4c5\ue4c6\ue4c7\ue4c8\ue4c9\ue4ca\ue4cb\ue4cc\ue4cd\ue4ce\ue4cf\ue4d0\ue4d1\ue4d2\ue4d3\ue4d4\ue4d5\ue4d6\ue4d7\ue4d8\ue4d9\ue4da\ue4db\ue4dc\ue4dd\ue4de\ue4df\ue4e0\ue4e1\ue4e2\ue4e3\ue4e4\ue4e5\ue4e6\ue4e7\ue4e8\ue4e9\ue4ea\ue4eb\ue4ec\ue4ed\ue4ee\ue4ef\ue4f0\ue4f1\ue4f2\ue4f3\ue4f4\ue4f5\ue4f6\ue4f7\ue4f8\ue4f9\ue4fa\ue4fb\ue4fc\ue4fd\ue4fe\ue4ff\ue500\ue501\ue502\ue503\ue504\ue505\ue506\ue507\ue508\ue509\ue50a\ue50b\ue50c\ue50d\ue50e\ue50f\ue510\ue511\ue512\ue513\ue514\ue515\ue516\ue517\ue518\ue519\ue51a\ue51b\ue51c\ue51d\ue51e\ue51f\ue520\ue521\ue522\ue523\ue524\ue525\ue526\ue527\ue528\ue529\ue52a\ue52b\ue52c\ue52d\ue52e\ue52f\ue530\ue531\ue532\ue533\ue534\ue535\ue536\ue537\ue538\ue539\ue53a\ue53b\ue53c\ue53d\ue53e\ue53f\ue540\ue541\ue542\ue543\ue544\ue545\ue546\ue547\ue548\ue549\ue54a\ue54b\ue54c\ue54d\ue54e\ue54f\ue550\ue551\ue552\ue553\ue554\ue555\ue556\ue557\ue558\ue559\ue55a\ue55b\ue55c\ue55d\ue55e\ue55f\ue560\ue561\ue562\ue563\ue564\ue565\ue566\ue567\ue568\ue569\ue56a\ue56b\ue56c\ue56d\ue56e\ue56f\ue570\ue571\ue572\ue573\ue574\ue575\ue576\ue577\ue578\ue579\ue57a\ue57b\ue57c\ue57d\ue57e\ue57f\ue580\ue581\ue582\ue583\ue584\ue585\ue586\ue587\ue588\ue589\ue58a\ue58b\ue58c\ue58d\ue58e\ue58f\ue590\ue591\ue592\ue593\ue594\ue595\ue596\ue597\ue598\ue599\ue59a\ue59b\ue59c\ue59d\ue59e\ue59f\ue5a0\ue5a1\ue5a2\ue5a3\ue5a4\ue5a5\ue5a6\ue5a7\ue5a8\ue5a9\ue5aa\ue5ab\ue5ac\ue5ad\ue5ae\ue5af\ue5b0\ue5b1\ue5b2\ue5b3\ue5b4\ue5b5\ue5b6\ue5b7\ue5b8\ue5b9\ue5ba\ue5bb\ue5bc\ue5bd\ue5be\ue5bf\ue5c0\ue5c1\ue5c2\ue5c3\ue5c4\ue5c5\ue5c6\ue5c7\ue5c8\ue5c9\ue5ca\ue5cb\ue5cc\ue5cd\ue5ce\ue5cf\ue5d0\ue5d1\ue5d2\ue5d3\ue5d4\ue5d5\ue5d6\ue5d7\ue5d8\ue5d9\ue5da\ue5db\ue5dc\ue5dd\ue5de\ue5df\ue5e0\ue5e1\ue5e2\ue5e3\ue5e4\ue5e5\ue5e6\ue5e7\ue5e8\ue5e9\ue5ea\ue5eb\ue5ec\ue5ed\ue5ee\ue5ef\ue5f0\ue5f1\ue5f2\ue5f3\ue5f4\ue5f5\ue5f6\ue5f7\ue5f8\ue5f9\ue5fa\ue5fb\ue5fc\ue5fd\ue5fe\ue5ff\ue600\ue601\ue602\ue603\ue604\ue605\ue606\ue607\ue608\ue609\ue60a\ue60b\ue60c\ue60d\ue60e\ue60f\ue610\ue611\ue612\ue613\ue614\ue615\ue616\ue617\ue618\ue619\ue61a\ue61b\ue61c\ue61d\ue61e\ue61f\ue620\ue621\ue622\ue623\ue624\ue625\ue626\ue627\ue628\ue629\ue62a\ue62b\ue62c\ue62d\ue62e\ue62f\ue630\ue631\ue632\ue633\ue634\ue635\ue636\ue637\ue638\ue639\ue63a\ue63b\ue63c\ue63d\ue63e\ue63f\ue640\ue641\ue642\ue643\ue644\ue645\ue646\ue647\ue648\ue649\ue64a\ue64b\ue64c\ue64d\ue64e\ue64f\ue650\ue651\ue652\ue653\ue654\ue655\ue656\ue657\ue658\ue659\ue65a\ue65b\ue65c\ue65d\ue65e\ue65f\ue660\ue661\ue662\ue663\ue664\ue665\ue666\ue667\ue668\ue669\ue66a\ue66b\ue66c\ue66d\ue66e\ue66f\ue670\ue671\ue672\ue673\ue674\ue675\ue676\ue677\ue678\ue679\ue67a\ue67b\ue67c\ue67d\ue67e\ue67f\ue680\ue681\ue682\ue683\ue684\ue685\ue686\ue687\ue688\ue689\ue68a\ue68b\ue68c\ue68d\ue68e\ue68f\ue690\ue691\ue692\ue693\ue694\ue695\ue696\ue697\ue698\ue699\ue69a\ue69b\ue69c\ue69d\ue69e\ue69f\ue6a0\ue6a1\ue6a2\ue6a3\ue6a4\ue6a5\ue6a6\ue6a7\ue6a8\ue6a9\ue6aa\ue6ab\ue6ac\ue6ad\ue6ae\ue6af\ue6b0\ue6b1\ue6b2\ue6b3\ue6b4\ue6b5\ue6b6\ue6b7\ue6b8\ue6b9\ue6ba\ue6bb\ue6bc\ue6bd\ue6be\ue6bf\ue6c0\ue6c1\ue6c2\ue6c3\ue6c4\ue6c5\ue6c6\ue6c7\ue6c8\ue6c9\ue6ca\ue6cb\ue6cc\ue6cd\ue6ce\ue6cf\ue6d0\ue6d1\ue6d2\ue6d3\ue6d4\ue6d5\ue6d6\ue6d7\ue6d8\ue6d9\ue6da\ue6db\ue6dc\ue6dd\ue6de\ue6df\ue6e0\ue6e1\ue6e2\ue6e3\ue6e4\ue6e5\ue6e6\ue6e7\ue6e8\ue6e9\ue6ea\ue6eb\ue6ec\ue6ed\ue6ee\ue6ef\ue6f0\ue6f1\ue6f2\ue6f3\ue6f4\ue6f5\ue6f6\ue6f7\ue6f8\ue6f9\ue6fa\ue6fb\ue6fc\ue6fd\ue6fe\ue6ff\ue700\ue701\ue702\ue703\ue704\ue705\ue706\ue707\ue708\ue709\ue70a\ue70b\ue70c\ue70d\ue70e\ue70f\ue710\ue711\ue712\ue713\ue714\ue715\ue716\ue717\ue718\ue719\ue71a\ue71b\ue71c\ue71d\ue71e\ue71f\ue720\ue721\ue722\ue723\ue724\ue725\ue726\ue727\ue728\ue729\ue72a\ue72b\ue72c\ue72d\ue72e\ue72f\ue730\ue731\ue732\ue733\ue734\ue735\ue736\ue737\ue738\ue739\ue73a\ue73b\ue73c\ue73d\ue73e\ue73f\ue740\ue741\ue742\ue743\ue744\ue745\ue746\ue747\ue748\ue749\ue74a\ue74b\ue74c\ue74d\ue74e\ue74f\ue750\ue751\ue752\ue753\ue754\ue755\ue756\ue757\ue758\ue759\ue75a\ue75b\ue75c\ue75d\ue75e\ue75f\ue760\ue761\ue762\ue763\ue764\ue765\ue766\ue767\ue768\ue769\ue76a\ue76b\ue76c\ue76d\ue76e\ue76f\ue770\ue771\ue772\ue773\ue774\ue775\ue776\ue777\ue778\ue779\ue77a\ue77b\ue77c\ue77d\ue77e\ue77f\ue780\ue781\ue782\ue783\ue784\ue785\ue786\ue787\ue788\ue789\ue78a\ue78b\ue78c\ue78d\ue78e\ue78f\ue790\ue791\ue792\ue793\ue794\ue795\ue796\ue797\ue798\ue799\ue79a\ue79b\ue79c\ue79d\ue79e\ue79f\ue7a0\ue7a1\ue7a2\ue7a3\ue7a4\ue7a5\ue7a6\ue7a7\ue7a8\ue7a9\ue7aa\ue7ab\ue7ac\ue7ad\ue7ae\ue7af\ue7b0\ue7b1\ue7b2\ue7b3\ue7b4\ue7b5\ue7b6\ue7b7\ue7b8\ue7b9\ue7ba\ue7bb\ue7bc\ue7bd\ue7be\ue7bf\ue7c0\ue7c1\ue7c2\ue7c3\ue7c4\ue7c5\ue7c6\ue7c7\ue7c8\ue7c9\ue7ca\ue7cb\ue7cc\ue7cd\ue7ce\ue7cf\ue7d0\ue7d1\ue7d2\ue7d3\ue7d4\ue7d5\ue7d6\ue7d7\ue7d8\ue7d9\ue7da\ue7db\ue7dc\ue7dd\ue7de\ue7df\ue7e0\ue7e1\ue7e2\ue7e3\ue7e4\ue7e5\ue7e6\ue7e7\ue7e8\ue7e9\ue7ea\ue7eb\ue7ec\ue7ed\ue7ee\ue7ef\ue7f0\ue7f1\ue7f2\ue7f3\ue7f4\ue7f5\ue7f6\ue7f7\ue7f8\ue7f9\ue7fa\ue7fb\ue7fc\ue7fd\ue7fe\ue7ff\ue800\ue801\ue802\ue803\ue804\ue805\ue806\ue807\ue808\ue809\ue80a\ue80b\ue80c\ue80d\ue80e\ue80f\ue810\ue811\ue812\ue813\ue814\ue815\ue816\ue817\ue818\ue819\ue81a\ue81b\ue81c\ue81d\ue81e\ue81f\ue820\ue821\ue822\ue823\ue824\ue825\ue826\ue827\ue828\ue829\ue82a\ue82b\ue82c\ue82d\ue82e\ue82f\ue830\ue831\ue832\ue833\ue834\ue835\ue836\ue837\ue838\ue839\ue83a\ue83b\ue83c\ue83d\ue83e\ue83f\ue840\ue841\ue842\ue843\ue844\ue845\ue846\ue847\ue848\ue849\ue84a\ue84b\ue84c\ue84d\ue84e\ue84f\ue850\ue851\ue852\ue853\ue854\ue855\ue856\ue857\ue858\ue859\ue85a\ue85b\ue85c\ue85d\ue85e\ue85f\ue860\ue861\ue862\ue863\ue864\ue865\ue866\ue867\ue868\ue869\ue86a\ue86b\ue86c\ue86d\ue86e\ue86f\ue870\ue871\ue872\ue873\ue874\ue875\ue876\ue877\ue878\ue879\ue87a\ue87b\ue87c\ue87d\ue87e\ue87f\ue880\ue881\ue882\ue883\ue884\ue885\ue886\ue887\ue888\ue889\ue88a\ue88b\ue88c\ue88d\ue88e\ue88f\ue890\ue891\ue892\ue893\ue894\ue895\ue896\ue897\ue898\ue899\ue89a\ue89b\ue89c\ue89d\ue89e\ue89f\ue8a0\ue8a1\ue8a2\ue8a3\ue8a4\ue8a5\ue8a6\ue8a7\ue8a8\ue8a9\ue8aa\ue8ab\ue8ac\ue8ad\ue8ae\ue8af\ue8b0\ue8b1\ue8b2\ue8b3\ue8b4\ue8b5\ue8b6\ue8b7\ue8b8\ue8b9\ue8ba\ue8bb\ue8bc\ue8bd\ue8be\ue8bf\ue8c0\ue8c1\ue8c2\ue8c3\ue8c4\ue8c5\ue8c6\ue8c7\ue8c8\ue8c9\ue8ca\ue8cb\ue8cc\ue8cd\ue8ce\ue8cf\ue8d0\ue8d1\ue8d2\ue8d3\ue8d4\ue8d5\ue8d6\ue8d7\ue8d8\ue8d9\ue8da\ue8db\ue8dc\ue8dd\ue8de\ue8df\ue8e0\ue8e1\ue8e2\ue8e3\ue8e4\ue8e5\ue8e6\ue8e7\ue8e8\ue8e9\ue8ea\ue8eb\ue8ec\ue8ed\ue8ee\ue8ef\ue8f0\ue8f1\ue8f2\ue8f3\ue8f4\ue8f5\ue8f6\ue8f7\ue8f8\ue8f9\ue8fa\ue8fb\ue8fc\ue8fd\ue8fe\ue8ff\ue900\ue901\ue902\ue903\ue904\ue905\ue906\ue907\ue908\ue909\ue90a\ue90b\ue90c\ue90d\ue90e\ue90f\ue910\ue911\ue912\ue913\ue914\ue915\ue916\ue917\ue918\ue919\ue91a\ue91b\ue91c\ue91d\ue91e\ue91f\ue920\ue921\ue922\ue923\ue924\ue925\ue926\ue927\ue928\ue929\ue92a\ue92b\ue92c\ue92d\ue92e\ue92f\ue930\ue931\ue932\ue933\ue934\ue935\ue936\ue937\ue938\ue939\ue93a\ue93b\ue93c\ue93d\ue93e\ue93f\ue940\ue941\ue942\ue943\ue944\ue945\ue946\ue947\ue948\ue949\ue94a\ue94b\ue94c\ue94d\ue94e\ue94f\ue950\ue951\ue952\ue953\ue954\ue955\ue956\ue957\ue958\ue959\ue95a\ue95b\ue95c\ue95d\ue95e\ue95f\ue960\ue961\ue962\ue963\ue964\ue965\ue966\ue967\ue968\ue969\ue96a\ue96b\ue96c\ue96d\ue96e\ue96f\ue970\ue971\ue972\ue973\ue974\ue975\ue976\ue977\ue978\ue979\ue97a\ue97b\ue97c\ue97d\ue97e\ue97f\ue980\ue981\ue982\ue983\ue984\ue985\ue986\ue987\ue988\ue989\ue98a\ue98b\ue98c\ue98d\ue98e\ue98f\ue990\ue991\ue992\ue993\ue994\ue995\ue996\ue997\ue998\ue999\ue99a\ue99b\ue99c\ue99d\ue99e\ue99f\ue9a0\ue9a1\ue9a2\ue9a3\ue9a4\ue9a5\ue9a6\ue9a7\ue9a8\ue9a9\ue9aa\ue9ab\ue9ac\ue9ad\ue9ae\ue9af\ue9b0\ue9b1\ue9b2\ue9b3\ue9b4\ue9b5\ue9b6\ue9b7\ue9b8\ue9b9\ue9ba\ue9bb\ue9bc\ue9bd\ue9be\ue9bf\ue9c0\ue9c1\ue9c2\ue9c3\ue9c4\ue9c5\ue9c6\ue9c7\ue9c8\ue9c9\ue9ca\ue9cb\ue9cc\ue9cd\ue9ce\ue9cf\ue9d0\ue9d1\ue9d2\ue9d3\ue9d4\ue9d5\ue9d6\ue9d7\ue9d8\ue9d9\ue9da\ue9db\ue9dc\ue9dd\ue9de\ue9df\ue9e0\ue9e1\ue9e2\ue9e3\ue9e4\ue9e5\ue9e6\ue9e7\ue9e8\ue9e9\ue9ea\ue9eb\ue9ec\ue9ed\ue9ee\ue9ef\ue9f0\ue9f1\ue9f2\ue9f3\ue9f4\ue9f5\ue9f6\ue9f7\ue9f8\ue9f9\ue9fa\ue9fb\ue9fc\ue9fd\ue9fe\ue9ff\uea00\uea01\uea02\uea03\uea04\uea05\uea06\uea07\uea08\uea09\uea0a\uea0b\uea0c\uea0d\uea0e\uea0f\uea10\uea11\uea12\uea13\uea14\uea15\uea16\uea17\uea18\uea19\uea1a\uea1b\uea1c\uea1d\uea1e\uea1f\uea20\uea21\uea22\uea23\uea24\uea25\uea26\uea27\uea28\uea29\uea2a\uea2b\uea2c\uea2d\uea2e\uea2f\uea30\uea31\uea32\uea33\uea34\uea35\uea36\uea37\uea38\uea39\uea3a\uea3b\uea3c\uea3d\uea3e\uea3f\uea40\uea41\uea42\uea43\uea44\uea45\uea46\uea47\uea48\uea49\uea4a\uea4b\uea4c\uea4d\uea4e\uea4f\uea50\uea51\uea52\uea53\uea54\uea55\uea56\uea57\uea58\uea59\uea5a\uea5b\uea5c\uea5d\uea5e\uea5f\uea60\uea61\uea62\uea63\uea64\uea65\uea66\uea67\uea68\uea69\uea6a\uea6b\uea6c\uea6d\uea6e\uea6f\uea70\uea71\uea72\uea73\uea74\uea75\uea76\uea77\uea78\uea79\uea7a\uea7b\uea7c\uea7d\uea7e\uea7f\uea80\uea81\uea82\uea83\uea84\uea85\uea86\uea87\uea88\uea89\uea8a\uea8b\uea8c\uea8d\uea8e\uea8f\uea90\uea91\uea92\uea93\uea94\uea95\uea96\uea97\uea98\uea99\uea9a\uea9b\uea9c\uea9d\uea9e\uea9f\ueaa0\ueaa1\ueaa2\ueaa3\ueaa4\ueaa5\ueaa6\ueaa7\ueaa8\ueaa9\ueaaa\ueaab\ueaac\ueaad\ueaae\ueaaf\ueab0\ueab1\ueab2\ueab3\ueab4\ueab5\ueab6\ueab7\ueab8\ueab9\ueaba\ueabb\ueabc\ueabd\ueabe\ueabf\ueac0\ueac1\ueac2\ueac3\ueac4\ueac5\ueac6\ueac7\ueac8\ueac9\ueaca\ueacb\ueacc\ueacd\ueace\ueacf\uead0\uead1\uead2\uead3\uead4\uead5\uead6\uead7\uead8\uead9\ueada\ueadb\ueadc\ueadd\ueade\ueadf\ueae0\ueae1\ueae2\ueae3\ueae4\ueae5\ueae6\ueae7\ueae8\ueae9\ueaea\ueaeb\ueaec\ueaed\ueaee\ueaef\ueaf0\ueaf1\ueaf2\ueaf3\ueaf4\ueaf5\ueaf6\ueaf7\ueaf8\ueaf9\ueafa\ueafb\ueafc\ueafd\ueafe\ueaff\ueb00\ueb01\ueb02\ueb03\ueb04\ueb05\ueb06\ueb07\ueb08\ueb09\ueb0a\ueb0b\ueb0c\ueb0d\ueb0e\ueb0f\ueb10\ueb11\ueb12\ueb13\ueb14\ueb15\ueb16\ueb17\ueb18\ueb19\ueb1a\ueb1b\ueb1c\ueb1d\ueb1e\ueb1f\ueb20\ueb21\ueb22\ueb23\ueb24\ueb25\ueb26\ueb27\ueb28\ueb29\ueb2a\ueb2b\ueb2c\ueb2d\ueb2e\ueb2f\ueb30\ueb31\ueb32\ueb33\ueb34\ueb35\ueb36\ueb37\ueb38\ueb39\ueb3a\ueb3b\ueb3c\ueb3d\ueb3e\ueb3f\ueb40\ueb41\ueb42\ueb43\ueb44\ueb45\ueb46\ueb47\ueb48\ueb49\ueb4a\ueb4b\ueb4c\ueb4d\ueb4e\ueb4f\ueb50\ueb51\ueb52\ueb53\ueb54\ueb55\ueb56\ueb57\ueb58\ueb59\ueb5a\ueb5b\ueb5c\ueb5d\ueb5e\ueb5f\ueb60\ueb61\ueb62\ueb63\ueb64\ueb65\ueb66\ueb67\ueb68\ueb69\ueb6a\ueb6b\ueb6c\ueb6d\ueb6e\ueb6f\ueb70\ueb71\ueb72\ueb73\ueb74\ueb75\ueb76\ueb77\ueb78\ueb79\ueb7a\ueb7b\ueb7c\ueb7d\ueb7e\ueb7f\ueb80\ueb81\ueb82\ueb83\ueb84\ueb85\ueb86\ueb87\ueb88\ueb89\ueb8a\ueb8b\ueb8c\ueb8d\ueb8e\ueb8f\ueb90\ueb91\ueb92\ueb93\ueb94\ueb95\ueb96\ueb97\ueb98\ueb99\ueb9a\ueb9b\ueb9c\ueb9d\ueb9e\ueb9f\ueba0\ueba1\ueba2\ueba3\ueba4\ueba5\ueba6\ueba7\ueba8\ueba9\uebaa\uebab\uebac\uebad\uebae\uebaf\uebb0\uebb1\uebb2\uebb3\uebb4\uebb5\uebb6\uebb7\uebb8\uebb9\uebba\uebbb\uebbc\uebbd\uebbe\uebbf\uebc0\uebc1\uebc2\uebc3\uebc4\uebc5\uebc6\uebc7\uebc8\uebc9\uebca\uebcb\uebcc\uebcd\uebce\uebcf\uebd0\uebd1\uebd2\uebd3\uebd4\uebd5\uebd6\uebd7\uebd8\uebd9\uebda\uebdb\uebdc\uebdd\uebde\uebdf\uebe0\uebe1\uebe2\uebe3\uebe4\uebe5\uebe6\uebe7\uebe8\uebe9\uebea\uebeb\uebec\uebed\uebee\uebef\uebf0\uebf1\uebf2\uebf3\uebf4\uebf5\uebf6\uebf7\uebf8\uebf9\uebfa\uebfb\uebfc\uebfd\uebfe\uebff\uec00\uec01\uec02\uec03\uec04\uec05\uec06\uec07\uec08\uec09\uec0a\uec0b\uec0c\uec0d\uec0e\uec0f\uec10\uec11\uec12\uec13\uec14\uec15\uec16\uec17\uec18\uec19\uec1a\uec1b\uec1c\uec1d\uec1e\uec1f\uec20\uec21\uec22\uec23\uec24\uec25\uec26\uec27\uec28\uec29\uec2a\uec2b\uec2c\uec2d\uec2e\uec2f\uec30\uec31\uec32\uec33\uec34\uec35\uec36\uec37\uec38\uec39\uec3a\uec3b\uec3c\uec3d\uec3e\uec3f\uec40\uec41\uec42\uec43\uec44\uec45\uec46\uec47\uec48\uec49\uec4a\uec4b\uec4c\uec4d\uec4e\uec4f\uec50\uec51\uec52\uec53\uec54\uec55\uec56\uec57\uec58\uec59\uec5a\uec5b\uec5c\uec5d\uec5e\uec5f\uec60\uec61\uec62\uec63\uec64\uec65\uec66\uec67\uec68\uec69\uec6a\uec6b\uec6c\uec6d\uec6e\uec6f\uec70\uec71\uec72\uec73\uec74\uec75\uec76\uec77\uec78\uec79\uec7a\uec7b\uec7c\uec7d\uec7e\uec7f\uec80\uec81\uec82\uec83\uec84\uec85\uec86\uec87\uec88\uec89\uec8a\uec8b\uec8c\uec8d\uec8e\uec8f\uec90\uec91\uec92\uec93\uec94\uec95\uec96\uec97\uec98\uec99\uec9a\uec9b\uec9c\uec9d\uec9e\uec9f\ueca0\ueca1\ueca2\ueca3\ueca4\ueca5\ueca6\ueca7\ueca8\ueca9\uecaa\uecab\uecac\uecad\uecae\uecaf\uecb0\uecb1\uecb2\uecb3\uecb4\uecb5\uecb6\uecb7\uecb8\uecb9\uecba\uecbb\uecbc\uecbd\uecbe\uecbf\uecc0\uecc1\uecc2\uecc3\uecc4\uecc5\uecc6\uecc7\uecc8\uecc9\uecca\ueccb\ueccc\ueccd\uecce\ueccf\uecd0\uecd1\uecd2\uecd3\uecd4\uecd5\uecd6\uecd7\uecd8\uecd9\uecda\uecdb\uecdc\uecdd\uecde\uecdf\uece0\uece1\uece2\uece3\uece4\uece5\uece6\uece7\uece8\uece9\uecea\ueceb\uecec\ueced\uecee\uecef\uecf0\uecf1\uecf2\uecf3\uecf4\uecf5\uecf6\uecf7\uecf8\uecf9\uecfa\uecfb\uecfc\uecfd\uecfe\uecff\ued00\ued01\ued02\ued03\ued04\ued05\ued06\ued07\ued08\ued09\ued0a\ued0b\ued0c\ued0d\ued0e\ued0f\ued10\ued11\ued12\ued13\ued14\ued15\ued16\ued17\ued18\ued19\ued1a\ued1b\ued1c\ued1d\ued1e\ued1f\ued20\ued21\ued22\ued23\ued24\ued25\ued26\ued27\ued28\ued29\ued2a\ued2b\ued2c\ued2d\ued2e\ued2f\ued30\ued31\ued32\ued33\ued34\ued35\ued36\ued37\ued38\ued39\ued3a\ued3b\ued3c\ued3d\ued3e\ued3f\ued40\ued41\ued42\ued43\ued44\ued45\ued46\ued47\ued48\ued49\ued4a\ued4b\ued4c\ued4d\ued4e\ued4f\ued50\ued51\ued52\ued53\ued54\ued55\ued56\ued57\ued58\ued59\ued5a\ued5b\ued5c\ued5d\ued5e\ued5f\ued60\ued61\ued62\ued63\ued64\ued65\ued66\ued67\ued68\ued69\ued6a\ued6b\ued6c\ued6d\ued6e\ued6f\ued70\ued71\ued72\ued73\ued74\ued75\ued76\ued77\ued78\ued79\ued7a\ued7b\ued7c\ued7d\ued7e\ued7f\ued80\ued81\ued82\ued83\ued84\ued85\ued86\ued87\ued88\ued89\ued8a\ued8b\ued8c\ued8d\ued8e\ued8f\ued90\ued91\ued92\ued93\ued94\ued95\ued96\ued97\ued98\ued99\ued9a\ued9b\ued9c\ued9d\ued9e\ued9f\ueda0\ueda1\ueda2\ueda3\ueda4\ueda5\ueda6\ueda7\ueda8\ueda9\uedaa\uedab\uedac\uedad\uedae\uedaf\uedb0\uedb1\uedb2\uedb3\uedb4\uedb5\uedb6\uedb7\uedb8\uedb9\uedba\uedbb\uedbc\uedbd\uedbe\uedbf\uedc0\uedc1\uedc2\uedc3\uedc4\uedc5\uedc6\uedc7\uedc8\uedc9\uedca\uedcb\uedcc\uedcd\uedce\uedcf\uedd0\uedd1\uedd2\uedd3\uedd4\uedd5\uedd6\uedd7\uedd8\uedd9\uedda\ueddb\ueddc\ueddd\uedde\ueddf\uede0\uede1\uede2\uede3\uede4\uede5\uede6\uede7\uede8\uede9\uedea\uedeb\uedec\ueded\uedee\uedef\uedf0\uedf1\uedf2\uedf3\uedf4\uedf5\uedf6\uedf7\uedf8\uedf9\uedfa\uedfb\uedfc\uedfd\uedfe\uedff\uee00\uee01\uee02\uee03\uee04\uee05\uee06\uee07\uee08\uee09\uee0a\uee0b\uee0c\uee0d\uee0e\uee0f\uee10\uee11\uee12\uee13\uee14\uee15\uee16\uee17\uee18\uee19\uee1a\uee1b\uee1c\uee1d\uee1e\uee1f\uee20\uee21\uee22\uee23\uee24\uee25\uee26\uee27\uee28\uee29\uee2a\uee2b\uee2c\uee2d\uee2e\uee2f\uee30\uee31\uee32\uee33\uee34\uee35\uee36\uee37\uee38\uee39\uee3a\uee3b\uee3c\uee3d\uee3e\uee3f\uee40\uee41\uee42\uee43\uee44\uee45\uee46\uee47\uee48\uee49\uee4a\uee4b\uee4c\uee4d\uee4e\uee4f\uee50\uee51\uee52\uee53\uee54\uee55\uee56\uee57\uee58\uee59\uee5a\uee5b\uee5c\uee5d\uee5e\uee5f\uee60\uee61\uee62\uee63\uee64\uee65\uee66\uee67\uee68\uee69\uee6a\uee6b\uee6c\uee6d\uee6e\uee6f\uee70\uee71\uee72\uee73\uee74\uee75\uee76\uee77\uee78\uee79\uee7a\uee7b\uee7c\uee7d\uee7e\uee7f\uee80\uee81\uee82\uee83\uee84\uee85\uee86\uee87\uee88\uee89\uee8a\uee8b\uee8c\uee8d\uee8e\uee8f\uee90\uee91\uee92\uee93\uee94\uee95\uee96\uee97\uee98\uee99\uee9a\uee9b\uee9c\uee9d\uee9e\uee9f\ueea0\ueea1\ueea2\ueea3\ueea4\ueea5\ueea6\ueea7\ueea8\ueea9\ueeaa\ueeab\ueeac\ueead\ueeae\ueeaf\ueeb0\ueeb1\ueeb2\ueeb3\ueeb4\ueeb5\ueeb6\ueeb7\ueeb8\ueeb9\ueeba\ueebb\ueebc\ueebd\ueebe\ueebf\ueec0\ueec1\ueec2\ueec3\ueec4\ueec5\ueec6\ueec7\ueec8\ueec9\ueeca\ueecb\ueecc\ueecd\ueece\ueecf\ueed0\ueed1\ueed2\ueed3\ueed4\ueed5\ueed6\ueed7\ueed8\ueed9\ueeda\ueedb\ueedc\ueedd\ueede\ueedf\ueee0\ueee1\ueee2\ueee3\ueee4\ueee5\ueee6\ueee7\ueee8\ueee9\ueeea\ueeeb\ueeec\ueeed\ueeee\ueeef\ueef0\ueef1\ueef2\ueef3\ueef4\ueef5\ueef6\ueef7\ueef8\ueef9\ueefa\ueefb\ueefc\ueefd\ueefe\ueeff\uef00\uef01\uef02\uef03\uef04\uef05\uef06\uef07\uef08\uef09\uef0a\uef0b\uef0c\uef0d\uef0e\uef0f\uef10\uef11\uef12\uef13\uef14\uef15\uef16\uef17\uef18\uef19\uef1a\uef1b\uef1c\uef1d\uef1e\uef1f\uef20\uef21\uef22\uef23\uef24\uef25\uef26\uef27\uef28\uef29\uef2a\uef2b\uef2c\uef2d\uef2e\uef2f\uef30\uef31\uef32\uef33\uef34\uef35\uef36\uef37\uef38\uef39\uef3a\uef3b\uef3c\uef3d\uef3e\uef3f\uef40\uef41\uef42\uef43\uef44\uef45\uef46\uef47\uef48\uef49\uef4a\uef4b\uef4c\uef4d\uef4e\uef4f\uef50\uef51\uef52\uef53\uef54\uef55\uef56\uef57\uef58\uef59\uef5a\uef5b\uef5c\uef5d\uef5e\uef5f\uef60\uef61\uef62\uef63\uef64\uef65\uef66\uef67\uef68\uef69\uef6a\uef6b\uef6c\uef6d\uef6e\uef6f\uef70\uef71\uef72\uef73\uef74\uef75\uef76\uef77\uef78\uef79\uef7a\uef7b\uef7c\uef7d\uef7e\uef7f\uef80\uef81\uef82\uef83\uef84\uef85\uef86\uef87\uef88\uef89\uef8a\uef8b\uef8c\uef8d\uef8e\uef8f\uef90\uef91\uef92\uef93\uef94\uef95\uef96\uef97\uef98\uef99\uef9a\uef9b\uef9c\uef9d\uef9e\uef9f\uefa0\uefa1\uefa2\uefa3\uefa4\uefa5\uefa6\uefa7\uefa8\uefa9\uefaa\uefab\uefac\uefad\uefae\uefaf\uefb0\uefb1\uefb2\uefb3\uefb4\uefb5\uefb6\uefb7\uefb8\uefb9\uefba\uefbb\uefbc\uefbd\uefbe\uefbf\uefc0\uefc1\uefc2\uefc3\uefc4\uefc5\uefc6\uefc7\uefc8\uefc9\uefca\uefcb\uefcc\uefcd\uefce\uefcf\uefd0\uefd1\uefd2\uefd3\uefd4\uefd5\uefd6\uefd7\uefd8\uefd9\uefda\uefdb\uefdc\uefdd\uefde\uefdf\uefe0\uefe1\uefe2\uefe3\uefe4\uefe5\uefe6\uefe7\uefe8\uefe9\uefea\uefeb\uefec\uefed\uefee\uefef\ueff0\ueff1\ueff2\ueff3\ueff4\ueff5\ueff6\ueff7\ueff8\ueff9\ueffa\ueffb\ueffc\ueffd\ueffe\uefff\uf000\uf001\uf002\uf003\uf004\uf005\uf006\uf007\uf008\uf009\uf00a\uf00b\uf00c\uf00d\uf00e\uf00f\uf010\uf011\uf012\uf013\uf014\uf015\uf016\uf017\uf018\uf019\uf01a\uf01b\uf01c\uf01d\uf01e\uf01f\uf020\uf021\uf022\uf023\uf024\uf025\uf026\uf027\uf028\uf029\uf02a\uf02b\uf02c\uf02d\uf02e\uf02f\uf030\uf031\uf032\uf033\uf034\uf035\uf036\uf037\uf038\uf039\uf03a\uf03b\uf03c\uf03d\uf03e\uf03f\uf040\uf041\uf042\uf043\uf044\uf045\uf046\uf047\uf048\uf049\uf04a\uf04b\uf04c\uf04d\uf04e\uf04f\uf050\uf051\uf052\uf053\uf054\uf055\uf056\uf057\uf058\uf059\uf05a\uf05b\uf05c\uf05d\uf05e\uf05f\uf060\uf061\uf062\uf063\uf064\uf065\uf066\uf067\uf068\uf069\uf06a\uf06b\uf06c\uf06d\uf06e\uf06f\uf070\uf071\uf072\uf073\uf074\uf075\uf076\uf077\uf078\uf079\uf07a\uf07b\uf07c\uf07d\uf07e\uf07f\uf080\uf081\uf082\uf083\uf084\uf085\uf086\uf087\uf088\uf089\uf08a\uf08b\uf08c\uf08d\uf08e\uf08f\uf090\uf091\uf092\uf093\uf094\uf095\uf096\uf097\uf098\uf099\uf09a\uf09b\uf09c\uf09d\uf09e\uf09f\uf0a0\uf0a1\uf0a2\uf0a3\uf0a4\uf0a5\uf0a6\uf0a7\uf0a8\uf0a9\uf0aa\uf0ab\uf0ac\uf0ad\uf0ae\uf0af\uf0b0\uf0b1\uf0b2\uf0b3\uf0b4\uf0b5\uf0b6\uf0b7\uf0b8\uf0b9\uf0ba\uf0bb\uf0bc\uf0bd\uf0be\uf0bf\uf0c0\uf0c1\uf0c2\uf0c3\uf0c4\uf0c5\uf0c6\uf0c7\uf0c8\uf0c9\uf0ca\uf0cb\uf0cc\uf0cd\uf0ce\uf0cf\uf0d0\uf0d1\uf0d2\uf0d3\uf0d4\uf0d5\uf0d6\uf0d7\uf0d8\uf0d9\uf0da\uf0db\uf0dc\uf0dd\uf0de\uf0df\uf0e0\uf0e1\uf0e2\uf0e3\uf0e4\uf0e5\uf0e6\uf0e7\uf0e8\uf0e9\uf0ea\uf0eb\uf0ec\uf0ed\uf0ee\uf0ef\uf0f0\uf0f1\uf0f2\uf0f3\uf0f4\uf0f5\uf0f6\uf0f7\uf0f8\uf0f9\uf0fa\uf0fb\uf0fc\uf0fd\uf0fe\uf0ff\uf100\uf101\uf102\uf103\uf104\uf105\uf106\uf107\uf108\uf109\uf10a\uf10b\uf10c\uf10d\uf10e\uf10f\uf110\uf111\uf112\uf113\uf114\uf115\uf116\uf117\uf118\uf119\uf11a\uf11b\uf11c\uf11d\uf11e\uf11f\uf120\uf121\uf122\uf123\uf124\uf125\uf126\uf127\uf128\uf129\uf12a\uf12b\uf12c\uf12d\uf12e\uf12f\uf130\uf131\uf132\uf133\uf134\uf135\uf136\uf137\uf138\uf139\uf13a\uf13b\uf13c\uf13d\uf13e\uf13f\uf140\uf141\uf142\uf143\uf144\uf145\uf146\uf147\uf148\uf149\uf14a\uf14b\uf14c\uf14d\uf14e\uf14f\uf150\uf151\uf152\uf153\uf154\uf155\uf156\uf157\uf158\uf159\uf15a\uf15b\uf15c\uf15d\uf15e\uf15f\uf160\uf161\uf162\uf163\uf164\uf165\uf166\uf167\uf168\uf169\uf16a\uf16b\uf16c\uf16d\uf16e\uf16f\uf170\uf171\uf172\uf173\uf174\uf175\uf176\uf177\uf178\uf179\uf17a\uf17b\uf17c\uf17d\uf17e\uf17f\uf180\uf181\uf182\uf183\uf184\uf185\uf186\uf187\uf188\uf189\uf18a\uf18b\uf18c\uf18d\uf18e\uf18f\uf190\uf191\uf192\uf193\uf194\uf195\uf196\uf197\uf198\uf199\uf19a\uf19b\uf19c\uf19d\uf19e\uf19f\uf1a0\uf1a1\uf1a2\uf1a3\uf1a4\uf1a5\uf1a6\uf1a7\uf1a8\uf1a9\uf1aa\uf1ab\uf1ac\uf1ad\uf1ae\uf1af\uf1b0\uf1b1\uf1b2\uf1b3\uf1b4\uf1b5\uf1b6\uf1b7\uf1b8\uf1b9\uf1ba\uf1bb\uf1bc\uf1bd\uf1be\uf1bf\uf1c0\uf1c1\uf1c2\uf1c3\uf1c4\uf1c5\uf1c6\uf1c7\uf1c8\uf1c9\uf1ca\uf1cb\uf1cc\uf1cd\uf1ce\uf1cf\uf1d0\uf1d1\uf1d2\uf1d3\uf1d4\uf1d5\uf1d6\uf1d7\uf1d8\uf1d9\uf1da\uf1db\uf1dc\uf1dd\uf1de\uf1df\uf1e0\uf1e1\uf1e2\uf1e3\uf1e4\uf1e5\uf1e6\uf1e7\uf1e8\uf1e9\uf1ea\uf1eb\uf1ec\uf1ed\uf1ee\uf1ef\uf1f0\uf1f1\uf1f2\uf1f3\uf1f4\uf1f5\uf1f6\uf1f7\uf1f8\uf1f9\uf1fa\uf1fb\uf1fc\uf1fd\uf1fe\uf1ff\uf200\uf201\uf202\uf203\uf204\uf205\uf206\uf207\uf208\uf209\uf20a\uf20b\uf20c\uf20d\uf20e\uf20f\uf210\uf211\uf212\uf213\uf214\uf215\uf216\uf217\uf218\uf219\uf21a\uf21b\uf21c\uf21d\uf21e\uf21f\uf220\uf221\uf222\uf223\uf224\uf225\uf226\uf227\uf228\uf229\uf22a\uf22b\uf22c\uf22d\uf22e\uf22f\uf230\uf231\uf232\uf233\uf234\uf235\uf236\uf237\uf238\uf239\uf23a\uf23b\uf23c\uf23d\uf23e\uf23f\uf240\uf241\uf242\uf243\uf244\uf245\uf246\uf247\uf248\uf249\uf24a\uf24b\uf24c\uf24d\uf24e\uf24f\uf250\uf251\uf252\uf253\uf254\uf255\uf256\uf257\uf258\uf259\uf25a\uf25b\uf25c\uf25d\uf25e\uf25f\uf260\uf261\uf262\uf263\uf264\uf265\uf266\uf267\uf268\uf269\uf26a\uf26b\uf26c\uf26d\uf26e\uf26f\uf270\uf271\uf272\uf273\uf274\uf275\uf276\uf277\uf278\uf279\uf27a\uf27b\uf27c\uf27d\uf27e\uf27f\uf280\uf281\uf282\uf283\uf284\uf285\uf286\uf287\uf288\uf289\uf28a\uf28b\uf28c\uf28d\uf28e\uf28f\uf290\uf291\uf292\uf293\uf294\uf295\uf296\uf297\uf298\uf299\uf29a\uf29b\uf29c\uf29d\uf29e\uf29f\uf2a0\uf2a1\uf2a2\uf2a3\uf2a4\uf2a5\uf2a6\uf2a7\uf2a8\uf2a9\uf2aa\uf2ab\uf2ac\uf2ad\uf2ae\uf2af\uf2b0\uf2b1\uf2b2\uf2b3\uf2b4\uf2b5\uf2b6\uf2b7\uf2b8\uf2b9\uf2ba\uf2bb\uf2bc\uf2bd\uf2be\uf2bf\uf2c0\uf2c1\uf2c2\uf2c3\uf2c4\uf2c5\uf2c6\uf2c7\uf2c8\uf2c9\uf2ca\uf2cb\uf2cc\uf2cd\uf2ce\uf2cf\uf2d0\uf2d1\uf2d2\uf2d3\uf2d4\uf2d5\uf2d6\uf2d7\uf2d8\uf2d9\uf2da\uf2db\uf2dc\uf2dd\uf2de\uf2df\uf2e0\uf2e1\uf2e2\uf2e3\uf2e4\uf2e5\uf2e6\uf2e7\uf2e8\uf2e9\uf2ea\uf2eb\uf2ec\uf2ed\uf2ee\uf2ef\uf2f0\uf2f1\uf2f2\uf2f3\uf2f4\uf2f5\uf2f6\uf2f7\uf2f8\uf2f9\uf2fa\uf2fb\uf2fc\uf2fd\uf2fe\uf2ff\uf300\uf301\uf302\uf303\uf304\uf305\uf306\uf307\uf308\uf309\uf30a\uf30b\uf30c\uf30d\uf30e\uf30f\uf310\uf311\uf312\uf313\uf314\uf315\uf316\uf317\uf318\uf319\uf31a\uf31b\uf31c\uf31d\uf31e\uf31f\uf320\uf321\uf322\uf323\uf324\uf325\uf326\uf327\uf328\uf329\uf32a\uf32b\uf32c\uf32d\uf32e\uf32f\uf330\uf331\uf332\uf333\uf334\uf335\uf336\uf337\uf338\uf339\uf33a\uf33b\uf33c\uf33d\uf33e\uf33f\uf340\uf341\uf342\uf343\uf344\uf345\uf346\uf347\uf348\uf349\uf34a\uf34b\uf34c\uf34d\uf34e\uf34f\uf350\uf351\uf352\uf353\uf354\uf355\uf356\uf357\uf358\uf359\uf35a\uf35b\uf35c\uf35d\uf35e\uf35f\uf360\uf361\uf362\uf363\uf364\uf365\uf366\uf367\uf368\uf369\uf36a\uf36b\uf36c\uf36d\uf36e\uf36f\uf370\uf371\uf372\uf373\uf374\uf375\uf376\uf377\uf378\uf379\uf37a\uf37b\uf37c\uf37d\uf37e\uf37f\uf380\uf381\uf382\uf383\uf384\uf385\uf386\uf387\uf388\uf389\uf38a\uf38b\uf38c\uf38d\uf38e\uf38f\uf390\uf391\uf392\uf393\uf394\uf395\uf396\uf397\uf398\uf399\uf39a\uf39b\uf39c\uf39d\uf39e\uf39f\uf3a0\uf3a1\uf3a2\uf3a3\uf3a4\uf3a5\uf3a6\uf3a7\uf3a8\uf3a9\uf3aa\uf3ab\uf3ac\uf3ad\uf3ae\uf3af\uf3b0\uf3b1\uf3b2\uf3b3\uf3b4\uf3b5\uf3b6\uf3b7\uf3b8\uf3b9\uf3ba\uf3bb\uf3bc\uf3bd\uf3be\uf3bf\uf3c0\uf3c1\uf3c2\uf3c3\uf3c4\uf3c5\uf3c6\uf3c7\uf3c8\uf3c9\uf3ca\uf3cb\uf3cc\uf3cd\uf3ce\uf3cf\uf3d0\uf3d1\uf3d2\uf3d3\uf3d4\uf3d5\uf3d6\uf3d7\uf3d8\uf3d9\uf3da\uf3db\uf3dc\uf3dd\uf3de\uf3df\uf3e0\uf3e1\uf3e2\uf3e3\uf3e4\uf3e5\uf3e6\uf3e7\uf3e8\uf3e9\uf3ea\uf3eb\uf3ec\uf3ed\uf3ee\uf3ef\uf3f0\uf3f1\uf3f2\uf3f3\uf3f4\uf3f5\uf3f6\uf3f7\uf3f8\uf3f9\uf3fa\uf3fb\uf3fc\uf3fd\uf3fe\uf3ff\uf400\uf401\uf402\uf403\uf404\uf405\uf406\uf407\uf408\uf409\uf40a\uf40b\uf40c\uf40d\uf40e\uf40f\uf410\uf411\uf412\uf413\uf414\uf415\uf416\uf417\uf418\uf419\uf41a\uf41b\uf41c\uf41d\uf41e\uf41f\uf420\uf421\uf422\uf423\uf424\uf425\uf426\uf427\uf428\uf429\uf42a\uf42b\uf42c\uf42d\uf42e\uf42f\uf430\uf431\uf432\uf433\uf434\uf435\uf436\uf437\uf438\uf439\uf43a\uf43b\uf43c\uf43d\uf43e\uf43f\uf440\uf441\uf442\uf443\uf444\uf445\uf446\uf447\uf448\uf449\uf44a\uf44b\uf44c\uf44d\uf44e\uf44f\uf450\uf451\uf452\uf453\uf454\uf455\uf456\uf457\uf458\uf459\uf45a\uf45b\uf45c\uf45d\uf45e\uf45f\uf460\uf461\uf462\uf463\uf464\uf465\uf466\uf467\uf468\uf469\uf46a\uf46b\uf46c\uf46d\uf46e\uf46f\uf470\uf471\uf472\uf473\uf474\uf475\uf476\uf477\uf478\uf479\uf47a\uf47b\uf47c\uf47d\uf47e\uf47f\uf480\uf481\uf482\uf483\uf484\uf485\uf486\uf487\uf488\uf489\uf48a\uf48b\uf48c\uf48d\uf48e\uf48f\uf490\uf491\uf492\uf493\uf494\uf495\uf496\uf497\uf498\uf499\uf49a\uf49b\uf49c\uf49d\uf49e\uf49f\uf4a0\uf4a1\uf4a2\uf4a3\uf4a4\uf4a5\uf4a6\uf4a7\uf4a8\uf4a9\uf4aa\uf4ab\uf4ac\uf4ad\uf4ae\uf4af\uf4b0\uf4b1\uf4b2\uf4b3\uf4b4\uf4b5\uf4b6\uf4b7\uf4b8\uf4b9\uf4ba\uf4bb\uf4bc\uf4bd\uf4be\uf4bf\uf4c0\uf4c1\uf4c2\uf4c3\uf4c4\uf4c5\uf4c6\uf4c7\uf4c8\uf4c9\uf4ca\uf4cb\uf4cc\uf4cd\uf4ce\uf4cf\uf4d0\uf4d1\uf4d2\uf4d3\uf4d4\uf4d5\uf4d6\uf4d7\uf4d8\uf4d9\uf4da\uf4db\uf4dc\uf4dd\uf4de\uf4df\uf4e0\uf4e1\uf4e2\uf4e3\uf4e4\uf4e5\uf4e6\uf4e7\uf4e8\uf4e9\uf4ea\uf4eb\uf4ec\uf4ed\uf4ee\uf4ef\uf4f0\uf4f1\uf4f2\uf4f3\uf4f4\uf4f5\uf4f6\uf4f7\uf4f8\uf4f9\uf4fa\uf4fb\uf4fc\uf4fd\uf4fe\uf4ff\uf500\uf501\uf502\uf503\uf504\uf505\uf506\uf507\uf508\uf509\uf50a\uf50b\uf50c\uf50d\uf50e\uf50f\uf510\uf511\uf512\uf513\uf514\uf515\uf516\uf517\uf518\uf519\uf51a\uf51b\uf51c\uf51d\uf51e\uf51f\uf520\uf521\uf522\uf523\uf524\uf525\uf526\uf527\uf528\uf529\uf52a\uf52b\uf52c\uf52d\uf52e\uf52f\uf530\uf531\uf532\uf533\uf534\uf535\uf536\uf537\uf538\uf539\uf53a\uf53b\uf53c\uf53d\uf53e\uf53f\uf540\uf541\uf542\uf543\uf544\uf545\uf546\uf547\uf548\uf549\uf54a\uf54b\uf54c\uf54d\uf54e\uf54f\uf550\uf551\uf552\uf553\uf554\uf555\uf556\uf557\uf558\uf559\uf55a\uf55b\uf55c\uf55d\uf55e\uf55f\uf560\uf561\uf562\uf563\uf564\uf565\uf566\uf567\uf568\uf569\uf56a\uf56b\uf56c\uf56d\uf56e\uf56f\uf570\uf571\uf572\uf573\uf574\uf575\uf576\uf577\uf578\uf579\uf57a\uf57b\uf57c\uf57d\uf57e\uf57f\uf580\uf581\uf582\uf583\uf584\uf585\uf586\uf587\uf588\uf589\uf58a\uf58b\uf58c\uf58d\uf58e\uf58f\uf590\uf591\uf592\uf593\uf594\uf595\uf596\uf597\uf598\uf599\uf59a\uf59b\uf59c\uf59d\uf59e\uf59f\uf5a0\uf5a1\uf5a2\uf5a3\uf5a4\uf5a5\uf5a6\uf5a7\uf5a8\uf5a9\uf5aa\uf5ab\uf5ac\uf5ad\uf5ae\uf5af\uf5b0\uf5b1\uf5b2\uf5b3\uf5b4\uf5b5\uf5b6\uf5b7\uf5b8\uf5b9\uf5ba\uf5bb\uf5bc\uf5bd\uf5be\uf5bf\uf5c0\uf5c1\uf5c2\uf5c3\uf5c4\uf5c5\uf5c6\uf5c7\uf5c8\uf5c9\uf5ca\uf5cb\uf5cc\uf5cd\uf5ce\uf5cf\uf5d0\uf5d1\uf5d2\uf5d3\uf5d4\uf5d5\uf5d6\uf5d7\uf5d8\uf5d9\uf5da\uf5db\uf5dc\uf5dd\uf5de\uf5df\uf5e0\uf5e1\uf5e2\uf5e3\uf5e4\uf5e5\uf5e6\uf5e7\uf5e8\uf5e9\uf5ea\uf5eb\uf5ec\uf5ed\uf5ee\uf5ef\uf5f0\uf5f1\uf5f2\uf5f3\uf5f4\uf5f5\uf5f6\uf5f7\uf5f8\uf5f9\uf5fa\uf5fb\uf5fc\uf5fd\uf5fe\uf5ff\uf600\uf601\uf602\uf603\uf604\uf605\uf606\uf607\uf608\uf609\uf60a\uf60b\uf60c\uf60d\uf60e\uf60f\uf610\uf611\uf612\uf613\uf614\uf615\uf616\uf617\uf618\uf619\uf61a\uf61b\uf61c\uf61d\uf61e\uf61f\uf620\uf621\uf622\uf623\uf624\uf625\uf626\uf627\uf628\uf629\uf62a\uf62b\uf62c\uf62d\uf62e\uf62f\uf630\uf631\uf632\uf633\uf634\uf635\uf636\uf637\uf638\uf639\uf63a\uf63b\uf63c\uf63d\uf63e\uf63f\uf640\uf641\uf642\uf643\uf644\uf645\uf646\uf647\uf648\uf649\uf64a\uf64b\uf64c\uf64d\uf64e\uf64f\uf650\uf651\uf652\uf653\uf654\uf655\uf656\uf657\uf658\uf659\uf65a\uf65b\uf65c\uf65d\uf65e\uf65f\uf660\uf661\uf662\uf663\uf664\uf665\uf666\uf667\uf668\uf669\uf66a\uf66b\uf66c\uf66d\uf66e\uf66f\uf670\uf671\uf672\uf673\uf674\uf675\uf676\uf677\uf678\uf679\uf67a\uf67b\uf67c\uf67d\uf67e\uf67f\uf680\uf681\uf682\uf683\uf684\uf685\uf686\uf687\uf688\uf689\uf68a\uf68b\uf68c\uf68d\uf68e\uf68f\uf690\uf691\uf692\uf693\uf694\uf695\uf696\uf697\uf698\uf699\uf69a\uf69b\uf69c\uf69d\uf69e\uf69f\uf6a0\uf6a1\uf6a2\uf6a3\uf6a4\uf6a5\uf6a6\uf6a7\uf6a8\uf6a9\uf6aa\uf6ab\uf6ac\uf6ad\uf6ae\uf6af\uf6b0\uf6b1\uf6b2\uf6b3\uf6b4\uf6b5\uf6b6\uf6b7\uf6b8\uf6b9\uf6ba\uf6bb\uf6bc\uf6bd\uf6be\uf6bf\uf6c0\uf6c1\uf6c2\uf6c3\uf6c4\uf6c5\uf6c6\uf6c7\uf6c8\uf6c9\uf6ca\uf6cb\uf6cc\uf6cd\uf6ce\uf6cf\uf6d0\uf6d1\uf6d2\uf6d3\uf6d4\uf6d5\uf6d6\uf6d7\uf6d8\uf6d9\uf6da\uf6db\uf6dc\uf6dd\uf6de\uf6df\uf6e0\uf6e1\uf6e2\uf6e3\uf6e4\uf6e5\uf6e6\uf6e7\uf6e8\uf6e9\uf6ea\uf6eb\uf6ec\uf6ed\uf6ee\uf6ef\uf6f0\uf6f1\uf6f2\uf6f3\uf6f4\uf6f5\uf6f6\uf6f7\uf6f8\uf6f9\uf6fa\uf6fb\uf6fc\uf6fd\uf6fe\uf6ff\uf700\uf701\uf702\uf703\uf704\uf705\uf706\uf707\uf708\uf709\uf70a\uf70b\uf70c\uf70d\uf70e\uf70f\uf710\uf711\uf712\uf713\uf714\uf715\uf716\uf717\uf718\uf719\uf71a\uf71b\uf71c\uf71d\uf71e\uf71f\uf720\uf721\uf722\uf723\uf724\uf725\uf726\uf727\uf728\uf729\uf72a\uf72b\uf72c\uf72d\uf72e\uf72f\uf730\uf731\uf732\uf733\uf734\uf735\uf736\uf737\uf738\uf739\uf73a\uf73b\uf73c\uf73d\uf73e\uf73f\uf740\uf741\uf742\uf743\uf744\uf745\uf746\uf747\uf748\uf749\uf74a\uf74b\uf74c\uf74d\uf74e\uf74f\uf750\uf751\uf752\uf753\uf754\uf755\uf756\uf757\uf758\uf759\uf75a\uf75b\uf75c\uf75d\uf75e\uf75f\uf760\uf761\uf762\uf763\uf764\uf765\uf766\uf767\uf768\uf769\uf76a\uf76b\uf76c\uf76d\uf76e\uf76f\uf770\uf771\uf772\uf773\uf774\uf775\uf776\uf777\uf778\uf779\uf77a\uf77b\uf77c\uf77d\uf77e\uf77f\uf780\uf781\uf782\uf783\uf784\uf785\uf786\uf787\uf788\uf789\uf78a\uf78b\uf78c\uf78d\uf78e\uf78f\uf790\uf791\uf792\uf793\uf794\uf795\uf796\uf797\uf798\uf799\uf79a\uf79b\uf79c\uf79d\uf79e\uf79f\uf7a0\uf7a1\uf7a2\uf7a3\uf7a4\uf7a5\uf7a6\uf7a7\uf7a8\uf7a9\uf7aa\uf7ab\uf7ac\uf7ad\uf7ae\uf7af\uf7b0\uf7b1\uf7b2\uf7b3\uf7b4\uf7b5\uf7b6\uf7b7\uf7b8\uf7b9\uf7ba\uf7bb\uf7bc\uf7bd\uf7be\uf7bf\uf7c0\uf7c1\uf7c2\uf7c3\uf7c4\uf7c5\uf7c6\uf7c7\uf7c8\uf7c9\uf7ca\uf7cb\uf7cc\uf7cd\uf7ce\uf7cf\uf7d0\uf7d1\uf7d2\uf7d3\uf7d4\uf7d5\uf7d6\uf7d7\uf7d8\uf7d9\uf7da\uf7db\uf7dc\uf7dd\uf7de\uf7df\uf7e0\uf7e1\uf7e2\uf7e3\uf7e4\uf7e5\uf7e6\uf7e7\uf7e8\uf7e9\uf7ea\uf7eb\uf7ec\uf7ed\uf7ee\uf7ef\uf7f0\uf7f1\uf7f2\uf7f3\uf7f4\uf7f5\uf7f6\uf7f7\uf7f8\uf7f9\uf7fa\uf7fb\uf7fc\uf7fd\uf7fe\uf7ff\uf800\uf801\uf802\uf803\uf804\uf805\uf806\uf807\uf808\uf809\uf80a\uf80b\uf80c\uf80d\uf80e\uf80f\uf810\uf811\uf812\uf813\uf814\uf815\uf816\uf817\uf818\uf819\uf81a\uf81b\uf81c\uf81d\uf81e\uf81f\uf820\uf821\uf822\uf823\uf824\uf825\uf826\uf827\uf828\uf829\uf82a\uf82b\uf82c\uf82d\uf82e\uf82f\uf830\uf831\uf832\uf833\uf834\uf835\uf836\uf837\uf838\uf839\uf83a\uf83b\uf83c\uf83d\uf83e\uf83f\uf840\uf841\uf842\uf843\uf844\uf845\uf846\uf847\uf848\uf849\uf84a\uf84b\uf84c\uf84d\uf84e\uf84f\uf850\uf851\uf852\uf853\uf854\uf855\uf856\uf857\uf858\uf859\uf85a\uf85b\uf85c\uf85d\uf85e\uf85f\uf860\uf861\uf862\uf863\uf864\uf865\uf866\uf867\uf868\uf869\uf86a\uf86b\uf86c\uf86d\uf86e\uf86f\uf870\uf871\uf872\uf873\uf874\uf875\uf876\uf877\uf878\uf879\uf87a\uf87b\uf87c\uf87d\uf87e\uf87f\uf880\uf881\uf882\uf883\uf884\uf885\uf886\uf887\uf888\uf889\uf88a\uf88b\uf88c\uf88d\uf88e\uf88f\uf890\uf891\uf892\uf893\uf894\uf895\uf896\uf897\uf898\uf899\uf89a\uf89b\uf89c\uf89d\uf89e\uf89f\uf8a0\uf8a1\uf8a2\uf8a3\uf8a4\uf8a5\uf8a6\uf8a7\uf8a8\uf8a9\uf8aa\uf8ab\uf8ac\uf8ad\uf8ae\uf8af\uf8b0\uf8b1\uf8b2\uf8b3\uf8b4\uf8b5\uf8b6\uf8b7\uf8b8\uf8b9\uf8ba\uf8bb\uf8bc\uf8bd\uf8be\uf8bf\uf8c0\uf8c1\uf8c2\uf8c3\uf8c4\uf8c5\uf8c6\uf8c7\uf8c8\uf8c9\uf8ca\uf8cb\uf8cc\uf8cd\uf8ce\uf8cf\uf8d0\uf8d1\uf8d2\uf8d3\uf8d4\uf8d5\uf8d6\uf8d7\uf8d8\uf8d9\uf8da\uf8db\uf8dc\uf8dd\uf8de\uf8df\uf8e0\uf8e1\uf8e2\uf8e3\uf8e4\uf8e5\uf8e6\uf8e7\uf8e8\uf8e9\uf8ea\uf8eb\uf8ec\uf8ed\uf8ee\uf8ef\uf8f0\uf8f1\uf8f2\uf8f3\uf8f4\uf8f5\uf8f6\uf8f7\uf8f8\uf8f9\uf8fa\uf8fb\uf8fc\uf8fd\uf8fe\uf8ff'
+Cn = u'\u0378-\u0379\u037f-\u0383\u038b\u038d\u03a2\u0528-\u0530\u0557-\u0558\u0560\u0588\u058b-\u058e\u0590\u05c8-\u05cf\u05eb-\u05ef\u05f5-\u05ff\u0605\u061d\u070e\u074b-\u074c\u07b2-\u07bf\u07fb-\u07ff\u082e-\u082f\u083f\u085c-\u085d\u085f-\u089f\u08a1\u08ad-\u08e3\u08ff\u0978\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09c5-\u09c6\u09c9-\u09ca\u09cf-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09fc-\u0a00\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a50\u0a52-\u0a58\u0a5d\u0a5f-\u0a65\u0a76-\u0a80\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0acf\u0ad1-\u0adf\u0ae4-\u0ae5\u0af2-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34\u0b3a-\u0b3b\u0b45-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b64-\u0b65\u0b78-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bcf\u0bd1-\u0bd6\u0bd8-\u0be5\u0bfb-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3c\u0c45\u0c49\u0c4e-\u0c54\u0c57\u0c5a-\u0c5f\u0c64-\u0c65\u0c70-\u0c77\u0c80-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbb\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce4-\u0ce5\u0cf0\u0cf3-\u0d01\u0d04\u0d0d\u0d11\u0d3b-\u0d3c\u0d45\u0d49\u0d4f-\u0d56\u0d58-\u0d5f\u0d64-\u0d65\u0d76-\u0d78\u0d80-\u0d81\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0dc9\u0dcb-\u0dce\u0dd5\u0dd7\u0de0-\u0df1\u0df5-\u0e00\u0e3b-\u0e3e\u0e5c-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0edb\u0ee0-\u0eff\u0f48\u0f6d-\u0f70\u0f98\u0fbd\u0fcd\u0fdb-\u0fff\u10c6\u10c8-\u10cc\u10ce-\u10cf\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1289\u128e-\u128f\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12d7\u1311\u1316-\u1317\u135b-\u135c\u137d-\u137f\u139a-\u139f\u13f5-\u13ff\u169d-\u169f\u16f1-\u16ff\u170d\u1715-\u171f\u1737-\u173f\u1754-\u175f\u176d\u1771\u1774-\u177f\u17de-\u17df\u17ea-\u17ef\u17fa-\u17ff\u180f\u181a-\u181f\u1878-\u187f\u18ab-\u18af\u18f6-\u18ff\u191d-\u191f\u192c-\u192f\u193c-\u193f\u1941-\u1943\u196e-\u196f\u1975-\u197f\u19ac-\u19af\u19ca-\u19cf\u19db-\u19dd\u1a1c-\u1a1d\u1a5f\u1a7d-\u1a7e\u1a8a-\u1a8f\u1a9a-\u1a9f\u1aae-\u1aff\u1b4c-\u1b4f\u1b7d-\u1b7f\u1bf4-\u1bfb\u1c38-\u1c3a\u1c4a-\u1c4c\u1c80-\u1cbf\u1cc8-\u1ccf\u1cf7-\u1cff\u1de7-\u1dfb\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fc5\u1fd4-\u1fd5\u1fdc\u1ff0-\u1ff1\u1ff5\u1fff\u2065\u2072-\u2073\u208f\u209d-\u209f\u20bb-\u20cf\u20f1-\u20ff\u218a-\u218f\u23f4-\u23ff\u2427-\u243f\u244b-\u245f\u2700\u2b4d-\u2b4f\u2b5a-\u2bff\u2c2f\u2c5f\u2cf4-\u2cf8\u2d26\u2d28-\u2d2c\u2d2e-\u2d2f\u2d68-\u2d6e\u2d71-\u2d7e\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf\u2e3c-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u2fff\u3040\u3097-\u3098\u3100-\u3104\u312e-\u3130\u318f\u31bb-\u31bf\u31e4-\u31ef\u321f\u32ff\u4db6-\u4dbf\u9fcd-\u9fff\ua48d-\ua48f\ua4c7-\ua4cf\ua62c-\ua63f\ua698-\ua69e\ua6f8-\ua6ff\ua78f\ua794-\ua79f\ua7ab-\ua7f7\ua82c-\ua82f\ua83a-\ua83f\ua878-\ua87f\ua8c5-\ua8cd\ua8da-\ua8df\ua8fc-\ua8ff\ua954-\ua95e\ua97d-\ua97f\ua9ce\ua9da-\ua9dd\ua9e0-\ua9ff\uaa37-\uaa3f\uaa4e-\uaa4f\uaa5a-\uaa5b\uaa7c-\uaa7f\uaac3-\uaada\uaaf7-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f-\uabbf\uabee-\uabef\uabfa-\uabff\ud7a4-\ud7af\ud7c7-\ud7ca\ud7fc-\ud7ff\ufa6e-\ufa6f\ufada-\ufaff\ufb07-\ufb12\ufb18-\ufb1c\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbc2-\ufbd2\ufd40-\ufd4f\ufd90-\ufd91\ufdc8-\ufdef\ufdfe-\ufdff\ufe1a-\ufe1f\ufe27-\ufe2f\ufe53\ufe67\ufe6c-\ufe6f\ufe75\ufefd-\ufefe\uff00\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffdf\uffe7\uffef-\ufff8\ufffe-\uffff'
+
+Co = u'\ue000-\uf8ff'
try:
- Cs = eval(u_prefix + r"'\ud800\ud801\ud802\ud803\ud804\ud805\ud806\ud807\ud808\ud809\ud80a\ud80b\ud80c\ud80d\ud80e\ud80f\ud810\ud811\ud812\ud813\ud814\ud815\ud816\ud817\ud818\ud819\ud81a\ud81b\ud81c\ud81d\ud81e\ud81f\ud820\ud821\ud822\ud823\ud824\ud825\ud826\ud827\ud828\ud829\ud82a\ud82b\ud82c\ud82d\ud82e\ud82f\ud830\ud831\ud832\ud833\ud834\ud835\ud836\ud837\ud838\ud839\ud83a\ud83b\ud83c\ud83d\ud83e\ud83f\ud840\ud841\ud842\ud843\ud844\ud845\ud846\ud847\ud848\ud849\ud84a\ud84b\ud84c\ud84d\ud84e\ud84f\ud850\ud851\ud852\ud853\ud854\ud855\ud856\ud857\ud858\ud859\ud85a\ud85b\ud85c\ud85d\ud85e\ud85f\ud860\ud861\ud862\ud863\ud864\ud865\ud866\ud867\ud868\ud869\ud86a\ud86b\ud86c\ud86d\ud86e\ud86f\ud870\ud871\ud872\ud873\ud874\ud875\ud876\ud877\ud878\ud879\ud87a\ud87b\ud87c\ud87d\ud87e\ud87f\ud880\ud881\ud882\ud883\ud884\ud885\ud886\ud887\ud888\ud889\ud88a\ud88b\ud88c\ud88d\ud88e\ud88f\ud890\ud891\ud892\ud893\ud894\ud895\ud896\ud897\ud898\ud899\ud89a\ud89b\ud89c\ud89d\ud89e\ud89f\ud8a0\ud8a1\ud8a2\ud8a3\ud8a4\ud8a5\ud8a6\ud8a7\ud8a8\ud8a9\ud8aa\ud8ab\ud8ac\ud8ad\ud8ae\ud8af\ud8b0\ud8b1\ud8b2\ud8b3\ud8b4\ud8b5\ud8b6\ud8b7\ud8b8\ud8b9\ud8ba\ud8bb\ud8bc\ud8bd\ud8be\ud8bf\ud8c0\ud8c1\ud8c2\ud8c3\ud8c4\ud8c5\ud8c6\ud8c7\ud8c8\ud8c9\ud8ca\ud8cb\ud8cc\ud8cd\ud8ce\ud8cf\ud8d0\ud8d1\ud8d2\ud8d3\ud8d4\ud8d5\ud8d6\ud8d7\ud8d8\ud8d9\ud8da\ud8db\ud8dc\ud8dd\ud8de\ud8df\ud8e0\ud8e1\ud8e2\ud8e3\ud8e4\ud8e5\ud8e6\ud8e7\ud8e8\ud8e9\ud8ea\ud8eb\ud8ec\ud8ed\ud8ee\ud8ef\ud8f0\ud8f1\ud8f2\ud8f3\ud8f4\ud8f5\ud8f6\ud8f7\ud8f8\ud8f9\ud8fa\ud8fb\ud8fc\ud8fd\ud8fe\ud8ff\ud900\ud901\ud902\ud903\ud904\ud905\ud906\ud907\ud908\ud909\ud90a\ud90b\ud90c\ud90d\ud90e\ud90f\ud910\ud911\ud912\ud913\ud914\ud915\ud916\ud917\ud918\ud919\ud91a\ud91b\ud91c\ud91d\ud91e\ud91f\ud920\ud921\ud922\ud923\ud924\ud925\ud926\ud927\ud928\ud929\ud92a\ud92b\ud92c\ud92d\ud92e\ud92f\ud930\ud931\ud932\ud933\ud934\ud935\ud936\ud937\ud938\ud939\ud93a\ud93b\ud93c\ud93d\ud93e\ud93f\ud940\ud941\ud942\ud943\ud944\ud945\ud946\ud947\ud948\ud949\ud94a\ud94b\ud94c\ud94d\ud94e\ud94f\ud950\ud951\ud952\ud953\ud954\ud955\ud956\ud957\ud958\ud959\ud95a\ud95b\ud95c\ud95d\ud95e\ud95f\ud960\ud961\ud962\ud963\ud964\ud965\ud966\ud967\ud968\ud969\ud96a\ud96b\ud96c\ud96d\ud96e\ud96f\ud970\ud971\ud972\ud973\ud974\ud975\ud976\ud977\ud978\ud979\ud97a\ud97b\ud97c\ud97d\ud97e\ud97f\ud980\ud981\ud982\ud983\ud984\ud985\ud986\ud987\ud988\ud989\ud98a\ud98b\ud98c\ud98d\ud98e\ud98f\ud990\ud991\ud992\ud993\ud994\ud995\ud996\ud997\ud998\ud999\ud99a\ud99b\ud99c\ud99d\ud99e\ud99f\ud9a0\ud9a1\ud9a2\ud9a3\ud9a4\ud9a5\ud9a6\ud9a7\ud9a8\ud9a9\ud9aa\ud9ab\ud9ac\ud9ad\ud9ae\ud9af\ud9b0\ud9b1\ud9b2\ud9b3\ud9b4\ud9b5\ud9b6\ud9b7\ud9b8\ud9b9\ud9ba\ud9bb\ud9bc\ud9bd\ud9be\ud9bf\ud9c0\ud9c1\ud9c2\ud9c3\ud9c4\ud9c5\ud9c6\ud9c7\ud9c8\ud9c9\ud9ca\ud9cb\ud9cc\ud9cd\ud9ce\ud9cf\ud9d0\ud9d1\ud9d2\ud9d3\ud9d4\ud9d5\ud9d6\ud9d7\ud9d8\ud9d9\ud9da\ud9db\ud9dc\ud9dd\ud9de\ud9df\ud9e0\ud9e1\ud9e2\ud9e3\ud9e4\ud9e5\ud9e6\ud9e7\ud9e8\ud9e9\ud9ea\ud9eb\ud9ec\ud9ed\ud9ee\ud9ef\ud9f0\ud9f1\ud9f2\ud9f3\ud9f4\ud9f5\ud9f6\ud9f7\ud9f8\ud9f9\ud9fa\ud9fb\ud9fc\ud9fd\ud9fe\ud9ff\uda00\uda01\uda02\uda03\uda04\uda05\uda06\uda07\uda08\uda09\uda0a\uda0b\uda0c\uda0d\uda0e\uda0f\uda10\uda11\uda12\uda13\uda14\uda15\uda16\uda17\uda18\uda19\uda1a\uda1b\uda1c\uda1d\uda1e\uda1f\uda20\uda21\uda22\uda23\uda24\uda25\uda26\uda27\uda28\uda29\uda2a\uda2b\uda2c\uda2d\uda2e\uda2f\uda30\uda31\uda32\uda33\uda34\uda35\uda36\uda37\uda38\uda39\uda3a\uda3b\uda3c\uda3d\uda3e\uda3f\uda40\uda41\uda42\uda43\uda44\uda45\uda46\uda47\uda48\uda49\uda4a\uda4b\uda4c\uda4d\uda4e\uda4f\uda50\uda51\uda52\uda53\uda54\uda55\uda56\uda57\uda58\uda59\uda5a\uda5b\uda5c\uda5d\uda5e\uda5f\uda60\uda61\uda62\uda63\uda64\uda65\uda66\uda67\uda68\uda69\uda6a\uda6b\uda6c\uda6d\uda6e\uda6f\uda70\uda71\uda72\uda73\uda74\uda75\uda76\uda77\uda78\uda79\uda7a\uda7b\uda7c\uda7d\uda7e\uda7f\uda80\uda81\uda82\uda83\uda84\uda85\uda86\uda87\uda88\uda89\uda8a\uda8b\uda8c\uda8d\uda8e\uda8f\uda90\uda91\uda92\uda93\uda94\uda95\uda96\uda97\uda98\uda99\uda9a\uda9b\uda9c\uda9d\uda9e\uda9f\udaa0\udaa1\udaa2\udaa3\udaa4\udaa5\udaa6\udaa7\udaa8\udaa9\udaaa\udaab\udaac\udaad\udaae\udaaf\udab0\udab1\udab2\udab3\udab4\udab5\udab6\udab7\udab8\udab9\udaba\udabb\udabc\udabd\udabe\udabf\udac0\udac1\udac2\udac3\udac4\udac5\udac6\udac7\udac8\udac9\udaca\udacb\udacc\udacd\udace\udacf\udad0\udad1\udad2\udad3\udad4\udad5\udad6\udad7\udad8\udad9\udada\udadb\udadc\udadd\udade\udadf\udae0\udae1\udae2\udae3\udae4\udae5\udae6\udae7\udae8\udae9\udaea\udaeb\udaec\udaed\udaee\udaef\udaf0\udaf1\udaf2\udaf3\udaf4\udaf5\udaf6\udaf7\udaf8\udaf9\udafa\udafb\udafc\udafd\udafe\udaff\udb00\udb01\udb02\udb03\udb04\udb05\udb06\udb07\udb08\udb09\udb0a\udb0b\udb0c\udb0d\udb0e\udb0f\udb10\udb11\udb12\udb13\udb14\udb15\udb16\udb17\udb18\udb19\udb1a\udb1b\udb1c\udb1d\udb1e\udb1f\udb20\udb21\udb22\udb23\udb24\udb25\udb26\udb27\udb28\udb29\udb2a\udb2b\udb2c\udb2d\udb2e\udb2f\udb30\udb31\udb32\udb33\udb34\udb35\udb36\udb37\udb38\udb39\udb3a\udb3b\udb3c\udb3d\udb3e\udb3f\udb40\udb41\udb42\udb43\udb44\udb45\udb46\udb47\udb48\udb49\udb4a\udb4b\udb4c\udb4d\udb4e\udb4f\udb50\udb51\udb52\udb53\udb54\udb55\udb56\udb57\udb58\udb59\udb5a\udb5b\udb5c\udb5d\udb5e\udb5f\udb60\udb61\udb62\udb63\udb64\udb65\udb66\udb67\udb68\udb69\udb6a\udb6b\udb6c\udb6d\udb6e\udb6f\udb70\udb71\udb72\udb73\udb74\udb75\udb76\udb77\udb78\udb79\udb7a\udb7b\udb7c\udb7d\udb7e\udb7f\udb80\udb81\udb82\udb83\udb84\udb85\udb86\udb87\udb88\udb89\udb8a\udb8b\udb8c\udb8d\udb8e\udb8f\udb90\udb91\udb92\udb93\udb94\udb95\udb96\udb97\udb98\udb99\udb9a\udb9b\udb9c\udb9d\udb9e\udb9f\udba0\udba1\udba2\udba3\udba4\udba5\udba6\udba7\udba8\udba9\udbaa\udbab\udbac\udbad\udbae\udbaf\udbb0\udbb1\udbb2\udbb3\udbb4\udbb5\udbb6\udbb7\udbb8\udbb9\udbba\udbbb\udbbc\udbbd\udbbe\udbbf\udbc0\udbc1\udbc2\udbc3\udbc4\udbc5\udbc6\udbc7\udbc8\udbc9\udbca\udbcb\udbcc\udbcd\udbce\udbcf\udbd0\udbd1\udbd2\udbd3\udbd4\udbd5\udbd6\udbd7\udbd8\udbd9\udbda\udbdb\udbdc\udbdd\udbde\udbdf\udbe0\udbe1\udbe2\udbe3\udbe4\udbe5\udbe6\udbe7\udbe8\udbe9\udbea\udbeb\udbec\udbed\udbee\udbef\udbf0\udbf1\udbf2\udbf3\udbf4\udbf5\udbf6\udbf7\udbf8\udbf9\udbfa\udbfb\udbfc\udbfd\udbfe\udbff\\\udc00\udc01\udc02\udc03\udc04\udc05\udc06\udc07\udc08\udc09\udc0a\udc0b\udc0c\udc0d\udc0e\udc0f\udc10\udc11\udc12\udc13\udc14\udc15\udc16\udc17\udc18\udc19\udc1a\udc1b\udc1c\udc1d\udc1e\udc1f\udc20\udc21\udc22\udc23\udc24\udc25\udc26\udc27\udc28\udc29\udc2a\udc2b\udc2c\udc2d\udc2e\udc2f\udc30\udc31\udc32\udc33\udc34\udc35\udc36\udc37\udc38\udc39\udc3a\udc3b\udc3c\udc3d\udc3e\udc3f\udc40\udc41\udc42\udc43\udc44\udc45\udc46\udc47\udc48\udc49\udc4a\udc4b\udc4c\udc4d\udc4e\udc4f\udc50\udc51\udc52\udc53\udc54\udc55\udc56\udc57\udc58\udc59\udc5a\udc5b\udc5c\udc5d\udc5e\udc5f\udc60\udc61\udc62\udc63\udc64\udc65\udc66\udc67\udc68\udc69\udc6a\udc6b\udc6c\udc6d\udc6e\udc6f\udc70\udc71\udc72\udc73\udc74\udc75\udc76\udc77\udc78\udc79\udc7a\udc7b\udc7c\udc7d\udc7e\udc7f\udc80\udc81\udc82\udc83\udc84\udc85\udc86\udc87\udc88\udc89\udc8a\udc8b\udc8c\udc8d\udc8e\udc8f\udc90\udc91\udc92\udc93\udc94\udc95\udc96\udc97\udc98\udc99\udc9a\udc9b\udc9c\udc9d\udc9e\udc9f\udca0\udca1\udca2\udca3\udca4\udca5\udca6\udca7\udca8\udca9\udcaa\udcab\udcac\udcad\udcae\udcaf\udcb0\udcb1\udcb2\udcb3\udcb4\udcb5\udcb6\udcb7\udcb8\udcb9\udcba\udcbb\udcbc\udcbd\udcbe\udcbf\udcc0\udcc1\udcc2\udcc3\udcc4\udcc5\udcc6\udcc7\udcc8\udcc9\udcca\udccb\udccc\udccd\udcce\udccf\udcd0\udcd1\udcd2\udcd3\udcd4\udcd5\udcd6\udcd7\udcd8\udcd9\udcda\udcdb\udcdc\udcdd\udcde\udcdf\udce0\udce1\udce2\udce3\udce4\udce5\udce6\udce7\udce8\udce9\udcea\udceb\udcec\udced\udcee\udcef\udcf0\udcf1\udcf2\udcf3\udcf4\udcf5\udcf6\udcf7\udcf8\udcf9\udcfa\udcfb\udcfc\udcfd\udcfe\udcff\udd00\udd01\udd02\udd03\udd04\udd05\udd06\udd07\udd08\udd09\udd0a\udd0b\udd0c\udd0d\udd0e\udd0f\udd10\udd11\udd12\udd13\udd14\udd15\udd16\udd17\udd18\udd19\udd1a\udd1b\udd1c\udd1d\udd1e\udd1f\udd20\udd21\udd22\udd23\udd24\udd25\udd26\udd27\udd28\udd29\udd2a\udd2b\udd2c\udd2d\udd2e\udd2f\udd30\udd31\udd32\udd33\udd34\udd35\udd36\udd37\udd38\udd39\udd3a\udd3b\udd3c\udd3d\udd3e\udd3f\udd40\udd41\udd42\udd43\udd44\udd45\udd46\udd47\udd48\udd49\udd4a\udd4b\udd4c\udd4d\udd4e\udd4f\udd50\udd51\udd52\udd53\udd54\udd55\udd56\udd57\udd58\udd59\udd5a\udd5b\udd5c\udd5d\udd5e\udd5f\udd60\udd61\udd62\udd63\udd64\udd65\udd66\udd67\udd68\udd69\udd6a\udd6b\udd6c\udd6d\udd6e\udd6f\udd70\udd71\udd72\udd73\udd74\udd75\udd76\udd77\udd78\udd79\udd7a\udd7b\udd7c\udd7d\udd7e\udd7f\udd80\udd81\udd82\udd83\udd84\udd85\udd86\udd87\udd88\udd89\udd8a\udd8b\udd8c\udd8d\udd8e\udd8f\udd90\udd91\udd92\udd93\udd94\udd95\udd96\udd97\udd98\udd99\udd9a\udd9b\udd9c\udd9d\udd9e\udd9f\udda0\udda1\udda2\udda3\udda4\udda5\udda6\udda7\udda8\udda9\uddaa\uddab\uddac\uddad\uddae\uddaf\uddb0\uddb1\uddb2\uddb3\uddb4\uddb5\uddb6\uddb7\uddb8\uddb9\uddba\uddbb\uddbc\uddbd\uddbe\uddbf\uddc0\uddc1\uddc2\uddc3\uddc4\uddc5\uddc6\uddc7\uddc8\uddc9\uddca\uddcb\uddcc\uddcd\uddce\uddcf\uddd0\uddd1\uddd2\uddd3\uddd4\uddd5\uddd6\uddd7\uddd8\uddd9\uddda\udddb\udddc\udddd\uddde\udddf\udde0\udde1\udde2\udde3\udde4\udde5\udde6\udde7\udde8\udde9\uddea\uddeb\uddec\udded\uddee\uddef\uddf0\uddf1\uddf2\uddf3\uddf4\uddf5\uddf6\uddf7\uddf8\uddf9\uddfa\uddfb\uddfc\uddfd\uddfe\uddff\ude00\ude01\ude02\ude03\ude04\ude05\ude06\ude07\ude08\ude09\ude0a\ude0b\ude0c\ude0d\ude0e\ude0f\ude10\ude11\ude12\ude13\ude14\ude15\ude16\ude17\ude18\ude19\ude1a\ude1b\ude1c\ude1d\ude1e\ude1f\ude20\ude21\ude22\ude23\ude24\ude25\ude26\ude27\ude28\ude29\ude2a\ude2b\ude2c\ude2d\ude2e\ude2f\ude30\ude31\ude32\ude33\ude34\ude35\ude36\ude37\ude38\ude39\ude3a\ude3b\ude3c\ude3d\ude3e\ude3f\ude40\ude41\ude42\ude43\ude44\ude45\ude46\ude47\ude48\ude49\ude4a\ude4b\ude4c\ude4d\ude4e\ude4f\ude50\ude51\ude52\ude53\ude54\ude55\ude56\ude57\ude58\ude59\ude5a\ude5b\ude5c\ude5d\ude5e\ude5f\ude60\ude61\ude62\ude63\ude64\ude65\ude66\ude67\ude68\ude69\ude6a\ude6b\ude6c\ude6d\ude6e\ude6f\ude70\ude71\ude72\ude73\ude74\ude75\ude76\ude77\ude78\ude79\ude7a\ude7b\ude7c\ude7d\ude7e\ude7f\ude80\ude81\ude82\ude83\ude84\ude85\ude86\ude87\ude88\ude89\ude8a\ude8b\ude8c\ude8d\ude8e\ude8f\ude90\ude91\ude92\ude93\ude94\ude95\ude96\ude97\ude98\ude99\ude9a\ude9b\ude9c\ude9d\ude9e\ude9f\udea0\udea1\udea2\udea3\udea4\udea5\udea6\udea7\udea8\udea9\udeaa\udeab\udeac\udead\udeae\udeaf\udeb0\udeb1\udeb2\udeb3\udeb4\udeb5\udeb6\udeb7\udeb8\udeb9\udeba\udebb\udebc\udebd\udebe\udebf\udec0\udec1\udec2\udec3\udec4\udec5\udec6\udec7\udec8\udec9\udeca\udecb\udecc\udecd\udece\udecf\uded0\uded1\uded2\uded3\uded4\uded5\uded6\uded7\uded8\uded9\udeda\udedb\udedc\udedd\udede\udedf\udee0\udee1\udee2\udee3\udee4\udee5\udee6\udee7\udee8\udee9\udeea\udeeb\udeec\udeed\udeee\udeef\udef0\udef1\udef2\udef3\udef4\udef5\udef6\udef7\udef8\udef9\udefa\udefb\udefc\udefd\udefe\udeff\udf00\udf01\udf02\udf03\udf04\udf05\udf06\udf07\udf08\udf09\udf0a\udf0b\udf0c\udf0d\udf0e\udf0f\udf10\udf11\udf12\udf13\udf14\udf15\udf16\udf17\udf18\udf19\udf1a\udf1b\udf1c\udf1d\udf1e\udf1f\udf20\udf21\udf22\udf23\udf24\udf25\udf26\udf27\udf28\udf29\udf2a\udf2b\udf2c\udf2d\udf2e\udf2f\udf30\udf31\udf32\udf33\udf34\udf35\udf36\udf37\udf38\udf39\udf3a\udf3b\udf3c\udf3d\udf3e\udf3f\udf40\udf41\udf42\udf43\udf44\udf45\udf46\udf47\udf48\udf49\udf4a\udf4b\udf4c\udf4d\udf4e\udf4f\udf50\udf51\udf52\udf53\udf54\udf55\udf56\udf57\udf58\udf59\udf5a\udf5b\udf5c\udf5d\udf5e\udf5f\udf60\udf61\udf62\udf63\udf64\udf65\udf66\udf67\udf68\udf69\udf6a\udf6b\udf6c\udf6d\udf6e\udf6f\udf70\udf71\udf72\udf73\udf74\udf75\udf76\udf77\udf78\udf79\udf7a\udf7b\udf7c\udf7d\udf7e\udf7f\udf80\udf81\udf82\udf83\udf84\udf85\udf86\udf87\udf88\udf89\udf8a\udf8b\udf8c\udf8d\udf8e\udf8f\udf90\udf91\udf92\udf93\udf94\udf95\udf96\udf97\udf98\udf99\udf9a\udf9b\udf9c\udf9d\udf9e\udf9f\udfa0\udfa1\udfa2\udfa3\udfa4\udfa5\udfa6\udfa7\udfa8\udfa9\udfaa\udfab\udfac\udfad\udfae\udfaf\udfb0\udfb1\udfb2\udfb3\udfb4\udfb5\udfb6\udfb7\udfb8\udfb9\udfba\udfbb\udfbc\udfbd\udfbe\udfbf\udfc0\udfc1\udfc2\udfc3\udfc4\udfc5\udfc6\udfc7\udfc8\udfc9\udfca\udfcb\udfcc\udfcd\udfce\udfcf\udfd0\udfd1\udfd2\udfd3\udfd4\udfd5\udfd6\udfd7\udfd8\udfd9\udfda\udfdb\udfdc\udfdd\udfde\udfdf\udfe0\udfe1\udfe2\udfe3\udfe4\udfe5\udfe6\udfe7\udfe8\udfe9\udfea\udfeb\udfec\udfed\udfee\udfef\udff0\udff1\udff2\udff3\udff4\udff5\udff6\udff7\udff8\udff9\udffa\udffb\udffc\udffd\udffe\udfff'")
+ Cs = eval(r"u'\ud800-\udbff\\\udc00\udc01-\udfff'")
except UnicodeDecodeError:
- Cs = '' # Jython can't handle isolated surrogates
+ Cs = '' # Jython can't handle isolated surrogates
-Ll = u'abcdefghijklmnopqrstuvwxyz\xaa\xb5\xba\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e\u017f\u0180\u0183\u0185\u0188\u018c\u018d\u0192\u0195\u0199\u019a\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9\u01ba\u01bd\u01be\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233\u0234\u0235\u0236\u0237\u0238\u0239\u023c\u023f\u0240\u0242\u0247\u0249\u024b\u024d\u024f\u0250\u0251\u0252\u0253\u0254\u0255\u0256\u0257\u0258\u0259\u025a\u025b\u025c\u025d\u025e\u025f\u0260\u0261\u0262\u0263\u0264\u0265\u0266\u0267\u0268\u0269\u026a\u026b\u026c\u026d\u026e\u026f\u0270\u0271\u0272\u0273\u0274\u0275\u0276\u0277\u0278\u0279\u027a\u027b\u027c\u027d\u027e\u027f\u0280\u0281\u0282\u0283\u0284\u0285\u0286\u0287\u0288\u0289\u028a\u028b\u028c\u028d\u028e\u028f\u0290\u0291\u0292\u0293\u0295\u0296\u0297\u0298\u0299\u029a\u029b\u029c\u029d\u029e\u029f\u02a0\u02a1\u02a2\u02a3\u02a4\u02a5\u02a6\u02a7\u02a8\u02a9\u02aa\u02ab\u02ac\u02ad\u02ae\u02af\u0371\u0373\u0377\u037b\u037c\u037d\u0390\u03ac\u03ad\u03ae\u03af\u03b0\u03b1\u03b2\u03b3\u03b4\u03b5\u03b6\u03b7\u03b8\u03b9\u03ba\u03bb\u03bc\u03bd\u03be\u03bf\u03c0\u03c1\u03c2\u03c3\u03c4\u03c5\u03c6\u03c7\u03c8\u03c9\u03ca\u03cb\u03cc\u03cd\u03ce\u03d0\u03d1\u03d5\u03d6\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef\u03f0\u03f1\u03f2\u03f3\u03f5\u03f8\u03fb\u03fc\u0430\u0431\u0432\u0433\u0434\u0435\u0436\u0437\u0438\u0439\u043a\u043b\u043c\u043d\u043e\u043f\u0440\u0441\u0442\u0443\u0444\u0445\u0446\u0447\u0448\u0449\u044a\u044b\u044c\u044d\u044e\u044f\u0450\u0451\u0452\u0453\u0454\u0455\u0456\u0457\u0458\u0459\u045a\u045b\u045c\u045d\u045e\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0561\u0562\u0563\u0564\u0565\u0566\u0567\u0568\u0569\u056a\u056b\u056c\u056d\u056e\u056f\u0570\u0571\u0572\u0573\u0574\u0575\u0576\u0577\u0578\u0579\u057a\u057b\u057c\u057d\u057e\u057f\u0580\u0581\u0582\u0583\u0584\u0585\u0586\u0587\u1d00\u1d01\u1d02\u1d03\u1d04\u1d05\u1d06\u1d07\u1d08\u1d09\u1d0a\u1d0b\u1d0c\u1d0d\u1d0e\u1d0f\u1d10\u1d11\u1d12\u1d13\u1d14\u1d15\u1d16\u1d17\u1d18\u1d19\u1d1a\u1d1b\u1d1c\u1d1d\u1d1e\u1d1f\u1d20\u1d21\u1d22\u1d23\u1d24\u1d25\u1d26\u1d27\u1d28\u1d29\u1d2a\u1d2b\u1d62\u1d63\u1d64\u1d65\u1d66\u1d67\u1d68\u1d69\u1d6a\u1d6b\u1d6c\u1d6d\u1d6e\u1d6f\u1d70\u1d71\u1d72\u1d73\u1d74\u1d75\u1d76\u1d77\u1d79\u1d7a\u1d7b\u1d7c\u1d7d\u1d7e\u1d7f\u1d80\u1d81\u1d82\u1d83\u1d84\u1d85\u1d86\u1d87\u1d88\u1d89\u1d8a\u1d8b\u1d8c\u1d8d\u1d8e\u1d8f\u1d90\u1d91\u1d92\u1d93\u1d94\u1d95\u1d96\u1d97\u1d98\u1d99\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95\u1e96\u1e97\u1e98\u1e99\u1e9a\u1e9b\u1e9c\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff\u1f00\u1f01\u1f02\u1f03\u1f04\u1f05\u1f06\u1f07\u1f10\u1f11\u1f12\u1f13\u1f14\u1f15\u1f20\u1f21\u1f22\u1f23\u1f24\u1f25\u1f26\u1f27\u1f30\u1f31\u1f32\u1f33\u1f34\u1f35\u1f36\u1f37\u1f40\u1f41\u1f42\u1f43\u1f44\u1f45\u1f50\u1f51\u1f52\u1f53\u1f54\u1f55\u1f56\u1f57\u1f60\u1f61\u1f62\u1f63\u1f64\u1f65\u1f66\u1f67\u1f70\u1f71\u1f72\u1f73\u1f74\u1f75\u1f76\u1f77\u1f78\u1f79\u1f7a\u1f7b\u1f7c\u1f7d\u1f80\u1f81\u1f82\u1f83\u1f84\u1f85\u1f86\u1f87\u1f90\u1f91\u1f92\u1f93\u1f94\u1f95\u1f96\u1f97\u1fa0\u1fa1\u1fa2\u1fa3\u1fa4\u1fa5\u1fa6\u1fa7\u1fb0\u1fb1\u1fb2\u1fb3\u1fb4\u1fb6\u1fb7\u1fbe\u1fc2\u1fc3\u1fc4\u1fc6\u1fc7\u1fd0\u1fd1\u1fd2\u1fd3\u1fd6\u1fd7\u1fe0\u1fe1\u1fe2\u1fe3\u1fe4\u1fe5\u1fe6\u1fe7\u1ff2\u1ff3\u1ff4\u1ff6\u1ff7\u210a\u210e\u210f\u2113\u212f\u2134\u2139\u213c\u213d\u2146\u2147\u2148\u2149\u214e\u2184\u2c30\u2c31\u2c32\u2c33\u2c34\u2c35\u2c36\u2c37\u2c38\u2c39\u2c3a\u2c3b\u2c3c\u2c3d\u2c3e\u2c3f\u2c40\u2c41\u2c42\u2c43\u2c44\u2c45\u2c46\u2c47\u2c48\u2c49\u2c4a\u2c4b\u2c4c\u2c4d\u2c4e\u2c4f\u2c50\u2c51\u2c52\u2c53\u2c54\u2c55\u2c56\u2c57\u2c58\u2c59\u2c5a\u2c5b\u2c5c\u2c5d\u2c5e\u2c61\u2c65\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73\u2c74\u2c76\u2c77\u2c78\u2c79\u2c7a\u2c7b\u2c7c\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3\u2ce4\u2cec\u2cee\u2d00\u2d01\u2d02\u2d03\u2d04\u2d05\u2d06\u2d07\u2d08\u2d09\u2d0a\u2d0b\u2d0c\u2d0d\u2d0e\u2d0f\u2d10\u2d11\u2d12\u2d13\u2d14\u2d15\u2d16\u2d17\u2d18\u2d19\u2d1a\u2d1b\u2d1c\u2d1d\u2d1e\u2d1f\u2d20\u2d21\u2d22\u2d23\u2d24\u2d25\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f\ua730\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771\ua772\ua773\ua774\ua775\ua776\ua777\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ufb00\ufb01\ufb02\ufb03\ufb04\ufb05\ufb06\ufb13\ufb14\ufb15\ufb16\ufb17\uff41\uff42\uff43\uff44\uff45\uff46\uff47\uff48\uff49\uff4a\uff4b\uff4c\uff4d\uff4e\uff4f\uff50\uff51\uff52\uff53\uff54\uff55\uff56\uff57\uff58\uff59\uff5a'
+Ll = u'a-z\xb5\xdf-\xf6\xf8-\xff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0561-\u0587\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7fa\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a'
-Lm = u'\u02b0\u02b1\u02b2\u02b3\u02b4\u02b5\u02b6\u02b7\u02b8\u02b9\u02ba\u02bb\u02bc\u02bd\u02be\u02bf\u02c0\u02c1\u02c6\u02c7\u02c8\u02c9\u02ca\u02cb\u02cc\u02cd\u02ce\u02cf\u02d0\u02d1\u02e0\u02e1\u02e2\u02e3\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5\u06e6\u07f4\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78\u1c79\u1c7a\u1c7b\u1c7c\u1c7d\u1d2c\u1d2d\u1d2e\u1d2f\u1d30\u1d31\u1d32\u1d33\u1d34\u1d35\u1d36\u1d37\u1d38\u1d39\u1d3a\u1d3b\u1d3c\u1d3d\u1d3e\u1d3f\u1d40\u1d41\u1d42\u1d43\u1d44\u1d45\u1d46\u1d47\u1d48\u1d49\u1d4a\u1d4b\u1d4c\u1d4d\u1d4e\u1d4f\u1d50\u1d51\u1d52\u1d53\u1d54\u1d55\u1d56\u1d57\u1d58\u1d59\u1d5a\u1d5b\u1d5c\u1d5d\u1d5e\u1d5f\u1d60\u1d61\u1d78\u1d9b\u1d9c\u1d9d\u1d9e\u1d9f\u1da0\u1da1\u1da2\u1da3\u1da4\u1da5\u1da6\u1da7\u1da8\u1da9\u1daa\u1dab\u1dac\u1dad\u1dae\u1daf\u1db0\u1db1\u1db2\u1db3\u1db4\u1db5\u1db6\u1db7\u1db8\u1db9\u1dba\u1dbb\u1dbc\u1dbd\u1dbe\u1dbf\u2071\u207f\u2090\u2091\u2092\u2093\u2094\u2c7d\u2d6f\u2e2f\u3005\u3031\u3032\u3033\u3034\u3035\u303b\u309d\u309e\u30fc\u30fd\u30fe\ua015\ua4f8\ua4f9\ua4fa\ua4fb\ua4fc\ua4fd\ua60c\ua67f\ua717\ua718\ua719\ua71a\ua71b\ua71c\ua71d\ua71e\ua71f\ua770\ua788\ua9cf\uaa70\uaadd\uff70\uff9e\uff9f'
+Lm = u'\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\uaa70\uaadd\uaaf3-\uaaf4\uff70\uff9e-\uff9f'
-Lo = u'\u01bb\u01c0\u01c1\u01c2\u01c3\u0294\u05d0\u05d1\u05d2\u05d3\u05d4\u05d5\u05d6\u05d7\u05d8\u05d9\u05da\u05db\u05dc\u05dd\u05de\u05df\u05e0\u05e1\u05e2\u05e3\u05e4\u05e5\u05e6\u05e7\u05e8\u05e9\u05ea\u05f0\u05f1\u05f2\u0621\u0622\u0623\u0624\u0625\u0626\u0627\u0628\u0629\u062a\u062b\u062c\u062d\u062e\u062f\u0630\u0631\u0632\u0633\u0634\u0635\u0636\u0637\u0638\u0639\u063a\u063b\u063c\u063d\u063e\u063f\u0641\u0642\u0643\u0644\u0645\u0646\u0647\u0648\u0649\u064a\u066e\u066f\u0671\u0672\u0673\u0674\u0675\u0676\u0677\u0678\u0679\u067a\u067b\u067c\u067d\u067e\u067f\u0680\u0681\u0682\u0683\u0684\u0685\u0686\u0687\u0688\u0689\u068a\u068b\u068c\u068d\u068e\u068f\u0690\u0691\u0692\u0693\u0694\u0695\u0696\u0697\u0698\u0699\u069a\u069b\u069c\u069d\u069e\u069f\u06a0\u06a1\u06a2\u06a3\u06a4\u06a5\u06a6\u06a7\u06a8\u06a9\u06aa\u06ab\u06ac\u06ad\u06ae\u06af\u06b0\u06b1\u06b2\u06b3\u06b4\u06b5\u06b6\u06b7\u06b8\u06b9\u06ba\u06bb\u06bc\u06bd\u06be\u06bf\u06c0\u06c1\u06c2\u06c3\u06c4\u06c5\u06c6\u06c7\u06c8\u06c9\u06ca\u06cb\u06cc\u06cd\u06ce\u06cf\u06d0\u06d1\u06d2\u06d3\u06d5\u06ee\u06ef\u06fa\u06fb\u06fc\u06ff\u0710\u0712\u0713\u0714\u0715\u0716\u0717\u0718\u0719\u071a\u071b\u071c\u071d\u071e\u071f\u0720\u0721\u0722\u0723\u0724\u0725\u0726\u0727\u0728\u0729\u072a\u072b\u072c\u072d\u072e\u072f\u074d\u074e\u074f\u0750\u0751\u0752\u0753\u0754\u0755\u0756\u0757\u0758\u0759\u075a\u075b\u075c\u075d\u075e\u075f\u0760\u0761\u0762\u0763\u0764\u0765\u0766\u0767\u0768\u0769\u076a\u076b\u076c\u076d\u076e\u076f\u0770\u0771\u0772\u0773\u0774\u0775\u0776\u0777\u0778\u0779\u077a\u077b\u077c\u077d\u077e\u077f\u0780\u0781\u0782\u0783\u0784\u0785\u0786\u0787\u0788\u0789\u078a\u078b\u078c\u078d\u078e\u078f\u0790\u0791\u0792\u0793\u0794\u0795\u0796\u0797\u0798\u0799\u079a\u079b\u079c\u079d\u079e\u079f\u07a0\u07a1\u07a2\u07a3\u07a4\u07a5\u07b1\u07ca\u07cb\u07cc\u07cd\u07ce\u07cf\u07d0\u07d1\u07d2\u07d3\u07d4\u07d5\u07d6\u07d7\u07d8\u07d9\u07da\u07db\u07dc\u07dd\u07de\u07df\u07e0\u07e1\u07e2\u07e3\u07e4\u07e5\u07e6\u07e7\u07e8\u07e9\u07ea\u0800\u0801\u0802\u0803\u0804\u0805\u0806\u0807\u0808\u0809\u080a\u080b\u080c\u080d\u080e\u080f\u0810\u0811\u0812\u0813\u0814\u0815\u0904\u0905\u0906\u0907\u0908\u0909\u090a\u090b\u090c\u090d\u090e\u090f\u0910\u0911\u0912\u0913\u0914\u0915\u0916\u0917\u0918\u0919\u091a\u091b\u091c\u091d\u091e\u091f\u0920\u0921\u0922\u0923\u0924\u0925\u0926\u0927\u0928\u0929\u092a\u092b\u092c\u092d\u092e\u092f\u0930\u0931\u0932\u0933\u0934\u0935\u0936\u0937\u0938\u0939\u093d\u0950\u0958\u0959\u095a\u095b\u095c\u095d\u095e\u095f\u0960\u0961\u0972\u0979\u097a\u097b\u097c\u097d\u097e\u097f\u0985\u0986\u0987\u0988\u0989\u098a\u098b\u098c\u098f\u0990\u0993\u0994\u0995\u0996\u0997\u0998\u0999\u099a\u099b\u099c\u099d\u099e\u099f\u09a0\u09a1\u09a2\u09a3\u09a4\u09a5\u09a6\u09a7\u09a8\u09aa\u09ab\u09ac\u09ad\u09ae\u09af\u09b0\u09b2\u09b6\u09b7\u09b8\u09b9\u09bd\u09ce\u09dc\u09dd\u09df\u09e0\u09e1\u09f0\u09f1\u0a05\u0a06\u0a07\u0a08\u0a09\u0a0a\u0a0f\u0a10\u0a13\u0a14\u0a15\u0a16\u0a17\u0a18\u0a19\u0a1a\u0a1b\u0a1c\u0a1d\u0a1e\u0a1f\u0a20\u0a21\u0a22\u0a23\u0a24\u0a25\u0a26\u0a27\u0a28\u0a2a\u0a2b\u0a2c\u0a2d\u0a2e\u0a2f\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59\u0a5a\u0a5b\u0a5c\u0a5e\u0a72\u0a73\u0a74\u0a85\u0a86\u0a87\u0a88\u0a89\u0a8a\u0a8b\u0a8c\u0a8d\u0a8f\u0a90\u0a91\u0a93\u0a94\u0a95\u0a96\u0a97\u0a98\u0a99\u0a9a\u0a9b\u0a9c\u0a9d\u0a9e\u0a9f\u0aa0\u0aa1\u0aa2\u0aa3\u0aa4\u0aa5\u0aa6\u0aa7\u0aa8\u0aaa\u0aab\u0aac\u0aad\u0aae\u0aaf\u0ab0\u0ab2\u0ab3\u0ab5\u0ab6\u0ab7\u0ab8\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0b05\u0b06\u0b07\u0b08\u0b09\u0b0a\u0b0b\u0b0c\u0b0f\u0b10\u0b13\u0b14\u0b15\u0b16\u0b17\u0b18\u0b19\u0b1a\u0b1b\u0b1c\u0b1d\u0b1e\u0b1f\u0b20\u0b21\u0b22\u0b23\u0b24\u0b25\u0b26\u0b27\u0b28\u0b2a\u0b2b\u0b2c\u0b2d\u0b2e\u0b2f\u0b30\u0b32\u0b33\u0b35\u0b36\u0b37\u0b38\u0b39\u0b3d\u0b5c\u0b5d\u0b5f\u0b60\u0b61\u0b71\u0b83\u0b85\u0b86\u0b87\u0b88\u0b89\u0b8a\u0b8e\u0b8f\u0b90\u0b92\u0b93\u0b94\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8\u0ba9\u0baa\u0bae\u0baf\u0bb0\u0bb1\u0bb2\u0bb3\u0bb4\u0bb5\u0bb6\u0bb7\u0bb8\u0bb9\u0bd0\u0c05\u0c06\u0c07\u0c08\u0c09\u0c0a\u0c0b\u0c0c\u0c0e\u0c0f\u0c10\u0c12\u0c13\u0c14\u0c15\u0c16\u0c17\u0c18\u0c19\u0c1a\u0c1b\u0c1c\u0c1d\u0c1e\u0c1f\u0c20\u0c21\u0c22\u0c23\u0c24\u0c25\u0c26\u0c27\u0c28\u0c2a\u0c2b\u0c2c\u0c2d\u0c2e\u0c2f\u0c30\u0c31\u0c32\u0c33\u0c35\u0c36\u0c37\u0c38\u0c39\u0c3d\u0c58\u0c59\u0c60\u0c61\u0c85\u0c86\u0c87\u0c88\u0c89\u0c8a\u0c8b\u0c8c\u0c8e\u0c8f\u0c90\u0c92\u0c93\u0c94\u0c95\u0c96\u0c97\u0c98\u0c99\u0c9a\u0c9b\u0c9c\u0c9d\u0c9e\u0c9f\u0ca0\u0ca1\u0ca2\u0ca3\u0ca4\u0ca5\u0ca6\u0ca7\u0ca8\u0caa\u0cab\u0cac\u0cad\u0cae\u0caf\u0cb0\u0cb1\u0cb2\u0cb3\u0cb5\u0cb6\u0cb7\u0cb8\u0cb9\u0cbd\u0cde\u0ce0\u0ce1\u0d05\u0d06\u0d07\u0d08\u0d09\u0d0a\u0d0b\u0d0c\u0d0e\u0d0f\u0d10\u0d12\u0d13\u0d14\u0d15\u0d16\u0d17\u0d18\u0d19\u0d1a\u0d1b\u0d1c\u0d1d\u0d1e\u0d1f\u0d20\u0d21\u0d22\u0d23\u0d24\u0d25\u0d26\u0d27\u0d28\u0d2a\u0d2b\u0d2c\u0d2d\u0d2e\u0d2f\u0d30\u0d31\u0d32\u0d33\u0d34\u0d35\u0d36\u0d37\u0d38\u0d39\u0d3d\u0d60\u0d61\u0d7a\u0d7b\u0d7c\u0d7d\u0d7e\u0d7f\u0d85\u0d86\u0d87\u0d88\u0d89\u0d8a\u0d8b\u0d8c\u0d8d\u0d8e\u0d8f\u0d90\u0d91\u0d92\u0d93\u0d94\u0d95\u0d96\u0d9a\u0d9b\u0d9c\u0d9d\u0d9e\u0d9f\u0da0\u0da1\u0da2\u0da3\u0da4\u0da5\u0da6\u0da7\u0da8\u0da9\u0daa\u0dab\u0dac\u0dad\u0dae\u0daf\u0db0\u0db1\u0db3\u0db4\u0db5\u0db6\u0db7\u0db8\u0db9\u0dba\u0dbb\u0dbd\u0dc0\u0dc1\u0dc2\u0dc3\u0dc4\u0dc5\u0dc6\u0e01\u0e02\u0e03\u0e04\u0e05\u0e06\u0e07\u0e08\u0e09\u0e0a\u0e0b\u0e0c\u0e0d\u0e0e\u0e0f\u0e10\u0e11\u0e12\u0e13\u0e14\u0e15\u0e16\u0e17\u0e18\u0e19\u0e1a\u0e1b\u0e1c\u0e1d\u0e1e\u0e1f\u0e20\u0e21\u0e22\u0e23\u0e24\u0e25\u0e26\u0e27\u0e28\u0e29\u0e2a\u0e2b\u0e2c\u0e2d\u0e2e\u0e2f\u0e30\u0e32\u0e33\u0e40\u0e41\u0e42\u0e43\u0e44\u0e45\u0e81\u0e82\u0e84\u0e87\u0e88\u0e8a\u0e8d\u0e94\u0e95\u0e96\u0e97\u0e99\u0e9a\u0e9b\u0e9c\u0e9d\u0e9e\u0e9f\u0ea1\u0ea2\u0ea3\u0ea5\u0ea7\u0eaa\u0eab\u0ead\u0eae\u0eaf\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0\u0ec1\u0ec2\u0ec3\u0ec4\u0edc\u0edd\u0f00\u0f40\u0f41\u0f42\u0f43\u0f44\u0f45\u0f46\u0f47\u0f49\u0f4a\u0f4b\u0f4c\u0f4d\u0f4e\u0f4f\u0f50\u0f51\u0f52\u0f53\u0f54\u0f55\u0f56\u0f57\u0f58\u0f59\u0f5a\u0f5b\u0f5c\u0f5d\u0f5e\u0f5f\u0f60\u0f61\u0f62\u0f63\u0f64\u0f65\u0f66\u0f67\u0f68\u0f69\u0f6a\u0f6b\u0f6c\u0f88\u0f89\u0f8a\u0f8b\u1000\u1001\u1002\u1003\u1004\u1005\u1006\u1007\u1008\u1009\u100a\u100b\u100c\u100d\u100e\u100f\u1010\u1011\u1012\u1013\u1014\u1015\u1016\u1017\u1018\u1019\u101a\u101b\u101c\u101d\u101e\u101f\u1020\u1021\u1022\u1023\u1024\u1025\u1026\u1027\u1028\u1029\u102a\u103f\u1050\u1051\u1052\u1053\u1054\u1055\u105a\u105b\u105c\u105d\u1061\u1065\u1066\u106e\u106f\u1070\u1075\u1076\u1077\u1078\u1079\u107a\u107b\u107c\u107d\u107e\u107f\u1080\u1081\u108e\u10d0\u10d1\u10d2\u10d3\u10d4\u10d5\u10d6\u10d7\u10d8\u10d9\u10da\u10db\u10dc\u10dd\u10de\u10df\u10e0\u10e1\u10e2\u10e3\u10e4\u10e5\u10e6\u10e7\u10e8\u10e9\u10ea\u10eb\u10ec\u10ed\u10ee\u10ef\u10f0\u10f1\u10f2\u10f3\u10f4\u10f5\u10f6\u10f7\u10f8\u10f9\u10fa\u1100\u1101\u1102\u1103\u1104\u1105\u1106\u1107\u1108\u1109\u110a\u110b\u110c\u110d\u110e\u110f\u1110\u1111\u1112\u1113\u1114\u1115\u1116\u1117\u1118\u1119\u111a\u111b\u111c\u111d\u111e\u111f\u1120\u1121\u1122\u1123\u1124\u1125\u1126\u1127\u1128\u1129\u112a\u112b\u112c\u112d\u112e\u112f\u1130\u1131\u1132\u1133\u1134\u1135\u1136\u1137\u1138\u1139\u113a\u113b\u113c\u113d\u113e\u113f\u1140\u1141\u1142\u1143\u1144\u1145\u1146\u1147\u1148\u1149\u114a\u114b\u114c\u114d\u114e\u114f\u1150\u1151\u1152\u1153\u1154\u1155\u1156\u1157\u1158\u1159\u115a\u115b\u115c\u115d\u115e\u115f\u1160\u1161\u1162\u1163\u1164\u1165\u1166\u1167\u1168\u1169\u116a\u116b\u116c\u116d\u116e\u116f\u1170\u1171\u1172\u1173\u1174\u1175\u1176\u1177\u1178\u1179\u117a\u117b\u117c\u117d\u117e\u117f\u1180\u1181\u1182\u1183\u1184\u1185\u1186\u1187\u1188\u1189\u118a\u118b\u118c\u118d\u118e\u118f\u1190\u1191\u1192\u1193\u1194\u1195\u1196\u1197\u1198\u1199\u119a\u119b\u119c\u119d\u119e\u119f\u11a0\u11a1\u11a2\u11a3\u11a4\u11a5\u11a6\u11a7\u11a8\u11a9\u11aa\u11ab\u11ac\u11ad\u11ae\u11af\u11b0\u11b1\u11b2\u11b3\u11b4\u11b5\u11b6\u11b7\u11b8\u11b9\u11ba\u11bb\u11bc\u11bd\u11be\u11bf\u11c0\u11c1\u11c2\u11c3\u11c4\u11c5\u11c6\u11c7\u11c8\u11c9\u11ca\u11cb\u11cc\u11cd\u11ce\u11cf\u11d0\u11d1\u11d2\u11d3\u11d4\u11d5\u11d6\u11d7\u11d8\u11d9\u11da\u11db\u11dc\u11dd\u11de\u11df\u11e0\u11e1\u11e2\u11e3\u11e4\u11e5\u11e6\u11e7\u11e8\u11e9\u11ea\u11eb\u11ec\u11ed\u11ee\u11ef\u11f0\u11f1\u11f2\u11f3\u11f4\u11f5\u11f6\u11f7\u11f8\u11f9\u11fa\u11fb\u11fc\u11fd\u11fe\u11ff\u1200\u1201\u1202\u1203\u1204\u1205\u1206\u1207\u1208\u1209\u120a\u120b\u120c\u120d\u120e\u120f\u1210\u1211\u1212\u1213\u1214\u1215\u1216\u1217\u1218\u1219\u121a\u121b\u121c\u121d\u121e\u121f\u1220\u1221\u1222\u1223\u1224\u1225\u1226\u1227\u1228\u1229\u122a\u122b\u122c\u122d\u122e\u122f\u1230\u1231\u1232\u1233\u1234\u1235\u1236\u1237\u1238\u1239\u123a\u123b\u123c\u123d\u123e\u123f\u1240\u1241\u1242\u1243\u1244\u1245\u1246\u1247\u1248\u124a\u124b\u124c\u124d\u1250\u1251\u1252\u1253\u1254\u1255\u1256\u1258\u125a\u125b\u125c\u125d\u1260\u1261\u1262\u1263\u1264\u1265\u1266\u1267\u1268\u1269\u126a\u126b\u126c\u126d\u126e\u126f\u1270\u1271\u1272\u1273\u1274\u1275\u1276\u1277\u1278\u1279\u127a\u127b\u127c\u127d\u127e\u127f\u1280\u1281\u1282\u1283\u1284\u1285\u1286\u1287\u1288\u128a\u128b\u128c\u128d\u1290\u1291\u1292\u1293\u1294\u1295\u1296\u1297\u1298\u1299\u129a\u129b\u129c\u129d\u129e\u129f\u12a0\u12a1\u12a2\u12a3\u12a4\u12a5\u12a6\u12a7\u12a8\u12a9\u12aa\u12ab\u12ac\u12ad\u12ae\u12af\u12b0\u12b2\u12b3\u12b4\u12b5\u12b8\u12b9\u12ba\u12bb\u12bc\u12bd\u12be\u12c0\u12c2\u12c3\u12c4\u12c5\u12c8\u12c9\u12ca\u12cb\u12cc\u12cd\u12ce\u12cf\u12d0\u12d1\u12d2\u12d3\u12d4\u12d5\u12d6\u12d8\u12d9\u12da\u12db\u12dc\u12dd\u12de\u12df\u12e0\u12e1\u12e2\u12e3\u12e4\u12e5\u12e6\u12e7\u12e8\u12e9\u12ea\u12eb\u12ec\u12ed\u12ee\u12ef\u12f0\u12f1\u12f2\u12f3\u12f4\u12f5\u12f6\u12f7\u12f8\u12f9\u12fa\u12fb\u12fc\u12fd\u12fe\u12ff\u1300\u1301\u1302\u1303\u1304\u1305\u1306\u1307\u1308\u1309\u130a\u130b\u130c\u130d\u130e\u130f\u1310\u1312\u1313\u1314\u1315\u1318\u1319\u131a\u131b\u131c\u131d\u131e\u131f\u1320\u1321\u1322\u1323\u1324\u1325\u1326\u1327\u1328\u1329\u132a\u132b\u132c\u132d\u132e\u132f\u1330\u1331\u1332\u1333\u1334\u1335\u1336\u1337\u1338\u1339\u133a\u133b\u133c\u133d\u133e\u133f\u1340\u1341\u1342\u1343\u1344\u1345\u1346\u1347\u1348\u1349\u134a\u134b\u134c\u134d\u134e\u134f\u1350\u1351\u1352\u1353\u1354\u1355\u1356\u1357\u1358\u1359\u135a\u1380\u1381\u1382\u1383\u1384\u1385\u1386\u1387\u1388\u1389\u138a\u138b\u138c\u138d\u138e\u138f\u13a0\u13a1\u13a2\u13a3\u13a4\u13a5\u13a6\u13a7\u13a8\u13a9\u13aa\u13ab\u13ac\u13ad\u13ae\u13af\u13b0\u13b1\u13b2\u13b3\u13b4\u13b5\u13b6\u13b7\u13b8\u13b9\u13ba\u13bb\u13bc\u13bd\u13be\u13bf\u13c0\u13c1\u13c2\u13c3\u13c4\u13c5\u13c6\u13c7\u13c8\u13c9\u13ca\u13cb\u13cc\u13cd\u13ce\u13cf\u13d0\u13d1\u13d2\u13d3\u13d4\u13d5\u13d6\u13d7\u13d8\u13d9\u13da\u13db\u13dc\u13dd\u13de\u13df\u13e0\u13e1\u13e2\u13e3\u13e4\u13e5\u13e6\u13e7\u13e8\u13e9\u13ea\u13eb\u13ec\u13ed\u13ee\u13ef\u13f0\u13f1\u13f2\u13f3\u13f4\u1401\u1402\u1403\u1404\u1405\u1406\u1407\u1408\u1409\u140a\u140b\u140c\u140d\u140e\u140f\u1410\u1411\u1412\u1413\u1414\u1415\u1416\u1417\u1418\u1419\u141a\u141b\u141c\u141d\u141e\u141f\u1420\u1421\u1422\u1423\u1424\u1425\u1426\u1427\u1428\u1429\u142a\u142b\u142c\u142d\u142e\u142f\u1430\u1431\u1432\u1433\u1434\u1435\u1436\u1437\u1438\u1439\u143a\u143b\u143c\u143d\u143e\u143f\u1440\u1441\u1442\u1443\u1444\u1445\u1446\u1447\u1448\u1449\u144a\u144b\u144c\u144d\u144e\u144f\u1450\u1451\u1452\u1453\u1454\u1455\u1456\u1457\u1458\u1459\u145a\u145b\u145c\u145d\u145e\u145f\u1460\u1461\u1462\u1463\u1464\u1465\u1466\u1467\u1468\u1469\u146a\u146b\u146c\u146d\u146e\u146f\u1470\u1471\u1472\u1473\u1474\u1475\u1476\u1477\u1478\u1479\u147a\u147b\u147c\u147d\u147e\u147f\u1480\u1481\u1482\u1483\u1484\u1485\u1486\u1487\u1488\u1489\u148a\u148b\u148c\u148d\u148e\u148f\u1490\u1491\u1492\u1493\u1494\u1495\u1496\u1497\u1498\u1499\u149a\u149b\u149c\u149d\u149e\u149f\u14a0\u14a1\u14a2\u14a3\u14a4\u14a5\u14a6\u14a7\u14a8\u14a9\u14aa\u14ab\u14ac\u14ad\u14ae\u14af\u14b0\u14b1\u14b2\u14b3\u14b4\u14b5\u14b6\u14b7\u14b8\u14b9\u14ba\u14bb\u14bc\u14bd\u14be\u14bf\u14c0\u14c1\u14c2\u14c3\u14c4\u14c5\u14c6\u14c7\u14c8\u14c9\u14ca\u14cb\u14cc\u14cd\u14ce\u14cf\u14d0\u14d1\u14d2\u14d3\u14d4\u14d5\u14d6\u14d7\u14d8\u14d9\u14da\u14db\u14dc\u14dd\u14de\u14df\u14e0\u14e1\u14e2\u14e3\u14e4\u14e5\u14e6\u14e7\u14e8\u14e9\u14ea\u14eb\u14ec\u14ed\u14ee\u14ef\u14f0\u14f1\u14f2\u14f3\u14f4\u14f5\u14f6\u14f7\u14f8\u14f9\u14fa\u14fb\u14fc\u14fd\u14fe\u14ff\u1500\u1501\u1502\u1503\u1504\u1505\u1506\u1507\u1508\u1509\u150a\u150b\u150c\u150d\u150e\u150f\u1510\u1511\u1512\u1513\u1514\u1515\u1516\u1517\u1518\u1519\u151a\u151b\u151c\u151d\u151e\u151f\u1520\u1521\u1522\u1523\u1524\u1525\u1526\u1527\u1528\u1529\u152a\u152b\u152c\u152d\u152e\u152f\u1530\u1531\u1532\u1533\u1534\u1535\u1536\u1537\u1538\u1539\u153a\u153b\u153c\u153d\u153e\u153f\u1540\u1541\u1542\u1543\u1544\u1545\u1546\u1547\u1548\u1549\u154a\u154b\u154c\u154d\u154e\u154f\u1550\u1551\u1552\u1553\u1554\u1555\u1556\u1557\u1558\u1559\u155a\u155b\u155c\u155d\u155e\u155f\u1560\u1561\u1562\u1563\u1564\u1565\u1566\u1567\u1568\u1569\u156a\u156b\u156c\u156d\u156e\u156f\u1570\u1571\u1572\u1573\u1574\u1575\u1576\u1577\u1578\u1579\u157a\u157b\u157c\u157d\u157e\u157f\u1580\u1581\u1582\u1583\u1584\u1585\u1586\u1587\u1588\u1589\u158a\u158b\u158c\u158d\u158e\u158f\u1590\u1591\u1592\u1593\u1594\u1595\u1596\u1597\u1598\u1599\u159a\u159b\u159c\u159d\u159e\u159f\u15a0\u15a1\u15a2\u15a3\u15a4\u15a5\u15a6\u15a7\u15a8\u15a9\u15aa\u15ab\u15ac\u15ad\u15ae\u15af\u15b0\u15b1\u15b2\u15b3\u15b4\u15b5\u15b6\u15b7\u15b8\u15b9\u15ba\u15bb\u15bc\u15bd\u15be\u15bf\u15c0\u15c1\u15c2\u15c3\u15c4\u15c5\u15c6\u15c7\u15c8\u15c9\u15ca\u15cb\u15cc\u15cd\u15ce\u15cf\u15d0\u15d1\u15d2\u15d3\u15d4\u15d5\u15d6\u15d7\u15d8\u15d9\u15da\u15db\u15dc\u15dd\u15de\u15df\u15e0\u15e1\u15e2\u15e3\u15e4\u15e5\u15e6\u15e7\u15e8\u15e9\u15ea\u15eb\u15ec\u15ed\u15ee\u15ef\u15f0\u15f1\u15f2\u15f3\u15f4\u15f5\u15f6\u15f7\u15f8\u15f9\u15fa\u15fb\u15fc\u15fd\u15fe\u15ff\u1600\u1601\u1602\u1603\u1604\u1605\u1606\u1607\u1608\u1609\u160a\u160b\u160c\u160d\u160e\u160f\u1610\u1611\u1612\u1613\u1614\u1615\u1616\u1617\u1618\u1619\u161a\u161b\u161c\u161d\u161e\u161f\u1620\u1621\u1622\u1623\u1624\u1625\u1626\u1627\u1628\u1629\u162a\u162b\u162c\u162d\u162e\u162f\u1630\u1631\u1632\u1633\u1634\u1635\u1636\u1637\u1638\u1639\u163a\u163b\u163c\u163d\u163e\u163f\u1640\u1641\u1642\u1643\u1644\u1645\u1646\u1647\u1648\u1649\u164a\u164b\u164c\u164d\u164e\u164f\u1650\u1651\u1652\u1653\u1654\u1655\u1656\u1657\u1658\u1659\u165a\u165b\u165c\u165d\u165e\u165f\u1660\u1661\u1662\u1663\u1664\u1665\u1666\u1667\u1668\u1669\u166a\u166b\u166c\u166f\u1670\u1671\u1672\u1673\u1674\u1675\u1676\u1677\u1678\u1679\u167a\u167b\u167c\u167d\u167e\u167f\u1681\u1682\u1683\u1684\u1685\u1686\u1687\u1688\u1689\u168a\u168b\u168c\u168d\u168e\u168f\u1690\u1691\u1692\u1693\u1694\u1695\u1696\u1697\u1698\u1699\u169a\u16a0\u16a1\u16a2\u16a3\u16a4\u16a5\u16a6\u16a7\u16a8\u16a9\u16aa\u16ab\u16ac\u16ad\u16ae\u16af\u16b0\u16b1\u16b2\u16b3\u16b4\u16b5\u16b6\u16b7\u16b8\u16b9\u16ba\u16bb\u16bc\u16bd\u16be\u16bf\u16c0\u16c1\u16c2\u16c3\u16c4\u16c5\u16c6\u16c7\u16c8\u16c9\u16ca\u16cb\u16cc\u16cd\u16ce\u16cf\u16d0\u16d1\u16d2\u16d3\u16d4\u16d5\u16d6\u16d7\u16d8\u16d9\u16da\u16db\u16dc\u16dd\u16de\u16df\u16e0\u16e1\u16e2\u16e3\u16e4\u16e5\u16e6\u16e7\u16e8\u16e9\u16ea\u1700\u1701\u1702\u1703\u1704\u1705\u1706\u1707\u1708\u1709\u170a\u170b\u170c\u170e\u170f\u1710\u1711\u1720\u1721\u1722\u1723\u1724\u1725\u1726\u1727\u1728\u1729\u172a\u172b\u172c\u172d\u172e\u172f\u1730\u1731\u1740\u1741\u1742\u1743\u1744\u1745\u1746\u1747\u1748\u1749\u174a\u174b\u174c\u174d\u174e\u174f\u1750\u1751\u1760\u1761\u1762\u1763\u1764\u1765\u1766\u1767\u1768\u1769\u176a\u176b\u176c\u176e\u176f\u1770\u1780\u1781\u1782\u1783\u1784\u1785\u1786\u1787\u1788\u1789\u178a\u178b\u178c\u178d\u178e\u178f\u1790\u1791\u1792\u1793\u1794\u1795\u1796\u1797\u1798\u1799\u179a\u179b\u179c\u179d\u179e\u179f\u17a0\u17a1\u17a2\u17a3\u17a4\u17a5\u17a6\u17a7\u17a8\u17a9\u17aa\u17ab\u17ac\u17ad\u17ae\u17af\u17b0\u17b1\u17b2\u17b3\u17dc\u1820\u1821\u1822\u1823\u1824\u1825\u1826\u1827\u1828\u1829\u182a\u182b\u182c\u182d\u182e\u182f\u1830\u1831\u1832\u1833\u1834\u1835\u1836\u1837\u1838\u1839\u183a\u183b\u183c\u183d\u183e\u183f\u1840\u1841\u1842\u1844\u1845\u1846\u1847\u1848\u1849\u184a\u184b\u184c\u184d\u184e\u184f\u1850\u1851\u1852\u1853\u1854\u1855\u1856\u1857\u1858\u1859\u185a\u185b\u185c\u185d\u185e\u185f\u1860\u1861\u1862\u1863\u1864\u1865\u1866\u1867\u1868\u1869\u186a\u186b\u186c\u186d\u186e\u186f\u1870\u1871\u1872\u1873\u1874\u1875\u1876\u1877\u1880\u1881\u1882\u1883\u1884\u1885\u1886\u1887\u1888\u1889\u188a\u188b\u188c\u188d\u188e\u188f\u1890\u1891\u1892\u1893\u1894\u1895\u1896\u1897\u1898\u1899\u189a\u189b\u189c\u189d\u189e\u189f\u18a0\u18a1\u18a2\u18a3\u18a4\u18a5\u18a6\u18a7\u18a8\u18aa\u18b0\u18b1\u18b2\u18b3\u18b4\u18b5\u18b6\u18b7\u18b8\u18b9\u18ba\u18bb\u18bc\u18bd\u18be\u18bf\u18c0\u18c1\u18c2\u18c3\u18c4\u18c5\u18c6\u18c7\u18c8\u18c9\u18ca\u18cb\u18cc\u18cd\u18ce\u18cf\u18d0\u18d1\u18d2\u18d3\u18d4\u18d5\u18d6\u18d7\u18d8\u18d9\u18da\u18db\u18dc\u18dd\u18de\u18df\u18e0\u18e1\u18e2\u18e3\u18e4\u18e5\u18e6\u18e7\u18e8\u18e9\u18ea\u18eb\u18ec\u18ed\u18ee\u18ef\u18f0\u18f1\u18f2\u18f3\u18f4\u18f5\u1900\u1901\u1902\u1903\u1904\u1905\u1906\u1907\u1908\u1909\u190a\u190b\u190c\u190d\u190e\u190f\u1910\u1911\u1912\u1913\u1914\u1915\u1916\u1917\u1918\u1919\u191a\u191b\u191c\u1950\u1951\u1952\u1953\u1954\u1955\u1956\u1957\u1958\u1959\u195a\u195b\u195c\u195d\u195e\u195f\u1960\u1961\u1962\u1963\u1964\u1965\u1966\u1967\u1968\u1969\u196a\u196b\u196c\u196d\u1970\u1971\u1972\u1973\u1974\u1980\u1981\u1982\u1983\u1984\u1985\u1986\u1987\u1988\u1989\u198a\u198b\u198c\u198d\u198e\u198f\u1990\u1991\u1992\u1993\u1994\u1995\u1996\u1997\u1998\u1999\u199a\u199b\u199c\u199d\u199e\u199f\u19a0\u19a1\u19a2\u19a3\u19a4\u19a5\u19a6\u19a7\u19a8\u19a9\u19aa\u19ab\u19c1\u19c2\u19c3\u19c4\u19c5\u19c6\u19c7\u1a00\u1a01\u1a02\u1a03\u1a04\u1a05\u1a06\u1a07\u1a08\u1a09\u1a0a\u1a0b\u1a0c\u1a0d\u1a0e\u1a0f\u1a10\u1a11\u1a12\u1a13\u1a14\u1a15\u1a16\u1a20\u1a21\u1a22\u1a23\u1a24\u1a25\u1a26\u1a27\u1a28\u1a29\u1a2a\u1a2b\u1a2c\u1a2d\u1a2e\u1a2f\u1a30\u1a31\u1a32\u1a33\u1a34\u1a35\u1a36\u1a37\u1a38\u1a39\u1a3a\u1a3b\u1a3c\u1a3d\u1a3e\u1a3f\u1a40\u1a41\u1a42\u1a43\u1a44\u1a45\u1a46\u1a47\u1a48\u1a49\u1a4a\u1a4b\u1a4c\u1a4d\u1a4e\u1a4f\u1a50\u1a51\u1a52\u1a53\u1a54\u1b05\u1b06\u1b07\u1b08\u1b09\u1b0a\u1b0b\u1b0c\u1b0d\u1b0e\u1b0f\u1b10\u1b11\u1b12\u1b13\u1b14\u1b15\u1b16\u1b17\u1b18\u1b19\u1b1a\u1b1b\u1b1c\u1b1d\u1b1e\u1b1f\u1b20\u1b21\u1b22\u1b23\u1b24\u1b25\u1b26\u1b27\u1b28\u1b29\u1b2a\u1b2b\u1b2c\u1b2d\u1b2e\u1b2f\u1b30\u1b31\u1b32\u1b33\u1b45\u1b46\u1b47\u1b48\u1b49\u1b4a\u1b4b\u1b83\u1b84\u1b85\u1b86\u1b87\u1b88\u1b89\u1b8a\u1b8b\u1b8c\u1b8d\u1b8e\u1b8f\u1b90\u1b91\u1b92\u1b93\u1b94\u1b95\u1b96\u1b97\u1b98\u1b99\u1b9a\u1b9b\u1b9c\u1b9d\u1b9e\u1b9f\u1ba0\u1bae\u1baf\u1c00\u1c01\u1c02\u1c03\u1c04\u1c05\u1c06\u1c07\u1c08\u1c09\u1c0a\u1c0b\u1c0c\u1c0d\u1c0e\u1c0f\u1c10\u1c11\u1c12\u1c13\u1c14\u1c15\u1c16\u1c17\u1c18\u1c19\u1c1a\u1c1b\u1c1c\u1c1d\u1c1e\u1c1f\u1c20\u1c21\u1c22\u1c23\u1c4d\u1c4e\u1c4f\u1c5a\u1c5b\u1c5c\u1c5d\u1c5e\u1c5f\u1c60\u1c61\u1c62\u1c63\u1c64\u1c65\u1c66\u1c67\u1c68\u1c69\u1c6a\u1c6b\u1c6c\u1c6d\u1c6e\u1c6f\u1c70\u1c71\u1c72\u1c73\u1c74\u1c75\u1c76\u1c77\u1ce9\u1cea\u1ceb\u1cec\u1cee\u1cef\u1cf0\u1cf1\u2135\u2136\u2137\u2138\u2d30\u2d31\u2d32\u2d33\u2d34\u2d35\u2d36\u2d37\u2d38\u2d39\u2d3a\u2d3b\u2d3c\u2d3d\u2d3e\u2d3f\u2d40\u2d41\u2d42\u2d43\u2d44\u2d45\u2d46\u2d47\u2d48\u2d49\u2d4a\u2d4b\u2d4c\u2d4d\u2d4e\u2d4f\u2d50\u2d51\u2d52\u2d53\u2d54\u2d55\u2d56\u2d57\u2d58\u2d59\u2d5a\u2d5b\u2d5c\u2d5d\u2d5e\u2d5f\u2d60\u2d61\u2d62\u2d63\u2d64\u2d65\u2d80\u2d81\u2d82\u2d83\u2d84\u2d85\u2d86\u2d87\u2d88\u2d89\u2d8a\u2d8b\u2d8c\u2d8d\u2d8e\u2d8f\u2d90\u2d91\u2d92\u2d93\u2d94\u2d95\u2d96\u2da0\u2da1\u2da2\u2da3\u2da4\u2da5\u2da6\u2da8\u2da9\u2daa\u2dab\u2dac\u2dad\u2dae\u2db0\u2db1\u2db2\u2db3\u2db4\u2db5\u2db6\u2db8\u2db9\u2dba\u2dbb\u2dbc\u2dbd\u2dbe\u2dc0\u2dc1\u2dc2\u2dc3\u2dc4\u2dc5\u2dc6\u2dc8\u2dc9\u2dca\u2dcb\u2dcc\u2dcd\u2dce\u2dd0\u2dd1\u2dd2\u2dd3\u2dd4\u2dd5\u2dd6\u2dd8\u2dd9\u2dda\u2ddb\u2ddc\u2ddd\u2dde\u3006\u303c\u3041\u3042\u3043\u3044\u3045\u3046\u3047\u3048\u3049\u304a\u304b\u304c\u304d\u304e\u304f\u3050\u3051\u3052\u3053\u3054\u3055\u3056\u3057\u3058\u3059\u305a\u305b\u305c\u305d\u305e\u305f\u3060\u3061\u3062\u3063\u3064\u3065\u3066\u3067\u3068\u3069\u306a\u306b\u306c\u306d\u306e\u306f\u3070\u3071\u3072\u3073\u3074\u3075\u3076\u3077\u3078\u3079\u307a\u307b\u307c\u307d\u307e\u307f\u3080\u3081\u3082\u3083\u3084\u3085\u3086\u3087\u3088\u3089\u308a\u308b\u308c\u308d\u308e\u308f\u3090\u3091\u3092\u3093\u3094\u3095\u3096\u309f\u30a1\u30a2\u30a3\u30a4\u30a5\u30a6\u30a7\u30a8\u30a9\u30aa\u30ab\u30ac\u30ad\u30ae\u30af\u30b0\u30b1\u30b2\u30b3\u30b4\u30b5\u30b6\u30b7\u30b8\u30b9\u30ba\u30bb\u30bc\u30bd\u30be\u30bf\u30c0\u30c1\u30c2\u30c3\u30c4\u30c5\u30c6\u30c7\u30c8\u30c9\u30ca\u30cb\u30cc\u30cd\u30ce\u30cf\u30d0\u30d1\u30d2\u30d3\u30d4\u30d5\u30d6\u30d7\u30d8\u30d9\u30da\u30db\u30dc\u30dd\u30de\u30df\u30e0\u30e1\u30e2\u30e3\u30e4\u30e5\u30e6\u30e7\u30e8\u30e9\u30ea\u30eb\u30ec\u30ed\u30ee\u30ef\u30f0\u30f1\u30f2\u30f3\u30f4\u30f5\u30f6\u30f7\u30f8\u30f9\u30fa\u30ff\u3105\u3106\u3107\u3108\u3109\u310a\u310b\u310c\u310d\u310e\u310f\u3110\u3111\u3112\u3113\u3114\u3115\u3116\u3117\u3118\u3119\u311a\u311b\u311c\u311d\u311e\u311f\u3120\u3121\u3122\u3123\u3124\u3125\u3126\u3127\u3128\u3129\u312a\u312b\u312c\u312d\u3131\u3132\u3133\u3134\u3135\u3136\u3137\u3138\u3139\u313a\u313b\u313c\u313d\u313e\u313f\u3140\u3141\u3142\u3143\u3144\u3145\u3146\u3147\u3148\u3149\u314a\u314b\u314c\u314d\u314e\u314f\u3150\u3151\u3152\u3153\u3154\u3155\u3156\u3157\u3158\u3159\u315a\u315b\u315c\u315d\u315e\u315f\u3160\u3161\u3162\u3163\u3164\u3165\u3166\u3167\u3168\u3169\u316a\u316b\u316c\u316d\u316e\u316f\u3170\u3171\u3172\u3173\u3174\u3175\u3176\u3177\u3178\u3179\u317a\u317b\u317c\u317d\u317e\u317f\u3180\u3181\u3182\u3183\u3184\u3185\u3186\u3187\u3188\u3189\u318a\u318b\u318c\u318d\u318e\u31a0\u31a1\u31a2\u31a3\u31a4\u31a5\u31a6\u31a7\u31a8\u31a9\u31aa\u31ab\u31ac\u31ad\u31ae\u31af\u31b0\u31b1\u31b2\u31b3\u31b4\u31b5\u31b6\u31b7\u31f0\u31f1\u31f2\u31f3\u31f4\u31f5\u31f6\u31f7\u31f8\u31f9\u31fa\u31fb\u31fc\u31fd\u31fe\u31ff\u3400\u3401\u3402\u3403\u3404\u3405\u3406\u3407\u3408\u3409\u340a\u340b\u340c\u340d\u340e\u340f\u3410\u3411\u3412\u3413\u3414\u3415\u3416\u3417\u3418\u3419\u341a\u341b\u341c\u341d\u341e\u341f\u3420\u3421\u3422\u3423\u3424\u3425\u3426\u3427\u3428\u3429\u342a\u342b\u342c\u342d\u342e\u342f\u3430\u3431\u3432\u3433\u3434\u3435\u3436\u3437\u3438\u3439\u343a\u343b\u343c\u343d\u343e\u343f\u3440\u3441\u3442\u3443\u3444\u3445\u3446\u3447\u3448\u3449\u344a\u344b\u344c\u344d\u344e\u344f\u3450\u3451\u3452\u3453\u3454\u3455\u3456\u3457\u3458\u3459\u345a\u345b\u345c\u345d\u345e\u345f\u3460\u3461\u3462\u3463\u3464\u3465\u3466\u3467\u3468\u3469\u346a\u346b\u346c\u346d\u346e\u346f\u3470\u3471\u3472\u3473\u3474\u3475\u3476\u3477\u3478\u3479\u347a\u347b\u347c\u347d\u347e\u347f\u3480\u3481\u3482\u3483\u3484\u3485\u3486\u3487\u3488\u3489\u348a\u348b\u348c\u348d\u348e\u348f\u3490\u3491\u3492\u3493\u3494\u3495\u3496\u3497\u3498\u3499\u349a\u349b\u349c\u349d\u349e\u349f\u34a0\u34a1\u34a2\u34a3\u34a4\u34a5\u34a6\u34a7\u34a8\u34a9\u34aa\u34ab\u34ac\u34ad\u34ae\u34af\u34b0\u34b1\u34b2\u34b3\u34b4\u34b5\u34b6\u34b7\u34b8\u34b9\u34ba\u34bb\u34bc\u34bd\u34be\u34bf\u34c0\u34c1\u34c2\u34c3\u34c4\u34c5\u34c6\u34c7\u34c8\u34c9\u34ca\u34cb\u34cc\u34cd\u34ce\u34cf\u34d0\u34d1\u34d2\u34d3\u34d4\u34d5\u34d6\u34d7\u34d8\u34d9\u34da\u34db\u34dc\u34dd\u34de\u34df\u34e0\u34e1\u34e2\u34e3\u34e4\u34e5\u34e6\u34e7\u34e8\u34e9\u34ea\u34eb\u34ec\u34ed\u34ee\u34ef\u34f0\u34f1\u34f2\u34f3\u34f4\u34f5\u34f6\u34f7\u34f8\u34f9\u34fa\u34fb\u34fc\u34fd\u34fe\u34ff\u3500\u3501\u3502\u3503\u3504\u3505\u3506\u3507\u3508\u3509\u350a\u350b\u350c\u350d\u350e\u350f\u3510\u3511\u3512\u3513\u3514\u3515\u3516\u3517\u3518\u3519\u351a\u351b\u351c\u351d\u351e\u351f\u3520\u3521\u3522\u3523\u3524\u3525\u3526\u3527\u3528\u3529\u352a\u352b\u352c\u352d\u352e\u352f\u3530\u3531\u3532\u3533\u3534\u3535\u3536\u3537\u3538\u3539\u353a\u353b\u353c\u353d\u353e\u353f\u3540\u3541\u3542\u3543\u3544\u3545\u3546\u3547\u3548\u3549\u354a\u354b\u354c\u354d\u354e\u354f\u3550\u3551\u3552\u3553\u3554\u3555\u3556\u3557\u3558\u3559\u355a\u355b\u355c\u355d\u355e\u355f\u3560\u3561\u3562\u3563\u3564\u3565\u3566\u3567\u3568\u3569\u356a\u356b\u356c\u356d\u356e\u356f\u3570\u3571\u3572\u3573\u3574\u3575\u3576\u3577\u3578\u3579\u357a\u357b\u357c\u357d\u357e\u357f\u3580\u3581\u3582\u3583\u3584\u3585\u3586\u3587\u3588\u3589\u358a\u358b\u358c\u358d\u358e\u358f\u3590\u3591\u3592\u3593\u3594\u3595\u3596\u3597\u3598\u3599\u359a\u359b\u359c\u359d\u359e\u359f\u35a0\u35a1\u35a2\u35a3\u35a4\u35a5\u35a6\u35a7\u35a8\u35a9\u35aa\u35ab\u35ac\u35ad\u35ae\u35af\u35b0\u35b1\u35b2\u35b3\u35b4\u35b5\u35b6\u35b7\u35b8\u35b9\u35ba\u35bb\u35bc\u35bd\u35be\u35bf\u35c0\u35c1\u35c2\u35c3\u35c4\u35c5\u35c6\u35c7\u35c8\u35c9\u35ca\u35cb\u35cc\u35cd\u35ce\u35cf\u35d0\u35d1\u35d2\u35d3\u35d4\u35d5\u35d6\u35d7\u35d8\u35d9\u35da\u35db\u35dc\u35dd\u35de\u35df\u35e0\u35e1\u35e2\u35e3\u35e4\u35e5\u35e6\u35e7\u35e8\u35e9\u35ea\u35eb\u35ec\u35ed\u35ee\u35ef\u35f0\u35f1\u35f2\u35f3\u35f4\u35f5\u35f6\u35f7\u35f8\u35f9\u35fa\u35fb\u35fc\u35fd\u35fe\u35ff\u3600\u3601\u3602\u3603\u3604\u3605\u3606\u3607\u3608\u3609\u360a\u360b\u360c\u360d\u360e\u360f\u3610\u3611\u3612\u3613\u3614\u3615\u3616\u3617\u3618\u3619\u361a\u361b\u361c\u361d\u361e\u361f\u3620\u3621\u3622\u3623\u3624\u3625\u3626\u3627\u3628\u3629\u362a\u362b\u362c\u362d\u362e\u362f\u3630\u3631\u3632\u3633\u3634\u3635\u3636\u3637\u3638\u3639\u363a\u363b\u363c\u363d\u363e\u363f\u3640\u3641\u3642\u3643\u3644\u3645\u3646\u3647\u3648\u3649\u364a\u364b\u364c\u364d\u364e\u364f\u3650\u3651\u3652\u3653\u3654\u3655\u3656\u3657\u3658\u3659\u365a\u365b\u365c\u365d\u365e\u365f\u3660\u3661\u3662\u3663\u3664\u3665\u3666\u3667\u3668\u3669\u366a\u366b\u366c\u366d\u366e\u366f\u3670\u3671\u3672\u3673\u3674\u3675\u3676\u3677\u3678\u3679\u367a\u367b\u367c\u367d\u367e\u367f\u3680\u3681\u3682\u3683\u3684\u3685\u3686\u3687\u3688\u3689\u368a\u368b\u368c\u368d\u368e\u368f\u3690\u3691\u3692\u3693\u3694\u3695\u3696\u3697\u3698\u3699\u369a\u369b\u369c\u369d\u369e\u369f\u36a0\u36a1\u36a2\u36a3\u36a4\u36a5\u36a6\u36a7\u36a8\u36a9\u36aa\u36ab\u36ac\u36ad\u36ae\u36af\u36b0\u36b1\u36b2\u36b3\u36b4\u36b5\u36b6\u36b7\u36b8\u36b9\u36ba\u36bb\u36bc\u36bd\u36be\u36bf\u36c0\u36c1\u36c2\u36c3\u36c4\u36c5\u36c6\u36c7\u36c8\u36c9\u36ca\u36cb\u36cc\u36cd\u36ce\u36cf\u36d0\u36d1\u36d2\u36d3\u36d4\u36d5\u36d6\u36d7\u36d8\u36d9\u36da\u36db\u36dc\u36dd\u36de\u36df\u36e0\u36e1\u36e2\u36e3\u36e4\u36e5\u36e6\u36e7\u36e8\u36e9\u36ea\u36eb\u36ec\u36ed\u36ee\u36ef\u36f0\u36f1\u36f2\u36f3\u36f4\u36f5\u36f6\u36f7\u36f8\u36f9\u36fa\u36fb\u36fc\u36fd\u36fe\u36ff\u3700\u3701\u3702\u3703\u3704\u3705\u3706\u3707\u3708\u3709\u370a\u370b\u370c\u370d\u370e\u370f\u3710\u3711\u3712\u3713\u3714\u3715\u3716\u3717\u3718\u3719\u371a\u371b\u371c\u371d\u371e\u371f\u3720\u3721\u3722\u3723\u3724\u3725\u3726\u3727\u3728\u3729\u372a\u372b\u372c\u372d\u372e\u372f\u3730\u3731\u3732\u3733\u3734\u3735\u3736\u3737\u3738\u3739\u373a\u373b\u373c\u373d\u373e\u373f\u3740\u3741\u3742\u3743\u3744\u3745\u3746\u3747\u3748\u3749\u374a\u374b\u374c\u374d\u374e\u374f\u3750\u3751\u3752\u3753\u3754\u3755\u3756\u3757\u3758\u3759\u375a\u375b\u375c\u375d\u375e\u375f\u3760\u3761\u3762\u3763\u3764\u3765\u3766\u3767\u3768\u3769\u376a\u376b\u376c\u376d\u376e\u376f\u3770\u3771\u3772\u3773\u3774\u3775\u3776\u3777\u3778\u3779\u377a\u377b\u377c\u377d\u377e\u377f\u3780\u3781\u3782\u3783\u3784\u3785\u3786\u3787\u3788\u3789\u378a\u378b\u378c\u378d\u378e\u378f\u3790\u3791\u3792\u3793\u3794\u3795\u3796\u3797\u3798\u3799\u379a\u379b\u379c\u379d\u379e\u379f\u37a0\u37a1\u37a2\u37a3\u37a4\u37a5\u37a6\u37a7\u37a8\u37a9\u37aa\u37ab\u37ac\u37ad\u37ae\u37af\u37b0\u37b1\u37b2\u37b3\u37b4\u37b5\u37b6\u37b7\u37b8\u37b9\u37ba\u37bb\u37bc\u37bd\u37be\u37bf\u37c0\u37c1\u37c2\u37c3\u37c4\u37c5\u37c6\u37c7\u37c8\u37c9\u37ca\u37cb\u37cc\u37cd\u37ce\u37cf\u37d0\u37d1\u37d2\u37d3\u37d4\u37d5\u37d6\u37d7\u37d8\u37d9\u37da\u37db\u37dc\u37dd\u37de\u37df\u37e0\u37e1\u37e2\u37e3\u37e4\u37e5\u37e6\u37e7\u37e8\u37e9\u37ea\u37eb\u37ec\u37ed\u37ee\u37ef\u37f0\u37f1\u37f2\u37f3\u37f4\u37f5\u37f6\u37f7\u37f8\u37f9\u37fa\u37fb\u37fc\u37fd\u37fe\u37ff\u3800\u3801\u3802\u3803\u3804\u3805\u3806\u3807\u3808\u3809\u380a\u380b\u380c\u380d\u380e\u380f\u3810\u3811\u3812\u3813\u3814\u3815\u3816\u3817\u3818\u3819\u381a\u381b\u381c\u381d\u381e\u381f\u3820\u3821\u3822\u3823\u3824\u3825\u3826\u3827\u3828\u3829\u382a\u382b\u382c\u382d\u382e\u382f\u3830\u3831\u3832\u3833\u3834\u3835\u3836\u3837\u3838\u3839\u383a\u383b\u383c\u383d\u383e\u383f\u3840\u3841\u3842\u3843\u3844\u3845\u3846\u3847\u3848\u3849\u384a\u384b\u384c\u384d\u384e\u384f\u3850\u3851\u3852\u3853\u3854\u3855\u3856\u3857\u3858\u3859\u385a\u385b\u385c\u385d\u385e\u385f\u3860\u3861\u3862\u3863\u3864\u3865\u3866\u3867\u3868\u3869\u386a\u386b\u386c\u386d\u386e\u386f\u3870\u3871\u3872\u3873\u3874\u3875\u3876\u3877\u3878\u3879\u387a\u387b\u387c\u387d\u387e\u387f\u3880\u3881\u3882\u3883\u3884\u3885\u3886\u3887\u3888\u3889\u388a\u388b\u388c\u388d\u388e\u388f\u3890\u3891\u3892\u3893\u3894\u3895\u3896\u3897\u3898\u3899\u389a\u389b\u389c\u389d\u389e\u389f\u38a0\u38a1\u38a2\u38a3\u38a4\u38a5\u38a6\u38a7\u38a8\u38a9\u38aa\u38ab\u38ac\u38ad\u38ae\u38af\u38b0\u38b1\u38b2\u38b3\u38b4\u38b5\u38b6\u38b7\u38b8\u38b9\u38ba\u38bb\u38bc\u38bd\u38be\u38bf\u38c0\u38c1\u38c2\u38c3\u38c4\u38c5\u38c6\u38c7\u38c8\u38c9\u38ca\u38cb\u38cc\u38cd\u38ce\u38cf\u38d0\u38d1\u38d2\u38d3\u38d4\u38d5\u38d6\u38d7\u38d8\u38d9\u38da\u38db\u38dc\u38dd\u38de\u38df\u38e0\u38e1\u38e2\u38e3\u38e4\u38e5\u38e6\u38e7\u38e8\u38e9\u38ea\u38eb\u38ec\u38ed\u38ee\u38ef\u38f0\u38f1\u38f2\u38f3\u38f4\u38f5\u38f6\u38f7\u38f8\u38f9\u38fa\u38fb\u38fc\u38fd\u38fe\u38ff\u3900\u3901\u3902\u3903\u3904\u3905\u3906\u3907\u3908\u3909\u390a\u390b\u390c\u390d\u390e\u390f\u3910\u3911\u3912\u3913\u3914\u3915\u3916\u3917\u3918\u3919\u391a\u391b\u391c\u391d\u391e\u391f\u3920\u3921\u3922\u3923\u3924\u3925\u3926\u3927\u3928\u3929\u392a\u392b\u392c\u392d\u392e\u392f\u3930\u3931\u3932\u3933\u3934\u3935\u3936\u3937\u3938\u3939\u393a\u393b\u393c\u393d\u393e\u393f\u3940\u3941\u3942\u3943\u3944\u3945\u3946\u3947\u3948\u3949\u394a\u394b\u394c\u394d\u394e\u394f\u3950\u3951\u3952\u3953\u3954\u3955\u3956\u3957\u3958\u3959\u395a\u395b\u395c\u395d\u395e\u395f\u3960\u3961\u3962\u3963\u3964\u3965\u3966\u3967\u3968\u3969\u396a\u396b\u396c\u396d\u396e\u396f\u3970\u3971\u3972\u3973\u3974\u3975\u3976\u3977\u3978\u3979\u397a\u397b\u397c\u397d\u397e\u397f\u3980\u3981\u3982\u3983\u3984\u3985\u3986\u3987\u3988\u3989\u398a\u398b\u398c\u398d\u398e\u398f\u3990\u3991\u3992\u3993\u3994\u3995\u3996\u3997\u3998\u3999\u399a\u399b\u399c\u399d\u399e\u399f\u39a0\u39a1\u39a2\u39a3\u39a4\u39a5\u39a6\u39a7\u39a8\u39a9\u39aa\u39ab\u39ac\u39ad\u39ae\u39af\u39b0\u39b1\u39b2\u39b3\u39b4\u39b5\u39b6\u39b7\u39b8\u39b9\u39ba\u39bb\u39bc\u39bd\u39be\u39bf\u39c0\u39c1\u39c2\u39c3\u39c4\u39c5\u39c6\u39c7\u39c8\u39c9\u39ca\u39cb\u39cc\u39cd\u39ce\u39cf\u39d0\u39d1\u39d2\u39d3\u39d4\u39d5\u39d6\u39d7\u39d8\u39d9\u39da\u39db\u39dc\u39dd\u39de\u39df\u39e0\u39e1\u39e2\u39e3\u39e4\u39e5\u39e6\u39e7\u39e8\u39e9\u39ea\u39eb\u39ec\u39ed\u39ee\u39ef\u39f0\u39f1\u39f2\u39f3\u39f4\u39f5\u39f6\u39f7\u39f8\u39f9\u39fa\u39fb\u39fc\u39fd\u39fe\u39ff\u3a00\u3a01\u3a02\u3a03\u3a04\u3a05\u3a06\u3a07\u3a08\u3a09\u3a0a\u3a0b\u3a0c\u3a0d\u3a0e\u3a0f\u3a10\u3a11\u3a12\u3a13\u3a14\u3a15\u3a16\u3a17\u3a18\u3a19\u3a1a\u3a1b\u3a1c\u3a1d\u3a1e\u3a1f\u3a20\u3a21\u3a22\u3a23\u3a24\u3a25\u3a26\u3a27\u3a28\u3a29\u3a2a\u3a2b\u3a2c\u3a2d\u3a2e\u3a2f\u3a30\u3a31\u3a32\u3a33\u3a34\u3a35\u3a36\u3a37\u3a38\u3a39\u3a3a\u3a3b\u3a3c\u3a3d\u3a3e\u3a3f\u3a40\u3a41\u3a42\u3a43\u3a44\u3a45\u3a46\u3a47\u3a48\u3a49\u3a4a\u3a4b\u3a4c\u3a4d\u3a4e\u3a4f\u3a50\u3a51\u3a52\u3a53\u3a54\u3a55\u3a56\u3a57\u3a58\u3a59\u3a5a\u3a5b\u3a5c\u3a5d\u3a5e\u3a5f\u3a60\u3a61\u3a62\u3a63\u3a64\u3a65\u3a66\u3a67\u3a68\u3a69\u3a6a\u3a6b\u3a6c\u3a6d\u3a6e\u3a6f\u3a70\u3a71\u3a72\u3a73\u3a74\u3a75\u3a76\u3a77\u3a78\u3a79\u3a7a\u3a7b\u3a7c\u3a7d\u3a7e\u3a7f\u3a80\u3a81\u3a82\u3a83\u3a84\u3a85\u3a86\u3a87\u3a88\u3a89\u3a8a\u3a8b\u3a8c\u3a8d\u3a8e\u3a8f\u3a90\u3a91\u3a92\u3a93\u3a94\u3a95\u3a96\u3a97\u3a98\u3a99\u3a9a\u3a9b\u3a9c\u3a9d\u3a9e\u3a9f\u3aa0\u3aa1\u3aa2\u3aa3\u3aa4\u3aa5\u3aa6\u3aa7\u3aa8\u3aa9\u3aaa\u3aab\u3aac\u3aad\u3aae\u3aaf\u3ab0\u3ab1\u3ab2\u3ab3\u3ab4\u3ab5\u3ab6\u3ab7\u3ab8\u3ab9\u3aba\u3abb\u3abc\u3abd\u3abe\u3abf\u3ac0\u3ac1\u3ac2\u3ac3\u3ac4\u3ac5\u3ac6\u3ac7\u3ac8\u3ac9\u3aca\u3acb\u3acc\u3acd\u3ace\u3acf\u3ad0\u3ad1\u3ad2\u3ad3\u3ad4\u3ad5\u3ad6\u3ad7\u3ad8\u3ad9\u3ada\u3adb\u3adc\u3add\u3ade\u3adf\u3ae0\u3ae1\u3ae2\u3ae3\u3ae4\u3ae5\u3ae6\u3ae7\u3ae8\u3ae9\u3aea\u3aeb\u3aec\u3aed\u3aee\u3aef\u3af0\u3af1\u3af2\u3af3\u3af4\u3af5\u3af6\u3af7\u3af8\u3af9\u3afa\u3afb\u3afc\u3afd\u3afe\u3aff\u3b00\u3b01\u3b02\u3b03\u3b04\u3b05\u3b06\u3b07\u3b08\u3b09\u3b0a\u3b0b\u3b0c\u3b0d\u3b0e\u3b0f\u3b10\u3b11\u3b12\u3b13\u3b14\u3b15\u3b16\u3b17\u3b18\u3b19\u3b1a\u3b1b\u3b1c\u3b1d\u3b1e\u3b1f\u3b20\u3b21\u3b22\u3b23\u3b24\u3b25\u3b26\u3b27\u3b28\u3b29\u3b2a\u3b2b\u3b2c\u3b2d\u3b2e\u3b2f\u3b30\u3b31\u3b32\u3b33\u3b34\u3b35\u3b36\u3b37\u3b38\u3b39\u3b3a\u3b3b\u3b3c\u3b3d\u3b3e\u3b3f\u3b40\u3b41\u3b42\u3b43\u3b44\u3b45\u3b46\u3b47\u3b48\u3b49\u3b4a\u3b4b\u3b4c\u3b4d\u3b4e\u3b4f\u3b50\u3b51\u3b52\u3b53\u3b54\u3b55\u3b56\u3b57\u3b58\u3b59\u3b5a\u3b5b\u3b5c\u3b5d\u3b5e\u3b5f\u3b60\u3b61\u3b62\u3b63\u3b64\u3b65\u3b66\u3b67\u3b68\u3b69\u3b6a\u3b6b\u3b6c\u3b6d\u3b6e\u3b6f\u3b70\u3b71\u3b72\u3b73\u3b74\u3b75\u3b76\u3b77\u3b78\u3b79\u3b7a\u3b7b\u3b7c\u3b7d\u3b7e\u3b7f\u3b80\u3b81\u3b82\u3b83\u3b84\u3b85\u3b86\u3b87\u3b88\u3b89\u3b8a\u3b8b\u3b8c\u3b8d\u3b8e\u3b8f\u3b90\u3b91\u3b92\u3b93\u3b94\u3b95\u3b96\u3b97\u3b98\u3b99\u3b9a\u3b9b\u3b9c\u3b9d\u3b9e\u3b9f\u3ba0\u3ba1\u3ba2\u3ba3\u3ba4\u3ba5\u3ba6\u3ba7\u3ba8\u3ba9\u3baa\u3bab\u3bac\u3bad\u3bae\u3baf\u3bb0\u3bb1\u3bb2\u3bb3\u3bb4\u3bb5\u3bb6\u3bb7\u3bb8\u3bb9\u3bba\u3bbb\u3bbc\u3bbd\u3bbe\u3bbf\u3bc0\u3bc1\u3bc2\u3bc3\u3bc4\u3bc5\u3bc6\u3bc7\u3bc8\u3bc9\u3bca\u3bcb\u3bcc\u3bcd\u3bce\u3bcf\u3bd0\u3bd1\u3bd2\u3bd3\u3bd4\u3bd5\u3bd6\u3bd7\u3bd8\u3bd9\u3bda\u3bdb\u3bdc\u3bdd\u3bde\u3bdf\u3be0\u3be1\u3be2\u3be3\u3be4\u3be5\u3be6\u3be7\u3be8\u3be9\u3bea\u3beb\u3bec\u3bed\u3bee\u3bef\u3bf0\u3bf1\u3bf2\u3bf3\u3bf4\u3bf5\u3bf6\u3bf7\u3bf8\u3bf9\u3bfa\u3bfb\u3bfc\u3bfd\u3bfe\u3bff\u3c00\u3c01\u3c02\u3c03\u3c04\u3c05\u3c06\u3c07\u3c08\u3c09\u3c0a\u3c0b\u3c0c\u3c0d\u3c0e\u3c0f\u3c10\u3c11\u3c12\u3c13\u3c14\u3c15\u3c16\u3c17\u3c18\u3c19\u3c1a\u3c1b\u3c1c\u3c1d\u3c1e\u3c1f\u3c20\u3c21\u3c22\u3c23\u3c24\u3c25\u3c26\u3c27\u3c28\u3c29\u3c2a\u3c2b\u3c2c\u3c2d\u3c2e\u3c2f\u3c30\u3c31\u3c32\u3c33\u3c34\u3c35\u3c36\u3c37\u3c38\u3c39\u3c3a\u3c3b\u3c3c\u3c3d\u3c3e\u3c3f\u3c40\u3c41\u3c42\u3c43\u3c44\u3c45\u3c46\u3c47\u3c48\u3c49\u3c4a\u3c4b\u3c4c\u3c4d\u3c4e\u3c4f\u3c50\u3c51\u3c52\u3c53\u3c54\u3c55\u3c56\u3c57\u3c58\u3c59\u3c5a\u3c5b\u3c5c\u3c5d\u3c5e\u3c5f\u3c60\u3c61\u3c62\u3c63\u3c64\u3c65\u3c66\u3c67\u3c68\u3c69\u3c6a\u3c6b\u3c6c\u3c6d\u3c6e\u3c6f\u3c70\u3c71\u3c72\u3c73\u3c74\u3c75\u3c76\u3c77\u3c78\u3c79\u3c7a\u3c7b\u3c7c\u3c7d\u3c7e\u3c7f\u3c80\u3c81\u3c82\u3c83\u3c84\u3c85\u3c86\u3c87\u3c88\u3c89\u3c8a\u3c8b\u3c8c\u3c8d\u3c8e\u3c8f\u3c90\u3c91\u3c92\u3c93\u3c94\u3c95\u3c96\u3c97\u3c98\u3c99\u3c9a\u3c9b\u3c9c\u3c9d\u3c9e\u3c9f\u3ca0\u3ca1\u3ca2\u3ca3\u3ca4\u3ca5\u3ca6\u3ca7\u3ca8\u3ca9\u3caa\u3cab\u3cac\u3cad\u3cae\u3caf\u3cb0\u3cb1\u3cb2\u3cb3\u3cb4\u3cb5\u3cb6\u3cb7\u3cb8\u3cb9\u3cba\u3cbb\u3cbc\u3cbd\u3cbe\u3cbf\u3cc0\u3cc1\u3cc2\u3cc3\u3cc4\u3cc5\u3cc6\u3cc7\u3cc8\u3cc9\u3cca\u3ccb\u3ccc\u3ccd\u3cce\u3ccf\u3cd0\u3cd1\u3cd2\u3cd3\u3cd4\u3cd5\u3cd6\u3cd7\u3cd8\u3cd9\u3cda\u3cdb\u3cdc\u3cdd\u3cde\u3cdf\u3ce0\u3ce1\u3ce2\u3ce3\u3ce4\u3ce5\u3ce6\u3ce7\u3ce8\u3ce9\u3cea\u3ceb\u3cec\u3ced\u3cee\u3cef\u3cf0\u3cf1\u3cf2\u3cf3\u3cf4\u3cf5\u3cf6\u3cf7\u3cf8\u3cf9\u3cfa\u3cfb\u3cfc\u3cfd\u3cfe\u3cff\u3d00\u3d01\u3d02\u3d03\u3d04\u3d05\u3d06\u3d07\u3d08\u3d09\u3d0a\u3d0b\u3d0c\u3d0d\u3d0e\u3d0f\u3d10\u3d11\u3d12\u3d13\u3d14\u3d15\u3d16\u3d17\u3d18\u3d19\u3d1a\u3d1b\u3d1c\u3d1d\u3d1e\u3d1f\u3d20\u3d21\u3d22\u3d23\u3d24\u3d25\u3d26\u3d27\u3d28\u3d29\u3d2a\u3d2b\u3d2c\u3d2d\u3d2e\u3d2f\u3d30\u3d31\u3d32\u3d33\u3d34\u3d35\u3d36\u3d37\u3d38\u3d39\u3d3a\u3d3b\u3d3c\u3d3d\u3d3e\u3d3f\u3d40\u3d41\u3d42\u3d43\u3d44\u3d45\u3d46\u3d47\u3d48\u3d49\u3d4a\u3d4b\u3d4c\u3d4d\u3d4e\u3d4f\u3d50\u3d51\u3d52\u3d53\u3d54\u3d55\u3d56\u3d57\u3d58\u3d59\u3d5a\u3d5b\u3d5c\u3d5d\u3d5e\u3d5f\u3d60\u3d61\u3d62\u3d63\u3d64\u3d65\u3d66\u3d67\u3d68\u3d69\u3d6a\u3d6b\u3d6c\u3d6d\u3d6e\u3d6f\u3d70\u3d71\u3d72\u3d73\u3d74\u3d75\u3d76\u3d77\u3d78\u3d79\u3d7a\u3d7b\u3d7c\u3d7d\u3d7e\u3d7f\u3d80\u3d81\u3d82\u3d83\u3d84\u3d85\u3d86\u3d87\u3d88\u3d89\u3d8a\u3d8b\u3d8c\u3d8d\u3d8e\u3d8f\u3d90\u3d91\u3d92\u3d93\u3d94\u3d95\u3d96\u3d97\u3d98\u3d99\u3d9a\u3d9b\u3d9c\u3d9d\u3d9e\u3d9f\u3da0\u3da1\u3da2\u3da3\u3da4\u3da5\u3da6\u3da7\u3da8\u3da9\u3daa\u3dab\u3dac\u3dad\u3dae\u3daf\u3db0\u3db1\u3db2\u3db3\u3db4\u3db5\u3db6\u3db7\u3db8\u3db9\u3dba\u3dbb\u3dbc\u3dbd\u3dbe\u3dbf\u3dc0\u3dc1\u3dc2\u3dc3\u3dc4\u3dc5\u3dc6\u3dc7\u3dc8\u3dc9\u3dca\u3dcb\u3dcc\u3dcd\u3dce\u3dcf\u3dd0\u3dd1\u3dd2\u3dd3\u3dd4\u3dd5\u3dd6\u3dd7\u3dd8\u3dd9\u3dda\u3ddb\u3ddc\u3ddd\u3dde\u3ddf\u3de0\u3de1\u3de2\u3de3\u3de4\u3de5\u3de6\u3de7\u3de8\u3de9\u3dea\u3deb\u3dec\u3ded\u3dee\u3def\u3df0\u3df1\u3df2\u3df3\u3df4\u3df5\u3df6\u3df7\u3df8\u3df9\u3dfa\u3dfb\u3dfc\u3dfd\u3dfe\u3dff\u3e00\u3e01\u3e02\u3e03\u3e04\u3e05\u3e06\u3e07\u3e08\u3e09\u3e0a\u3e0b\u3e0c\u3e0d\u3e0e\u3e0f\u3e10\u3e11\u3e12\u3e13\u3e14\u3e15\u3e16\u3e17\u3e18\u3e19\u3e1a\u3e1b\u3e1c\u3e1d\u3e1e\u3e1f\u3e20\u3e21\u3e22\u3e23\u3e24\u3e25\u3e26\u3e27\u3e28\u3e29\u3e2a\u3e2b\u3e2c\u3e2d\u3e2e\u3e2f\u3e30\u3e31\u3e32\u3e33\u3e34\u3e35\u3e36\u3e37\u3e38\u3e39\u3e3a\u3e3b\u3e3c\u3e3d\u3e3e\u3e3f\u3e40\u3e41\u3e42\u3e43\u3e44\u3e45\u3e46\u3e47\u3e48\u3e49\u3e4a\u3e4b\u3e4c\u3e4d\u3e4e\u3e4f\u3e50\u3e51\u3e52\u3e53\u3e54\u3e55\u3e56\u3e57\u3e58\u3e59\u3e5a\u3e5b\u3e5c\u3e5d\u3e5e\u3e5f\u3e60\u3e61\u3e62\u3e63\u3e64\u3e65\u3e66\u3e67\u3e68\u3e69\u3e6a\u3e6b\u3e6c\u3e6d\u3e6e\u3e6f\u3e70\u3e71\u3e72\u3e73\u3e74\u3e75\u3e76\u3e77\u3e78\u3e79\u3e7a\u3e7b\u3e7c\u3e7d\u3e7e\u3e7f\u3e80\u3e81\u3e82\u3e83\u3e84\u3e85\u3e86\u3e87\u3e88\u3e89\u3e8a\u3e8b\u3e8c\u3e8d\u3e8e\u3e8f\u3e90\u3e91\u3e92\u3e93\u3e94\u3e95\u3e96\u3e97\u3e98\u3e99\u3e9a\u3e9b\u3e9c\u3e9d\u3e9e\u3e9f\u3ea0\u3ea1\u3ea2\u3ea3\u3ea4\u3ea5\u3ea6\u3ea7\u3ea8\u3ea9\u3eaa\u3eab\u3eac\u3ead\u3eae\u3eaf\u3eb0\u3eb1\u3eb2\u3eb3\u3eb4\u3eb5\u3eb6\u3eb7\u3eb8\u3eb9\u3eba\u3ebb\u3ebc\u3ebd\u3ebe\u3ebf\u3ec0\u3ec1\u3ec2\u3ec3\u3ec4\u3ec5\u3ec6\u3ec7\u3ec8\u3ec9\u3eca\u3ecb\u3ecc\u3ecd\u3ece\u3ecf\u3ed0\u3ed1\u3ed2\u3ed3\u3ed4\u3ed5\u3ed6\u3ed7\u3ed8\u3ed9\u3eda\u3edb\u3edc\u3edd\u3ede\u3edf\u3ee0\u3ee1\u3ee2\u3ee3\u3ee4\u3ee5\u3ee6\u3ee7\u3ee8\u3ee9\u3eea\u3eeb\u3eec\u3eed\u3eee\u3eef\u3ef0\u3ef1\u3ef2\u3ef3\u3ef4\u3ef5\u3ef6\u3ef7\u3ef8\u3ef9\u3efa\u3efb\u3efc\u3efd\u3efe\u3eff\u3f00\u3f01\u3f02\u3f03\u3f04\u3f05\u3f06\u3f07\u3f08\u3f09\u3f0a\u3f0b\u3f0c\u3f0d\u3f0e\u3f0f\u3f10\u3f11\u3f12\u3f13\u3f14\u3f15\u3f16\u3f17\u3f18\u3f19\u3f1a\u3f1b\u3f1c\u3f1d\u3f1e\u3f1f\u3f20\u3f21\u3f22\u3f23\u3f24\u3f25\u3f26\u3f27\u3f28\u3f29\u3f2a\u3f2b\u3f2c\u3f2d\u3f2e\u3f2f\u3f30\u3f31\u3f32\u3f33\u3f34\u3f35\u3f36\u3f37\u3f38\u3f39\u3f3a\u3f3b\u3f3c\u3f3d\u3f3e\u3f3f\u3f40\u3f41\u3f42\u3f43\u3f44\u3f45\u3f46\u3f47\u3f48\u3f49\u3f4a\u3f4b\u3f4c\u3f4d\u3f4e\u3f4f\u3f50\u3f51\u3f52\u3f53\u3f54\u3f55\u3f56\u3f57\u3f58\u3f59\u3f5a\u3f5b\u3f5c\u3f5d\u3f5e\u3f5f\u3f60\u3f61\u3f62\u3f63\u3f64\u3f65\u3f66\u3f67\u3f68\u3f69\u3f6a\u3f6b\u3f6c\u3f6d\u3f6e\u3f6f\u3f70\u3f71\u3f72\u3f73\u3f74\u3f75\u3f76\u3f77\u3f78\u3f79\u3f7a\u3f7b\u3f7c\u3f7d\u3f7e\u3f7f\u3f80\u3f81\u3f82\u3f83\u3f84\u3f85\u3f86\u3f87\u3f88\u3f89\u3f8a\u3f8b\u3f8c\u3f8d\u3f8e\u3f8f\u3f90\u3f91\u3f92\u3f93\u3f94\u3f95\u3f96\u3f97\u3f98\u3f99\u3f9a\u3f9b\u3f9c\u3f9d\u3f9e\u3f9f\u3fa0\u3fa1\u3fa2\u3fa3\u3fa4\u3fa5\u3fa6\u3fa7\u3fa8\u3fa9\u3faa\u3fab\u3fac\u3fad\u3fae\u3faf\u3fb0\u3fb1\u3fb2\u3fb3\u3fb4\u3fb5\u3fb6\u3fb7\u3fb8\u3fb9\u3fba\u3fbb\u3fbc\u3fbd\u3fbe\u3fbf\u3fc0\u3fc1\u3fc2\u3fc3\u3fc4\u3fc5\u3fc6\u3fc7\u3fc8\u3fc9\u3fca\u3fcb\u3fcc\u3fcd\u3fce\u3fcf\u3fd0\u3fd1\u3fd2\u3fd3\u3fd4\u3fd5\u3fd6\u3fd7\u3fd8\u3fd9\u3fda\u3fdb\u3fdc\u3fdd\u3fde\u3fdf\u3fe0\u3fe1\u3fe2\u3fe3\u3fe4\u3fe5\u3fe6\u3fe7\u3fe8\u3fe9\u3fea\u3feb\u3fec\u3fed\u3fee\u3fef\u3ff0\u3ff1\u3ff2\u3ff3\u3ff4\u3ff5\u3ff6\u3ff7\u3ff8\u3ff9\u3ffa\u3ffb\u3ffc\u3ffd\u3ffe\u3fff\u4000\u4001\u4002\u4003\u4004\u4005\u4006\u4007\u4008\u4009\u400a\u400b\u400c\u400d\u400e\u400f\u4010\u4011\u4012\u4013\u4014\u4015\u4016\u4017\u4018\u4019\u401a\u401b\u401c\u401d\u401e\u401f\u4020\u4021\u4022\u4023\u4024\u4025\u4026\u4027\u4028\u4029\u402a\u402b\u402c\u402d\u402e\u402f\u4030\u4031\u4032\u4033\u4034\u4035\u4036\u4037\u4038\u4039\u403a\u403b\u403c\u403d\u403e\u403f\u4040\u4041\u4042\u4043\u4044\u4045\u4046\u4047\u4048\u4049\u404a\u404b\u404c\u404d\u404e\u404f\u4050\u4051\u4052\u4053\u4054\u4055\u4056\u4057\u4058\u4059\u405a\u405b\u405c\u405d\u405e\u405f\u4060\u4061\u4062\u4063\u4064\u4065\u4066\u4067\u4068\u4069\u406a\u406b\u406c\u406d\u406e\u406f\u4070\u4071\u4072\u4073\u4074\u4075\u4076\u4077\u4078\u4079\u407a\u407b\u407c\u407d\u407e\u407f\u4080\u4081\u4082\u4083\u4084\u4085\u4086\u4087\u4088\u4089\u408a\u408b\u408c\u408d\u408e\u408f\u4090\u4091\u4092\u4093\u4094\u4095\u4096\u4097\u4098\u4099\u409a\u409b\u409c\u409d\u409e\u409f\u40a0\u40a1\u40a2\u40a3\u40a4\u40a5\u40a6\u40a7\u40a8\u40a9\u40aa\u40ab\u40ac\u40ad\u40ae\u40af\u40b0\u40b1\u40b2\u40b3\u40b4\u40b5\u40b6\u40b7\u40b8\u40b9\u40ba\u40bb\u40bc\u40bd\u40be\u40bf\u40c0\u40c1\u40c2\u40c3\u40c4\u40c5\u40c6\u40c7\u40c8\u40c9\u40ca\u40cb\u40cc\u40cd\u40ce\u40cf\u40d0\u40d1\u40d2\u40d3\u40d4\u40d5\u40d6\u40d7\u40d8\u40d9\u40da\u40db\u40dc\u40dd\u40de\u40df\u40e0\u40e1\u40e2\u40e3\u40e4\u40e5\u40e6\u40e7\u40e8\u40e9\u40ea\u40eb\u40ec\u40ed\u40ee\u40ef\u40f0\u40f1\u40f2\u40f3\u40f4\u40f5\u40f6\u40f7\u40f8\u40f9\u40fa\u40fb\u40fc\u40fd\u40fe\u40ff\u4100\u4101\u4102\u4103\u4104\u4105\u4106\u4107\u4108\u4109\u410a\u410b\u410c\u410d\u410e\u410f\u4110\u4111\u4112\u4113\u4114\u4115\u4116\u4117\u4118\u4119\u411a\u411b\u411c\u411d\u411e\u411f\u4120\u4121\u4122\u4123\u4124\u4125\u4126\u4127\u4128\u4129\u412a\u412b\u412c\u412d\u412e\u412f\u4130\u4131\u4132\u4133\u4134\u4135\u4136\u4137\u4138\u4139\u413a\u413b\u413c\u413d\u413e\u413f\u4140\u4141\u4142\u4143\u4144\u4145\u4146\u4147\u4148\u4149\u414a\u414b\u414c\u414d\u414e\u414f\u4150\u4151\u4152\u4153\u4154\u4155\u4156\u4157\u4158\u4159\u415a\u415b\u415c\u415d\u415e\u415f\u4160\u4161\u4162\u4163\u4164\u4165\u4166\u4167\u4168\u4169\u416a\u416b\u416c\u416d\u416e\u416f\u4170\u4171\u4172\u4173\u4174\u4175\u4176\u4177\u4178\u4179\u417a\u417b\u417c\u417d\u417e\u417f\u4180\u4181\u4182\u4183\u4184\u4185\u4186\u4187\u4188\u4189\u418a\u418b\u418c\u418d\u418e\u418f\u4190\u4191\u4192\u4193\u4194\u4195\u4196\u4197\u4198\u4199\u419a\u419b\u419c\u419d\u419e\u419f\u41a0\u41a1\u41a2\u41a3\u41a4\u41a5\u41a6\u41a7\u41a8\u41a9\u41aa\u41ab\u41ac\u41ad\u41ae\u41af\u41b0\u41b1\u41b2\u41b3\u41b4\u41b5\u41b6\u41b7\u41b8\u41b9\u41ba\u41bb\u41bc\u41bd\u41be\u41bf\u41c0\u41c1\u41c2\u41c3\u41c4\u41c5\u41c6\u41c7\u41c8\u41c9\u41ca\u41cb\u41cc\u41cd\u41ce\u41cf\u41d0\u41d1\u41d2\u41d3\u41d4\u41d5\u41d6\u41d7\u41d8\u41d9\u41da\u41db\u41dc\u41dd\u41de\u41df\u41e0\u41e1\u41e2\u41e3\u41e4\u41e5\u41e6\u41e7\u41e8\u41e9\u41ea\u41eb\u41ec\u41ed\u41ee\u41ef\u41f0\u41f1\u41f2\u41f3\u41f4\u41f5\u41f6\u41f7\u41f8\u41f9\u41fa\u41fb\u41fc\u41fd\u41fe\u41ff\u4200\u4201\u4202\u4203\u4204\u4205\u4206\u4207\u4208\u4209\u420a\u420b\u420c\u420d\u420e\u420f\u4210\u4211\u4212\u4213\u4214\u4215\u4216\u4217\u4218\u4219\u421a\u421b\u421c\u421d\u421e\u421f\u4220\u4221\u4222\u4223\u4224\u4225\u4226\u4227\u4228\u4229\u422a\u422b\u422c\u422d\u422e\u422f\u4230\u4231\u4232\u4233\u4234\u4235\u4236\u4237\u4238\u4239\u423a\u423b\u423c\u423d\u423e\u423f\u4240\u4241\u4242\u4243\u4244\u4245\u4246\u4247\u4248\u4249\u424a\u424b\u424c\u424d\u424e\u424f\u4250\u4251\u4252\u4253\u4254\u4255\u4256\u4257\u4258\u4259\u425a\u425b\u425c\u425d\u425e\u425f\u4260\u4261\u4262\u4263\u4264\u4265\u4266\u4267\u4268\u4269\u426a\u426b\u426c\u426d\u426e\u426f\u4270\u4271\u4272\u4273\u4274\u4275\u4276\u4277\u4278\u4279\u427a\u427b\u427c\u427d\u427e\u427f\u4280\u4281\u4282\u4283\u4284\u4285\u4286\u4287\u4288\u4289\u428a\u428b\u428c\u428d\u428e\u428f\u4290\u4291\u4292\u4293\u4294\u4295\u4296\u4297\u4298\u4299\u429a\u429b\u429c\u429d\u429e\u429f\u42a0\u42a1\u42a2\u42a3\u42a4\u42a5\u42a6\u42a7\u42a8\u42a9\u42aa\u42ab\u42ac\u42ad\u42ae\u42af\u42b0\u42b1\u42b2\u42b3\u42b4\u42b5\u42b6\u42b7\u42b8\u42b9\u42ba\u42bb\u42bc\u42bd\u42be\u42bf\u42c0\u42c1\u42c2\u42c3\u42c4\u42c5\u42c6\u42c7\u42c8\u42c9\u42ca\u42cb\u42cc\u42cd\u42ce\u42cf\u42d0\u42d1\u42d2\u42d3\u42d4\u42d5\u42d6\u42d7\u42d8\u42d9\u42da\u42db\u42dc\u42dd\u42de\u42df\u42e0\u42e1\u42e2\u42e3\u42e4\u42e5\u42e6\u42e7\u42e8\u42e9\u42ea\u42eb\u42ec\u42ed\u42ee\u42ef\u42f0\u42f1\u42f2\u42f3\u42f4\u42f5\u42f6\u42f7\u42f8\u42f9\u42fa\u42fb\u42fc\u42fd\u42fe\u42ff\u4300\u4301\u4302\u4303\u4304\u4305\u4306\u4307\u4308\u4309\u430a\u430b\u430c\u430d\u430e\u430f\u4310\u4311\u4312\u4313\u4314\u4315\u4316\u4317\u4318\u4319\u431a\u431b\u431c\u431d\u431e\u431f\u4320\u4321\u4322\u4323\u4324\u4325\u4326\u4327\u4328\u4329\u432a\u432b\u432c\u432d\u432e\u432f\u4330\u4331\u4332\u4333\u4334\u4335\u4336\u4337\u4338\u4339\u433a\u433b\u433c\u433d\u433e\u433f\u4340\u4341\u4342\u4343\u4344\u4345\u4346\u4347\u4348\u4349\u434a\u434b\u434c\u434d\u434e\u434f\u4350\u4351\u4352\u4353\u4354\u4355\u4356\u4357\u4358\u4359\u435a\u435b\u435c\u435d\u435e\u435f\u4360\u4361\u4362\u4363\u4364\u4365\u4366\u4367\u4368\u4369\u436a\u436b\u436c\u436d\u436e\u436f\u4370\u4371\u4372\u4373\u4374\u4375\u4376\u4377\u4378\u4379\u437a\u437b\u437c\u437d\u437e\u437f\u4380\u4381\u4382\u4383\u4384\u4385\u4386\u4387\u4388\u4389\u438a\u438b\u438c\u438d\u438e\u438f\u4390\u4391\u4392\u4393\u4394\u4395\u4396\u4397\u4398\u4399\u439a\u439b\u439c\u439d\u439e\u439f\u43a0\u43a1\u43a2\u43a3\u43a4\u43a5\u43a6\u43a7\u43a8\u43a9\u43aa\u43ab\u43ac\u43ad\u43ae\u43af\u43b0\u43b1\u43b2\u43b3\u43b4\u43b5\u43b6\u43b7\u43b8\u43b9\u43ba\u43bb\u43bc\u43bd\u43be\u43bf\u43c0\u43c1\u43c2\u43c3\u43c4\u43c5\u43c6\u43c7\u43c8\u43c9\u43ca\u43cb\u43cc\u43cd\u43ce\u43cf\u43d0\u43d1\u43d2\u43d3\u43d4\u43d5\u43d6\u43d7\u43d8\u43d9\u43da\u43db\u43dc\u43dd\u43de\u43df\u43e0\u43e1\u43e2\u43e3\u43e4\u43e5\u43e6\u43e7\u43e8\u43e9\u43ea\u43eb\u43ec\u43ed\u43ee\u43ef\u43f0\u43f1\u43f2\u43f3\u43f4\u43f5\u43f6\u43f7\u43f8\u43f9\u43fa\u43fb\u43fc\u43fd\u43fe\u43ff\u4400\u4401\u4402\u4403\u4404\u4405\u4406\u4407\u4408\u4409\u440a\u440b\u440c\u440d\u440e\u440f\u4410\u4411\u4412\u4413\u4414\u4415\u4416\u4417\u4418\u4419\u441a\u441b\u441c\u441d\u441e\u441f\u4420\u4421\u4422\u4423\u4424\u4425\u4426\u4427\u4428\u4429\u442a\u442b\u442c\u442d\u442e\u442f\u4430\u4431\u4432\u4433\u4434\u4435\u4436\u4437\u4438\u4439\u443a\u443b\u443c\u443d\u443e\u443f\u4440\u4441\u4442\u4443\u4444\u4445\u4446\u4447\u4448\u4449\u444a\u444b\u444c\u444d\u444e\u444f\u4450\u4451\u4452\u4453\u4454\u4455\u4456\u4457\u4458\u4459\u445a\u445b\u445c\u445d\u445e\u445f\u4460\u4461\u4462\u4463\u4464\u4465\u4466\u4467\u4468\u4469\u446a\u446b\u446c\u446d\u446e\u446f\u4470\u4471\u4472\u4473\u4474\u4475\u4476\u4477\u4478\u4479\u447a\u447b\u447c\u447d\u447e\u447f\u4480\u4481\u4482\u4483\u4484\u4485\u4486\u4487\u4488\u4489\u448a\u448b\u448c\u448d\u448e\u448f\u4490\u4491\u4492\u4493\u4494\u4495\u4496\u4497\u4498\u4499\u449a\u449b\u449c\u449d\u449e\u449f\u44a0\u44a1\u44a2\u44a3\u44a4\u44a5\u44a6\u44a7\u44a8\u44a9\u44aa\u44ab\u44ac\u44ad\u44ae\u44af\u44b0\u44b1\u44b2\u44b3\u44b4\u44b5\u44b6\u44b7\u44b8\u44b9\u44ba\u44bb\u44bc\u44bd\u44be\u44bf\u44c0\u44c1\u44c2\u44c3\u44c4\u44c5\u44c6\u44c7\u44c8\u44c9\u44ca\u44cb\u44cc\u44cd\u44ce\u44cf\u44d0\u44d1\u44d2\u44d3\u44d4\u44d5\u44d6\u44d7\u44d8\u44d9\u44da\u44db\u44dc\u44dd\u44de\u44df\u44e0\u44e1\u44e2\u44e3\u44e4\u44e5\u44e6\u44e7\u44e8\u44e9\u44ea\u44eb\u44ec\u44ed\u44ee\u44ef\u44f0\u44f1\u44f2\u44f3\u44f4\u44f5\u44f6\u44f7\u44f8\u44f9\u44fa\u44fb\u44fc\u44fd\u44fe\u44ff\u4500\u4501\u4502\u4503\u4504\u4505\u4506\u4507\u4508\u4509\u450a\u450b\u450c\u450d\u450e\u450f\u4510\u4511\u4512\u4513\u4514\u4515\u4516\u4517\u4518\u4519\u451a\u451b\u451c\u451d\u451e\u451f\u4520\u4521\u4522\u4523\u4524\u4525\u4526\u4527\u4528\u4529\u452a\u452b\u452c\u452d\u452e\u452f\u4530\u4531\u4532\u4533\u4534\u4535\u4536\u4537\u4538\u4539\u453a\u453b\u453c\u453d\u453e\u453f\u4540\u4541\u4542\u4543\u4544\u4545\u4546\u4547\u4548\u4549\u454a\u454b\u454c\u454d\u454e\u454f\u4550\u4551\u4552\u4553\u4554\u4555\u4556\u4557\u4558\u4559\u455a\u455b\u455c\u455d\u455e\u455f\u4560\u4561\u4562\u4563\u4564\u4565\u4566\u4567\u4568\u4569\u456a\u456b\u456c\u456d\u456e\u456f\u4570\u4571\u4572\u4573\u4574\u4575\u4576\u4577\u4578\u4579\u457a\u457b\u457c\u457d\u457e\u457f\u4580\u4581\u4582\u4583\u4584\u4585\u4586\u4587\u4588\u4589\u458a\u458b\u458c\u458d\u458e\u458f\u4590\u4591\u4592\u4593\u4594\u4595\u4596\u4597\u4598\u4599\u459a\u459b\u459c\u459d\u459e\u459f\u45a0\u45a1\u45a2\u45a3\u45a4\u45a5\u45a6\u45a7\u45a8\u45a9\u45aa\u45ab\u45ac\u45ad\u45ae\u45af\u45b0\u45b1\u45b2\u45b3\u45b4\u45b5\u45b6\u45b7\u45b8\u45b9\u45ba\u45bb\u45bc\u45bd\u45be\u45bf\u45c0\u45c1\u45c2\u45c3\u45c4\u45c5\u45c6\u45c7\u45c8\u45c9\u45ca\u45cb\u45cc\u45cd\u45ce\u45cf\u45d0\u45d1\u45d2\u45d3\u45d4\u45d5\u45d6\u45d7\u45d8\u45d9\u45da\u45db\u45dc\u45dd\u45de\u45df\u45e0\u45e1\u45e2\u45e3\u45e4\u45e5\u45e6\u45e7\u45e8\u45e9\u45ea\u45eb\u45ec\u45ed\u45ee\u45ef\u45f0\u45f1\u45f2\u45f3\u45f4\u45f5\u45f6\u45f7\u45f8\u45f9\u45fa\u45fb\u45fc\u45fd\u45fe\u45ff\u4600\u4601\u4602\u4603\u4604\u4605\u4606\u4607\u4608\u4609\u460a\u460b\u460c\u460d\u460e\u460f\u4610\u4611\u4612\u4613\u4614\u4615\u4616\u4617\u4618\u4619\u461a\u461b\u461c\u461d\u461e\u461f\u4620\u4621\u4622\u4623\u4624\u4625\u4626\u4627\u4628\u4629\u462a\u462b\u462c\u462d\u462e\u462f\u4630\u4631\u4632\u4633\u4634\u4635\u4636\u4637\u4638\u4639\u463a\u463b\u463c\u463d\u463e\u463f\u4640\u4641\u4642\u4643\u4644\u4645\u4646\u4647\u4648\u4649\u464a\u464b\u464c\u464d\u464e\u464f\u4650\u4651\u4652\u4653\u4654\u4655\u4656\u4657\u4658\u4659\u465a\u465b\u465c\u465d\u465e\u465f\u4660\u4661\u4662\u4663\u4664\u4665\u4666\u4667\u4668\u4669\u466a\u466b\u466c\u466d\u466e\u466f\u4670\u4671\u4672\u4673\u4674\u4675\u4676\u4677\u4678\u4679\u467a\u467b\u467c\u467d\u467e\u467f\u4680\u4681\u4682\u4683\u4684\u4685\u4686\u4687\u4688\u4689\u468a\u468b\u468c\u468d\u468e\u468f\u4690\u4691\u4692\u4693\u4694\u4695\u4696\u4697\u4698\u4699\u469a\u469b\u469c\u469d\u469e\u469f\u46a0\u46a1\u46a2\u46a3\u46a4\u46a5\u46a6\u46a7\u46a8\u46a9\u46aa\u46ab\u46ac\u46ad\u46ae\u46af\u46b0\u46b1\u46b2\u46b3\u46b4\u46b5\u46b6\u46b7\u46b8\u46b9\u46ba\u46bb\u46bc\u46bd\u46be\u46bf\u46c0\u46c1\u46c2\u46c3\u46c4\u46c5\u46c6\u46c7\u46c8\u46c9\u46ca\u46cb\u46cc\u46cd\u46ce\u46cf\u46d0\u46d1\u46d2\u46d3\u46d4\u46d5\u46d6\u46d7\u46d8\u46d9\u46da\u46db\u46dc\u46dd\u46de\u46df\u46e0\u46e1\u46e2\u46e3\u46e4\u46e5\u46e6\u46e7\u46e8\u46e9\u46ea\u46eb\u46ec\u46ed\u46ee\u46ef\u46f0\u46f1\u46f2\u46f3\u46f4\u46f5\u46f6\u46f7\u46f8\u46f9\u46fa\u46fb\u46fc\u46fd\u46fe\u46ff\u4700\u4701\u4702\u4703\u4704\u4705\u4706\u4707\u4708\u4709\u470a\u470b\u470c\u470d\u470e\u470f\u4710\u4711\u4712\u4713\u4714\u4715\u4716\u4717\u4718\u4719\u471a\u471b\u471c\u471d\u471e\u471f\u4720\u4721\u4722\u4723\u4724\u4725\u4726\u4727\u4728\u4729\u472a\u472b\u472c\u472d\u472e\u472f\u4730\u4731\u4732\u4733\u4734\u4735\u4736\u4737\u4738\u4739\u473a\u473b\u473c\u473d\u473e\u473f\u4740\u4741\u4742\u4743\u4744\u4745\u4746\u4747\u4748\u4749\u474a\u474b\u474c\u474d\u474e\u474f\u4750\u4751\u4752\u4753\u4754\u4755\u4756\u4757\u4758\u4759\u475a\u475b\u475c\u475d\u475e\u475f\u4760\u4761\u4762\u4763\u4764\u4765\u4766\u4767\u4768\u4769\u476a\u476b\u476c\u476d\u476e\u476f\u4770\u4771\u4772\u4773\u4774\u4775\u4776\u4777\u4778\u4779\u477a\u477b\u477c\u477d\u477e\u477f\u4780\u4781\u4782\u4783\u4784\u4785\u4786\u4787\u4788\u4789\u478a\u478b\u478c\u478d\u478e\u478f\u4790\u4791\u4792\u4793\u4794\u4795\u4796\u4797\u4798\u4799\u479a\u479b\u479c\u479d\u479e\u479f\u47a0\u47a1\u47a2\u47a3\u47a4\u47a5\u47a6\u47a7\u47a8\u47a9\u47aa\u47ab\u47ac\u47ad\u47ae\u47af\u47b0\u47b1\u47b2\u47b3\u47b4\u47b5\u47b6\u47b7\u47b8\u47b9\u47ba\u47bb\u47bc\u47bd\u47be\u47bf\u47c0\u47c1\u47c2\u47c3\u47c4\u47c5\u47c6\u47c7\u47c8\u47c9\u47ca\u47cb\u47cc\u47cd\u47ce\u47cf\u47d0\u47d1\u47d2\u47d3\u47d4\u47d5\u47d6\u47d7\u47d8\u47d9\u47da\u47db\u47dc\u47dd\u47de\u47df\u47e0\u47e1\u47e2\u47e3\u47e4\u47e5\u47e6\u47e7\u47e8\u47e9\u47ea\u47eb\u47ec\u47ed\u47ee\u47ef\u47f0\u47f1\u47f2\u47f3\u47f4\u47f5\u47f6\u47f7\u47f8\u47f9\u47fa\u47fb\u47fc\u47fd\u47fe\u47ff\u4800\u4801\u4802\u4803\u4804\u4805\u4806\u4807\u4808\u4809\u480a\u480b\u480c\u480d\u480e\u480f\u4810\u4811\u4812\u4813\u4814\u4815\u4816\u4817\u4818\u4819\u481a\u481b\u481c\u481d\u481e\u481f\u4820\u4821\u4822\u4823\u4824\u4825\u4826\u4827\u4828\u4829\u482a\u482b\u482c\u482d\u482e\u482f\u4830\u4831\u4832\u4833\u4834\u4835\u4836\u4837\u4838\u4839\u483a\u483b\u483c\u483d\u483e\u483f\u4840\u4841\u4842\u4843\u4844\u4845\u4846\u4847\u4848\u4849\u484a\u484b\u484c\u484d\u484e\u484f\u4850\u4851\u4852\u4853\u4854\u4855\u4856\u4857\u4858\u4859\u485a\u485b\u485c\u485d\u485e\u485f\u4860\u4861\u4862\u4863\u4864\u4865\u4866\u4867\u4868\u4869\u486a\u486b\u486c\u486d\u486e\u486f\u4870\u4871\u4872\u4873\u4874\u4875\u4876\u4877\u4878\u4879\u487a\u487b\u487c\u487d\u487e\u487f\u4880\u4881\u4882\u4883\u4884\u4885\u4886\u4887\u4888\u4889\u488a\u488b\u488c\u488d\u488e\u488f\u4890\u4891\u4892\u4893\u4894\u4895\u4896\u4897\u4898\u4899\u489a\u489b\u489c\u489d\u489e\u489f\u48a0\u48a1\u48a2\u48a3\u48a4\u48a5\u48a6\u48a7\u48a8\u48a9\u48aa\u48ab\u48ac\u48ad\u48ae\u48af\u48b0\u48b1\u48b2\u48b3\u48b4\u48b5\u48b6\u48b7\u48b8\u48b9\u48ba\u48bb\u48bc\u48bd\u48be\u48bf\u48c0\u48c1\u48c2\u48c3\u48c4\u48c5\u48c6\u48c7\u48c8\u48c9\u48ca\u48cb\u48cc\u48cd\u48ce\u48cf\u48d0\u48d1\u48d2\u48d3\u48d4\u48d5\u48d6\u48d7\u48d8\u48d9\u48da\u48db\u48dc\u48dd\u48de\u48df\u48e0\u48e1\u48e2\u48e3\u48e4\u48e5\u48e6\u48e7\u48e8\u48e9\u48ea\u48eb\u48ec\u48ed\u48ee\u48ef\u48f0\u48f1\u48f2\u48f3\u48f4\u48f5\u48f6\u48f7\u48f8\u48f9\u48fa\u48fb\u48fc\u48fd\u48fe\u48ff\u4900\u4901\u4902\u4903\u4904\u4905\u4906\u4907\u4908\u4909\u490a\u490b\u490c\u490d\u490e\u490f\u4910\u4911\u4912\u4913\u4914\u4915\u4916\u4917\u4918\u4919\u491a\u491b\u491c\u491d\u491e\u491f\u4920\u4921\u4922\u4923\u4924\u4925\u4926\u4927\u4928\u4929\u492a\u492b\u492c\u492d\u492e\u492f\u4930\u4931\u4932\u4933\u4934\u4935\u4936\u4937\u4938\u4939\u493a\u493b\u493c\u493d\u493e\u493f\u4940\u4941\u4942\u4943\u4944\u4945\u4946\u4947\u4948\u4949\u494a\u494b\u494c\u494d\u494e\u494f\u4950\u4951\u4952\u4953\u4954\u4955\u4956\u4957\u4958\u4959\u495a\u495b\u495c\u495d\u495e\u495f\u4960\u4961\u4962\u4963\u4964\u4965\u4966\u4967\u4968\u4969\u496a\u496b\u496c\u496d\u496e\u496f\u4970\u4971\u4972\u4973\u4974\u4975\u4976\u4977\u4978\u4979\u497a\u497b\u497c\u497d\u497e\u497f\u4980\u4981\u4982\u4983\u4984\u4985\u4986\u4987\u4988\u4989\u498a\u498b\u498c\u498d\u498e\u498f\u4990\u4991\u4992\u4993\u4994\u4995\u4996\u4997\u4998\u4999\u499a\u499b\u499c\u499d\u499e\u499f\u49a0\u49a1\u49a2\u49a3\u49a4\u49a5\u49a6\u49a7\u49a8\u49a9\u49aa\u49ab\u49ac\u49ad\u49ae\u49af\u49b0\u49b1\u49b2\u49b3\u49b4\u49b5\u49b6\u49b7\u49b8\u49b9\u49ba\u49bb\u49bc\u49bd\u49be\u49bf\u49c0\u49c1\u49c2\u49c3\u49c4\u49c5\u49c6\u49c7\u49c8\u49c9\u49ca\u49cb\u49cc\u49cd\u49ce\u49cf\u49d0\u49d1\u49d2\u49d3\u49d4\u49d5\u49d6\u49d7\u49d8\u49d9\u49da\u49db\u49dc\u49dd\u49de\u49df\u49e0\u49e1\u49e2\u49e3\u49e4\u49e5\u49e6\u49e7\u49e8\u49e9\u49ea\u49eb\u49ec\u49ed\u49ee\u49ef\u49f0\u49f1\u49f2\u49f3\u49f4\u49f5\u49f6\u49f7\u49f8\u49f9\u49fa\u49fb\u49fc\u49fd\u49fe\u49ff\u4a00\u4a01\u4a02\u4a03\u4a04\u4a05\u4a06\u4a07\u4a08\u4a09\u4a0a\u4a0b\u4a0c\u4a0d\u4a0e\u4a0f\u4a10\u4a11\u4a12\u4a13\u4a14\u4a15\u4a16\u4a17\u4a18\u4a19\u4a1a\u4a1b\u4a1c\u4a1d\u4a1e\u4a1f\u4a20\u4a21\u4a22\u4a23\u4a24\u4a25\u4a26\u4a27\u4a28\u4a29\u4a2a\u4a2b\u4a2c\u4a2d\u4a2e\u4a2f\u4a30\u4a31\u4a32\u4a33\u4a34\u4a35\u4a36\u4a37\u4a38\u4a39\u4a3a\u4a3b\u4a3c\u4a3d\u4a3e\u4a3f\u4a40\u4a41\u4a42\u4a43\u4a44\u4a45\u4a46\u4a47\u4a48\u4a49\u4a4a\u4a4b\u4a4c\u4a4d\u4a4e\u4a4f\u4a50\u4a51\u4a52\u4a53\u4a54\u4a55\u4a56\u4a57\u4a58\u4a59\u4a5a\u4a5b\u4a5c\u4a5d\u4a5e\u4a5f\u4a60\u4a61\u4a62\u4a63\u4a64\u4a65\u4a66\u4a67\u4a68\u4a69\u4a6a\u4a6b\u4a6c\u4a6d\u4a6e\u4a6f\u4a70\u4a71\u4a72\u4a73\u4a74\u4a75\u4a76\u4a77\u4a78\u4a79\u4a7a\u4a7b\u4a7c\u4a7d\u4a7e\u4a7f\u4a80\u4a81\u4a82\u4a83\u4a84\u4a85\u4a86\u4a87\u4a88\u4a89\u4a8a\u4a8b\u4a8c\u4a8d\u4a8e\u4a8f\u4a90\u4a91\u4a92\u4a93\u4a94\u4a95\u4a96\u4a97\u4a98\u4a99\u4a9a\u4a9b\u4a9c\u4a9d\u4a9e\u4a9f\u4aa0\u4aa1\u4aa2\u4aa3\u4aa4\u4aa5\u4aa6\u4aa7\u4aa8\u4aa9\u4aaa\u4aab\u4aac\u4aad\u4aae\u4aaf\u4ab0\u4ab1\u4ab2\u4ab3\u4ab4\u4ab5\u4ab6\u4ab7\u4ab8\u4ab9\u4aba\u4abb\u4abc\u4abd\u4abe\u4abf\u4ac0\u4ac1\u4ac2\u4ac3\u4ac4\u4ac5\u4ac6\u4ac7\u4ac8\u4ac9\u4aca\u4acb\u4acc\u4acd\u4ace\u4acf\u4ad0\u4ad1\u4ad2\u4ad3\u4ad4\u4ad5\u4ad6\u4ad7\u4ad8\u4ad9\u4ada\u4adb\u4adc\u4add\u4ade\u4adf\u4ae0\u4ae1\u4ae2\u4ae3\u4ae4\u4ae5\u4ae6\u4ae7\u4ae8\u4ae9\u4aea\u4aeb\u4aec\u4aed\u4aee\u4aef\u4af0\u4af1\u4af2\u4af3\u4af4\u4af5\u4af6\u4af7\u4af8\u4af9\u4afa\u4afb\u4afc\u4afd\u4afe\u4aff\u4b00\u4b01\u4b02\u4b03\u4b04\u4b05\u4b06\u4b07\u4b08\u4b09\u4b0a\u4b0b\u4b0c\u4b0d\u4b0e\u4b0f\u4b10\u4b11\u4b12\u4b13\u4b14\u4b15\u4b16\u4b17\u4b18\u4b19\u4b1a\u4b1b\u4b1c\u4b1d\u4b1e\u4b1f\u4b20\u4b21\u4b22\u4b23\u4b24\u4b25\u4b26\u4b27\u4b28\u4b29\u4b2a\u4b2b\u4b2c\u4b2d\u4b2e\u4b2f\u4b30\u4b31\u4b32\u4b33\u4b34\u4b35\u4b36\u4b37\u4b38\u4b39\u4b3a\u4b3b\u4b3c\u4b3d\u4b3e\u4b3f\u4b40\u4b41\u4b42\u4b43\u4b44\u4b45\u4b46\u4b47\u4b48\u4b49\u4b4a\u4b4b\u4b4c\u4b4d\u4b4e\u4b4f\u4b50\u4b51\u4b52\u4b53\u4b54\u4b55\u4b56\u4b57\u4b58\u4b59\u4b5a\u4b5b\u4b5c\u4b5d\u4b5e\u4b5f\u4b60\u4b61\u4b62\u4b63\u4b64\u4b65\u4b66\u4b67\u4b68\u4b69\u4b6a\u4b6b\u4b6c\u4b6d\u4b6e\u4b6f\u4b70\u4b71\u4b72\u4b73\u4b74\u4b75\u4b76\u4b77\u4b78\u4b79\u4b7a\u4b7b\u4b7c\u4b7d\u4b7e\u4b7f\u4b80\u4b81\u4b82\u4b83\u4b84\u4b85\u4b86\u4b87\u4b88\u4b89\u4b8a\u4b8b\u4b8c\u4b8d\u4b8e\u4b8f\u4b90\u4b91\u4b92\u4b93\u4b94\u4b95\u4b96\u4b97\u4b98\u4b99\u4b9a\u4b9b\u4b9c\u4b9d\u4b9e\u4b9f\u4ba0\u4ba1\u4ba2\u4ba3\u4ba4\u4ba5\u4ba6\u4ba7\u4ba8\u4ba9\u4baa\u4bab\u4bac\u4bad\u4bae\u4baf\u4bb0\u4bb1\u4bb2\u4bb3\u4bb4\u4bb5\u4bb6\u4bb7\u4bb8\u4bb9\u4bba\u4bbb\u4bbc\u4bbd\u4bbe\u4bbf\u4bc0\u4bc1\u4bc2\u4bc3\u4bc4\u4bc5\u4bc6\u4bc7\u4bc8\u4bc9\u4bca\u4bcb\u4bcc\u4bcd\u4bce\u4bcf\u4bd0\u4bd1\u4bd2\u4bd3\u4bd4\u4bd5\u4bd6\u4bd7\u4bd8\u4bd9\u4bda\u4bdb\u4bdc\u4bdd\u4bde\u4bdf\u4be0\u4be1\u4be2\u4be3\u4be4\u4be5\u4be6\u4be7\u4be8\u4be9\u4bea\u4beb\u4bec\u4bed\u4bee\u4bef\u4bf0\u4bf1\u4bf2\u4bf3\u4bf4\u4bf5\u4bf6\u4bf7\u4bf8\u4bf9\u4bfa\u4bfb\u4bfc\u4bfd\u4bfe\u4bff\u4c00\u4c01\u4c02\u4c03\u4c04\u4c05\u4c06\u4c07\u4c08\u4c09\u4c0a\u4c0b\u4c0c\u4c0d\u4c0e\u4c0f\u4c10\u4c11\u4c12\u4c13\u4c14\u4c15\u4c16\u4c17\u4c18\u4c19\u4c1a\u4c1b\u4c1c\u4c1d\u4c1e\u4c1f\u4c20\u4c21\u4c22\u4c23\u4c24\u4c25\u4c26\u4c27\u4c28\u4c29\u4c2a\u4c2b\u4c2c\u4c2d\u4c2e\u4c2f\u4c30\u4c31\u4c32\u4c33\u4c34\u4c35\u4c36\u4c37\u4c38\u4c39\u4c3a\u4c3b\u4c3c\u4c3d\u4c3e\u4c3f\u4c40\u4c41\u4c42\u4c43\u4c44\u4c45\u4c46\u4c47\u4c48\u4c49\u4c4a\u4c4b\u4c4c\u4c4d\u4c4e\u4c4f\u4c50\u4c51\u4c52\u4c53\u4c54\u4c55\u4c56\u4c57\u4c58\u4c59\u4c5a\u4c5b\u4c5c\u4c5d\u4c5e\u4c5f\u4c60\u4c61\u4c62\u4c63\u4c64\u4c65\u4c66\u4c67\u4c68\u4c69\u4c6a\u4c6b\u4c6c\u4c6d\u4c6e\u4c6f\u4c70\u4c71\u4c72\u4c73\u4c74\u4c75\u4c76\u4c77\u4c78\u4c79\u4c7a\u4c7b\u4c7c\u4c7d\u4c7e\u4c7f\u4c80\u4c81\u4c82\u4c83\u4c84\u4c85\u4c86\u4c87\u4c88\u4c89\u4c8a\u4c8b\u4c8c\u4c8d\u4c8e\u4c8f\u4c90\u4c91\u4c92\u4c93\u4c94\u4c95\u4c96\u4c97\u4c98\u4c99\u4c9a\u4c9b\u4c9c\u4c9d\u4c9e\u4c9f\u4ca0\u4ca1\u4ca2\u4ca3\u4ca4\u4ca5\u4ca6\u4ca7\u4ca8\u4ca9\u4caa\u4cab\u4cac\u4cad\u4cae\u4caf\u4cb0\u4cb1\u4cb2\u4cb3\u4cb4\u4cb5\u4cb6\u4cb7\u4cb8\u4cb9\u4cba\u4cbb\u4cbc\u4cbd\u4cbe\u4cbf\u4cc0\u4cc1\u4cc2\u4cc3\u4cc4\u4cc5\u4cc6\u4cc7\u4cc8\u4cc9\u4cca\u4ccb\u4ccc\u4ccd\u4cce\u4ccf\u4cd0\u4cd1\u4cd2\u4cd3\u4cd4\u4cd5\u4cd6\u4cd7\u4cd8\u4cd9\u4cda\u4cdb\u4cdc\u4cdd\u4cde\u4cdf\u4ce0\u4ce1\u4ce2\u4ce3\u4ce4\u4ce5\u4ce6\u4ce7\u4ce8\u4ce9\u4cea\u4ceb\u4cec\u4ced\u4cee\u4cef\u4cf0\u4cf1\u4cf2\u4cf3\u4cf4\u4cf5\u4cf6\u4cf7\u4cf8\u4cf9\u4cfa\u4cfb\u4cfc\u4cfd\u4cfe\u4cff\u4d00\u4d01\u4d02\u4d03\u4d04\u4d05\u4d06\u4d07\u4d08\u4d09\u4d0a\u4d0b\u4d0c\u4d0d\u4d0e\u4d0f\u4d10\u4d11\u4d12\u4d13\u4d14\u4d15\u4d16\u4d17\u4d18\u4d19\u4d1a\u4d1b\u4d1c\u4d1d\u4d1e\u4d1f\u4d20\u4d21\u4d22\u4d23\u4d24\u4d25\u4d26\u4d27\u4d28\u4d29\u4d2a\u4d2b\u4d2c\u4d2d\u4d2e\u4d2f\u4d30\u4d31\u4d32\u4d33\u4d34\u4d35\u4d36\u4d37\u4d38\u4d39\u4d3a\u4d3b\u4d3c\u4d3d\u4d3e\u4d3f\u4d40\u4d41\u4d42\u4d43\u4d44\u4d45\u4d46\u4d47\u4d48\u4d49\u4d4a\u4d4b\u4d4c\u4d4d\u4d4e\u4d4f\u4d50\u4d51\u4d52\u4d53\u4d54\u4d55\u4d56\u4d57\u4d58\u4d59\u4d5a\u4d5b\u4d5c\u4d5d\u4d5e\u4d5f\u4d60\u4d61\u4d62\u4d63\u4d64\u4d65\u4d66\u4d67\u4d68\u4d69\u4d6a\u4d6b\u4d6c\u4d6d\u4d6e\u4d6f\u4d70\u4d71\u4d72\u4d73\u4d74\u4d75\u4d76\u4d77\u4d78\u4d79\u4d7a\u4d7b\u4d7c\u4d7d\u4d7e\u4d7f\u4d80\u4d81\u4d82\u4d83\u4d84\u4d85\u4d86\u4d87\u4d88\u4d89\u4d8a\u4d8b\u4d8c\u4d8d\u4d8e\u4d8f\u4d90\u4d91\u4d92\u4d93\u4d94\u4d95\u4d96\u4d97\u4d98\u4d99\u4d9a\u4d9b\u4d9c\u4d9d\u4d9e\u4d9f\u4da0\u4da1\u4da2\u4da3\u4da4\u4da5\u4da6\u4da7\u4da8\u4da9\u4daa\u4dab\u4dac\u4dad\u4dae\u4daf\u4db0\u4db1\u4db2\u4db3\u4db4\u4db5\u4e00\u4e01\u4e02\u4e03\u4e04\u4e05\u4e06\u4e07\u4e08\u4e09\u4e0a\u4e0b\u4e0c\u4e0d\u4e0e\u4e0f\u4e10\u4e11\u4e12\u4e13\u4e14\u4e15\u4e16\u4e17\u4e18\u4e19\u4e1a\u4e1b\u4e1c\u4e1d\u4e1e\u4e1f\u4e20\u4e21\u4e22\u4e23\u4e24\u4e25\u4e26\u4e27\u4e28\u4e29\u4e2a\u4e2b\u4e2c\u4e2d\u4e2e\u4e2f\u4e30\u4e31\u4e32\u4e33\u4e34\u4e35\u4e36\u4e37\u4e38\u4e39\u4e3a\u4e3b\u4e3c\u4e3d\u4e3e\u4e3f\u4e40\u4e41\u4e42\u4e43\u4e44\u4e45\u4e46\u4e47\u4e48\u4e49\u4e4a\u4e4b\u4e4c\u4e4d\u4e4e\u4e4f\u4e50\u4e51\u4e52\u4e53\u4e54\u4e55\u4e56\u4e57\u4e58\u4e59\u4e5a\u4e5b\u4e5c\u4e5d\u4e5e\u4e5f\u4e60\u4e61\u4e62\u4e63\u4e64\u4e65\u4e66\u4e67\u4e68\u4e69\u4e6a\u4e6b\u4e6c\u4e6d\u4e6e\u4e6f\u4e70\u4e71\u4e72\u4e73\u4e74\u4e75\u4e76\u4e77\u4e78\u4e79\u4e7a\u4e7b\u4e7c\u4e7d\u4e7e\u4e7f\u4e80\u4e81\u4e82\u4e83\u4e84\u4e85\u4e86\u4e87\u4e88\u4e89\u4e8a\u4e8b\u4e8c\u4e8d\u4e8e\u4e8f\u4e90\u4e91\u4e92\u4e93\u4e94\u4e95\u4e96\u4e97\u4e98\u4e99\u4e9a\u4e9b\u4e9c\u4e9d\u4e9e\u4e9f\u4ea0\u4ea1\u4ea2\u4ea3\u4ea4\u4ea5\u4ea6\u4ea7\u4ea8\u4ea9\u4eaa\u4eab\u4eac\u4ead\u4eae\u4eaf\u4eb0\u4eb1\u4eb2\u4eb3\u4eb4\u4eb5\u4eb6\u4eb7\u4eb8\u4eb9\u4eba\u4ebb\u4ebc\u4ebd\u4ebe\u4ebf\u4ec0\u4ec1\u4ec2\u4ec3\u4ec4\u4ec5\u4ec6\u4ec7\u4ec8\u4ec9\u4eca\u4ecb\u4ecc\u4ecd\u4ece\u4ecf\u4ed0\u4ed1\u4ed2\u4ed3\u4ed4\u4ed5\u4ed6\u4ed7\u4ed8\u4ed9\u4eda\u4edb\u4edc\u4edd\u4ede\u4edf\u4ee0\u4ee1\u4ee2\u4ee3\u4ee4\u4ee5\u4ee6\u4ee7\u4ee8\u4ee9\u4eea\u4eeb\u4eec\u4eed\u4eee\u4eef\u4ef0\u4ef1\u4ef2\u4ef3\u4ef4\u4ef5\u4ef6\u4ef7\u4ef8\u4ef9\u4efa\u4efb\u4efc\u4efd\u4efe\u4eff\u4f00\u4f01\u4f02\u4f03\u4f04\u4f05\u4f06\u4f07\u4f08\u4f09\u4f0a\u4f0b\u4f0c\u4f0d\u4f0e\u4f0f\u4f10\u4f11\u4f12\u4f13\u4f14\u4f15\u4f16\u4f17\u4f18\u4f19\u4f1a\u4f1b\u4f1c\u4f1d\u4f1e\u4f1f\u4f20\u4f21\u4f22\u4f23\u4f24\u4f25\u4f26\u4f27\u4f28\u4f29\u4f2a\u4f2b\u4f2c\u4f2d\u4f2e\u4f2f\u4f30\u4f31\u4f32\u4f33\u4f34\u4f35\u4f36\u4f37\u4f38\u4f39\u4f3a\u4f3b\u4f3c\u4f3d\u4f3e\u4f3f\u4f40\u4f41\u4f42\u4f43\u4f44\u4f45\u4f46\u4f47\u4f48\u4f49\u4f4a\u4f4b\u4f4c\u4f4d\u4f4e\u4f4f\u4f50\u4f51\u4f52\u4f53\u4f54\u4f55\u4f56\u4f57\u4f58\u4f59\u4f5a\u4f5b\u4f5c\u4f5d\u4f5e\u4f5f\u4f60\u4f61\u4f62\u4f63\u4f64\u4f65\u4f66\u4f67\u4f68\u4f69\u4f6a\u4f6b\u4f6c\u4f6d\u4f6e\u4f6f\u4f70\u4f71\u4f72\u4f73\u4f74\u4f75\u4f76\u4f77\u4f78\u4f79\u4f7a\u4f7b\u4f7c\u4f7d\u4f7e\u4f7f\u4f80\u4f81\u4f82\u4f83\u4f84\u4f85\u4f86\u4f87\u4f88\u4f89\u4f8a\u4f8b\u4f8c\u4f8d\u4f8e\u4f8f\u4f90\u4f91\u4f92\u4f93\u4f94\u4f95\u4f96\u4f97\u4f98\u4f99\u4f9a\u4f9b\u4f9c\u4f9d\u4f9e\u4f9f\u4fa0\u4fa1\u4fa2\u4fa3\u4fa4\u4fa5\u4fa6\u4fa7\u4fa8\u4fa9\u4faa\u4fab\u4fac\u4fad\u4fae\u4faf\u4fb0\u4fb1\u4fb2\u4fb3\u4fb4\u4fb5\u4fb6\u4fb7\u4fb8\u4fb9\u4fba\u4fbb\u4fbc\u4fbd\u4fbe\u4fbf\u4fc0\u4fc1\u4fc2\u4fc3\u4fc4\u4fc5\u4fc6\u4fc7\u4fc8\u4fc9\u4fca\u4fcb\u4fcc\u4fcd\u4fce\u4fcf\u4fd0\u4fd1\u4fd2\u4fd3\u4fd4\u4fd5\u4fd6\u4fd7\u4fd8\u4fd9\u4fda\u4fdb\u4fdc\u4fdd\u4fde\u4fdf\u4fe0\u4fe1\u4fe2\u4fe3\u4fe4\u4fe5\u4fe6\u4fe7\u4fe8\u4fe9\u4fea\u4feb\u4fec\u4fed\u4fee\u4fef\u4ff0\u4ff1\u4ff2\u4ff3\u4ff4\u4ff5\u4ff6\u4ff7\u4ff8\u4ff9\u4ffa\u4ffb\u4ffc\u4ffd\u4ffe\u4fff\u5000\u5001\u5002\u5003\u5004\u5005\u5006\u5007\u5008\u5009\u500a\u500b\u500c\u500d\u500e\u500f\u5010\u5011\u5012\u5013\u5014\u5015\u5016\u5017\u5018\u5019\u501a\u501b\u501c\u501d\u501e\u501f\u5020\u5021\u5022\u5023\u5024\u5025\u5026\u5027\u5028\u5029\u502a\u502b\u502c\u502d\u502e\u502f\u5030\u5031\u5032\u5033\u5034\u5035\u5036\u5037\u5038\u5039\u503a\u503b\u503c\u503d\u503e\u503f\u5040\u5041\u5042\u5043\u5044\u5045\u5046\u5047\u5048\u5049\u504a\u504b\u504c\u504d\u504e\u504f\u5050\u5051\u5052\u5053\u5054\u5055\u5056\u5057\u5058\u5059\u505a\u505b\u505c\u505d\u505e\u505f\u5060\u5061\u5062\u5063\u5064\u5065\u5066\u5067\u5068\u5069\u506a\u506b\u506c\u506d\u506e\u506f\u5070\u5071\u5072\u5073\u5074\u5075\u5076\u5077\u5078\u5079\u507a\u507b\u507c\u507d\u507e\u507f\u5080\u5081\u5082\u5083\u5084\u5085\u5086\u5087\u5088\u5089\u508a\u508b\u508c\u508d\u508e\u508f\u5090\u5091\u5092\u5093\u5094\u5095\u5096\u5097\u5098\u5099\u509a\u509b\u509c\u509d\u509e\u509f\u50a0\u50a1\u50a2\u50a3\u50a4\u50a5\u50a6\u50a7\u50a8\u50a9\u50aa\u50ab\u50ac\u50ad\u50ae\u50af\u50b0\u50b1\u50b2\u50b3\u50b4\u50b5\u50b6\u50b7\u50b8\u50b9\u50ba\u50bb\u50bc\u50bd\u50be\u50bf\u50c0\u50c1\u50c2\u50c3\u50c4\u50c5\u50c6\u50c7\u50c8\u50c9\u50ca\u50cb\u50cc\u50cd\u50ce\u50cf\u50d0\u50d1\u50d2\u50d3\u50d4\u50d5\u50d6\u50d7\u50d8\u50d9\u50da\u50db\u50dc\u50dd\u50de\u50df\u50e0\u50e1\u50e2\u50e3\u50e4\u50e5\u50e6\u50e7\u50e8\u50e9\u50ea\u50eb\u50ec\u50ed\u50ee\u50ef\u50f0\u50f1\u50f2\u50f3\u50f4\u50f5\u50f6\u50f7\u50f8\u50f9\u50fa\u50fb\u50fc\u50fd\u50fe\u50ff\u5100\u5101\u5102\u5103\u5104\u5105\u5106\u5107\u5108\u5109\u510a\u510b\u510c\u510d\u510e\u510f\u5110\u5111\u5112\u5113\u5114\u5115\u5116\u5117\u5118\u5119\u511a\u511b\u511c\u511d\u511e\u511f\u5120\u5121\u5122\u5123\u5124\u5125\u5126\u5127\u5128\u5129\u512a\u512b\u512c\u512d\u512e\u512f\u5130\u5131\u5132\u5133\u5134\u5135\u5136\u5137\u5138\u5139\u513a\u513b\u513c\u513d\u513e\u513f\u5140\u5141\u5142\u5143\u5144\u5145\u5146\u5147\u5148\u5149\u514a\u514b\u514c\u514d\u514e\u514f\u5150\u5151\u5152\u5153\u5154\u5155\u5156\u5157\u5158\u5159\u515a\u515b\u515c\u515d\u515e\u515f\u5160\u5161\u5162\u5163\u5164\u5165\u5166\u5167\u5168\u5169\u516a\u516b\u516c\u516d\u516e\u516f\u5170\u5171\u5172\u5173\u5174\u5175\u5176\u5177\u5178\u5179\u517a\u517b\u517c\u517d\u517e\u517f\u5180\u5181\u5182\u5183\u5184\u5185\u5186\u5187\u5188\u5189\u518a\u518b\u518c\u518d\u518e\u518f\u5190\u5191\u5192\u5193\u5194\u5195\u5196\u5197\u5198\u5199\u519a\u519b\u519c\u519d\u519e\u519f\u51a0\u51a1\u51a2\u51a3\u51a4\u51a5\u51a6\u51a7\u51a8\u51a9\u51aa\u51ab\u51ac\u51ad\u51ae\u51af\u51b0\u51b1\u51b2\u51b3\u51b4\u51b5\u51b6\u51b7\u51b8\u51b9\u51ba\u51bb\u51bc\u51bd\u51be\u51bf\u51c0\u51c1\u51c2\u51c3\u51c4\u51c5\u51c6\u51c7\u51c8\u51c9\u51ca\u51cb\u51cc\u51cd\u51ce\u51cf\u51d0\u51d1\u51d2\u51d3\u51d4\u51d5\u51d6\u51d7\u51d8\u51d9\u51da\u51db\u51dc\u51dd\u51de\u51df\u51e0\u51e1\u51e2\u51e3\u51e4\u51e5\u51e6\u51e7\u51e8\u51e9\u51ea\u51eb\u51ec\u51ed\u51ee\u51ef\u51f0\u51f1\u51f2\u51f3\u51f4\u51f5\u51f6\u51f7\u51f8\u51f9\u51fa\u51fb\u51fc\u51fd\u51fe\u51ff\u5200\u5201\u5202\u5203\u5204\u5205\u5206\u5207\u5208\u5209\u520a\u520b\u520c\u520d\u520e\u520f\u5210\u5211\u5212\u5213\u5214\u5215\u5216\u5217\u5218\u5219\u521a\u521b\u521c\u521d\u521e\u521f\u5220\u5221\u5222\u5223\u5224\u5225\u5226\u5227\u5228\u5229\u522a\u522b\u522c\u522d\u522e\u522f\u5230\u5231\u5232\u5233\u5234\u5235\u5236\u5237\u5238\u5239\u523a\u523b\u523c\u523d\u523e\u523f\u5240\u5241\u5242\u5243\u5244\u5245\u5246\u5247\u5248\u5249\u524a\u524b\u524c\u524d\u524e\u524f\u5250\u5251\u5252\u5253\u5254\u5255\u5256\u5257\u5258\u5259\u525a\u525b\u525c\u525d\u525e\u525f\u5260\u5261\u5262\u5263\u5264\u5265\u5266\u5267\u5268\u5269\u526a\u526b\u526c\u526d\u526e\u526f\u5270\u5271\u5272\u5273\u5274\u5275\u5276\u5277\u5278\u5279\u527a\u527b\u527c\u527d\u527e\u527f\u5280\u5281\u5282\u5283\u5284\u5285\u5286\u5287\u5288\u5289\u528a\u528b\u528c\u528d\u528e\u528f\u5290\u5291\u5292\u5293\u5294\u5295\u5296\u5297\u5298\u5299\u529a\u529b\u529c\u529d\u529e\u529f\u52a0\u52a1\u52a2\u52a3\u52a4\u52a5\u52a6\u52a7\u52a8\u52a9\u52aa\u52ab\u52ac\u52ad\u52ae\u52af\u52b0\u52b1\u52b2\u52b3\u52b4\u52b5\u52b6\u52b7\u52b8\u52b9\u52ba\u52bb\u52bc\u52bd\u52be\u52bf\u52c0\u52c1\u52c2\u52c3\u52c4\u52c5\u52c6\u52c7\u52c8\u52c9\u52ca\u52cb\u52cc\u52cd\u52ce\u52cf\u52d0\u52d1\u52d2\u52d3\u52d4\u52d5\u52d6\u52d7\u52d8\u52d9\u52da\u52db\u52dc\u52dd\u52de\u52df\u52e0\u52e1\u52e2\u52e3\u52e4\u52e5\u52e6\u52e7\u52e8\u52e9\u52ea\u52eb\u52ec\u52ed\u52ee\u52ef\u52f0\u52f1\u52f2\u52f3\u52f4\u52f5\u52f6\u52f7\u52f8\u52f9\u52fa\u52fb\u52fc\u52fd\u52fe\u52ff\u5300\u5301\u5302\u5303\u5304\u5305\u5306\u5307\u5308\u5309\u530a\u530b\u530c\u530d\u530e\u530f\u5310\u5311\u5312\u5313\u5314\u5315\u5316\u5317\u5318\u5319\u531a\u531b\u531c\u531d\u531e\u531f\u5320\u5321\u5322\u5323\u5324\u5325\u5326\u5327\u5328\u5329\u532a\u532b\u532c\u532d\u532e\u532f\u5330\u5331\u5332\u5333\u5334\u5335\u5336\u5337\u5338\u5339\u533a\u533b\u533c\u533d\u533e\u533f\u5340\u5341\u5342\u5343\u5344\u5345\u5346\u5347\u5348\u5349\u534a\u534b\u534c\u534d\u534e\u534f\u5350\u5351\u5352\u5353\u5354\u5355\u5356\u5357\u5358\u5359\u535a\u535b\u535c\u535d\u535e\u535f\u5360\u5361\u5362\u5363\u5364\u5365\u5366\u5367\u5368\u5369\u536a\u536b\u536c\u536d\u536e\u536f\u5370\u5371\u5372\u5373\u5374\u5375\u5376\u5377\u5378\u5379\u537a\u537b\u537c\u537d\u537e\u537f\u5380\u5381\u5382\u5383\u5384\u5385\u5386\u5387\u5388\u5389\u538a\u538b\u538c\u538d\u538e\u538f\u5390\u5391\u5392\u5393\u5394\u5395\u5396\u5397\u5398\u5399\u539a\u539b\u539c\u539d\u539e\u539f\u53a0\u53a1\u53a2\u53a3\u53a4\u53a5\u53a6\u53a7\u53a8\u53a9\u53aa\u53ab\u53ac\u53ad\u53ae\u53af\u53b0\u53b1\u53b2\u53b3\u53b4\u53b5\u53b6\u53b7\u53b8\u53b9\u53ba\u53bb\u53bc\u53bd\u53be\u53bf\u53c0\u53c1\u53c2\u53c3\u53c4\u53c5\u53c6\u53c7\u53c8\u53c9\u53ca\u53cb\u53cc\u53cd\u53ce\u53cf\u53d0\u53d1\u53d2\u53d3\u53d4\u53d5\u53d6\u53d7\u53d8\u53d9\u53da\u53db\u53dc\u53dd\u53de\u53df\u53e0\u53e1\u53e2\u53e3\u53e4\u53e5\u53e6\u53e7\u53e8\u53e9\u53ea\u53eb\u53ec\u53ed\u53ee\u53ef\u53f0\u53f1\u53f2\u53f3\u53f4\u53f5\u53f6\u53f7\u53f8\u53f9\u53fa\u53fb\u53fc\u53fd\u53fe\u53ff\u5400\u5401\u5402\u5403\u5404\u5405\u5406\u5407\u5408\u5409\u540a\u540b\u540c\u540d\u540e\u540f\u5410\u5411\u5412\u5413\u5414\u5415\u5416\u5417\u5418\u5419\u541a\u541b\u541c\u541d\u541e\u541f\u5420\u5421\u5422\u5423\u5424\u5425\u5426\u5427\u5428\u5429\u542a\u542b\u542c\u542d\u542e\u542f\u5430\u5431\u5432\u5433\u5434\u5435\u5436\u5437\u5438\u5439\u543a\u543b\u543c\u543d\u543e\u543f\u5440\u5441\u5442\u5443\u5444\u5445\u5446\u5447\u5448\u5449\u544a\u544b\u544c\u544d\u544e\u544f\u5450\u5451\u5452\u5453\u5454\u5455\u5456\u5457\u5458\u5459\u545a\u545b\u545c\u545d\u545e\u545f\u5460\u5461\u5462\u5463\u5464\u5465\u5466\u5467\u5468\u5469\u546a\u546b\u546c\u546d\u546e\u546f\u5470\u5471\u5472\u5473\u5474\u5475\u5476\u5477\u5478\u5479\u547a\u547b\u547c\u547d\u547e\u547f\u5480\u5481\u5482\u5483\u5484\u5485\u5486\u5487\u5488\u5489\u548a\u548b\u548c\u548d\u548e\u548f\u5490\u5491\u5492\u5493\u5494\u5495\u5496\u5497\u5498\u5499\u549a\u549b\u549c\u549d\u549e\u549f\u54a0\u54a1\u54a2\u54a3\u54a4\u54a5\u54a6\u54a7\u54a8\u54a9\u54aa\u54ab\u54ac\u54ad\u54ae\u54af\u54b0\u54b1\u54b2\u54b3\u54b4\u54b5\u54b6\u54b7\u54b8\u54b9\u54ba\u54bb\u54bc\u54bd\u54be\u54bf\u54c0\u54c1\u54c2\u54c3\u54c4\u54c5\u54c6\u54c7\u54c8\u54c9\u54ca\u54cb\u54cc\u54cd\u54ce\u54cf\u54d0\u54d1\u54d2\u54d3\u54d4\u54d5\u54d6\u54d7\u54d8\u54d9\u54da\u54db\u54dc\u54dd\u54de\u54df\u54e0\u54e1\u54e2\u54e3\u54e4\u54e5\u54e6\u54e7\u54e8\u54e9\u54ea\u54eb\u54ec\u54ed\u54ee\u54ef\u54f0\u54f1\u54f2\u54f3\u54f4\u54f5\u54f6\u54f7\u54f8\u54f9\u54fa\u54fb\u54fc\u54fd\u54fe\u54ff\u5500\u5501\u5502\u5503\u5504\u5505\u5506\u5507\u5508\u5509\u550a\u550b\u550c\u550d\u550e\u550f\u5510\u5511\u5512\u5513\u5514\u5515\u5516\u5517\u5518\u5519\u551a\u551b\u551c\u551d\u551e\u551f\u5520\u5521\u5522\u5523\u5524\u5525\u5526\u5527\u5528\u5529\u552a\u552b\u552c\u552d\u552e\u552f\u5530\u5531\u5532\u5533\u5534\u5535\u5536\u5537\u5538\u5539\u553a\u553b\u553c\u553d\u553e\u553f\u5540\u5541\u5542\u5543\u5544\u5545\u5546\u5547\u5548\u5549\u554a\u554b\u554c\u554d\u554e\u554f\u5550\u5551\u5552\u5553\u5554\u5555\u5556\u5557\u5558\u5559\u555a\u555b\u555c\u555d\u555e\u555f\u5560\u5561\u5562\u5563\u5564\u5565\u5566\u5567\u5568\u5569\u556a\u556b\u556c\u556d\u556e\u556f\u5570\u5571\u5572\u5573\u5574\u5575\u5576\u5577\u5578\u5579\u557a\u557b\u557c\u557d\u557e\u557f\u5580\u5581\u5582\u5583\u5584\u5585\u5586\u5587\u5588\u5589\u558a\u558b\u558c\u558d\u558e\u558f\u5590\u5591\u5592\u5593\u5594\u5595\u5596\u5597\u5598\u5599\u559a\u559b\u559c\u559d\u559e\u559f\u55a0\u55a1\u55a2\u55a3\u55a4\u55a5\u55a6\u55a7\u55a8\u55a9\u55aa\u55ab\u55ac\u55ad\u55ae\u55af\u55b0\u55b1\u55b2\u55b3\u55b4\u55b5\u55b6\u55b7\u55b8\u55b9\u55ba\u55bb\u55bc\u55bd\u55be\u55bf\u55c0\u55c1\u55c2\u55c3\u55c4\u55c5\u55c6\u55c7\u55c8\u55c9\u55ca\u55cb\u55cc\u55cd\u55ce\u55cf\u55d0\u55d1\u55d2\u55d3\u55d4\u55d5\u55d6\u55d7\u55d8\u55d9\u55da\u55db\u55dc\u55dd\u55de\u55df\u55e0\u55e1\u55e2\u55e3\u55e4\u55e5\u55e6\u55e7\u55e8\u55e9\u55ea\u55eb\u55ec\u55ed\u55ee\u55ef\u55f0\u55f1\u55f2\u55f3\u55f4\u55f5\u55f6\u55f7\u55f8\u55f9\u55fa\u55fb\u55fc\u55fd\u55fe\u55ff\u5600\u5601\u5602\u5603\u5604\u5605\u5606\u5607\u5608\u5609\u560a\u560b\u560c\u560d\u560e\u560f\u5610\u5611\u5612\u5613\u5614\u5615\u5616\u5617\u5618\u5619\u561a\u561b\u561c\u561d\u561e\u561f\u5620\u5621\u5622\u5623\u5624\u5625\u5626\u5627\u5628\u5629\u562a\u562b\u562c\u562d\u562e\u562f\u5630\u5631\u5632\u5633\u5634\u5635\u5636\u5637\u5638\u5639\u563a\u563b\u563c\u563d\u563e\u563f\u5640\u5641\u5642\u5643\u5644\u5645\u5646\u5647\u5648\u5649\u564a\u564b\u564c\u564d\u564e\u564f\u5650\u5651\u5652\u5653\u5654\u5655\u5656\u5657\u5658\u5659\u565a\u565b\u565c\u565d\u565e\u565f\u5660\u5661\u5662\u5663\u5664\u5665\u5666\u5667\u5668\u5669\u566a\u566b\u566c\u566d\u566e\u566f\u5670\u5671\u5672\u5673\u5674\u5675\u5676\u5677\u5678\u5679\u567a\u567b\u567c\u567d\u567e\u567f\u5680\u5681\u5682\u5683\u5684\u5685\u5686\u5687\u5688\u5689\u568a\u568b\u568c\u568d\u568e\u568f\u5690\u5691\u5692\u5693\u5694\u5695\u5696\u5697\u5698\u5699\u569a\u569b\u569c\u569d\u569e\u569f\u56a0\u56a1\u56a2\u56a3\u56a4\u56a5\u56a6\u56a7\u56a8\u56a9\u56aa\u56ab\u56ac\u56ad\u56ae\u56af\u56b0\u56b1\u56b2\u56b3\u56b4\u56b5\u56b6\u56b7\u56b8\u56b9\u56ba\u56bb\u56bc\u56bd\u56be\u56bf\u56c0\u56c1\u56c2\u56c3\u56c4\u56c5\u56c6\u56c7\u56c8\u56c9\u56ca\u56cb\u56cc\u56cd\u56ce\u56cf\u56d0\u56d1\u56d2\u56d3\u56d4\u56d5\u56d6\u56d7\u56d8\u56d9\u56da\u56db\u56dc\u56dd\u56de\u56df\u56e0\u56e1\u56e2\u56e3\u56e4\u56e5\u56e6\u56e7\u56e8\u56e9\u56ea\u56eb\u56ec\u56ed\u56ee\u56ef\u56f0\u56f1\u56f2\u56f3\u56f4\u56f5\u56f6\u56f7\u56f8\u56f9\u56fa\u56fb\u56fc\u56fd\u56fe\u56ff\u5700\u5701\u5702\u5703\u5704\u5705\u5706\u5707\u5708\u5709\u570a\u570b\u570c\u570d\u570e\u570f\u5710\u5711\u5712\u5713\u5714\u5715\u5716\u5717\u5718\u5719\u571a\u571b\u571c\u571d\u571e\u571f\u5720\u5721\u5722\u5723\u5724\u5725\u5726\u5727\u5728\u5729\u572a\u572b\u572c\u572d\u572e\u572f\u5730\u5731\u5732\u5733\u5734\u5735\u5736\u5737\u5738\u5739\u573a\u573b\u573c\u573d\u573e\u573f\u5740\u5741\u5742\u5743\u5744\u5745\u5746\u5747\u5748\u5749\u574a\u574b\u574c\u574d\u574e\u574f\u5750\u5751\u5752\u5753\u5754\u5755\u5756\u5757\u5758\u5759\u575a\u575b\u575c\u575d\u575e\u575f\u5760\u5761\u5762\u5763\u5764\u5765\u5766\u5767\u5768\u5769\u576a\u576b\u576c\u576d\u576e\u576f\u5770\u5771\u5772\u5773\u5774\u5775\u5776\u5777\u5778\u5779\u577a\u577b\u577c\u577d\u577e\u577f\u5780\u5781\u5782\u5783\u5784\u5785\u5786\u5787\u5788\u5789\u578a\u578b\u578c\u578d\u578e\u578f\u5790\u5791\u5792\u5793\u5794\u5795\u5796\u5797\u5798\u5799\u579a\u579b\u579c\u579d\u579e\u579f\u57a0\u57a1\u57a2\u57a3\u57a4\u57a5\u57a6\u57a7\u57a8\u57a9\u57aa\u57ab\u57ac\u57ad\u57ae\u57af\u57b0\u57b1\u57b2\u57b3\u57b4\u57b5\u57b6\u57b7\u57b8\u57b9\u57ba\u57bb\u57bc\u57bd\u57be\u57bf\u57c0\u57c1\u57c2\u57c3\u57c4\u57c5\u57c6\u57c7\u57c8\u57c9\u57ca\u57cb\u57cc\u57cd\u57ce\u57cf\u57d0\u57d1\u57d2\u57d3\u57d4\u57d5\u57d6\u57d7\u57d8\u57d9\u57da\u57db\u57dc\u57dd\u57de\u57df\u57e0\u57e1\u57e2\u57e3\u57e4\u57e5\u57e6\u57e7\u57e8\u57e9\u57ea\u57eb\u57ec\u57ed\u57ee\u57ef\u57f0\u57f1\u57f2\u57f3\u57f4\u57f5\u57f6\u57f7\u57f8\u57f9\u57fa\u57fb\u57fc\u57fd\u57fe\u57ff\u5800\u5801\u5802\u5803\u5804\u5805\u5806\u5807\u5808\u5809\u580a\u580b\u580c\u580d\u580e\u580f\u5810\u5811\u5812\u5813\u5814\u5815\u5816\u5817\u5818\u5819\u581a\u581b\u581c\u581d\u581e\u581f\u5820\u5821\u5822\u5823\u5824\u5825\u5826\u5827\u5828\u5829\u582a\u582b\u582c\u582d\u582e\u582f\u5830\u5831\u5832\u5833\u5834\u5835\u5836\u5837\u5838\u5839\u583a\u583b\u583c\u583d\u583e\u583f\u5840\u5841\u5842\u5843\u5844\u5845\u5846\u5847\u5848\u5849\u584a\u584b\u584c\u584d\u584e\u584f\u5850\u5851\u5852\u5853\u5854\u5855\u5856\u5857\u5858\u5859\u585a\u585b\u585c\u585d\u585e\u585f\u5860\u5861\u5862\u5863\u5864\u5865\u5866\u5867\u5868\u5869\u586a\u586b\u586c\u586d\u586e\u586f\u5870\u5871\u5872\u5873\u5874\u5875\u5876\u5877\u5878\u5879\u587a\u587b\u587c\u587d\u587e\u587f\u5880\u5881\u5882\u5883\u5884\u5885\u5886\u5887\u5888\u5889\u588a\u588b\u588c\u588d\u588e\u588f\u5890\u5891\u5892\u5893\u5894\u5895\u5896\u5897\u5898\u5899\u589a\u589b\u589c\u589d\u589e\u589f\u58a0\u58a1\u58a2\u58a3\u58a4\u58a5\u58a6\u58a7\u58a8\u58a9\u58aa\u58ab\u58ac\u58ad\u58ae\u58af\u58b0\u58b1\u58b2\u58b3\u58b4\u58b5\u58b6\u58b7\u58b8\u58b9\u58ba\u58bb\u58bc\u58bd\u58be\u58bf\u58c0\u58c1\u58c2\u58c3\u58c4\u58c5\u58c6\u58c7\u58c8\u58c9\u58ca\u58cb\u58cc\u58cd\u58ce\u58cf\u58d0\u58d1\u58d2\u58d3\u58d4\u58d5\u58d6\u58d7\u58d8\u58d9\u58da\u58db\u58dc\u58dd\u58de\u58df\u58e0\u58e1\u58e2\u58e3\u58e4\u58e5\u58e6\u58e7\u58e8\u58e9\u58ea\u58eb\u58ec\u58ed\u58ee\u58ef\u58f0\u58f1\u58f2\u58f3\u58f4\u58f5\u58f6\u58f7\u58f8\u58f9\u58fa\u58fb\u58fc\u58fd\u58fe\u58ff\u5900\u5901\u5902\u5903\u5904\u5905\u5906\u5907\u5908\u5909\u590a\u590b\u590c\u590d\u590e\u590f\u5910\u5911\u5912\u5913\u5914\u5915\u5916\u5917\u5918\u5919\u591a\u591b\u591c\u591d\u591e\u591f\u5920\u5921\u5922\u5923\u5924\u5925\u5926\u5927\u5928\u5929\u592a\u592b\u592c\u592d\u592e\u592f\u5930\u5931\u5932\u5933\u5934\u5935\u5936\u5937\u5938\u5939\u593a\u593b\u593c\u593d\u593e\u593f\u5940\u5941\u5942\u5943\u5944\u5945\u5946\u5947\u5948\u5949\u594a\u594b\u594c\u594d\u594e\u594f\u5950\u5951\u5952\u5953\u5954\u5955\u5956\u5957\u5958\u5959\u595a\u595b\u595c\u595d\u595e\u595f\u5960\u5961\u5962\u5963\u5964\u5965\u5966\u5967\u5968\u5969\u596a\u596b\u596c\u596d\u596e\u596f\u5970\u5971\u5972\u5973\u5974\u5975\u5976\u5977\u5978\u5979\u597a\u597b\u597c\u597d\u597e\u597f\u5980\u5981\u5982\u5983\u5984\u5985\u5986\u5987\u5988\u5989\u598a\u598b\u598c\u598d\u598e\u598f\u5990\u5991\u5992\u5993\u5994\u5995\u5996\u5997\u5998\u5999\u599a\u599b\u599c\u599d\u599e\u599f\u59a0\u59a1\u59a2\u59a3\u59a4\u59a5\u59a6\u59a7\u59a8\u59a9\u59aa\u59ab\u59ac\u59ad\u59ae\u59af\u59b0\u59b1\u59b2\u59b3\u59b4\u59b5\u59b6\u59b7\u59b8\u59b9\u59ba\u59bb\u59bc\u59bd\u59be\u59bf\u59c0\u59c1\u59c2\u59c3\u59c4\u59c5\u59c6\u59c7\u59c8\u59c9\u59ca\u59cb\u59cc\u59cd\u59ce\u59cf\u59d0\u59d1\u59d2\u59d3\u59d4\u59d5\u59d6\u59d7\u59d8\u59d9\u59da\u59db\u59dc\u59dd\u59de\u59df\u59e0\u59e1\u59e2\u59e3\u59e4\u59e5\u59e6\u59e7\u59e8\u59e9\u59ea\u59eb\u59ec\u59ed\u59ee\u59ef\u59f0\u59f1\u59f2\u59f3\u59f4\u59f5\u59f6\u59f7\u59f8\u59f9\u59fa\u59fb\u59fc\u59fd\u59fe\u59ff\u5a00\u5a01\u5a02\u5a03\u5a04\u5a05\u5a06\u5a07\u5a08\u5a09\u5a0a\u5a0b\u5a0c\u5a0d\u5a0e\u5a0f\u5a10\u5a11\u5a12\u5a13\u5a14\u5a15\u5a16\u5a17\u5a18\u5a19\u5a1a\u5a1b\u5a1c\u5a1d\u5a1e\u5a1f\u5a20\u5a21\u5a22\u5a23\u5a24\u5a25\u5a26\u5a27\u5a28\u5a29\u5a2a\u5a2b\u5a2c\u5a2d\u5a2e\u5a2f\u5a30\u5a31\u5a32\u5a33\u5a34\u5a35\u5a36\u5a37\u5a38\u5a39\u5a3a\u5a3b\u5a3c\u5a3d\u5a3e\u5a3f\u5a40\u5a41\u5a42\u5a43\u5a44\u5a45\u5a46\u5a47\u5a48\u5a49\u5a4a\u5a4b\u5a4c\u5a4d\u5a4e\u5a4f\u5a50\u5a51\u5a52\u5a53\u5a54\u5a55\u5a56\u5a57\u5a58\u5a59\u5a5a\u5a5b\u5a5c\u5a5d\u5a5e\u5a5f\u5a60\u5a61\u5a62\u5a63\u5a64\u5a65\u5a66\u5a67\u5a68\u5a69\u5a6a\u5a6b\u5a6c\u5a6d\u5a6e\u5a6f\u5a70\u5a71\u5a72\u5a73\u5a74\u5a75\u5a76\u5a77\u5a78\u5a79\u5a7a\u5a7b\u5a7c\u5a7d\u5a7e\u5a7f\u5a80\u5a81\u5a82\u5a83\u5a84\u5a85\u5a86\u5a87\u5a88\u5a89\u5a8a\u5a8b\u5a8c\u5a8d\u5a8e\u5a8f\u5a90\u5a91\u5a92\u5a93\u5a94\u5a95\u5a96\u5a97\u5a98\u5a99\u5a9a\u5a9b\u5a9c\u5a9d\u5a9e\u5a9f\u5aa0\u5aa1\u5aa2\u5aa3\u5aa4\u5aa5\u5aa6\u5aa7\u5aa8\u5aa9\u5aaa\u5aab\u5aac\u5aad\u5aae\u5aaf\u5ab0\u5ab1\u5ab2\u5ab3\u5ab4\u5ab5\u5ab6\u5ab7\u5ab8\u5ab9\u5aba\u5abb\u5abc\u5abd\u5abe\u5abf\u5ac0\u5ac1\u5ac2\u5ac3\u5ac4\u5ac5\u5ac6\u5ac7\u5ac8\u5ac9\u5aca\u5acb\u5acc\u5acd\u5ace\u5acf\u5ad0\u5ad1\u5ad2\u5ad3\u5ad4\u5ad5\u5ad6\u5ad7\u5ad8\u5ad9\u5ada\u5adb\u5adc\u5add\u5ade\u5adf\u5ae0\u5ae1\u5ae2\u5ae3\u5ae4\u5ae5\u5ae6\u5ae7\u5ae8\u5ae9\u5aea\u5aeb\u5aec\u5aed\u5aee\u5aef\u5af0\u5af1\u5af2\u5af3\u5af4\u5af5\u5af6\u5af7\u5af8\u5af9\u5afa\u5afb\u5afc\u5afd\u5afe\u5aff\u5b00\u5b01\u5b02\u5b03\u5b04\u5b05\u5b06\u5b07\u5b08\u5b09\u5b0a\u5b0b\u5b0c\u5b0d\u5b0e\u5b0f\u5b10\u5b11\u5b12\u5b13\u5b14\u5b15\u5b16\u5b17\u5b18\u5b19\u5b1a\u5b1b\u5b1c\u5b1d\u5b1e\u5b1f\u5b20\u5b21\u5b22\u5b23\u5b24\u5b25\u5b26\u5b27\u5b28\u5b29\u5b2a\u5b2b\u5b2c\u5b2d\u5b2e\u5b2f\u5b30\u5b31\u5b32\u5b33\u5b34\u5b35\u5b36\u5b37\u5b38\u5b39\u5b3a\u5b3b\u5b3c\u5b3d\u5b3e\u5b3f\u5b40\u5b41\u5b42\u5b43\u5b44\u5b45\u5b46\u5b47\u5b48\u5b49\u5b4a\u5b4b\u5b4c\u5b4d\u5b4e\u5b4f\u5b50\u5b51\u5b52\u5b53\u5b54\u5b55\u5b56\u5b57\u5b58\u5b59\u5b5a\u5b5b\u5b5c\u5b5d\u5b5e\u5b5f\u5b60\u5b61\u5b62\u5b63\u5b64\u5b65\u5b66\u5b67\u5b68\u5b69\u5b6a\u5b6b\u5b6c\u5b6d\u5b6e\u5b6f\u5b70\u5b71\u5b72\u5b73\u5b74\u5b75\u5b76\u5b77\u5b78\u5b79\u5b7a\u5b7b\u5b7c\u5b7d\u5b7e\u5b7f\u5b80\u5b81\u5b82\u5b83\u5b84\u5b85\u5b86\u5b87\u5b88\u5b89\u5b8a\u5b8b\u5b8c\u5b8d\u5b8e\u5b8f\u5b90\u5b91\u5b92\u5b93\u5b94\u5b95\u5b96\u5b97\u5b98\u5b99\u5b9a\u5b9b\u5b9c\u5b9d\u5b9e\u5b9f\u5ba0\u5ba1\u5ba2\u5ba3\u5ba4\u5ba5\u5ba6\u5ba7\u5ba8\u5ba9\u5baa\u5bab\u5bac\u5bad\u5bae\u5baf\u5bb0\u5bb1\u5bb2\u5bb3\u5bb4\u5bb5\u5bb6\u5bb7\u5bb8\u5bb9\u5bba\u5bbb\u5bbc\u5bbd\u5bbe\u5bbf\u5bc0\u5bc1\u5bc2\u5bc3\u5bc4\u5bc5\u5bc6\u5bc7\u5bc8\u5bc9\u5bca\u5bcb\u5bcc\u5bcd\u5bce\u5bcf\u5bd0\u5bd1\u5bd2\u5bd3\u5bd4\u5bd5\u5bd6\u5bd7\u5bd8\u5bd9\u5bda\u5bdb\u5bdc\u5bdd\u5bde\u5bdf\u5be0\u5be1\u5be2\u5be3\u5be4\u5be5\u5be6\u5be7\u5be8\u5be9\u5bea\u5beb\u5bec\u5bed\u5bee\u5bef\u5bf0\u5bf1\u5bf2\u5bf3\u5bf4\u5bf5\u5bf6\u5bf7\u5bf8\u5bf9\u5bfa\u5bfb\u5bfc\u5bfd\u5bfe\u5bff\u5c00\u5c01\u5c02\u5c03\u5c04\u5c05\u5c06\u5c07\u5c08\u5c09\u5c0a\u5c0b\u5c0c\u5c0d\u5c0e\u5c0f\u5c10\u5c11\u5c12\u5c13\u5c14\u5c15\u5c16\u5c17\u5c18\u5c19\u5c1a\u5c1b\u5c1c\u5c1d\u5c1e\u5c1f\u5c20\u5c21\u5c22\u5c23\u5c24\u5c25\u5c26\u5c27\u5c28\u5c29\u5c2a\u5c2b\u5c2c\u5c2d\u5c2e\u5c2f\u5c30\u5c31\u5c32\u5c33\u5c34\u5c35\u5c36\u5c37\u5c38\u5c39\u5c3a\u5c3b\u5c3c\u5c3d\u5c3e\u5c3f\u5c40\u5c41\u5c42\u5c43\u5c44\u5c45\u5c46\u5c47\u5c48\u5c49\u5c4a\u5c4b\u5c4c\u5c4d\u5c4e\u5c4f\u5c50\u5c51\u5c52\u5c53\u5c54\u5c55\u5c56\u5c57\u5c58\u5c59\u5c5a\u5c5b\u5c5c\u5c5d\u5c5e\u5c5f\u5c60\u5c61\u5c62\u5c63\u5c64\u5c65\u5c66\u5c67\u5c68\u5c69\u5c6a\u5c6b\u5c6c\u5c6d\u5c6e\u5c6f\u5c70\u5c71\u5c72\u5c73\u5c74\u5c75\u5c76\u5c77\u5c78\u5c79\u5c7a\u5c7b\u5c7c\u5c7d\u5c7e\u5c7f\u5c80\u5c81\u5c82\u5c83\u5c84\u5c85\u5c86\u5c87\u5c88\u5c89\u5c8a\u5c8b\u5c8c\u5c8d\u5c8e\u5c8f\u5c90\u5c91\u5c92\u5c93\u5c94\u5c95\u5c96\u5c97\u5c98\u5c99\u5c9a\u5c9b\u5c9c\u5c9d\u5c9e\u5c9f\u5ca0\u5ca1\u5ca2\u5ca3\u5ca4\u5ca5\u5ca6\u5ca7\u5ca8\u5ca9\u5caa\u5cab\u5cac\u5cad\u5cae\u5caf\u5cb0\u5cb1\u5cb2\u5cb3\u5cb4\u5cb5\u5cb6\u5cb7\u5cb8\u5cb9\u5cba\u5cbb\u5cbc\u5cbd\u5cbe\u5cbf\u5cc0\u5cc1\u5cc2\u5cc3\u5cc4\u5cc5\u5cc6\u5cc7\u5cc8\u5cc9\u5cca\u5ccb\u5ccc\u5ccd\u5cce\u5ccf\u5cd0\u5cd1\u5cd2\u5cd3\u5cd4\u5cd5\u5cd6\u5cd7\u5cd8\u5cd9\u5cda\u5cdb\u5cdc\u5cdd\u5cde\u5cdf\u5ce0\u5ce1\u5ce2\u5ce3\u5ce4\u5ce5\u5ce6\u5ce7\u5ce8\u5ce9\u5cea\u5ceb\u5cec\u5ced\u5cee\u5cef\u5cf0\u5cf1\u5cf2\u5cf3\u5cf4\u5cf5\u5cf6\u5cf7\u5cf8\u5cf9\u5cfa\u5cfb\u5cfc\u5cfd\u5cfe\u5cff\u5d00\u5d01\u5d02\u5d03\u5d04\u5d05\u5d06\u5d07\u5d08\u5d09\u5d0a\u5d0b\u5d0c\u5d0d\u5d0e\u5d0f\u5d10\u5d11\u5d12\u5d13\u5d14\u5d15\u5d16\u5d17\u5d18\u5d19\u5d1a\u5d1b\u5d1c\u5d1d\u5d1e\u5d1f\u5d20\u5d21\u5d22\u5d23\u5d24\u5d25\u5d26\u5d27\u5d28\u5d29\u5d2a\u5d2b\u5d2c\u5d2d\u5d2e\u5d2f\u5d30\u5d31\u5d32\u5d33\u5d34\u5d35\u5d36\u5d37\u5d38\u5d39\u5d3a\u5d3b\u5d3c\u5d3d\u5d3e\u5d3f\u5d40\u5d41\u5d42\u5d43\u5d44\u5d45\u5d46\u5d47\u5d48\u5d49\u5d4a\u5d4b\u5d4c\u5d4d\u5d4e\u5d4f\u5d50\u5d51\u5d52\u5d53\u5d54\u5d55\u5d56\u5d57\u5d58\u5d59\u5d5a\u5d5b\u5d5c\u5d5d\u5d5e\u5d5f\u5d60\u5d61\u5d62\u5d63\u5d64\u5d65\u5d66\u5d67\u5d68\u5d69\u5d6a\u5d6b\u5d6c\u5d6d\u5d6e\u5d6f\u5d70\u5d71\u5d72\u5d73\u5d74\u5d75\u5d76\u5d77\u5d78\u5d79\u5d7a\u5d7b\u5d7c\u5d7d\u5d7e\u5d7f\u5d80\u5d81\u5d82\u5d83\u5d84\u5d85\u5d86\u5d87\u5d88\u5d89\u5d8a\u5d8b\u5d8c\u5d8d\u5d8e\u5d8f\u5d90\u5d91\u5d92\u5d93\u5d94\u5d95\u5d96\u5d97\u5d98\u5d99\u5d9a\u5d9b\u5d9c\u5d9d\u5d9e\u5d9f\u5da0\u5da1\u5da2\u5da3\u5da4\u5da5\u5da6\u5da7\u5da8\u5da9\u5daa\u5dab\u5dac\u5dad\u5dae\u5daf\u5db0\u5db1\u5db2\u5db3\u5db4\u5db5\u5db6\u5db7\u5db8\u5db9\u5dba\u5dbb\u5dbc\u5dbd\u5dbe\u5dbf\u5dc0\u5dc1\u5dc2\u5dc3\u5dc4\u5dc5\u5dc6\u5dc7\u5dc8\u5dc9\u5dca\u5dcb\u5dcc\u5dcd\u5dce\u5dcf\u5dd0\u5dd1\u5dd2\u5dd3\u5dd4\u5dd5\u5dd6\u5dd7\u5dd8\u5dd9\u5dda\u5ddb\u5ddc\u5ddd\u5dde\u5ddf\u5de0\u5de1\u5de2\u5de3\u5de4\u5de5\u5de6\u5de7\u5de8\u5de9\u5dea\u5deb\u5dec\u5ded\u5dee\u5def\u5df0\u5df1\u5df2\u5df3\u5df4\u5df5\u5df6\u5df7\u5df8\u5df9\u5dfa\u5dfb\u5dfc\u5dfd\u5dfe\u5dff\u5e00\u5e01\u5e02\u5e03\u5e04\u5e05\u5e06\u5e07\u5e08\u5e09\u5e0a\u5e0b\u5e0c\u5e0d\u5e0e\u5e0f\u5e10\u5e11\u5e12\u5e13\u5e14\u5e15\u5e16\u5e17\u5e18\u5e19\u5e1a\u5e1b\u5e1c\u5e1d\u5e1e\u5e1f\u5e20\u5e21\u5e22\u5e23\u5e24\u5e25\u5e26\u5e27\u5e28\u5e29\u5e2a\u5e2b\u5e2c\u5e2d\u5e2e\u5e2f\u5e30\u5e31\u5e32\u5e33\u5e34\u5e35\u5e36\u5e37\u5e38\u5e39\u5e3a\u5e3b\u5e3c\u5e3d\u5e3e\u5e3f\u5e40\u5e41\u5e42\u5e43\u5e44\u5e45\u5e46\u5e47\u5e48\u5e49\u5e4a\u5e4b\u5e4c\u5e4d\u5e4e\u5e4f\u5e50\u5e51\u5e52\u5e53\u5e54\u5e55\u5e56\u5e57\u5e58\u5e59\u5e5a\u5e5b\u5e5c\u5e5d\u5e5e\u5e5f\u5e60\u5e61\u5e62\u5e63\u5e64\u5e65\u5e66\u5e67\u5e68\u5e69\u5e6a\u5e6b\u5e6c\u5e6d\u5e6e\u5e6f\u5e70\u5e71\u5e72\u5e73\u5e74\u5e75\u5e76\u5e77\u5e78\u5e79\u5e7a\u5e7b\u5e7c\u5e7d\u5e7e\u5e7f\u5e80\u5e81\u5e82\u5e83\u5e84\u5e85\u5e86\u5e87\u5e88\u5e89\u5e8a\u5e8b\u5e8c\u5e8d\u5e8e\u5e8f\u5e90\u5e91\u5e92\u5e93\u5e94\u5e95\u5e96\u5e97\u5e98\u5e99\u5e9a\u5e9b\u5e9c\u5e9d\u5e9e\u5e9f\u5ea0\u5ea1\u5ea2\u5ea3\u5ea4\u5ea5\u5ea6\u5ea7\u5ea8\u5ea9\u5eaa\u5eab\u5eac\u5ead\u5eae\u5eaf\u5eb0\u5eb1\u5eb2\u5eb3\u5eb4\u5eb5\u5eb6\u5eb7\u5eb8\u5eb9\u5eba\u5ebb\u5ebc\u5ebd\u5ebe\u5ebf\u5ec0\u5ec1\u5ec2\u5ec3\u5ec4\u5ec5\u5ec6\u5ec7\u5ec8\u5ec9\u5eca\u5ecb\u5ecc\u5ecd\u5ece\u5ecf\u5ed0\u5ed1\u5ed2\u5ed3\u5ed4\u5ed5\u5ed6\u5ed7\u5ed8\u5ed9\u5eda\u5edb\u5edc\u5edd\u5ede\u5edf\u5ee0\u5ee1\u5ee2\u5ee3\u5ee4\u5ee5\u5ee6\u5ee7\u5ee8\u5ee9\u5eea\u5eeb\u5eec\u5eed\u5eee\u5eef\u5ef0\u5ef1\u5ef2\u5ef3\u5ef4\u5ef5\u5ef6\u5ef7\u5ef8\u5ef9\u5efa\u5efb\u5efc\u5efd\u5efe\u5eff\u5f00\u5f01\u5f02\u5f03\u5f04\u5f05\u5f06\u5f07\u5f08\u5f09\u5f0a\u5f0b\u5f0c\u5f0d\u5f0e\u5f0f\u5f10\u5f11\u5f12\u5f13\u5f14\u5f15\u5f16\u5f17\u5f18\u5f19\u5f1a\u5f1b\u5f1c\u5f1d\u5f1e\u5f1f\u5f20\u5f21\u5f22\u5f23\u5f24\u5f25\u5f26\u5f27\u5f28\u5f29\u5f2a\u5f2b\u5f2c\u5f2d\u5f2e\u5f2f\u5f30\u5f31\u5f32\u5f33\u5f34\u5f35\u5f36\u5f37\u5f38\u5f39\u5f3a\u5f3b\u5f3c\u5f3d\u5f3e\u5f3f\u5f40\u5f41\u5f42\u5f43\u5f44\u5f45\u5f46\u5f47\u5f48\u5f49\u5f4a\u5f4b\u5f4c\u5f4d\u5f4e\u5f4f\u5f50\u5f51\u5f52\u5f53\u5f54\u5f55\u5f56\u5f57\u5f58\u5f59\u5f5a\u5f5b\u5f5c\u5f5d\u5f5e\u5f5f\u5f60\u5f61\u5f62\u5f63\u5f64\u5f65\u5f66\u5f67\u5f68\u5f69\u5f6a\u5f6b\u5f6c\u5f6d\u5f6e\u5f6f\u5f70\u5f71\u5f72\u5f73\u5f74\u5f75\u5f76\u5f77\u5f78\u5f79\u5f7a\u5f7b\u5f7c\u5f7d\u5f7e\u5f7f\u5f80\u5f81\u5f82\u5f83\u5f84\u5f85\u5f86\u5f87\u5f88\u5f89\u5f8a\u5f8b\u5f8c\u5f8d\u5f8e\u5f8f\u5f90\u5f91\u5f92\u5f93\u5f94\u5f95\u5f96\u5f97\u5f98\u5f99\u5f9a\u5f9b\u5f9c\u5f9d\u5f9e\u5f9f\u5fa0\u5fa1\u5fa2\u5fa3\u5fa4\u5fa5\u5fa6\u5fa7\u5fa8\u5fa9\u5faa\u5fab\u5fac\u5fad\u5fae\u5faf\u5fb0\u5fb1\u5fb2\u5fb3\u5fb4\u5fb5\u5fb6\u5fb7\u5fb8\u5fb9\u5fba\u5fbb\u5fbc\u5fbd\u5fbe\u5fbf\u5fc0\u5fc1\u5fc2\u5fc3\u5fc4\u5fc5\u5fc6\u5fc7\u5fc8\u5fc9\u5fca\u5fcb\u5fcc\u5fcd\u5fce\u5fcf\u5fd0\u5fd1\u5fd2\u5fd3\u5fd4\u5fd5\u5fd6\u5fd7\u5fd8\u5fd9\u5fda\u5fdb\u5fdc\u5fdd\u5fde\u5fdf\u5fe0\u5fe1\u5fe2\u5fe3\u5fe4\u5fe5\u5fe6\u5fe7\u5fe8\u5fe9\u5fea\u5feb\u5fec\u5fed\u5fee\u5fef\u5ff0\u5ff1\u5ff2\u5ff3\u5ff4\u5ff5\u5ff6\u5ff7\u5ff8\u5ff9\u5ffa\u5ffb\u5ffc\u5ffd\u5ffe\u5fff\u6000\u6001\u6002\u6003\u6004\u6005\u6006\u6007\u6008\u6009\u600a\u600b\u600c\u600d\u600e\u600f\u6010\u6011\u6012\u6013\u6014\u6015\u6016\u6017\u6018\u6019\u601a\u601b\u601c\u601d\u601e\u601f\u6020\u6021\u6022\u6023\u6024\u6025\u6026\u6027\u6028\u6029\u602a\u602b\u602c\u602d\u602e\u602f\u6030\u6031\u6032\u6033\u6034\u6035\u6036\u6037\u6038\u6039\u603a\u603b\u603c\u603d\u603e\u603f\u6040\u6041\u6042\u6043\u6044\u6045\u6046\u6047\u6048\u6049\u604a\u604b\u604c\u604d\u604e\u604f\u6050\u6051\u6052\u6053\u6054\u6055\u6056\u6057\u6058\u6059\u605a\u605b\u605c\u605d\u605e\u605f\u6060\u6061\u6062\u6063\u6064\u6065\u6066\u6067\u6068\u6069\u606a\u606b\u606c\u606d\u606e\u606f\u6070\u6071\u6072\u6073\u6074\u6075\u6076\u6077\u6078\u6079\u607a\u607b\u607c\u607d\u607e\u607f\u6080\u6081\u6082\u6083\u6084\u6085\u6086\u6087\u6088\u6089\u608a\u608b\u608c\u608d\u608e\u608f\u6090\u6091\u6092\u6093\u6094\u6095\u6096\u6097\u6098\u6099\u609a\u609b\u609c\u609d\u609e\u609f\u60a0\u60a1\u60a2\u60a3\u60a4\u60a5\u60a6\u60a7\u60a8\u60a9\u60aa\u60ab\u60ac\u60ad\u60ae\u60af\u60b0\u60b1\u60b2\u60b3\u60b4\u60b5\u60b6\u60b7\u60b8\u60b9\u60ba\u60bb\u60bc\u60bd\u60be\u60bf\u60c0\u60c1\u60c2\u60c3\u60c4\u60c5\u60c6\u60c7\u60c8\u60c9\u60ca\u60cb\u60cc\u60cd\u60ce\u60cf\u60d0\u60d1\u60d2\u60d3\u60d4\u60d5\u60d6\u60d7\u60d8\u60d9\u60da\u60db\u60dc\u60dd\u60de\u60df\u60e0\u60e1\u60e2\u60e3\u60e4\u60e5\u60e6\u60e7\u60e8\u60e9\u60ea\u60eb\u60ec\u60ed\u60ee\u60ef\u60f0\u60f1\u60f2\u60f3\u60f4\u60f5\u60f6\u60f7\u60f8\u60f9\u60fa\u60fb\u60fc\u60fd\u60fe\u60ff\u6100\u6101\u6102\u6103\u6104\u6105\u6106\u6107\u6108\u6109\u610a\u610b\u610c\u610d\u610e\u610f\u6110\u6111\u6112\u6113\u6114\u6115\u6116\u6117\u6118\u6119\u611a\u611b\u611c\u611d\u611e\u611f\u6120\u6121\u6122\u6123\u6124\u6125\u6126\u6127\u6128\u6129\u612a\u612b\u612c\u612d\u612e\u612f\u6130\u6131\u6132\u6133\u6134\u6135\u6136\u6137\u6138\u6139\u613a\u613b\u613c\u613d\u613e\u613f\u6140\u6141\u6142\u6143\u6144\u6145\u6146\u6147\u6148\u6149\u614a\u614b\u614c\u614d\u614e\u614f\u6150\u6151\u6152\u6153\u6154\u6155\u6156\u6157\u6158\u6159\u615a\u615b\u615c\u615d\u615e\u615f\u6160\u6161\u6162\u6163\u6164\u6165\u6166\u6167\u6168\u6169\u616a\u616b\u616c\u616d\u616e\u616f\u6170\u6171\u6172\u6173\u6174\u6175\u6176\u6177\u6178\u6179\u617a\u617b\u617c\u617d\u617e\u617f\u6180\u6181\u6182\u6183\u6184\u6185\u6186\u6187\u6188\u6189\u618a\u618b\u618c\u618d\u618e\u618f\u6190\u6191\u6192\u6193\u6194\u6195\u6196\u6197\u6198\u6199\u619a\u619b\u619c\u619d\u619e\u619f\u61a0\u61a1\u61a2\u61a3\u61a4\u61a5\u61a6\u61a7\u61a8\u61a9\u61aa\u61ab\u61ac\u61ad\u61ae\u61af\u61b0\u61b1\u61b2\u61b3\u61b4\u61b5\u61b6\u61b7\u61b8\u61b9\u61ba\u61bb\u61bc\u61bd\u61be\u61bf\u61c0\u61c1\u61c2\u61c3\u61c4\u61c5\u61c6\u61c7\u61c8\u61c9\u61ca\u61cb\u61cc\u61cd\u61ce\u61cf\u61d0\u61d1\u61d2\u61d3\u61d4\u61d5\u61d6\u61d7\u61d8\u61d9\u61da\u61db\u61dc\u61dd\u61de\u61df\u61e0\u61e1\u61e2\u61e3\u61e4\u61e5\u61e6\u61e7\u61e8\u61e9\u61ea\u61eb\u61ec\u61ed\u61ee\u61ef\u61f0\u61f1\u61f2\u61f3\u61f4\u61f5\u61f6\u61f7\u61f8\u61f9\u61fa\u61fb\u61fc\u61fd\u61fe\u61ff\u6200\u6201\u6202\u6203\u6204\u6205\u6206\u6207\u6208\u6209\u620a\u620b\u620c\u620d\u620e\u620f\u6210\u6211\u6212\u6213\u6214\u6215\u6216\u6217\u6218\u6219\u621a\u621b\u621c\u621d\u621e\u621f\u6220\u6221\u6222\u6223\u6224\u6225\u6226\u6227\u6228\u6229\u622a\u622b\u622c\u622d\u622e\u622f\u6230\u6231\u6232\u6233\u6234\u6235\u6236\u6237\u6238\u6239\u623a\u623b\u623c\u623d\u623e\u623f\u6240\u6241\u6242\u6243\u6244\u6245\u6246\u6247\u6248\u6249\u624a\u624b\u624c\u624d\u624e\u624f\u6250\u6251\u6252\u6253\u6254\u6255\u6256\u6257\u6258\u6259\u625a\u625b\u625c\u625d\u625e\u625f\u6260\u6261\u6262\u6263\u6264\u6265\u6266\u6267\u6268\u6269\u626a\u626b\u626c\u626d\u626e\u626f\u6270\u6271\u6272\u6273\u6274\u6275\u6276\u6277\u6278\u6279\u627a\u627b\u627c\u627d\u627e\u627f\u6280\u6281\u6282\u6283\u6284\u6285\u6286\u6287\u6288\u6289\u628a\u628b\u628c\u628d\u628e\u628f\u6290\u6291\u6292\u6293\u6294\u6295\u6296\u6297\u6298\u6299\u629a\u629b\u629c\u629d\u629e\u629f\u62a0\u62a1\u62a2\u62a3\u62a4\u62a5\u62a6\u62a7\u62a8\u62a9\u62aa\u62ab\u62ac\u62ad\u62ae\u62af\u62b0\u62b1\u62b2\u62b3\u62b4\u62b5\u62b6\u62b7\u62b8\u62b9\u62ba\u62bb\u62bc\u62bd\u62be\u62bf\u62c0\u62c1\u62c2\u62c3\u62c4\u62c5\u62c6\u62c7\u62c8\u62c9\u62ca\u62cb\u62cc\u62cd\u62ce\u62cf\u62d0\u62d1\u62d2\u62d3\u62d4\u62d5\u62d6\u62d7\u62d8\u62d9\u62da\u62db\u62dc\u62dd\u62de\u62df\u62e0\u62e1\u62e2\u62e3\u62e4\u62e5\u62e6\u62e7\u62e8\u62e9\u62ea\u62eb\u62ec\u62ed\u62ee\u62ef\u62f0\u62f1\u62f2\u62f3\u62f4\u62f5\u62f6\u62f7\u62f8\u62f9\u62fa\u62fb\u62fc\u62fd\u62fe\u62ff\u6300\u6301\u6302\u6303\u6304\u6305\u6306\u6307\u6308\u6309\u630a\u630b\u630c\u630d\u630e\u630f\u6310\u6311\u6312\u6313\u6314\u6315\u6316\u6317\u6318\u6319\u631a\u631b\u631c\u631d\u631e\u631f\u6320\u6321\u6322\u6323\u6324\u6325\u6326\u6327\u6328\u6329\u632a\u632b\u632c\u632d\u632e\u632f\u6330\u6331\u6332\u6333\u6334\u6335\u6336\u6337\u6338\u6339\u633a\u633b\u633c\u633d\u633e\u633f\u6340\u6341\u6342\u6343\u6344\u6345\u6346\u6347\u6348\u6349\u634a\u634b\u634c\u634d\u634e\u634f\u6350\u6351\u6352\u6353\u6354\u6355\u6356\u6357\u6358\u6359\u635a\u635b\u635c\u635d\u635e\u635f\u6360\u6361\u6362\u6363\u6364\u6365\u6366\u6367\u6368\u6369\u636a\u636b\u636c\u636d\u636e\u636f\u6370\u6371\u6372\u6373\u6374\u6375\u6376\u6377\u6378\u6379\u637a\u637b\u637c\u637d\u637e\u637f\u6380\u6381\u6382\u6383\u6384\u6385\u6386\u6387\u6388\u6389\u638a\u638b\u638c\u638d\u638e\u638f\u6390\u6391\u6392\u6393\u6394\u6395\u6396\u6397\u6398\u6399\u639a\u639b\u639c\u639d\u639e\u639f\u63a0\u63a1\u63a2\u63a3\u63a4\u63a5\u63a6\u63a7\u63a8\u63a9\u63aa\u63ab\u63ac\u63ad\u63ae\u63af\u63b0\u63b1\u63b2\u63b3\u63b4\u63b5\u63b6\u63b7\u63b8\u63b9\u63ba\u63bb\u63bc\u63bd\u63be\u63bf\u63c0\u63c1\u63c2\u63c3\u63c4\u63c5\u63c6\u63c7\u63c8\u63c9\u63ca\u63cb\u63cc\u63cd\u63ce\u63cf\u63d0\u63d1\u63d2\u63d3\u63d4\u63d5\u63d6\u63d7\u63d8\u63d9\u63da\u63db\u63dc\u63dd\u63de\u63df\u63e0\u63e1\u63e2\u63e3\u63e4\u63e5\u63e6\u63e7\u63e8\u63e9\u63ea\u63eb\u63ec\u63ed\u63ee\u63ef\u63f0\u63f1\u63f2\u63f3\u63f4\u63f5\u63f6\u63f7\u63f8\u63f9\u63fa\u63fb\u63fc\u63fd\u63fe\u63ff\u6400\u6401\u6402\u6403\u6404\u6405\u6406\u6407\u6408\u6409\u640a\u640b\u640c\u640d\u640e\u640f\u6410\u6411\u6412\u6413\u6414\u6415\u6416\u6417\u6418\u6419\u641a\u641b\u641c\u641d\u641e\u641f\u6420\u6421\u6422\u6423\u6424\u6425\u6426\u6427\u6428\u6429\u642a\u642b\u642c\u642d\u642e\u642f\u6430\u6431\u6432\u6433\u6434\u6435\u6436\u6437\u6438\u6439\u643a\u643b\u643c\u643d\u643e\u643f\u6440\u6441\u6442\u6443\u6444\u6445\u6446\u6447\u6448\u6449\u644a\u644b\u644c\u644d\u644e\u644f\u6450\u6451\u6452\u6453\u6454\u6455\u6456\u6457\u6458\u6459\u645a\u645b\u645c\u645d\u645e\u645f\u6460\u6461\u6462\u6463\u6464\u6465\u6466\u6467\u6468\u6469\u646a\u646b\u646c\u646d\u646e\u646f\u6470\u6471\u6472\u6473\u6474\u6475\u6476\u6477\u6478\u6479\u647a\u647b\u647c\u647d\u647e\u647f\u6480\u6481\u6482\u6483\u6484\u6485\u6486\u6487\u6488\u6489\u648a\u648b\u648c\u648d\u648e\u648f\u6490\u6491\u6492\u6493\u6494\u6495\u6496\u6497\u6498\u6499\u649a\u649b\u649c\u649d\u649e\u649f\u64a0\u64a1\u64a2\u64a3\u64a4\u64a5\u64a6\u64a7\u64a8\u64a9\u64aa\u64ab\u64ac\u64ad\u64ae\u64af\u64b0\u64b1\u64b2\u64b3\u64b4\u64b5\u64b6\u64b7\u64b8\u64b9\u64ba\u64bb\u64bc\u64bd\u64be\u64bf\u64c0\u64c1\u64c2\u64c3\u64c4\u64c5\u64c6\u64c7\u64c8\u64c9\u64ca\u64cb\u64cc\u64cd\u64ce\u64cf\u64d0\u64d1\u64d2\u64d3\u64d4\u64d5\u64d6\u64d7\u64d8\u64d9\u64da\u64db\u64dc\u64dd\u64de\u64df\u64e0\u64e1\u64e2\u64e3\u64e4\u64e5\u64e6\u64e7\u64e8\u64e9\u64ea\u64eb\u64ec\u64ed\u64ee\u64ef\u64f0\u64f1\u64f2\u64f3\u64f4\u64f5\u64f6\u64f7\u64f8\u64f9\u64fa\u64fb\u64fc\u64fd\u64fe\u64ff\u6500\u6501\u6502\u6503\u6504\u6505\u6506\u6507\u6508\u6509\u650a\u650b\u650c\u650d\u650e\u650f\u6510\u6511\u6512\u6513\u6514\u6515\u6516\u6517\u6518\u6519\u651a\u651b\u651c\u651d\u651e\u651f\u6520\u6521\u6522\u6523\u6524\u6525\u6526\u6527\u6528\u6529\u652a\u652b\u652c\u652d\u652e\u652f\u6530\u6531\u6532\u6533\u6534\u6535\u6536\u6537\u6538\u6539\u653a\u653b\u653c\u653d\u653e\u653f\u6540\u6541\u6542\u6543\u6544\u6545\u6546\u6547\u6548\u6549\u654a\u654b\u654c\u654d\u654e\u654f\u6550\u6551\u6552\u6553\u6554\u6555\u6556\u6557\u6558\u6559\u655a\u655b\u655c\u655d\u655e\u655f\u6560\u6561\u6562\u6563\u6564\u6565\u6566\u6567\u6568\u6569\u656a\u656b\u656c\u656d\u656e\u656f\u6570\u6571\u6572\u6573\u6574\u6575\u6576\u6577\u6578\u6579\u657a\u657b\u657c\u657d\u657e\u657f\u6580\u6581\u6582\u6583\u6584\u6585\u6586\u6587\u6588\u6589\u658a\u658b\u658c\u658d\u658e\u658f\u6590\u6591\u6592\u6593\u6594\u6595\u6596\u6597\u6598\u6599\u659a\u659b\u659c\u659d\u659e\u659f\u65a0\u65a1\u65a2\u65a3\u65a4\u65a5\u65a6\u65a7\u65a8\u65a9\u65aa\u65ab\u65ac\u65ad\u65ae\u65af\u65b0\u65b1\u65b2\u65b3\u65b4\u65b5\u65b6\u65b7\u65b8\u65b9\u65ba\u65bb\u65bc\u65bd\u65be\u65bf\u65c0\u65c1\u65c2\u65c3\u65c4\u65c5\u65c6\u65c7\u65c8\u65c9\u65ca\u65cb\u65cc\u65cd\u65ce\u65cf\u65d0\u65d1\u65d2\u65d3\u65d4\u65d5\u65d6\u65d7\u65d8\u65d9\u65da\u65db\u65dc\u65dd\u65de\u65df\u65e0\u65e1\u65e2\u65e3\u65e4\u65e5\u65e6\u65e7\u65e8\u65e9\u65ea\u65eb\u65ec\u65ed\u65ee\u65ef\u65f0\u65f1\u65f2\u65f3\u65f4\u65f5\u65f6\u65f7\u65f8\u65f9\u65fa\u65fb\u65fc\u65fd\u65fe\u65ff\u6600\u6601\u6602\u6603\u6604\u6605\u6606\u6607\u6608\u6609\u660a\u660b\u660c\u660d\u660e\u660f\u6610\u6611\u6612\u6613\u6614\u6615\u6616\u6617\u6618\u6619\u661a\u661b\u661c\u661d\u661e\u661f\u6620\u6621\u6622\u6623\u6624\u6625\u6626\u6627\u6628\u6629\u662a\u662b\u662c\u662d\u662e\u662f\u6630\u6631\u6632\u6633\u6634\u6635\u6636\u6637\u6638\u6639\u663a\u663b\u663c\u663d\u663e\u663f\u6640\u6641\u6642\u6643\u6644\u6645\u6646\u6647\u6648\u6649\u664a\u664b\u664c\u664d\u664e\u664f\u6650\u6651\u6652\u6653\u6654\u6655\u6656\u6657\u6658\u6659\u665a\u665b\u665c\u665d\u665e\u665f\u6660\u6661\u6662\u6663\u6664\u6665\u6666\u6667\u6668\u6669\u666a\u666b\u666c\u666d\u666e\u666f\u6670\u6671\u6672\u6673\u6674\u6675\u6676\u6677\u6678\u6679\u667a\u667b\u667c\u667d\u667e\u667f\u6680\u6681\u6682\u6683\u6684\u6685\u6686\u6687\u6688\u6689\u668a\u668b\u668c\u668d\u668e\u668f\u6690\u6691\u6692\u6693\u6694\u6695\u6696\u6697\u6698\u6699\u669a\u669b\u669c\u669d\u669e\u669f\u66a0\u66a1\u66a2\u66a3\u66a4\u66a5\u66a6\u66a7\u66a8\u66a9\u66aa\u66ab\u66ac\u66ad\u66ae\u66af\u66b0\u66b1\u66b2\u66b3\u66b4\u66b5\u66b6\u66b7\u66b8\u66b9\u66ba\u66bb\u66bc\u66bd\u66be\u66bf\u66c0\u66c1\u66c2\u66c3\u66c4\u66c5\u66c6\u66c7\u66c8\u66c9\u66ca\u66cb\u66cc\u66cd\u66ce\u66cf\u66d0\u66d1\u66d2\u66d3\u66d4\u66d5\u66d6\u66d7\u66d8\u66d9\u66da\u66db\u66dc\u66dd\u66de\u66df\u66e0\u66e1\u66e2\u66e3\u66e4\u66e5\u66e6\u66e7\u66e8\u66e9\u66ea\u66eb\u66ec\u66ed\u66ee\u66ef\u66f0\u66f1\u66f2\u66f3\u66f4\u66f5\u66f6\u66f7\u66f8\u66f9\u66fa\u66fb\u66fc\u66fd\u66fe\u66ff\u6700\u6701\u6702\u6703\u6704\u6705\u6706\u6707\u6708\u6709\u670a\u670b\u670c\u670d\u670e\u670f\u6710\u6711\u6712\u6713\u6714\u6715\u6716\u6717\u6718\u6719\u671a\u671b\u671c\u671d\u671e\u671f\u6720\u6721\u6722\u6723\u6724\u6725\u6726\u6727\u6728\u6729\u672a\u672b\u672c\u672d\u672e\u672f\u6730\u6731\u6732\u6733\u6734\u6735\u6736\u6737\u6738\u6739\u673a\u673b\u673c\u673d\u673e\u673f\u6740\u6741\u6742\u6743\u6744\u6745\u6746\u6747\u6748\u6749\u674a\u674b\u674c\u674d\u674e\u674f\u6750\u6751\u6752\u6753\u6754\u6755\u6756\u6757\u6758\u6759\u675a\u675b\u675c\u675d\u675e\u675f\u6760\u6761\u6762\u6763\u6764\u6765\u6766\u6767\u6768\u6769\u676a\u676b\u676c\u676d\u676e\u676f\u6770\u6771\u6772\u6773\u6774\u6775\u6776\u6777\u6778\u6779\u677a\u677b\u677c\u677d\u677e\u677f\u6780\u6781\u6782\u6783\u6784\u6785\u6786\u6787\u6788\u6789\u678a\u678b\u678c\u678d\u678e\u678f\u6790\u6791\u6792\u6793\u6794\u6795\u6796\u6797\u6798\u6799\u679a\u679b\u679c\u679d\u679e\u679f\u67a0\u67a1\u67a2\u67a3\u67a4\u67a5\u67a6\u67a7\u67a8\u67a9\u67aa\u67ab\u67ac\u67ad\u67ae\u67af\u67b0\u67b1\u67b2\u67b3\u67b4\u67b5\u67b6\u67b7\u67b8\u67b9\u67ba\u67bb\u67bc\u67bd\u67be\u67bf\u67c0\u67c1\u67c2\u67c3\u67c4\u67c5\u67c6\u67c7\u67c8\u67c9\u67ca\u67cb\u67cc\u67cd\u67ce\u67cf\u67d0\u67d1\u67d2\u67d3\u67d4\u67d5\u67d6\u67d7\u67d8\u67d9\u67da\u67db\u67dc\u67dd\u67de\u67df\u67e0\u67e1\u67e2\u67e3\u67e4\u67e5\u67e6\u67e7\u67e8\u67e9\u67ea\u67eb\u67ec\u67ed\u67ee\u67ef\u67f0\u67f1\u67f2\u67f3\u67f4\u67f5\u67f6\u67f7\u67f8\u67f9\u67fa\u67fb\u67fc\u67fd\u67fe\u67ff\u6800\u6801\u6802\u6803\u6804\u6805\u6806\u6807\u6808\u6809\u680a\u680b\u680c\u680d\u680e\u680f\u6810\u6811\u6812\u6813\u6814\u6815\u6816\u6817\u6818\u6819\u681a\u681b\u681c\u681d\u681e\u681f\u6820\u6821\u6822\u6823\u6824\u6825\u6826\u6827\u6828\u6829\u682a\u682b\u682c\u682d\u682e\u682f\u6830\u6831\u6832\u6833\u6834\u6835\u6836\u6837\u6838\u6839\u683a\u683b\u683c\u683d\u683e\u683f\u6840\u6841\u6842\u6843\u6844\u6845\u6846\u6847\u6848\u6849\u684a\u684b\u684c\u684d\u684e\u684f\u6850\u6851\u6852\u6853\u6854\u6855\u6856\u6857\u6858\u6859\u685a\u685b\u685c\u685d\u685e\u685f\u6860\u6861\u6862\u6863\u6864\u6865\u6866\u6867\u6868\u6869\u686a\u686b\u686c\u686d\u686e\u686f\u6870\u6871\u6872\u6873\u6874\u6875\u6876\u6877\u6878\u6879\u687a\u687b\u687c\u687d\u687e\u687f\u6880\u6881\u6882\u6883\u6884\u6885\u6886\u6887\u6888\u6889\u688a\u688b\u688c\u688d\u688e\u688f\u6890\u6891\u6892\u6893\u6894\u6895\u6896\u6897\u6898\u6899\u689a\u689b\u689c\u689d\u689e\u689f\u68a0\u68a1\u68a2\u68a3\u68a4\u68a5\u68a6\u68a7\u68a8\u68a9\u68aa\u68ab\u68ac\u68ad\u68ae\u68af\u68b0\u68b1\u68b2\u68b3\u68b4\u68b5\u68b6\u68b7\u68b8\u68b9\u68ba\u68bb\u68bc\u68bd\u68be\u68bf\u68c0\u68c1\u68c2\u68c3\u68c4\u68c5\u68c6\u68c7\u68c8\u68c9\u68ca\u68cb\u68cc\u68cd\u68ce\u68cf\u68d0\u68d1\u68d2\u68d3\u68d4\u68d5\u68d6\u68d7\u68d8\u68d9\u68da\u68db\u68dc\u68dd\u68de\u68df\u68e0\u68e1\u68e2\u68e3\u68e4\u68e5\u68e6\u68e7\u68e8\u68e9\u68ea\u68eb\u68ec\u68ed\u68ee\u68ef\u68f0\u68f1\u68f2\u68f3\u68f4\u68f5\u68f6\u68f7\u68f8\u68f9\u68fa\u68fb\u68fc\u68fd\u68fe\u68ff\u6900\u6901\u6902\u6903\u6904\u6905\u6906\u6907\u6908\u6909\u690a\u690b\u690c\u690d\u690e\u690f\u6910\u6911\u6912\u6913\u6914\u6915\u6916\u6917\u6918\u6919\u691a\u691b\u691c\u691d\u691e\u691f\u6920\u6921\u6922\u6923\u6924\u6925\u6926\u6927\u6928\u6929\u692a\u692b\u692c\u692d\u692e\u692f\u6930\u6931\u6932\u6933\u6934\u6935\u6936\u6937\u6938\u6939\u693a\u693b\u693c\u693d\u693e\u693f\u6940\u6941\u6942\u6943\u6944\u6945\u6946\u6947\u6948\u6949\u694a\u694b\u694c\u694d\u694e\u694f\u6950\u6951\u6952\u6953\u6954\u6955\u6956\u6957\u6958\u6959\u695a\u695b\u695c\u695d\u695e\u695f\u6960\u6961\u6962\u6963\u6964\u6965\u6966\u6967\u6968\u6969\u696a\u696b\u696c\u696d\u696e\u696f\u6970\u6971\u6972\u6973\u6974\u6975\u6976\u6977\u6978\u6979\u697a\u697b\u697c\u697d\u697e\u697f\u6980\u6981\u6982\u6983\u6984\u6985\u6986\u6987\u6988\u6989\u698a\u698b\u698c\u698d\u698e\u698f\u6990\u6991\u6992\u6993\u6994\u6995\u6996\u6997\u6998\u6999\u699a\u699b\u699c\u699d\u699e\u699f\u69a0\u69a1\u69a2\u69a3\u69a4\u69a5\u69a6\u69a7\u69a8\u69a9\u69aa\u69ab\u69ac\u69ad\u69ae\u69af\u69b0\u69b1\u69b2\u69b3\u69b4\u69b5\u69b6\u69b7\u69b8\u69b9\u69ba\u69bb\u69bc\u69bd\u69be\u69bf\u69c0\u69c1\u69c2\u69c3\u69c4\u69c5\u69c6\u69c7\u69c8\u69c9\u69ca\u69cb\u69cc\u69cd\u69ce\u69cf\u69d0\u69d1\u69d2\u69d3\u69d4\u69d5\u69d6\u69d7\u69d8\u69d9\u69da\u69db\u69dc\u69dd\u69de\u69df\u69e0\u69e1\u69e2\u69e3\u69e4\u69e5\u69e6\u69e7\u69e8\u69e9\u69ea\u69eb\u69ec\u69ed\u69ee\u69ef\u69f0\u69f1\u69f2\u69f3\u69f4\u69f5\u69f6\u69f7\u69f8\u69f9\u69fa\u69fb\u69fc\u69fd\u69fe\u69ff\u6a00\u6a01\u6a02\u6a03\u6a04\u6a05\u6a06\u6a07\u6a08\u6a09\u6a0a\u6a0b\u6a0c\u6a0d\u6a0e\u6a0f\u6a10\u6a11\u6a12\u6a13\u6a14\u6a15\u6a16\u6a17\u6a18\u6a19\u6a1a\u6a1b\u6a1c\u6a1d\u6a1e\u6a1f\u6a20\u6a21\u6a22\u6a23\u6a24\u6a25\u6a26\u6a27\u6a28\u6a29\u6a2a\u6a2b\u6a2c\u6a2d\u6a2e\u6a2f\u6a30\u6a31\u6a32\u6a33\u6a34\u6a35\u6a36\u6a37\u6a38\u6a39\u6a3a\u6a3b\u6a3c\u6a3d\u6a3e\u6a3f\u6a40\u6a41\u6a42\u6a43\u6a44\u6a45\u6a46\u6a47\u6a48\u6a49\u6a4a\u6a4b\u6a4c\u6a4d\u6a4e\u6a4f\u6a50\u6a51\u6a52\u6a53\u6a54\u6a55\u6a56\u6a57\u6a58\u6a59\u6a5a\u6a5b\u6a5c\u6a5d\u6a5e\u6a5f\u6a60\u6a61\u6a62\u6a63\u6a64\u6a65\u6a66\u6a67\u6a68\u6a69\u6a6a\u6a6b\u6a6c\u6a6d\u6a6e\u6a6f\u6a70\u6a71\u6a72\u6a73\u6a74\u6a75\u6a76\u6a77\u6a78\u6a79\u6a7a\u6a7b\u6a7c\u6a7d\u6a7e\u6a7f\u6a80\u6a81\u6a82\u6a83\u6a84\u6a85\u6a86\u6a87\u6a88\u6a89\u6a8a\u6a8b\u6a8c\u6a8d\u6a8e\u6a8f\u6a90\u6a91\u6a92\u6a93\u6a94\u6a95\u6a96\u6a97\u6a98\u6a99\u6a9a\u6a9b\u6a9c\u6a9d\u6a9e\u6a9f\u6aa0\u6aa1\u6aa2\u6aa3\u6aa4\u6aa5\u6aa6\u6aa7\u6aa8\u6aa9\u6aaa\u6aab\u6aac\u6aad\u6aae\u6aaf\u6ab0\u6ab1\u6ab2\u6ab3\u6ab4\u6ab5\u6ab6\u6ab7\u6ab8\u6ab9\u6aba\u6abb\u6abc\u6abd\u6abe\u6abf\u6ac0\u6ac1\u6ac2\u6ac3\u6ac4\u6ac5\u6ac6\u6ac7\u6ac8\u6ac9\u6aca\u6acb\u6acc\u6acd\u6ace\u6acf\u6ad0\u6ad1\u6ad2\u6ad3\u6ad4\u6ad5\u6ad6\u6ad7\u6ad8\u6ad9\u6ada\u6adb\u6adc\u6add\u6ade\u6adf\u6ae0\u6ae1\u6ae2\u6ae3\u6ae4\u6ae5\u6ae6\u6ae7\u6ae8\u6ae9\u6aea\u6aeb\u6aec\u6aed\u6aee\u6aef\u6af0\u6af1\u6af2\u6af3\u6af4\u6af5\u6af6\u6af7\u6af8\u6af9\u6afa\u6afb\u6afc\u6afd\u6afe\u6aff\u6b00\u6b01\u6b02\u6b03\u6b04\u6b05\u6b06\u6b07\u6b08\u6b09\u6b0a\u6b0b\u6b0c\u6b0d\u6b0e\u6b0f\u6b10\u6b11\u6b12\u6b13\u6b14\u6b15\u6b16\u6b17\u6b18\u6b19\u6b1a\u6b1b\u6b1c\u6b1d\u6b1e\u6b1f\u6b20\u6b21\u6b22\u6b23\u6b24\u6b25\u6b26\u6b27\u6b28\u6b29\u6b2a\u6b2b\u6b2c\u6b2d\u6b2e\u6b2f\u6b30\u6b31\u6b32\u6b33\u6b34\u6b35\u6b36\u6b37\u6b38\u6b39\u6b3a\u6b3b\u6b3c\u6b3d\u6b3e\u6b3f\u6b40\u6b41\u6b42\u6b43\u6b44\u6b45\u6b46\u6b47\u6b48\u6b49\u6b4a\u6b4b\u6b4c\u6b4d\u6b4e\u6b4f\u6b50\u6b51\u6b52\u6b53\u6b54\u6b55\u6b56\u6b57\u6b58\u6b59\u6b5a\u6b5b\u6b5c\u6b5d\u6b5e\u6b5f\u6b60\u6b61\u6b62\u6b63\u6b64\u6b65\u6b66\u6b67\u6b68\u6b69\u6b6a\u6b6b\u6b6c\u6b6d\u6b6e\u6b6f\u6b70\u6b71\u6b72\u6b73\u6b74\u6b75\u6b76\u6b77\u6b78\u6b79\u6b7a\u6b7b\u6b7c\u6b7d\u6b7e\u6b7f\u6b80\u6b81\u6b82\u6b83\u6b84\u6b85\u6b86\u6b87\u6b88\u6b89\u6b8a\u6b8b\u6b8c\u6b8d\u6b8e\u6b8f\u6b90\u6b91\u6b92\u6b93\u6b94\u6b95\u6b96\u6b97\u6b98\u6b99\u6b9a\u6b9b\u6b9c\u6b9d\u6b9e\u6b9f\u6ba0\u6ba1\u6ba2\u6ba3\u6ba4\u6ba5\u6ba6\u6ba7\u6ba8\u6ba9\u6baa\u6bab\u6bac\u6bad\u6bae\u6baf\u6bb0\u6bb1\u6bb2\u6bb3\u6bb4\u6bb5\u6bb6\u6bb7\u6bb8\u6bb9\u6bba\u6bbb\u6bbc\u6bbd\u6bbe\u6bbf\u6bc0\u6bc1\u6bc2\u6bc3\u6bc4\u6bc5\u6bc6\u6bc7\u6bc8\u6bc9\u6bca\u6bcb\u6bcc\u6bcd\u6bce\u6bcf\u6bd0\u6bd1\u6bd2\u6bd3\u6bd4\u6bd5\u6bd6\u6bd7\u6bd8\u6bd9\u6bda\u6bdb\u6bdc\u6bdd\u6bde\u6bdf\u6be0\u6be1\u6be2\u6be3\u6be4\u6be5\u6be6\u6be7\u6be8\u6be9\u6bea\u6beb\u6bec\u6bed\u6bee\u6bef\u6bf0\u6bf1\u6bf2\u6bf3\u6bf4\u6bf5\u6bf6\u6bf7\u6bf8\u6bf9\u6bfa\u6bfb\u6bfc\u6bfd\u6bfe\u6bff\u6c00\u6c01\u6c02\u6c03\u6c04\u6c05\u6c06\u6c07\u6c08\u6c09\u6c0a\u6c0b\u6c0c\u6c0d\u6c0e\u6c0f\u6c10\u6c11\u6c12\u6c13\u6c14\u6c15\u6c16\u6c17\u6c18\u6c19\u6c1a\u6c1b\u6c1c\u6c1d\u6c1e\u6c1f\u6c20\u6c21\u6c22\u6c23\u6c24\u6c25\u6c26\u6c27\u6c28\u6c29\u6c2a\u6c2b\u6c2c\u6c2d\u6c2e\u6c2f\u6c30\u6c31\u6c32\u6c33\u6c34\u6c35\u6c36\u6c37\u6c38\u6c39\u6c3a\u6c3b\u6c3c\u6c3d\u6c3e\u6c3f\u6c40\u6c41\u6c42\u6c43\u6c44\u6c45\u6c46\u6c47\u6c48\u6c49\u6c4a\u6c4b\u6c4c\u6c4d\u6c4e\u6c4f\u6c50\u6c51\u6c52\u6c53\u6c54\u6c55\u6c56\u6c57\u6c58\u6c59\u6c5a\u6c5b\u6c5c\u6c5d\u6c5e\u6c5f\u6c60\u6c61\u6c62\u6c63\u6c64\u6c65\u6c66\u6c67\u6c68\u6c69\u6c6a\u6c6b\u6c6c\u6c6d\u6c6e\u6c6f\u6c70\u6c71\u6c72\u6c73\u6c74\u6c75\u6c76\u6c77\u6c78\u6c79\u6c7a\u6c7b\u6c7c\u6c7d\u6c7e\u6c7f\u6c80\u6c81\u6c82\u6c83\u6c84\u6c85\u6c86\u6c87\u6c88\u6c89\u6c8a\u6c8b\u6c8c\u6c8d\u6c8e\u6c8f\u6c90\u6c91\u6c92\u6c93\u6c94\u6c95\u6c96\u6c97\u6c98\u6c99\u6c9a\u6c9b\u6c9c\u6c9d\u6c9e\u6c9f\u6ca0\u6ca1\u6ca2\u6ca3\u6ca4\u6ca5\u6ca6\u6ca7\u6ca8\u6ca9\u6caa\u6cab\u6cac\u6cad\u6cae\u6caf\u6cb0\u6cb1\u6cb2\u6cb3\u6cb4\u6cb5\u6cb6\u6cb7\u6cb8\u6cb9\u6cba\u6cbb\u6cbc\u6cbd\u6cbe\u6cbf\u6cc0\u6cc1\u6cc2\u6cc3\u6cc4\u6cc5\u6cc6\u6cc7\u6cc8\u6cc9\u6cca\u6ccb\u6ccc\u6ccd\u6cce\u6ccf\u6cd0\u6cd1\u6cd2\u6cd3\u6cd4\u6cd5\u6cd6\u6cd7\u6cd8\u6cd9\u6cda\u6cdb\u6cdc\u6cdd\u6cde\u6cdf\u6ce0\u6ce1\u6ce2\u6ce3\u6ce4\u6ce5\u6ce6\u6ce7\u6ce8\u6ce9\u6cea\u6ceb\u6cec\u6ced\u6cee\u6cef\u6cf0\u6cf1\u6cf2\u6cf3\u6cf4\u6cf5\u6cf6\u6cf7\u6cf8\u6cf9\u6cfa\u6cfb\u6cfc\u6cfd\u6cfe\u6cff\u6d00\u6d01\u6d02\u6d03\u6d04\u6d05\u6d06\u6d07\u6d08\u6d09\u6d0a\u6d0b\u6d0c\u6d0d\u6d0e\u6d0f\u6d10\u6d11\u6d12\u6d13\u6d14\u6d15\u6d16\u6d17\u6d18\u6d19\u6d1a\u6d1b\u6d1c\u6d1d\u6d1e\u6d1f\u6d20\u6d21\u6d22\u6d23\u6d24\u6d25\u6d26\u6d27\u6d28\u6d29\u6d2a\u6d2b\u6d2c\u6d2d\u6d2e\u6d2f\u6d30\u6d31\u6d32\u6d33\u6d34\u6d35\u6d36\u6d37\u6d38\u6d39\u6d3a\u6d3b\u6d3c\u6d3d\u6d3e\u6d3f\u6d40\u6d41\u6d42\u6d43\u6d44\u6d45\u6d46\u6d47\u6d48\u6d49\u6d4a\u6d4b\u6d4c\u6d4d\u6d4e\u6d4f\u6d50\u6d51\u6d52\u6d53\u6d54\u6d55\u6d56\u6d57\u6d58\u6d59\u6d5a\u6d5b\u6d5c\u6d5d\u6d5e\u6d5f\u6d60\u6d61\u6d62\u6d63\u6d64\u6d65\u6d66\u6d67\u6d68\u6d69\u6d6a\u6d6b\u6d6c\u6d6d\u6d6e\u6d6f\u6d70\u6d71\u6d72\u6d73\u6d74\u6d75\u6d76\u6d77\u6d78\u6d79\u6d7a\u6d7b\u6d7c\u6d7d\u6d7e\u6d7f\u6d80\u6d81\u6d82\u6d83\u6d84\u6d85\u6d86\u6d87\u6d88\u6d89\u6d8a\u6d8b\u6d8c\u6d8d\u6d8e\u6d8f\u6d90\u6d91\u6d92\u6d93\u6d94\u6d95\u6d96\u6d97\u6d98\u6d99\u6d9a\u6d9b\u6d9c\u6d9d\u6d9e\u6d9f\u6da0\u6da1\u6da2\u6da3\u6da4\u6da5\u6da6\u6da7\u6da8\u6da9\u6daa\u6dab\u6dac\u6dad\u6dae\u6daf\u6db0\u6db1\u6db2\u6db3\u6db4\u6db5\u6db6\u6db7\u6db8\u6db9\u6dba\u6dbb\u6dbc\u6dbd\u6dbe\u6dbf\u6dc0\u6dc1\u6dc2\u6dc3\u6dc4\u6dc5\u6dc6\u6dc7\u6dc8\u6dc9\u6dca\u6dcb\u6dcc\u6dcd\u6dce\u6dcf\u6dd0\u6dd1\u6dd2\u6dd3\u6dd4\u6dd5\u6dd6\u6dd7\u6dd8\u6dd9\u6dda\u6ddb\u6ddc\u6ddd\u6dde\u6ddf\u6de0\u6de1\u6de2\u6de3\u6de4\u6de5\u6de6\u6de7\u6de8\u6de9\u6dea\u6deb\u6dec\u6ded\u6dee\u6def\u6df0\u6df1\u6df2\u6df3\u6df4\u6df5\u6df6\u6df7\u6df8\u6df9\u6dfa\u6dfb\u6dfc\u6dfd\u6dfe\u6dff\u6e00\u6e01\u6e02\u6e03\u6e04\u6e05\u6e06\u6e07\u6e08\u6e09\u6e0a\u6e0b\u6e0c\u6e0d\u6e0e\u6e0f\u6e10\u6e11\u6e12\u6e13\u6e14\u6e15\u6e16\u6e17\u6e18\u6e19\u6e1a\u6e1b\u6e1c\u6e1d\u6e1e\u6e1f\u6e20\u6e21\u6e22\u6e23\u6e24\u6e25\u6e26\u6e27\u6e28\u6e29\u6e2a\u6e2b\u6e2c\u6e2d\u6e2e\u6e2f\u6e30\u6e31\u6e32\u6e33\u6e34\u6e35\u6e36\u6e37\u6e38\u6e39\u6e3a\u6e3b\u6e3c\u6e3d\u6e3e\u6e3f\u6e40\u6e41\u6e42\u6e43\u6e44\u6e45\u6e46\u6e47\u6e48\u6e49\u6e4a\u6e4b\u6e4c\u6e4d\u6e4e\u6e4f\u6e50\u6e51\u6e52\u6e53\u6e54\u6e55\u6e56\u6e57\u6e58\u6e59\u6e5a\u6e5b\u6e5c\u6e5d\u6e5e\u6e5f\u6e60\u6e61\u6e62\u6e63\u6e64\u6e65\u6e66\u6e67\u6e68\u6e69\u6e6a\u6e6b\u6e6c\u6e6d\u6e6e\u6e6f\u6e70\u6e71\u6e72\u6e73\u6e74\u6e75\u6e76\u6e77\u6e78\u6e79\u6e7a\u6e7b\u6e7c\u6e7d\u6e7e\u6e7f\u6e80\u6e81\u6e82\u6e83\u6e84\u6e85\u6e86\u6e87\u6e88\u6e89\u6e8a\u6e8b\u6e8c\u6e8d\u6e8e\u6e8f\u6e90\u6e91\u6e92\u6e93\u6e94\u6e95\u6e96\u6e97\u6e98\u6e99\u6e9a\u6e9b\u6e9c\u6e9d\u6e9e\u6e9f\u6ea0\u6ea1\u6ea2\u6ea3\u6ea4\u6ea5\u6ea6\u6ea7\u6ea8\u6ea9\u6eaa\u6eab\u6eac\u6ead\u6eae\u6eaf\u6eb0\u6eb1\u6eb2\u6eb3\u6eb4\u6eb5\u6eb6\u6eb7\u6eb8\u6eb9\u6eba\u6ebb\u6ebc\u6ebd\u6ebe\u6ebf\u6ec0\u6ec1\u6ec2\u6ec3\u6ec4\u6ec5\u6ec6\u6ec7\u6ec8\u6ec9\u6eca\u6ecb\u6ecc\u6ecd\u6ece\u6ecf\u6ed0\u6ed1\u6ed2\u6ed3\u6ed4\u6ed5\u6ed6\u6ed7\u6ed8\u6ed9\u6eda\u6edb\u6edc\u6edd\u6ede\u6edf\u6ee0\u6ee1\u6ee2\u6ee3\u6ee4\u6ee5\u6ee6\u6ee7\u6ee8\u6ee9\u6eea\u6eeb\u6eec\u6eed\u6eee\u6eef\u6ef0\u6ef1\u6ef2\u6ef3\u6ef4\u6ef5\u6ef6\u6ef7\u6ef8\u6ef9\u6efa\u6efb\u6efc\u6efd\u6efe\u6eff\u6f00\u6f01\u6f02\u6f03\u6f04\u6f05\u6f06\u6f07\u6f08\u6f09\u6f0a\u6f0b\u6f0c\u6f0d\u6f0e\u6f0f\u6f10\u6f11\u6f12\u6f13\u6f14\u6f15\u6f16\u6f17\u6f18\u6f19\u6f1a\u6f1b\u6f1c\u6f1d\u6f1e\u6f1f\u6f20\u6f21\u6f22\u6f23\u6f24\u6f25\u6f26\u6f27\u6f28\u6f29\u6f2a\u6f2b\u6f2c\u6f2d\u6f2e\u6f2f\u6f30\u6f31\u6f32\u6f33\u6f34\u6f35\u6f36\u6f37\u6f38\u6f39\u6f3a\u6f3b\u6f3c\u6f3d\u6f3e\u6f3f\u6f40\u6f41\u6f42\u6f43\u6f44\u6f45\u6f46\u6f47\u6f48\u6f49\u6f4a\u6f4b\u6f4c\u6f4d\u6f4e\u6f4f\u6f50\u6f51\u6f52\u6f53\u6f54\u6f55\u6f56\u6f57\u6f58\u6f59\u6f5a\u6f5b\u6f5c\u6f5d\u6f5e\u6f5f\u6f60\u6f61\u6f62\u6f63\u6f64\u6f65\u6f66\u6f67\u6f68\u6f69\u6f6a\u6f6b\u6f6c\u6f6d\u6f6e\u6f6f\u6f70\u6f71\u6f72\u6f73\u6f74\u6f75\u6f76\u6f77\u6f78\u6f79\u6f7a\u6f7b\u6f7c\u6f7d\u6f7e\u6f7f\u6f80\u6f81\u6f82\u6f83\u6f84\u6f85\u6f86\u6f87\u6f88\u6f89\u6f8a\u6f8b\u6f8c\u6f8d\u6f8e\u6f8f\u6f90\u6f91\u6f92\u6f93\u6f94\u6f95\u6f96\u6f97\u6f98\u6f99\u6f9a\u6f9b\u6f9c\u6f9d\u6f9e\u6f9f\u6fa0\u6fa1\u6fa2\u6fa3\u6fa4\u6fa5\u6fa6\u6fa7\u6fa8\u6fa9\u6faa\u6fab\u6fac\u6fad\u6fae\u6faf\u6fb0\u6fb1\u6fb2\u6fb3\u6fb4\u6fb5\u6fb6\u6fb7\u6fb8\u6fb9\u6fba\u6fbb\u6fbc\u6fbd\u6fbe\u6fbf\u6fc0\u6fc1\u6fc2\u6fc3\u6fc4\u6fc5\u6fc6\u6fc7\u6fc8\u6fc9\u6fca\u6fcb\u6fcc\u6fcd\u6fce\u6fcf\u6fd0\u6fd1\u6fd2\u6fd3\u6fd4\u6fd5\u6fd6\u6fd7\u6fd8\u6fd9\u6fda\u6fdb\u6fdc\u6fdd\u6fde\u6fdf\u6fe0\u6fe1\u6fe2\u6fe3\u6fe4\u6fe5\u6fe6\u6fe7\u6fe8\u6fe9\u6fea\u6feb\u6fec\u6fed\u6fee\u6fef\u6ff0\u6ff1\u6ff2\u6ff3\u6ff4\u6ff5\u6ff6\u6ff7\u6ff8\u6ff9\u6ffa\u6ffb\u6ffc\u6ffd\u6ffe\u6fff\u7000\u7001\u7002\u7003\u7004\u7005\u7006\u7007\u7008\u7009\u700a\u700b\u700c\u700d\u700e\u700f\u7010\u7011\u7012\u7013\u7014\u7015\u7016\u7017\u7018\u7019\u701a\u701b\u701c\u701d\u701e\u701f\u7020\u7021\u7022\u7023\u7024\u7025\u7026\u7027\u7028\u7029\u702a\u702b\u702c\u702d\u702e\u702f\u7030\u7031\u7032\u7033\u7034\u7035\u7036\u7037\u7038\u7039\u703a\u703b\u703c\u703d\u703e\u703f\u7040\u7041\u7042\u7043\u7044\u7045\u7046\u7047\u7048\u7049\u704a\u704b\u704c\u704d\u704e\u704f\u7050\u7051\u7052\u7053\u7054\u7055\u7056\u7057\u7058\u7059\u705a\u705b\u705c\u705d\u705e\u705f\u7060\u7061\u7062\u7063\u7064\u7065\u7066\u7067\u7068\u7069\u706a\u706b\u706c\u706d\u706e\u706f\u7070\u7071\u7072\u7073\u7074\u7075\u7076\u7077\u7078\u7079\u707a\u707b\u707c\u707d\u707e\u707f\u7080\u7081\u7082\u7083\u7084\u7085\u7086\u7087\u7088\u7089\u708a\u708b\u708c\u708d\u708e\u708f\u7090\u7091\u7092\u7093\u7094\u7095\u7096\u7097\u7098\u7099\u709a\u709b\u709c\u709d\u709e\u709f\u70a0\u70a1\u70a2\u70a3\u70a4\u70a5\u70a6\u70a7\u70a8\u70a9\u70aa\u70ab\u70ac\u70ad\u70ae\u70af\u70b0\u70b1\u70b2\u70b3\u70b4\u70b5\u70b6\u70b7\u70b8\u70b9\u70ba\u70bb\u70bc\u70bd\u70be\u70bf\u70c0\u70c1\u70c2\u70c3\u70c4\u70c5\u70c6\u70c7\u70c8\u70c9\u70ca\u70cb\u70cc\u70cd\u70ce\u70cf\u70d0\u70d1\u70d2\u70d3\u70d4\u70d5\u70d6\u70d7\u70d8\u70d9\u70da\u70db\u70dc\u70dd\u70de\u70df\u70e0\u70e1\u70e2\u70e3\u70e4\u70e5\u70e6\u70e7\u70e8\u70e9\u70ea\u70eb\u70ec\u70ed\u70ee\u70ef\u70f0\u70f1\u70f2\u70f3\u70f4\u70f5\u70f6\u70f7\u70f8\u70f9\u70fa\u70fb\u70fc\u70fd\u70fe\u70ff\u7100\u7101\u7102\u7103\u7104\u7105\u7106\u7107\u7108\u7109\u710a\u710b\u710c\u710d\u710e\u710f\u7110\u7111\u7112\u7113\u7114\u7115\u7116\u7117\u7118\u7119\u711a\u711b\u711c\u711d\u711e\u711f\u7120\u7121\u7122\u7123\u7124\u7125\u7126\u7127\u7128\u7129\u712a\u712b\u712c\u712d\u712e\u712f\u7130\u7131\u7132\u7133\u7134\u7135\u7136\u7137\u7138\u7139\u713a\u713b\u713c\u713d\u713e\u713f\u7140\u7141\u7142\u7143\u7144\u7145\u7146\u7147\u7148\u7149\u714a\u714b\u714c\u714d\u714e\u714f\u7150\u7151\u7152\u7153\u7154\u7155\u7156\u7157\u7158\u7159\u715a\u715b\u715c\u715d\u715e\u715f\u7160\u7161\u7162\u7163\u7164\u7165\u7166\u7167\u7168\u7169\u716a\u716b\u716c\u716d\u716e\u716f\u7170\u7171\u7172\u7173\u7174\u7175\u7176\u7177\u7178\u7179\u717a\u717b\u717c\u717d\u717e\u717f\u7180\u7181\u7182\u7183\u7184\u7185\u7186\u7187\u7188\u7189\u718a\u718b\u718c\u718d\u718e\u718f\u7190\u7191\u7192\u7193\u7194\u7195\u7196\u7197\u7198\u7199\u719a\u719b\u719c\u719d\u719e\u719f\u71a0\u71a1\u71a2\u71a3\u71a4\u71a5\u71a6\u71a7\u71a8\u71a9\u71aa\u71ab\u71ac\u71ad\u71ae\u71af\u71b0\u71b1\u71b2\u71b3\u71b4\u71b5\u71b6\u71b7\u71b8\u71b9\u71ba\u71bb\u71bc\u71bd\u71be\u71bf\u71c0\u71c1\u71c2\u71c3\u71c4\u71c5\u71c6\u71c7\u71c8\u71c9\u71ca\u71cb\u71cc\u71cd\u71ce\u71cf\u71d0\u71d1\u71d2\u71d3\u71d4\u71d5\u71d6\u71d7\u71d8\u71d9\u71da\u71db\u71dc\u71dd\u71de\u71df\u71e0\u71e1\u71e2\u71e3\u71e4\u71e5\u71e6\u71e7\u71e8\u71e9\u71ea\u71eb\u71ec\u71ed\u71ee\u71ef\u71f0\u71f1\u71f2\u71f3\u71f4\u71f5\u71f6\u71f7\u71f8\u71f9\u71fa\u71fb\u71fc\u71fd\u71fe\u71ff\u7200\u7201\u7202\u7203\u7204\u7205\u7206\u7207\u7208\u7209\u720a\u720b\u720c\u720d\u720e\u720f\u7210\u7211\u7212\u7213\u7214\u7215\u7216\u7217\u7218\u7219\u721a\u721b\u721c\u721d\u721e\u721f\u7220\u7221\u7222\u7223\u7224\u7225\u7226\u7227\u7228\u7229\u722a\u722b\u722c\u722d\u722e\u722f\u7230\u7231\u7232\u7233\u7234\u7235\u7236\u7237\u7238\u7239\u723a\u723b\u723c\u723d\u723e\u723f\u7240\u7241\u7242\u7243\u7244\u7245\u7246\u7247\u7248\u7249\u724a\u724b\u724c\u724d\u724e\u724f\u7250\u7251\u7252\u7253\u7254\u7255\u7256\u7257\u7258\u7259\u725a\u725b\u725c\u725d\u725e\u725f\u7260\u7261\u7262\u7263\u7264\u7265\u7266\u7267\u7268\u7269\u726a\u726b\u726c\u726d\u726e\u726f\u7270\u7271\u7272\u7273\u7274\u7275\u7276\u7277\u7278\u7279\u727a\u727b\u727c\u727d\u727e\u727f\u7280\u7281\u7282\u7283\u7284\u7285\u7286\u7287\u7288\u7289\u728a\u728b\u728c\u728d\u728e\u728f\u7290\u7291\u7292\u7293\u7294\u7295\u7296\u7297\u7298\u7299\u729a\u729b\u729c\u729d\u729e\u729f\u72a0\u72a1\u72a2\u72a3\u72a4\u72a5\u72a6\u72a7\u72a8\u72a9\u72aa\u72ab\u72ac\u72ad\u72ae\u72af\u72b0\u72b1\u72b2\u72b3\u72b4\u72b5\u72b6\u72b7\u72b8\u72b9\u72ba\u72bb\u72bc\u72bd\u72be\u72bf\u72c0\u72c1\u72c2\u72c3\u72c4\u72c5\u72c6\u72c7\u72c8\u72c9\u72ca\u72cb\u72cc\u72cd\u72ce\u72cf\u72d0\u72d1\u72d2\u72d3\u72d4\u72d5\u72d6\u72d7\u72d8\u72d9\u72da\u72db\u72dc\u72dd\u72de\u72df\u72e0\u72e1\u72e2\u72e3\u72e4\u72e5\u72e6\u72e7\u72e8\u72e9\u72ea\u72eb\u72ec\u72ed\u72ee\u72ef\u72f0\u72f1\u72f2\u72f3\u72f4\u72f5\u72f6\u72f7\u72f8\u72f9\u72fa\u72fb\u72fc\u72fd\u72fe\u72ff\u7300\u7301\u7302\u7303\u7304\u7305\u7306\u7307\u7308\u7309\u730a\u730b\u730c\u730d\u730e\u730f\u7310\u7311\u7312\u7313\u7314\u7315\u7316\u7317\u7318\u7319\u731a\u731b\u731c\u731d\u731e\u731f\u7320\u7321\u7322\u7323\u7324\u7325\u7326\u7327\u7328\u7329\u732a\u732b\u732c\u732d\u732e\u732f\u7330\u7331\u7332\u7333\u7334\u7335\u7336\u7337\u7338\u7339\u733a\u733b\u733c\u733d\u733e\u733f\u7340\u7341\u7342\u7343\u7344\u7345\u7346\u7347\u7348\u7349\u734a\u734b\u734c\u734d\u734e\u734f\u7350\u7351\u7352\u7353\u7354\u7355\u7356\u7357\u7358\u7359\u735a\u735b\u735c\u735d\u735e\u735f\u7360\u7361\u7362\u7363\u7364\u7365\u7366\u7367\u7368\u7369\u736a\u736b\u736c\u736d\u736e\u736f\u7370\u7371\u7372\u7373\u7374\u7375\u7376\u7377\u7378\u7379\u737a\u737b\u737c\u737d\u737e\u737f\u7380\u7381\u7382\u7383\u7384\u7385\u7386\u7387\u7388\u7389\u738a\u738b\u738c\u738d\u738e\u738f\u7390\u7391\u7392\u7393\u7394\u7395\u7396\u7397\u7398\u7399\u739a\u739b\u739c\u739d\u739e\u739f\u73a0\u73a1\u73a2\u73a3\u73a4\u73a5\u73a6\u73a7\u73a8\u73a9\u73aa\u73ab\u73ac\u73ad\u73ae\u73af\u73b0\u73b1\u73b2\u73b3\u73b4\u73b5\u73b6\u73b7\u73b8\u73b9\u73ba\u73bb\u73bc\u73bd\u73be\u73bf\u73c0\u73c1\u73c2\u73c3\u73c4\u73c5\u73c6\u73c7\u73c8\u73c9\u73ca\u73cb\u73cc\u73cd\u73ce\u73cf\u73d0\u73d1\u73d2\u73d3\u73d4\u73d5\u73d6\u73d7\u73d8\u73d9\u73da\u73db\u73dc\u73dd\u73de\u73df\u73e0\u73e1\u73e2\u73e3\u73e4\u73e5\u73e6\u73e7\u73e8\u73e9\u73ea\u73eb\u73ec\u73ed\u73ee\u73ef\u73f0\u73f1\u73f2\u73f3\u73f4\u73f5\u73f6\u73f7\u73f8\u73f9\u73fa\u73fb\u73fc\u73fd\u73fe\u73ff\u7400\u7401\u7402\u7403\u7404\u7405\u7406\u7407\u7408\u7409\u740a\u740b\u740c\u740d\u740e\u740f\u7410\u7411\u7412\u7413\u7414\u7415\u7416\u7417\u7418\u7419\u741a\u741b\u741c\u741d\u741e\u741f\u7420\u7421\u7422\u7423\u7424\u7425\u7426\u7427\u7428\u7429\u742a\u742b\u742c\u742d\u742e\u742f\u7430\u7431\u7432\u7433\u7434\u7435\u7436\u7437\u7438\u7439\u743a\u743b\u743c\u743d\u743e\u743f\u7440\u7441\u7442\u7443\u7444\u7445\u7446\u7447\u7448\u7449\u744a\u744b\u744c\u744d\u744e\u744f\u7450\u7451\u7452\u7453\u7454\u7455\u7456\u7457\u7458\u7459\u745a\u745b\u745c\u745d\u745e\u745f\u7460\u7461\u7462\u7463\u7464\u7465\u7466\u7467\u7468\u7469\u746a\u746b\u746c\u746d\u746e\u746f\u7470\u7471\u7472\u7473\u7474\u7475\u7476\u7477\u7478\u7479\u747a\u747b\u747c\u747d\u747e\u747f\u7480\u7481\u7482\u7483\u7484\u7485\u7486\u7487\u7488\u7489\u748a\u748b\u748c\u748d\u748e\u748f\u7490\u7491\u7492\u7493\u7494\u7495\u7496\u7497\u7498\u7499\u749a\u749b\u749c\u749d\u749e\u749f\u74a0\u74a1\u74a2\u74a3\u74a4\u74a5\u74a6\u74a7\u74a8\u74a9\u74aa\u74ab\u74ac\u74ad\u74ae\u74af\u74b0\u74b1\u74b2\u74b3\u74b4\u74b5\u74b6\u74b7\u74b8\u74b9\u74ba\u74bb\u74bc\u74bd\u74be\u74bf\u74c0\u74c1\u74c2\u74c3\u74c4\u74c5\u74c6\u74c7\u74c8\u74c9\u74ca\u74cb\u74cc\u74cd\u74ce\u74cf\u74d0\u74d1\u74d2\u74d3\u74d4\u74d5\u74d6\u74d7\u74d8\u74d9\u74da\u74db\u74dc\u74dd\u74de\u74df\u74e0\u74e1\u74e2\u74e3\u74e4\u74e5\u74e6\u74e7\u74e8\u74e9\u74ea\u74eb\u74ec\u74ed\u74ee\u74ef\u74f0\u74f1\u74f2\u74f3\u74f4\u74f5\u74f6\u74f7\u74f8\u74f9\u74fa\u74fb\u74fc\u74fd\u74fe\u74ff\u7500\u7501\u7502\u7503\u7504\u7505\u7506\u7507\u7508\u7509\u750a\u750b\u750c\u750d\u750e\u750f\u7510\u7511\u7512\u7513\u7514\u7515\u7516\u7517\u7518\u7519\u751a\u751b\u751c\u751d\u751e\u751f\u7520\u7521\u7522\u7523\u7524\u7525\u7526\u7527\u7528\u7529\u752a\u752b\u752c\u752d\u752e\u752f\u7530\u7531\u7532\u7533\u7534\u7535\u7536\u7537\u7538\u7539\u753a\u753b\u753c\u753d\u753e\u753f\u7540\u7541\u7542\u7543\u7544\u7545\u7546\u7547\u7548\u7549\u754a\u754b\u754c\u754d\u754e\u754f\u7550\u7551\u7552\u7553\u7554\u7555\u7556\u7557\u7558\u7559\u755a\u755b\u755c\u755d\u755e\u755f\u7560\u7561\u7562\u7563\u7564\u7565\u7566\u7567\u7568\u7569\u756a\u756b\u756c\u756d\u756e\u756f\u7570\u7571\u7572\u7573\u7574\u7575\u7576\u7577\u7578\u7579\u757a\u757b\u757c\u757d\u757e\u757f\u7580\u7581\u7582\u7583\u7584\u7585\u7586\u7587\u7588\u7589\u758a\u758b\u758c\u758d\u758e\u758f\u7590\u7591\u7592\u7593\u7594\u7595\u7596\u7597\u7598\u7599\u759a\u759b\u759c\u759d\u759e\u759f\u75a0\u75a1\u75a2\u75a3\u75a4\u75a5\u75a6\u75a7\u75a8\u75a9\u75aa\u75ab\u75ac\u75ad\u75ae\u75af\u75b0\u75b1\u75b2\u75b3\u75b4\u75b5\u75b6\u75b7\u75b8\u75b9\u75ba\u75bb\u75bc\u75bd\u75be\u75bf\u75c0\u75c1\u75c2\u75c3\u75c4\u75c5\u75c6\u75c7\u75c8\u75c9\u75ca\u75cb\u75cc\u75cd\u75ce\u75cf\u75d0\u75d1\u75d2\u75d3\u75d4\u75d5\u75d6\u75d7\u75d8\u75d9\u75da\u75db\u75dc\u75dd\u75de\u75df\u75e0\u75e1\u75e2\u75e3\u75e4\u75e5\u75e6\u75e7\u75e8\u75e9\u75ea\u75eb\u75ec\u75ed\u75ee\u75ef\u75f0\u75f1\u75f2\u75f3\u75f4\u75f5\u75f6\u75f7\u75f8\u75f9\u75fa\u75fb\u75fc\u75fd\u75fe\u75ff\u7600\u7601\u7602\u7603\u7604\u7605\u7606\u7607\u7608\u7609\u760a\u760b\u760c\u760d\u760e\u760f\u7610\u7611\u7612\u7613\u7614\u7615\u7616\u7617\u7618\u7619\u761a\u761b\u761c\u761d\u761e\u761f\u7620\u7621\u7622\u7623\u7624\u7625\u7626\u7627\u7628\u7629\u762a\u762b\u762c\u762d\u762e\u762f\u7630\u7631\u7632\u7633\u7634\u7635\u7636\u7637\u7638\u7639\u763a\u763b\u763c\u763d\u763e\u763f\u7640\u7641\u7642\u7643\u7644\u7645\u7646\u7647\u7648\u7649\u764a\u764b\u764c\u764d\u764e\u764f\u7650\u7651\u7652\u7653\u7654\u7655\u7656\u7657\u7658\u7659\u765a\u765b\u765c\u765d\u765e\u765f\u7660\u7661\u7662\u7663\u7664\u7665\u7666\u7667\u7668\u7669\u766a\u766b\u766c\u766d\u766e\u766f\u7670\u7671\u7672\u7673\u7674\u7675\u7676\u7677\u7678\u7679\u767a\u767b\u767c\u767d\u767e\u767f\u7680\u7681\u7682\u7683\u7684\u7685\u7686\u7687\u7688\u7689\u768a\u768b\u768c\u768d\u768e\u768f\u7690\u7691\u7692\u7693\u7694\u7695\u7696\u7697\u7698\u7699\u769a\u769b\u769c\u769d\u769e\u769f\u76a0\u76a1\u76a2\u76a3\u76a4\u76a5\u76a6\u76a7\u76a8\u76a9\u76aa\u76ab\u76ac\u76ad\u76ae\u76af\u76b0\u76b1\u76b2\u76b3\u76b4\u76b5\u76b6\u76b7\u76b8\u76b9\u76ba\u76bb\u76bc\u76bd\u76be\u76bf\u76c0\u76c1\u76c2\u76c3\u76c4\u76c5\u76c6\u76c7\u76c8\u76c9\u76ca\u76cb\u76cc\u76cd\u76ce\u76cf\u76d0\u76d1\u76d2\u76d3\u76d4\u76d5\u76d6\u76d7\u76d8\u76d9\u76da\u76db\u76dc\u76dd\u76de\u76df\u76e0\u76e1\u76e2\u76e3\u76e4\u76e5\u76e6\u76e7\u76e8\u76e9\u76ea\u76eb\u76ec\u76ed\u76ee\u76ef\u76f0\u76f1\u76f2\u76f3\u76f4\u76f5\u76f6\u76f7\u76f8\u76f9\u76fa\u76fb\u76fc\u76fd\u76fe\u76ff\u7700\u7701\u7702\u7703\u7704\u7705\u7706\u7707\u7708\u7709\u770a\u770b\u770c\u770d\u770e\u770f\u7710\u7711\u7712\u7713\u7714\u7715\u7716\u7717\u7718\u7719\u771a\u771b\u771c\u771d\u771e\u771f\u7720\u7721\u7722\u7723\u7724\u7725\u7726\u7727\u7728\u7729\u772a\u772b\u772c\u772d\u772e\u772f\u7730\u7731\u7732\u7733\u7734\u7735\u7736\u7737\u7738\u7739\u773a\u773b\u773c\u773d\u773e\u773f\u7740\u7741\u7742\u7743\u7744\u7745\u7746\u7747\u7748\u7749\u774a\u774b\u774c\u774d\u774e\u774f\u7750\u7751\u7752\u7753\u7754\u7755\u7756\u7757\u7758\u7759\u775a\u775b\u775c\u775d\u775e\u775f\u7760\u7761\u7762\u7763\u7764\u7765\u7766\u7767\u7768\u7769\u776a\u776b\u776c\u776d\u776e\u776f\u7770\u7771\u7772\u7773\u7774\u7775\u7776\u7777\u7778\u7779\u777a\u777b\u777c\u777d\u777e\u777f\u7780\u7781\u7782\u7783\u7784\u7785\u7786\u7787\u7788\u7789\u778a\u778b\u778c\u778d\u778e\u778f\u7790\u7791\u7792\u7793\u7794\u7795\u7796\u7797\u7798\u7799\u779a\u779b\u779c\u779d\u779e\u779f\u77a0\u77a1\u77a2\u77a3\u77a4\u77a5\u77a6\u77a7\u77a8\u77a9\u77aa\u77ab\u77ac\u77ad\u77ae\u77af\u77b0\u77b1\u77b2\u77b3\u77b4\u77b5\u77b6\u77b7\u77b8\u77b9\u77ba\u77bb\u77bc\u77bd\u77be\u77bf\u77c0\u77c1\u77c2\u77c3\u77c4\u77c5\u77c6\u77c7\u77c8\u77c9\u77ca\u77cb\u77cc\u77cd\u77ce\u77cf\u77d0\u77d1\u77d2\u77d3\u77d4\u77d5\u77d6\u77d7\u77d8\u77d9\u77da\u77db\u77dc\u77dd\u77de\u77df\u77e0\u77e1\u77e2\u77e3\u77e4\u77e5\u77e6\u77e7\u77e8\u77e9\u77ea\u77eb\u77ec\u77ed\u77ee\u77ef\u77f0\u77f1\u77f2\u77f3\u77f4\u77f5\u77f6\u77f7\u77f8\u77f9\u77fa\u77fb\u77fc\u77fd\u77fe\u77ff\u7800\u7801\u7802\u7803\u7804\u7805\u7806\u7807\u7808\u7809\u780a\u780b\u780c\u780d\u780e\u780f\u7810\u7811\u7812\u7813\u7814\u7815\u7816\u7817\u7818\u7819\u781a\u781b\u781c\u781d\u781e\u781f\u7820\u7821\u7822\u7823\u7824\u7825\u7826\u7827\u7828\u7829\u782a\u782b\u782c\u782d\u782e\u782f\u7830\u7831\u7832\u7833\u7834\u7835\u7836\u7837\u7838\u7839\u783a\u783b\u783c\u783d\u783e\u783f\u7840\u7841\u7842\u7843\u7844\u7845\u7846\u7847\u7848\u7849\u784a\u784b\u784c\u784d\u784e\u784f\u7850\u7851\u7852\u7853\u7854\u7855\u7856\u7857\u7858\u7859\u785a\u785b\u785c\u785d\u785e\u785f\u7860\u7861\u7862\u7863\u7864\u7865\u7866\u7867\u7868\u7869\u786a\u786b\u786c\u786d\u786e\u786f\u7870\u7871\u7872\u7873\u7874\u7875\u7876\u7877\u7878\u7879\u787a\u787b\u787c\u787d\u787e\u787f\u7880\u7881\u7882\u7883\u7884\u7885\u7886\u7887\u7888\u7889\u788a\u788b\u788c\u788d\u788e\u788f\u7890\u7891\u7892\u7893\u7894\u7895\u7896\u7897\u7898\u7899\u789a\u789b\u789c\u789d\u789e\u789f\u78a0\u78a1\u78a2\u78a3\u78a4\u78a5\u78a6\u78a7\u78a8\u78a9\u78aa\u78ab\u78ac\u78ad\u78ae\u78af\u78b0\u78b1\u78b2\u78b3\u78b4\u78b5\u78b6\u78b7\u78b8\u78b9\u78ba\u78bb\u78bc\u78bd\u78be\u78bf\u78c0\u78c1\u78c2\u78c3\u78c4\u78c5\u78c6\u78c7\u78c8\u78c9\u78ca\u78cb\u78cc\u78cd\u78ce\u78cf\u78d0\u78d1\u78d2\u78d3\u78d4\u78d5\u78d6\u78d7\u78d8\u78d9\u78da\u78db\u78dc\u78dd\u78de\u78df\u78e0\u78e1\u78e2\u78e3\u78e4\u78e5\u78e6\u78e7\u78e8\u78e9\u78ea\u78eb\u78ec\u78ed\u78ee\u78ef\u78f0\u78f1\u78f2\u78f3\u78f4\u78f5\u78f6\u78f7\u78f8\u78f9\u78fa\u78fb\u78fc\u78fd\u78fe\u78ff\u7900\u7901\u7902\u7903\u7904\u7905\u7906\u7907\u7908\u7909\u790a\u790b\u790c\u790d\u790e\u790f\u7910\u7911\u7912\u7913\u7914\u7915\u7916\u7917\u7918\u7919\u791a\u791b\u791c\u791d\u791e\u791f\u7920\u7921\u7922\u7923\u7924\u7925\u7926\u7927\u7928\u7929\u792a\u792b\u792c\u792d\u792e\u792f\u7930\u7931\u7932\u7933\u7934\u7935\u7936\u7937\u7938\u7939\u793a\u793b\u793c\u793d\u793e\u793f\u7940\u7941\u7942\u7943\u7944\u7945\u7946\u7947\u7948\u7949\u794a\u794b\u794c\u794d\u794e\u794f\u7950\u7951\u7952\u7953\u7954\u7955\u7956\u7957\u7958\u7959\u795a\u795b\u795c\u795d\u795e\u795f\u7960\u7961\u7962\u7963\u7964\u7965\u7966\u7967\u7968\u7969\u796a\u796b\u796c\u796d\u796e\u796f\u7970\u7971\u7972\u7973\u7974\u7975\u7976\u7977\u7978\u7979\u797a\u797b\u797c\u797d\u797e\u797f\u7980\u7981\u7982\u7983\u7984\u7985\u7986\u7987\u7988\u7989\u798a\u798b\u798c\u798d\u798e\u798f\u7990\u7991\u7992\u7993\u7994\u7995\u7996\u7997\u7998\u7999\u799a\u799b\u799c\u799d\u799e\u799f\u79a0\u79a1\u79a2\u79a3\u79a4\u79a5\u79a6\u79a7\u79a8\u79a9\u79aa\u79ab\u79ac\u79ad\u79ae\u79af\u79b0\u79b1\u79b2\u79b3\u79b4\u79b5\u79b6\u79b7\u79b8\u79b9\u79ba\u79bb\u79bc\u79bd\u79be\u79bf\u79c0\u79c1\u79c2\u79c3\u79c4\u79c5\u79c6\u79c7\u79c8\u79c9\u79ca\u79cb\u79cc\u79cd\u79ce\u79cf\u79d0\u79d1\u79d2\u79d3\u79d4\u79d5\u79d6\u79d7\u79d8\u79d9\u79da\u79db\u79dc\u79dd\u79de\u79df\u79e0\u79e1\u79e2\u79e3\u79e4\u79e5\u79e6\u79e7\u79e8\u79e9\u79ea\u79eb\u79ec\u79ed\u79ee\u79ef\u79f0\u79f1\u79f2\u79f3\u79f4\u79f5\u79f6\u79f7\u79f8\u79f9\u79fa\u79fb\u79fc\u79fd\u79fe\u79ff\u7a00\u7a01\u7a02\u7a03\u7a04\u7a05\u7a06\u7a07\u7a08\u7a09\u7a0a\u7a0b\u7a0c\u7a0d\u7a0e\u7a0f\u7a10\u7a11\u7a12\u7a13\u7a14\u7a15\u7a16\u7a17\u7a18\u7a19\u7a1a\u7a1b\u7a1c\u7a1d\u7a1e\u7a1f\u7a20\u7a21\u7a22\u7a23\u7a24\u7a25\u7a26\u7a27\u7a28\u7a29\u7a2a\u7a2b\u7a2c\u7a2d\u7a2e\u7a2f\u7a30\u7a31\u7a32\u7a33\u7a34\u7a35\u7a36\u7a37\u7a38\u7a39\u7a3a\u7a3b\u7a3c\u7a3d\u7a3e\u7a3f\u7a40\u7a41\u7a42\u7a43\u7a44\u7a45\u7a46\u7a47\u7a48\u7a49\u7a4a\u7a4b\u7a4c\u7a4d\u7a4e\u7a4f\u7a50\u7a51\u7a52\u7a53\u7a54\u7a55\u7a56\u7a57\u7a58\u7a59\u7a5a\u7a5b\u7a5c\u7a5d\u7a5e\u7a5f\u7a60\u7a61\u7a62\u7a63\u7a64\u7a65\u7a66\u7a67\u7a68\u7a69\u7a6a\u7a6b\u7a6c\u7a6d\u7a6e\u7a6f\u7a70\u7a71\u7a72\u7a73\u7a74\u7a75\u7a76\u7a77\u7a78\u7a79\u7a7a\u7a7b\u7a7c\u7a7d\u7a7e\u7a7f\u7a80\u7a81\u7a82\u7a83\u7a84\u7a85\u7a86\u7a87\u7a88\u7a89\u7a8a\u7a8b\u7a8c\u7a8d\u7a8e\u7a8f\u7a90\u7a91\u7a92\u7a93\u7a94\u7a95\u7a96\u7a97\u7a98\u7a99\u7a9a\u7a9b\u7a9c\u7a9d\u7a9e\u7a9f\u7aa0\u7aa1\u7aa2\u7aa3\u7aa4\u7aa5\u7aa6\u7aa7\u7aa8\u7aa9\u7aaa\u7aab\u7aac\u7aad\u7aae\u7aaf\u7ab0\u7ab1\u7ab2\u7ab3\u7ab4\u7ab5\u7ab6\u7ab7\u7ab8\u7ab9\u7aba\u7abb\u7abc\u7abd\u7abe\u7abf\u7ac0\u7ac1\u7ac2\u7ac3\u7ac4\u7ac5\u7ac6\u7ac7\u7ac8\u7ac9\u7aca\u7acb\u7acc\u7acd\u7ace\u7acf\u7ad0\u7ad1\u7ad2\u7ad3\u7ad4\u7ad5\u7ad6\u7ad7\u7ad8\u7ad9\u7ada\u7adb\u7adc\u7add\u7ade\u7adf\u7ae0\u7ae1\u7ae2\u7ae3\u7ae4\u7ae5\u7ae6\u7ae7\u7ae8\u7ae9\u7aea\u7aeb\u7aec\u7aed\u7aee\u7aef\u7af0\u7af1\u7af2\u7af3\u7af4\u7af5\u7af6\u7af7\u7af8\u7af9\u7afa\u7afb\u7afc\u7afd\u7afe\u7aff\u7b00\u7b01\u7b02\u7b03\u7b04\u7b05\u7b06\u7b07\u7b08\u7b09\u7b0a\u7b0b\u7b0c\u7b0d\u7b0e\u7b0f\u7b10\u7b11\u7b12\u7b13\u7b14\u7b15\u7b16\u7b17\u7b18\u7b19\u7b1a\u7b1b\u7b1c\u7b1d\u7b1e\u7b1f\u7b20\u7b21\u7b22\u7b23\u7b24\u7b25\u7b26\u7b27\u7b28\u7b29\u7b2a\u7b2b\u7b2c\u7b2d\u7b2e\u7b2f\u7b30\u7b31\u7b32\u7b33\u7b34\u7b35\u7b36\u7b37\u7b38\u7b39\u7b3a\u7b3b\u7b3c\u7b3d\u7b3e\u7b3f\u7b40\u7b41\u7b42\u7b43\u7b44\u7b45\u7b46\u7b47\u7b48\u7b49\u7b4a\u7b4b\u7b4c\u7b4d\u7b4e\u7b4f\u7b50\u7b51\u7b52\u7b53\u7b54\u7b55\u7b56\u7b57\u7b58\u7b59\u7b5a\u7b5b\u7b5c\u7b5d\u7b5e\u7b5f\u7b60\u7b61\u7b62\u7b63\u7b64\u7b65\u7b66\u7b67\u7b68\u7b69\u7b6a\u7b6b\u7b6c\u7b6d\u7b6e\u7b6f\u7b70\u7b71\u7b72\u7b73\u7b74\u7b75\u7b76\u7b77\u7b78\u7b79\u7b7a\u7b7b\u7b7c\u7b7d\u7b7e\u7b7f\u7b80\u7b81\u7b82\u7b83\u7b84\u7b85\u7b86\u7b87\u7b88\u7b89\u7b8a\u7b8b\u7b8c\u7b8d\u7b8e\u7b8f\u7b90\u7b91\u7b92\u7b93\u7b94\u7b95\u7b96\u7b97\u7b98\u7b99\u7b9a\u7b9b\u7b9c\u7b9d\u7b9e\u7b9f\u7ba0\u7ba1\u7ba2\u7ba3\u7ba4\u7ba5\u7ba6\u7ba7\u7ba8\u7ba9\u7baa\u7bab\u7bac\u7bad\u7bae\u7baf\u7bb0\u7bb1\u7bb2\u7bb3\u7bb4\u7bb5\u7bb6\u7bb7\u7bb8\u7bb9\u7bba\u7bbb\u7bbc\u7bbd\u7bbe\u7bbf\u7bc0\u7bc1\u7bc2\u7bc3\u7bc4\u7bc5\u7bc6\u7bc7\u7bc8\u7bc9\u7bca\u7bcb\u7bcc\u7bcd\u7bce\u7bcf\u7bd0\u7bd1\u7bd2\u7bd3\u7bd4\u7bd5\u7bd6\u7bd7\u7bd8\u7bd9\u7bda\u7bdb\u7bdc\u7bdd\u7bde\u7bdf\u7be0\u7be1\u7be2\u7be3\u7be4\u7be5\u7be6\u7be7\u7be8\u7be9\u7bea\u7beb\u7bec\u7bed\u7bee\u7bef\u7bf0\u7bf1\u7bf2\u7bf3\u7bf4\u7bf5\u7bf6\u7bf7\u7bf8\u7bf9\u7bfa\u7bfb\u7bfc\u7bfd\u7bfe\u7bff\u7c00\u7c01\u7c02\u7c03\u7c04\u7c05\u7c06\u7c07\u7c08\u7c09\u7c0a\u7c0b\u7c0c\u7c0d\u7c0e\u7c0f\u7c10\u7c11\u7c12\u7c13\u7c14\u7c15\u7c16\u7c17\u7c18\u7c19\u7c1a\u7c1b\u7c1c\u7c1d\u7c1e\u7c1f\u7c20\u7c21\u7c22\u7c23\u7c24\u7c25\u7c26\u7c27\u7c28\u7c29\u7c2a\u7c2b\u7c2c\u7c2d\u7c2e\u7c2f\u7c30\u7c31\u7c32\u7c33\u7c34\u7c35\u7c36\u7c37\u7c38\u7c39\u7c3a\u7c3b\u7c3c\u7c3d\u7c3e\u7c3f\u7c40\u7c41\u7c42\u7c43\u7c44\u7c45\u7c46\u7c47\u7c48\u7c49\u7c4a\u7c4b\u7c4c\u7c4d\u7c4e\u7c4f\u7c50\u7c51\u7c52\u7c53\u7c54\u7c55\u7c56\u7c57\u7c58\u7c59\u7c5a\u7c5b\u7c5c\u7c5d\u7c5e\u7c5f\u7c60\u7c61\u7c62\u7c63\u7c64\u7c65\u7c66\u7c67\u7c68\u7c69\u7c6a\u7c6b\u7c6c\u7c6d\u7c6e\u7c6f\u7c70\u7c71\u7c72\u7c73\u7c74\u7c75\u7c76\u7c77\u7c78\u7c79\u7c7a\u7c7b\u7c7c\u7c7d\u7c7e\u7c7f\u7c80\u7c81\u7c82\u7c83\u7c84\u7c85\u7c86\u7c87\u7c88\u7c89\u7c8a\u7c8b\u7c8c\u7c8d\u7c8e\u7c8f\u7c90\u7c91\u7c92\u7c93\u7c94\u7c95\u7c96\u7c97\u7c98\u7c99\u7c9a\u7c9b\u7c9c\u7c9d\u7c9e\u7c9f\u7ca0\u7ca1\u7ca2\u7ca3\u7ca4\u7ca5\u7ca6\u7ca7\u7ca8\u7ca9\u7caa\u7cab\u7cac\u7cad\u7cae\u7caf\u7cb0\u7cb1\u7cb2\u7cb3\u7cb4\u7cb5\u7cb6\u7cb7\u7cb8\u7cb9\u7cba\u7cbb\u7cbc\u7cbd\u7cbe\u7cbf\u7cc0\u7cc1\u7cc2\u7cc3\u7cc4\u7cc5\u7cc6\u7cc7\u7cc8\u7cc9\u7cca\u7ccb\u7ccc\u7ccd\u7cce\u7ccf\u7cd0\u7cd1\u7cd2\u7cd3\u7cd4\u7cd5\u7cd6\u7cd7\u7cd8\u7cd9\u7cda\u7cdb\u7cdc\u7cdd\u7cde\u7cdf\u7ce0\u7ce1\u7ce2\u7ce3\u7ce4\u7ce5\u7ce6\u7ce7\u7ce8\u7ce9\u7cea\u7ceb\u7cec\u7ced\u7cee\u7cef\u7cf0\u7cf1\u7cf2\u7cf3\u7cf4\u7cf5\u7cf6\u7cf7\u7cf8\u7cf9\u7cfa\u7cfb\u7cfc\u7cfd\u7cfe\u7cff\u7d00\u7d01\u7d02\u7d03\u7d04\u7d05\u7d06\u7d07\u7d08\u7d09\u7d0a\u7d0b\u7d0c\u7d0d\u7d0e\u7d0f\u7d10\u7d11\u7d12\u7d13\u7d14\u7d15\u7d16\u7d17\u7d18\u7d19\u7d1a\u7d1b\u7d1c\u7d1d\u7d1e\u7d1f\u7d20\u7d21\u7d22\u7d23\u7d24\u7d25\u7d26\u7d27\u7d28\u7d29\u7d2a\u7d2b\u7d2c\u7d2d\u7d2e\u7d2f\u7d30\u7d31\u7d32\u7d33\u7d34\u7d35\u7d36\u7d37\u7d38\u7d39\u7d3a\u7d3b\u7d3c\u7d3d\u7d3e\u7d3f\u7d40\u7d41\u7d42\u7d43\u7d44\u7d45\u7d46\u7d47\u7d48\u7d49\u7d4a\u7d4b\u7d4c\u7d4d\u7d4e\u7d4f\u7d50\u7d51\u7d52\u7d53\u7d54\u7d55\u7d56\u7d57\u7d58\u7d59\u7d5a\u7d5b\u7d5c\u7d5d\u7d5e\u7d5f\u7d60\u7d61\u7d62\u7d63\u7d64\u7d65\u7d66\u7d67\u7d68\u7d69\u7d6a\u7d6b\u7d6c\u7d6d\u7d6e\u7d6f\u7d70\u7d71\u7d72\u7d73\u7d74\u7d75\u7d76\u7d77\u7d78\u7d79\u7d7a\u7d7b\u7d7c\u7d7d\u7d7e\u7d7f\u7d80\u7d81\u7d82\u7d83\u7d84\u7d85\u7d86\u7d87\u7d88\u7d89\u7d8a\u7d8b\u7d8c\u7d8d\u7d8e\u7d8f\u7d90\u7d91\u7d92\u7d93\u7d94\u7d95\u7d96\u7d97\u7d98\u7d99\u7d9a\u7d9b\u7d9c\u7d9d\u7d9e\u7d9f\u7da0\u7da1\u7da2\u7da3\u7da4\u7da5\u7da6\u7da7\u7da8\u7da9\u7daa\u7dab\u7dac\u7dad\u7dae\u7daf\u7db0\u7db1\u7db2\u7db3\u7db4\u7db5\u7db6\u7db7\u7db8\u7db9\u7dba\u7dbb\u7dbc\u7dbd\u7dbe\u7dbf\u7dc0\u7dc1\u7dc2\u7dc3\u7dc4\u7dc5\u7dc6\u7dc7\u7dc8\u7dc9\u7dca\u7dcb\u7dcc\u7dcd\u7dce\u7dcf\u7dd0\u7dd1\u7dd2\u7dd3\u7dd4\u7dd5\u7dd6\u7dd7\u7dd8\u7dd9\u7dda\u7ddb\u7ddc\u7ddd\u7dde\u7ddf\u7de0\u7de1\u7de2\u7de3\u7de4\u7de5\u7de6\u7de7\u7de8\u7de9\u7dea\u7deb\u7dec\u7ded\u7dee\u7def\u7df0\u7df1\u7df2\u7df3\u7df4\u7df5\u7df6\u7df7\u7df8\u7df9\u7dfa\u7dfb\u7dfc\u7dfd\u7dfe\u7dff\u7e00\u7e01\u7e02\u7e03\u7e04\u7e05\u7e06\u7e07\u7e08\u7e09\u7e0a\u7e0b\u7e0c\u7e0d\u7e0e\u7e0f\u7e10\u7e11\u7e12\u7e13\u7e14\u7e15\u7e16\u7e17\u7e18\u7e19\u7e1a\u7e1b\u7e1c\u7e1d\u7e1e\u7e1f\u7e20\u7e21\u7e22\u7e23\u7e24\u7e25\u7e26\u7e27\u7e28\u7e29\u7e2a\u7e2b\u7e2c\u7e2d\u7e2e\u7e2f\u7e30\u7e31\u7e32\u7e33\u7e34\u7e35\u7e36\u7e37\u7e38\u7e39\u7e3a\u7e3b\u7e3c\u7e3d\u7e3e\u7e3f\u7e40\u7e41\u7e42\u7e43\u7e44\u7e45\u7e46\u7e47\u7e48\u7e49\u7e4a\u7e4b\u7e4c\u7e4d\u7e4e\u7e4f\u7e50\u7e51\u7e52\u7e53\u7e54\u7e55\u7e56\u7e57\u7e58\u7e59\u7e5a\u7e5b\u7e5c\u7e5d\u7e5e\u7e5f\u7e60\u7e61\u7e62\u7e63\u7e64\u7e65\u7e66\u7e67\u7e68\u7e69\u7e6a\u7e6b\u7e6c\u7e6d\u7e6e\u7e6f\u7e70\u7e71\u7e72\u7e73\u7e74\u7e75\u7e76\u7e77\u7e78\u7e79\u7e7a\u7e7b\u7e7c\u7e7d\u7e7e\u7e7f\u7e80\u7e81\u7e82\u7e83\u7e84\u7e85\u7e86\u7e87\u7e88\u7e89\u7e8a\u7e8b\u7e8c\u7e8d\u7e8e\u7e8f\u7e90\u7e91\u7e92\u7e93\u7e94\u7e95\u7e96\u7e97\u7e98\u7e99\u7e9a\u7e9b\u7e9c\u7e9d\u7e9e\u7e9f\u7ea0\u7ea1\u7ea2\u7ea3\u7ea4\u7ea5\u7ea6\u7ea7\u7ea8\u7ea9\u7eaa\u7eab\u7eac\u7ead\u7eae\u7eaf\u7eb0\u7eb1\u7eb2\u7eb3\u7eb4\u7eb5\u7eb6\u7eb7\u7eb8\u7eb9\u7eba\u7ebb\u7ebc\u7ebd\u7ebe\u7ebf\u7ec0\u7ec1\u7ec2\u7ec3\u7ec4\u7ec5\u7ec6\u7ec7\u7ec8\u7ec9\u7eca\u7ecb\u7ecc\u7ecd\u7ece\u7ecf\u7ed0\u7ed1\u7ed2\u7ed3\u7ed4\u7ed5\u7ed6\u7ed7\u7ed8\u7ed9\u7eda\u7edb\u7edc\u7edd\u7ede\u7edf\u7ee0\u7ee1\u7ee2\u7ee3\u7ee4\u7ee5\u7ee6\u7ee7\u7ee8\u7ee9\u7eea\u7eeb\u7eec\u7eed\u7eee\u7eef\u7ef0\u7ef1\u7ef2\u7ef3\u7ef4\u7ef5\u7ef6\u7ef7\u7ef8\u7ef9\u7efa\u7efb\u7efc\u7efd\u7efe\u7eff\u7f00\u7f01\u7f02\u7f03\u7f04\u7f05\u7f06\u7f07\u7f08\u7f09\u7f0a\u7f0b\u7f0c\u7f0d\u7f0e\u7f0f\u7f10\u7f11\u7f12\u7f13\u7f14\u7f15\u7f16\u7f17\u7f18\u7f19\u7f1a\u7f1b\u7f1c\u7f1d\u7f1e\u7f1f\u7f20\u7f21\u7f22\u7f23\u7f24\u7f25\u7f26\u7f27\u7f28\u7f29\u7f2a\u7f2b\u7f2c\u7f2d\u7f2e\u7f2f\u7f30\u7f31\u7f32\u7f33\u7f34\u7f35\u7f36\u7f37\u7f38\u7f39\u7f3a\u7f3b\u7f3c\u7f3d\u7f3e\u7f3f\u7f40\u7f41\u7f42\u7f43\u7f44\u7f45\u7f46\u7f47\u7f48\u7f49\u7f4a\u7f4b\u7f4c\u7f4d\u7f4e\u7f4f\u7f50\u7f51\u7f52\u7f53\u7f54\u7f55\u7f56\u7f57\u7f58\u7f59\u7f5a\u7f5b\u7f5c\u7f5d\u7f5e\u7f5f\u7f60\u7f61\u7f62\u7f63\u7f64\u7f65\u7f66\u7f67\u7f68\u7f69\u7f6a\u7f6b\u7f6c\u7f6d\u7f6e\u7f6f\u7f70\u7f71\u7f72\u7f73\u7f74\u7f75\u7f76\u7f77\u7f78\u7f79\u7f7a\u7f7b\u7f7c\u7f7d\u7f7e\u7f7f\u7f80\u7f81\u7f82\u7f83\u7f84\u7f85\u7f86\u7f87\u7f88\u7f89\u7f8a\u7f8b\u7f8c\u7f8d\u7f8e\u7f8f\u7f90\u7f91\u7f92\u7f93\u7f94\u7f95\u7f96\u7f97\u7f98\u7f99\u7f9a\u7f9b\u7f9c\u7f9d\u7f9e\u7f9f\u7fa0\u7fa1\u7fa2\u7fa3\u7fa4\u7fa5\u7fa6\u7fa7\u7fa8\u7fa9\u7faa\u7fab\u7fac\u7fad\u7fae\u7faf\u7fb0\u7fb1\u7fb2\u7fb3\u7fb4\u7fb5\u7fb6\u7fb7\u7fb8\u7fb9\u7fba\u7fbb\u7fbc\u7fbd\u7fbe\u7fbf\u7fc0\u7fc1\u7fc2\u7fc3\u7fc4\u7fc5\u7fc6\u7fc7\u7fc8\u7fc9\u7fca\u7fcb\u7fcc\u7fcd\u7fce\u7fcf\u7fd0\u7fd1\u7fd2\u7fd3\u7fd4\u7fd5\u7fd6\u7fd7\u7fd8\u7fd9\u7fda\u7fdb\u7fdc\u7fdd\u7fde\u7fdf\u7fe0\u7fe1\u7fe2\u7fe3\u7fe4\u7fe5\u7fe6\u7fe7\u7fe8\u7fe9\u7fea\u7feb\u7fec\u7fed\u7fee\u7fef\u7ff0\u7ff1\u7ff2\u7ff3\u7ff4\u7ff5\u7ff6\u7ff7\u7ff8\u7ff9\u7ffa\u7ffb\u7ffc\u7ffd\u7ffe\u7fff\u8000\u8001\u8002\u8003\u8004\u8005\u8006\u8007\u8008\u8009\u800a\u800b\u800c\u800d\u800e\u800f\u8010\u8011\u8012\u8013\u8014\u8015\u8016\u8017\u8018\u8019\u801a\u801b\u801c\u801d\u801e\u801f\u8020\u8021\u8022\u8023\u8024\u8025\u8026\u8027\u8028\u8029\u802a\u802b\u802c\u802d\u802e\u802f\u8030\u8031\u8032\u8033\u8034\u8035\u8036\u8037\u8038\u8039\u803a\u803b\u803c\u803d\u803e\u803f\u8040\u8041\u8042\u8043\u8044\u8045\u8046\u8047\u8048\u8049\u804a\u804b\u804c\u804d\u804e\u804f\u8050\u8051\u8052\u8053\u8054\u8055\u8056\u8057\u8058\u8059\u805a\u805b\u805c\u805d\u805e\u805f\u8060\u8061\u8062\u8063\u8064\u8065\u8066\u8067\u8068\u8069\u806a\u806b\u806c\u806d\u806e\u806f\u8070\u8071\u8072\u8073\u8074\u8075\u8076\u8077\u8078\u8079\u807a\u807b\u807c\u807d\u807e\u807f\u8080\u8081\u8082\u8083\u8084\u8085\u8086\u8087\u8088\u8089\u808a\u808b\u808c\u808d\u808e\u808f\u8090\u8091\u8092\u8093\u8094\u8095\u8096\u8097\u8098\u8099\u809a\u809b\u809c\u809d\u809e\u809f\u80a0\u80a1\u80a2\u80a3\u80a4\u80a5\u80a6\u80a7\u80a8\u80a9\u80aa\u80ab\u80ac\u80ad\u80ae\u80af\u80b0\u80b1\u80b2\u80b3\u80b4\u80b5\u80b6\u80b7\u80b8\u80b9\u80ba\u80bb\u80bc\u80bd\u80be\u80bf\u80c0\u80c1\u80c2\u80c3\u80c4\u80c5\u80c6\u80c7\u80c8\u80c9\u80ca\u80cb\u80cc\u80cd\u80ce\u80cf\u80d0\u80d1\u80d2\u80d3\u80d4\u80d5\u80d6\u80d7\u80d8\u80d9\u80da\u80db\u80dc\u80dd\u80de\u80df\u80e0\u80e1\u80e2\u80e3\u80e4\u80e5\u80e6\u80e7\u80e8\u80e9\u80ea\u80eb\u80ec\u80ed\u80ee\u80ef\u80f0\u80f1\u80f2\u80f3\u80f4\u80f5\u80f6\u80f7\u80f8\u80f9\u80fa\u80fb\u80fc\u80fd\u80fe\u80ff\u8100\u8101\u8102\u8103\u8104\u8105\u8106\u8107\u8108\u8109\u810a\u810b\u810c\u810d\u810e\u810f\u8110\u8111\u8112\u8113\u8114\u8115\u8116\u8117\u8118\u8119\u811a\u811b\u811c\u811d\u811e\u811f\u8120\u8121\u8122\u8123\u8124\u8125\u8126\u8127\u8128\u8129\u812a\u812b\u812c\u812d\u812e\u812f\u8130\u8131\u8132\u8133\u8134\u8135\u8136\u8137\u8138\u8139\u813a\u813b\u813c\u813d\u813e\u813f\u8140\u8141\u8142\u8143\u8144\u8145\u8146\u8147\u8148\u8149\u814a\u814b\u814c\u814d\u814e\u814f\u8150\u8151\u8152\u8153\u8154\u8155\u8156\u8157\u8158\u8159\u815a\u815b\u815c\u815d\u815e\u815f\u8160\u8161\u8162\u8163\u8164\u8165\u8166\u8167\u8168\u8169\u816a\u816b\u816c\u816d\u816e\u816f\u8170\u8171\u8172\u8173\u8174\u8175\u8176\u8177\u8178\u8179\u817a\u817b\u817c\u817d\u817e\u817f\u8180\u8181\u8182\u8183\u8184\u8185\u8186\u8187\u8188\u8189\u818a\u818b\u818c\u818d\u818e\u818f\u8190\u8191\u8192\u8193\u8194\u8195\u8196\u8197\u8198\u8199\u819a\u819b\u819c\u819d\u819e\u819f\u81a0\u81a1\u81a2\u81a3\u81a4\u81a5\u81a6\u81a7\u81a8\u81a9\u81aa\u81ab\u81ac\u81ad\u81ae\u81af\u81b0\u81b1\u81b2\u81b3\u81b4\u81b5\u81b6\u81b7\u81b8\u81b9\u81ba\u81bb\u81bc\u81bd\u81be\u81bf\u81c0\u81c1\u81c2\u81c3\u81c4\u81c5\u81c6\u81c7\u81c8\u81c9\u81ca\u81cb\u81cc\u81cd\u81ce\u81cf\u81d0\u81d1\u81d2\u81d3\u81d4\u81d5\u81d6\u81d7\u81d8\u81d9\u81da\u81db\u81dc\u81dd\u81de\u81df\u81e0\u81e1\u81e2\u81e3\u81e4\u81e5\u81e6\u81e7\u81e8\u81e9\u81ea\u81eb\u81ec\u81ed\u81ee\u81ef\u81f0\u81f1\u81f2\u81f3\u81f4\u81f5\u81f6\u81f7\u81f8\u81f9\u81fa\u81fb\u81fc\u81fd\u81fe\u81ff\u8200\u8201\u8202\u8203\u8204\u8205\u8206\u8207\u8208\u8209\u820a\u820b\u820c\u820d\u820e\u820f\u8210\u8211\u8212\u8213\u8214\u8215\u8216\u8217\u8218\u8219\u821a\u821b\u821c\u821d\u821e\u821f\u8220\u8221\u8222\u8223\u8224\u8225\u8226\u8227\u8228\u8229\u822a\u822b\u822c\u822d\u822e\u822f\u8230\u8231\u8232\u8233\u8234\u8235\u8236\u8237\u8238\u8239\u823a\u823b\u823c\u823d\u823e\u823f\u8240\u8241\u8242\u8243\u8244\u8245\u8246\u8247\u8248\u8249\u824a\u824b\u824c\u824d\u824e\u824f\u8250\u8251\u8252\u8253\u8254\u8255\u8256\u8257\u8258\u8259\u825a\u825b\u825c\u825d\u825e\u825f\u8260\u8261\u8262\u8263\u8264\u8265\u8266\u8267\u8268\u8269\u826a\u826b\u826c\u826d\u826e\u826f\u8270\u8271\u8272\u8273\u8274\u8275\u8276\u8277\u8278\u8279\u827a\u827b\u827c\u827d\u827e\u827f\u8280\u8281\u8282\u8283\u8284\u8285\u8286\u8287\u8288\u8289\u828a\u828b\u828c\u828d\u828e\u828f\u8290\u8291\u8292\u8293\u8294\u8295\u8296\u8297\u8298\u8299\u829a\u829b\u829c\u829d\u829e\u829f\u82a0\u82a1\u82a2\u82a3\u82a4\u82a5\u82a6\u82a7\u82a8\u82a9\u82aa\u82ab\u82ac\u82ad\u82ae\u82af\u82b0\u82b1\u82b2\u82b3\u82b4\u82b5\u82b6\u82b7\u82b8\u82b9\u82ba\u82bb\u82bc\u82bd\u82be\u82bf\u82c0\u82c1\u82c2\u82c3\u82c4\u82c5\u82c6\u82c7\u82c8\u82c9\u82ca\u82cb\u82cc\u82cd\u82ce\u82cf\u82d0\u82d1\u82d2\u82d3\u82d4\u82d5\u82d6\u82d7\u82d8\u82d9\u82da\u82db\u82dc\u82dd\u82de\u82df\u82e0\u82e1\u82e2\u82e3\u82e4\u82e5\u82e6\u82e7\u82e8\u82e9\u82ea\u82eb\u82ec\u82ed\u82ee\u82ef\u82f0\u82f1\u82f2\u82f3\u82f4\u82f5\u82f6\u82f7\u82f8\u82f9\u82fa\u82fb\u82fc\u82fd\u82fe\u82ff\u8300\u8301\u8302\u8303\u8304\u8305\u8306\u8307\u8308\u8309\u830a\u830b\u830c\u830d\u830e\u830f\u8310\u8311\u8312\u8313\u8314\u8315\u8316\u8317\u8318\u8319\u831a\u831b\u831c\u831d\u831e\u831f\u8320\u8321\u8322\u8323\u8324\u8325\u8326\u8327\u8328\u8329\u832a\u832b\u832c\u832d\u832e\u832f\u8330\u8331\u8332\u8333\u8334\u8335\u8336\u8337\u8338\u8339\u833a\u833b\u833c\u833d\u833e\u833f\u8340\u8341\u8342\u8343\u8344\u8345\u8346\u8347\u8348\u8349\u834a\u834b\u834c\u834d\u834e\u834f\u8350\u8351\u8352\u8353\u8354\u8355\u8356\u8357\u8358\u8359\u835a\u835b\u835c\u835d\u835e\u835f\u8360\u8361\u8362\u8363\u8364\u8365\u8366\u8367\u8368\u8369\u836a\u836b\u836c\u836d\u836e\u836f\u8370\u8371\u8372\u8373\u8374\u8375\u8376\u8377\u8378\u8379\u837a\u837b\u837c\u837d\u837e\u837f\u8380\u8381\u8382\u8383\u8384\u8385\u8386\u8387\u8388\u8389\u838a\u838b\u838c\u838d\u838e\u838f\u8390\u8391\u8392\u8393\u8394\u8395\u8396\u8397\u8398\u8399\u839a\u839b\u839c\u839d\u839e\u839f\u83a0\u83a1\u83a2\u83a3\u83a4\u83a5\u83a6\u83a7\u83a8\u83a9\u83aa\u83ab\u83ac\u83ad\u83ae\u83af\u83b0\u83b1\u83b2\u83b3\u83b4\u83b5\u83b6\u83b7\u83b8\u83b9\u83ba\u83bb\u83bc\u83bd\u83be\u83bf\u83c0\u83c1\u83c2\u83c3\u83c4\u83c5\u83c6\u83c7\u83c8\u83c9\u83ca\u83cb\u83cc\u83cd\u83ce\u83cf\u83d0\u83d1\u83d2\u83d3\u83d4\u83d5\u83d6\u83d7\u83d8\u83d9\u83da\u83db\u83dc\u83dd\u83de\u83df\u83e0\u83e1\u83e2\u83e3\u83e4\u83e5\u83e6\u83e7\u83e8\u83e9\u83ea\u83eb\u83ec\u83ed\u83ee\u83ef\u83f0\u83f1\u83f2\u83f3\u83f4\u83f5\u83f6\u83f7\u83f8\u83f9\u83fa\u83fb\u83fc\u83fd\u83fe\u83ff\u8400\u8401\u8402\u8403\u8404\u8405\u8406\u8407\u8408\u8409\u840a\u840b\u840c\u840d\u840e\u840f\u8410\u8411\u8412\u8413\u8414\u8415\u8416\u8417\u8418\u8419\u841a\u841b\u841c\u841d\u841e\u841f\u8420\u8421\u8422\u8423\u8424\u8425\u8426\u8427\u8428\u8429\u842a\u842b\u842c\u842d\u842e\u842f\u8430\u8431\u8432\u8433\u8434\u8435\u8436\u8437\u8438\u8439\u843a\u843b\u843c\u843d\u843e\u843f\u8440\u8441\u8442\u8443\u8444\u8445\u8446\u8447\u8448\u8449\u844a\u844b\u844c\u844d\u844e\u844f\u8450\u8451\u8452\u8453\u8454\u8455\u8456\u8457\u8458\u8459\u845a\u845b\u845c\u845d\u845e\u845f\u8460\u8461\u8462\u8463\u8464\u8465\u8466\u8467\u8468\u8469\u846a\u846b\u846c\u846d\u846e\u846f\u8470\u8471\u8472\u8473\u8474\u8475\u8476\u8477\u8478\u8479\u847a\u847b\u847c\u847d\u847e\u847f\u8480\u8481\u8482\u8483\u8484\u8485\u8486\u8487\u8488\u8489\u848a\u848b\u848c\u848d\u848e\u848f\u8490\u8491\u8492\u8493\u8494\u8495\u8496\u8497\u8498\u8499\u849a\u849b\u849c\u849d\u849e\u849f\u84a0\u84a1\u84a2\u84a3\u84a4\u84a5\u84a6\u84a7\u84a8\u84a9\u84aa\u84ab\u84ac\u84ad\u84ae\u84af\u84b0\u84b1\u84b2\u84b3\u84b4\u84b5\u84b6\u84b7\u84b8\u84b9\u84ba\u84bb\u84bc\u84bd\u84be\u84bf\u84c0\u84c1\u84c2\u84c3\u84c4\u84c5\u84c6\u84c7\u84c8\u84c9\u84ca\u84cb\u84cc\u84cd\u84ce\u84cf\u84d0\u84d1\u84d2\u84d3\u84d4\u84d5\u84d6\u84d7\u84d8\u84d9\u84da\u84db\u84dc\u84dd\u84de\u84df\u84e0\u84e1\u84e2\u84e3\u84e4\u84e5\u84e6\u84e7\u84e8\u84e9\u84ea\u84eb\u84ec\u84ed\u84ee\u84ef\u84f0\u84f1\u84f2\u84f3\u84f4\u84f5\u84f6\u84f7\u84f8\u84f9\u84fa\u84fb\u84fc\u84fd\u84fe\u84ff\u8500\u8501\u8502\u8503\u8504\u8505\u8506\u8507\u8508\u8509\u850a\u850b\u850c\u850d\u850e\u850f\u8510\u8511\u8512\u8513\u8514\u8515\u8516\u8517\u8518\u8519\u851a\u851b\u851c\u851d\u851e\u851f\u8520\u8521\u8522\u8523\u8524\u8525\u8526\u8527\u8528\u8529\u852a\u852b\u852c\u852d\u852e\u852f\u8530\u8531\u8532\u8533\u8534\u8535\u8536\u8537\u8538\u8539\u853a\u853b\u853c\u853d\u853e\u853f\u8540\u8541\u8542\u8543\u8544\u8545\u8546\u8547\u8548\u8549\u854a\u854b\u854c\u854d\u854e\u854f\u8550\u8551\u8552\u8553\u8554\u8555\u8556\u8557\u8558\u8559\u855a\u855b\u855c\u855d\u855e\u855f\u8560\u8561\u8562\u8563\u8564\u8565\u8566\u8567\u8568\u8569\u856a\u856b\u856c\u856d\u856e\u856f\u8570\u8571\u8572\u8573\u8574\u8575\u8576\u8577\u8578\u8579\u857a\u857b\u857c\u857d\u857e\u857f\u8580\u8581\u8582\u8583\u8584\u8585\u8586\u8587\u8588\u8589\u858a\u858b\u858c\u858d\u858e\u858f\u8590\u8591\u8592\u8593\u8594\u8595\u8596\u8597\u8598\u8599\u859a\u859b\u859c\u859d\u859e\u859f\u85a0\u85a1\u85a2\u85a3\u85a4\u85a5\u85a6\u85a7\u85a8\u85a9\u85aa\u85ab\u85ac\u85ad\u85ae\u85af\u85b0\u85b1\u85b2\u85b3\u85b4\u85b5\u85b6\u85b7\u85b8\u85b9\u85ba\u85bb\u85bc\u85bd\u85be\u85bf\u85c0\u85c1\u85c2\u85c3\u85c4\u85c5\u85c6\u85c7\u85c8\u85c9\u85ca\u85cb\u85cc\u85cd\u85ce\u85cf\u85d0\u85d1\u85d2\u85d3\u85d4\u85d5\u85d6\u85d7\u85d8\u85d9\u85da\u85db\u85dc\u85dd\u85de\u85df\u85e0\u85e1\u85e2\u85e3\u85e4\u85e5\u85e6\u85e7\u85e8\u85e9\u85ea\u85eb\u85ec\u85ed\u85ee\u85ef\u85f0\u85f1\u85f2\u85f3\u85f4\u85f5\u85f6\u85f7\u85f8\u85f9\u85fa\u85fb\u85fc\u85fd\u85fe\u85ff\u8600\u8601\u8602\u8603\u8604\u8605\u8606\u8607\u8608\u8609\u860a\u860b\u860c\u860d\u860e\u860f\u8610\u8611\u8612\u8613\u8614\u8615\u8616\u8617\u8618\u8619\u861a\u861b\u861c\u861d\u861e\u861f\u8620\u8621\u8622\u8623\u8624\u8625\u8626\u8627\u8628\u8629\u862a\u862b\u862c\u862d\u862e\u862f\u8630\u8631\u8632\u8633\u8634\u8635\u8636\u8637\u8638\u8639\u863a\u863b\u863c\u863d\u863e\u863f\u8640\u8641\u8642\u8643\u8644\u8645\u8646\u8647\u8648\u8649\u864a\u864b\u864c\u864d\u864e\u864f\u8650\u8651\u8652\u8653\u8654\u8655\u8656\u8657\u8658\u8659\u865a\u865b\u865c\u865d\u865e\u865f\u8660\u8661\u8662\u8663\u8664\u8665\u8666\u8667\u8668\u8669\u866a\u866b\u866c\u866d\u866e\u866f\u8670\u8671\u8672\u8673\u8674\u8675\u8676\u8677\u8678\u8679\u867a\u867b\u867c\u867d\u867e\u867f\u8680\u8681\u8682\u8683\u8684\u8685\u8686\u8687\u8688\u8689\u868a\u868b\u868c\u868d\u868e\u868f\u8690\u8691\u8692\u8693\u8694\u8695\u8696\u8697\u8698\u8699\u869a\u869b\u869c\u869d\u869e\u869f\u86a0\u86a1\u86a2\u86a3\u86a4\u86a5\u86a6\u86a7\u86a8\u86a9\u86aa\u86ab\u86ac\u86ad\u86ae\u86af\u86b0\u86b1\u86b2\u86b3\u86b4\u86b5\u86b6\u86b7\u86b8\u86b9\u86ba\u86bb\u86bc\u86bd\u86be\u86bf\u86c0\u86c1\u86c2\u86c3\u86c4\u86c5\u86c6\u86c7\u86c8\u86c9\u86ca\u86cb\u86cc\u86cd\u86ce\u86cf\u86d0\u86d1\u86d2\u86d3\u86d4\u86d5\u86d6\u86d7\u86d8\u86d9\u86da\u86db\u86dc\u86dd\u86de\u86df\u86e0\u86e1\u86e2\u86e3\u86e4\u86e5\u86e6\u86e7\u86e8\u86e9\u86ea\u86eb\u86ec\u86ed\u86ee\u86ef\u86f0\u86f1\u86f2\u86f3\u86f4\u86f5\u86f6\u86f7\u86f8\u86f9\u86fa\u86fb\u86fc\u86fd\u86fe\u86ff\u8700\u8701\u8702\u8703\u8704\u8705\u8706\u8707\u8708\u8709\u870a\u870b\u870c\u870d\u870e\u870f\u8710\u8711\u8712\u8713\u8714\u8715\u8716\u8717\u8718\u8719\u871a\u871b\u871c\u871d\u871e\u871f\u8720\u8721\u8722\u8723\u8724\u8725\u8726\u8727\u8728\u8729\u872a\u872b\u872c\u872d\u872e\u872f\u8730\u8731\u8732\u8733\u8734\u8735\u8736\u8737\u8738\u8739\u873a\u873b\u873c\u873d\u873e\u873f\u8740\u8741\u8742\u8743\u8744\u8745\u8746\u8747\u8748\u8749\u874a\u874b\u874c\u874d\u874e\u874f\u8750\u8751\u8752\u8753\u8754\u8755\u8756\u8757\u8758\u8759\u875a\u875b\u875c\u875d\u875e\u875f\u8760\u8761\u8762\u8763\u8764\u8765\u8766\u8767\u8768\u8769\u876a\u876b\u876c\u876d\u876e\u876f\u8770\u8771\u8772\u8773\u8774\u8775\u8776\u8777\u8778\u8779\u877a\u877b\u877c\u877d\u877e\u877f\u8780\u8781\u8782\u8783\u8784\u8785\u8786\u8787\u8788\u8789\u878a\u878b\u878c\u878d\u878e\u878f\u8790\u8791\u8792\u8793\u8794\u8795\u8796\u8797\u8798\u8799\u879a\u879b\u879c\u879d\u879e\u879f\u87a0\u87a1\u87a2\u87a3\u87a4\u87a5\u87a6\u87a7\u87a8\u87a9\u87aa\u87ab\u87ac\u87ad\u87ae\u87af\u87b0\u87b1\u87b2\u87b3\u87b4\u87b5\u87b6\u87b7\u87b8\u87b9\u87ba\u87bb\u87bc\u87bd\u87be\u87bf\u87c0\u87c1\u87c2\u87c3\u87c4\u87c5\u87c6\u87c7\u87c8\u87c9\u87ca\u87cb\u87cc\u87cd\u87ce\u87cf\u87d0\u87d1\u87d2\u87d3\u87d4\u87d5\u87d6\u87d7\u87d8\u87d9\u87da\u87db\u87dc\u87dd\u87de\u87df\u87e0\u87e1\u87e2\u87e3\u87e4\u87e5\u87e6\u87e7\u87e8\u87e9\u87ea\u87eb\u87ec\u87ed\u87ee\u87ef\u87f0\u87f1\u87f2\u87f3\u87f4\u87f5\u87f6\u87f7\u87f8\u87f9\u87fa\u87fb\u87fc\u87fd\u87fe\u87ff\u8800\u8801\u8802\u8803\u8804\u8805\u8806\u8807\u8808\u8809\u880a\u880b\u880c\u880d\u880e\u880f\u8810\u8811\u8812\u8813\u8814\u8815\u8816\u8817\u8818\u8819\u881a\u881b\u881c\u881d\u881e\u881f\u8820\u8821\u8822\u8823\u8824\u8825\u8826\u8827\u8828\u8829\u882a\u882b\u882c\u882d\u882e\u882f\u8830\u8831\u8832\u8833\u8834\u8835\u8836\u8837\u8838\u8839\u883a\u883b\u883c\u883d\u883e\u883f\u8840\u8841\u8842\u8843\u8844\u8845\u8846\u8847\u8848\u8849\u884a\u884b\u884c\u884d\u884e\u884f\u8850\u8851\u8852\u8853\u8854\u8855\u8856\u8857\u8858\u8859\u885a\u885b\u885c\u885d\u885e\u885f\u8860\u8861\u8862\u8863\u8864\u8865\u8866\u8867\u8868\u8869\u886a\u886b\u886c\u886d\u886e\u886f\u8870\u8871\u8872\u8873\u8874\u8875\u8876\u8877\u8878\u8879\u887a\u887b\u887c\u887d\u887e\u887f\u8880\u8881\u8882\u8883\u8884\u8885\u8886\u8887\u8888\u8889\u888a\u888b\u888c\u888d\u888e\u888f\u8890\u8891\u8892\u8893\u8894\u8895\u8896\u8897\u8898\u8899\u889a\u889b\u889c\u889d\u889e\u889f\u88a0\u88a1\u88a2\u88a3\u88a4\u88a5\u88a6\u88a7\u88a8\u88a9\u88aa\u88ab\u88ac\u88ad\u88ae\u88af\u88b0\u88b1\u88b2\u88b3\u88b4\u88b5\u88b6\u88b7\u88b8\u88b9\u88ba\u88bb\u88bc\u88bd\u88be\u88bf\u88c0\u88c1\u88c2\u88c3\u88c4\u88c5\u88c6\u88c7\u88c8\u88c9\u88ca\u88cb\u88cc\u88cd\u88ce\u88cf\u88d0\u88d1\u88d2\u88d3\u88d4\u88d5\u88d6\u88d7\u88d8\u88d9\u88da\u88db\u88dc\u88dd\u88de\u88df\u88e0\u88e1\u88e2\u88e3\u88e4\u88e5\u88e6\u88e7\u88e8\u88e9\u88ea\u88eb\u88ec\u88ed\u88ee\u88ef\u88f0\u88f1\u88f2\u88f3\u88f4\u88f5\u88f6\u88f7\u88f8\u88f9\u88fa\u88fb\u88fc\u88fd\u88fe\u88ff\u8900\u8901\u8902\u8903\u8904\u8905\u8906\u8907\u8908\u8909\u890a\u890b\u890c\u890d\u890e\u890f\u8910\u8911\u8912\u8913\u8914\u8915\u8916\u8917\u8918\u8919\u891a\u891b\u891c\u891d\u891e\u891f\u8920\u8921\u8922\u8923\u8924\u8925\u8926\u8927\u8928\u8929\u892a\u892b\u892c\u892d\u892e\u892f\u8930\u8931\u8932\u8933\u8934\u8935\u8936\u8937\u8938\u8939\u893a\u893b\u893c\u893d\u893e\u893f\u8940\u8941\u8942\u8943\u8944\u8945\u8946\u8947\u8948\u8949\u894a\u894b\u894c\u894d\u894e\u894f\u8950\u8951\u8952\u8953\u8954\u8955\u8956\u8957\u8958\u8959\u895a\u895b\u895c\u895d\u895e\u895f\u8960\u8961\u8962\u8963\u8964\u8965\u8966\u8967\u8968\u8969\u896a\u896b\u896c\u896d\u896e\u896f\u8970\u8971\u8972\u8973\u8974\u8975\u8976\u8977\u8978\u8979\u897a\u897b\u897c\u897d\u897e\u897f\u8980\u8981\u8982\u8983\u8984\u8985\u8986\u8987\u8988\u8989\u898a\u898b\u898c\u898d\u898e\u898f\u8990\u8991\u8992\u8993\u8994\u8995\u8996\u8997\u8998\u8999\u899a\u899b\u899c\u899d\u899e\u899f\u89a0\u89a1\u89a2\u89a3\u89a4\u89a5\u89a6\u89a7\u89a8\u89a9\u89aa\u89ab\u89ac\u89ad\u89ae\u89af\u89b0\u89b1\u89b2\u89b3\u89b4\u89b5\u89b6\u89b7\u89b8\u89b9\u89ba\u89bb\u89bc\u89bd\u89be\u89bf\u89c0\u89c1\u89c2\u89c3\u89c4\u89c5\u89c6\u89c7\u89c8\u89c9\u89ca\u89cb\u89cc\u89cd\u89ce\u89cf\u89d0\u89d1\u89d2\u89d3\u89d4\u89d5\u89d6\u89d7\u89d8\u89d9\u89da\u89db\u89dc\u89dd\u89de\u89df\u89e0\u89e1\u89e2\u89e3\u89e4\u89e5\u89e6\u89e7\u89e8\u89e9\u89ea\u89eb\u89ec\u89ed\u89ee\u89ef\u89f0\u89f1\u89f2\u89f3\u89f4\u89f5\u89f6\u89f7\u89f8\u89f9\u89fa\u89fb\u89fc\u89fd\u89fe\u89ff\u8a00\u8a01\u8a02\u8a03\u8a04\u8a05\u8a06\u8a07\u8a08\u8a09\u8a0a\u8a0b\u8a0c\u8a0d\u8a0e\u8a0f\u8a10\u8a11\u8a12\u8a13\u8a14\u8a15\u8a16\u8a17\u8a18\u8a19\u8a1a\u8a1b\u8a1c\u8a1d\u8a1e\u8a1f\u8a20\u8a21\u8a22\u8a23\u8a24\u8a25\u8a26\u8a27\u8a28\u8a29\u8a2a\u8a2b\u8a2c\u8a2d\u8a2e\u8a2f\u8a30\u8a31\u8a32\u8a33\u8a34\u8a35\u8a36\u8a37\u8a38\u8a39\u8a3a\u8a3b\u8a3c\u8a3d\u8a3e\u8a3f\u8a40\u8a41\u8a42\u8a43\u8a44\u8a45\u8a46\u8a47\u8a48\u8a49\u8a4a\u8a4b\u8a4c\u8a4d\u8a4e\u8a4f\u8a50\u8a51\u8a52\u8a53\u8a54\u8a55\u8a56\u8a57\u8a58\u8a59\u8a5a\u8a5b\u8a5c\u8a5d\u8a5e\u8a5f\u8a60\u8a61\u8a62\u8a63\u8a64\u8a65\u8a66\u8a67\u8a68\u8a69\u8a6a\u8a6b\u8a6c\u8a6d\u8a6e\u8a6f\u8a70\u8a71\u8a72\u8a73\u8a74\u8a75\u8a76\u8a77\u8a78\u8a79\u8a7a\u8a7b\u8a7c\u8a7d\u8a7e\u8a7f\u8a80\u8a81\u8a82\u8a83\u8a84\u8a85\u8a86\u8a87\u8a88\u8a89\u8a8a\u8a8b\u8a8c\u8a8d\u8a8e\u8a8f\u8a90\u8a91\u8a92\u8a93\u8a94\u8a95\u8a96\u8a97\u8a98\u8a99\u8a9a\u8a9b\u8a9c\u8a9d\u8a9e\u8a9f\u8aa0\u8aa1\u8aa2\u8aa3\u8aa4\u8aa5\u8aa6\u8aa7\u8aa8\u8aa9\u8aaa\u8aab\u8aac\u8aad\u8aae\u8aaf\u8ab0\u8ab1\u8ab2\u8ab3\u8ab4\u8ab5\u8ab6\u8ab7\u8ab8\u8ab9\u8aba\u8abb\u8abc\u8abd\u8abe\u8abf\u8ac0\u8ac1\u8ac2\u8ac3\u8ac4\u8ac5\u8ac6\u8ac7\u8ac8\u8ac9\u8aca\u8acb\u8acc\u8acd\u8ace\u8acf\u8ad0\u8ad1\u8ad2\u8ad3\u8ad4\u8ad5\u8ad6\u8ad7\u8ad8\u8ad9\u8ada\u8adb\u8adc\u8add\u8ade\u8adf\u8ae0\u8ae1\u8ae2\u8ae3\u8ae4\u8ae5\u8ae6\u8ae7\u8ae8\u8ae9\u8aea\u8aeb\u8aec\u8aed\u8aee\u8aef\u8af0\u8af1\u8af2\u8af3\u8af4\u8af5\u8af6\u8af7\u8af8\u8af9\u8afa\u8afb\u8afc\u8afd\u8afe\u8aff\u8b00\u8b01\u8b02\u8b03\u8b04\u8b05\u8b06\u8b07\u8b08\u8b09\u8b0a\u8b0b\u8b0c\u8b0d\u8b0e\u8b0f\u8b10\u8b11\u8b12\u8b13\u8b14\u8b15\u8b16\u8b17\u8b18\u8b19\u8b1a\u8b1b\u8b1c\u8b1d\u8b1e\u8b1f\u8b20\u8b21\u8b22\u8b23\u8b24\u8b25\u8b26\u8b27\u8b28\u8b29\u8b2a\u8b2b\u8b2c\u8b2d\u8b2e\u8b2f\u8b30\u8b31\u8b32\u8b33\u8b34\u8b35\u8b36\u8b37\u8b38\u8b39\u8b3a\u8b3b\u8b3c\u8b3d\u8b3e\u8b3f\u8b40\u8b41\u8b42\u8b43\u8b44\u8b45\u8b46\u8b47\u8b48\u8b49\u8b4a\u8b4b\u8b4c\u8b4d\u8b4e\u8b4f\u8b50\u8b51\u8b52\u8b53\u8b54\u8b55\u8b56\u8b57\u8b58\u8b59\u8b5a\u8b5b\u8b5c\u8b5d\u8b5e\u8b5f\u8b60\u8b61\u8b62\u8b63\u8b64\u8b65\u8b66\u8b67\u8b68\u8b69\u8b6a\u8b6b\u8b6c\u8b6d\u8b6e\u8b6f\u8b70\u8b71\u8b72\u8b73\u8b74\u8b75\u8b76\u8b77\u8b78\u8b79\u8b7a\u8b7b\u8b7c\u8b7d\u8b7e\u8b7f\u8b80\u8b81\u8b82\u8b83\u8b84\u8b85\u8b86\u8b87\u8b88\u8b89\u8b8a\u8b8b\u8b8c\u8b8d\u8b8e\u8b8f\u8b90\u8b91\u8b92\u8b93\u8b94\u8b95\u8b96\u8b97\u8b98\u8b99\u8b9a\u8b9b\u8b9c\u8b9d\u8b9e\u8b9f\u8ba0\u8ba1\u8ba2\u8ba3\u8ba4\u8ba5\u8ba6\u8ba7\u8ba8\u8ba9\u8baa\u8bab\u8bac\u8bad\u8bae\u8baf\u8bb0\u8bb1\u8bb2\u8bb3\u8bb4\u8bb5\u8bb6\u8bb7\u8bb8\u8bb9\u8bba\u8bbb\u8bbc\u8bbd\u8bbe\u8bbf\u8bc0\u8bc1\u8bc2\u8bc3\u8bc4\u8bc5\u8bc6\u8bc7\u8bc8\u8bc9\u8bca\u8bcb\u8bcc\u8bcd\u8bce\u8bcf\u8bd0\u8bd1\u8bd2\u8bd3\u8bd4\u8bd5\u8bd6\u8bd7\u8bd8\u8bd9\u8bda\u8bdb\u8bdc\u8bdd\u8bde\u8bdf\u8be0\u8be1\u8be2\u8be3\u8be4\u8be5\u8be6\u8be7\u8be8\u8be9\u8bea\u8beb\u8bec\u8bed\u8bee\u8bef\u8bf0\u8bf1\u8bf2\u8bf3\u8bf4\u8bf5\u8bf6\u8bf7\u8bf8\u8bf9\u8bfa\u8bfb\u8bfc\u8bfd\u8bfe\u8bff\u8c00\u8c01\u8c02\u8c03\u8c04\u8c05\u8c06\u8c07\u8c08\u8c09\u8c0a\u8c0b\u8c0c\u8c0d\u8c0e\u8c0f\u8c10\u8c11\u8c12\u8c13\u8c14\u8c15\u8c16\u8c17\u8c18\u8c19\u8c1a\u8c1b\u8c1c\u8c1d\u8c1e\u8c1f\u8c20\u8c21\u8c22\u8c23\u8c24\u8c25\u8c26\u8c27\u8c28\u8c29\u8c2a\u8c2b\u8c2c\u8c2d\u8c2e\u8c2f\u8c30\u8c31\u8c32\u8c33\u8c34\u8c35\u8c36\u8c37\u8c38\u8c39\u8c3a\u8c3b\u8c3c\u8c3d\u8c3e\u8c3f\u8c40\u8c41\u8c42\u8c43\u8c44\u8c45\u8c46\u8c47\u8c48\u8c49\u8c4a\u8c4b\u8c4c\u8c4d\u8c4e\u8c4f\u8c50\u8c51\u8c52\u8c53\u8c54\u8c55\u8c56\u8c57\u8c58\u8c59\u8c5a\u8c5b\u8c5c\u8c5d\u8c5e\u8c5f\u8c60\u8c61\u8c62\u8c63\u8c64\u8c65\u8c66\u8c67\u8c68\u8c69\u8c6a\u8c6b\u8c6c\u8c6d\u8c6e\u8c6f\u8c70\u8c71\u8c72\u8c73\u8c74\u8c75\u8c76\u8c77\u8c78\u8c79\u8c7a\u8c7b\u8c7c\u8c7d\u8c7e\u8c7f\u8c80\u8c81\u8c82\u8c83\u8c84\u8c85\u8c86\u8c87\u8c88\u8c89\u8c8a\u8c8b\u8c8c\u8c8d\u8c8e\u8c8f\u8c90\u8c91\u8c92\u8c93\u8c94\u8c95\u8c96\u8c97\u8c98\u8c99\u8c9a\u8c9b\u8c9c\u8c9d\u8c9e\u8c9f\u8ca0\u8ca1\u8ca2\u8ca3\u8ca4\u8ca5\u8ca6\u8ca7\u8ca8\u8ca9\u8caa\u8cab\u8cac\u8cad\u8cae\u8caf\u8cb0\u8cb1\u8cb2\u8cb3\u8cb4\u8cb5\u8cb6\u8cb7\u8cb8\u8cb9\u8cba\u8cbb\u8cbc\u8cbd\u8cbe\u8cbf\u8cc0\u8cc1\u8cc2\u8cc3\u8cc4\u8cc5\u8cc6\u8cc7\u8cc8\u8cc9\u8cca\u8ccb\u8ccc\u8ccd\u8cce\u8ccf\u8cd0\u8cd1\u8cd2\u8cd3\u8cd4\u8cd5\u8cd6\u8cd7\u8cd8\u8cd9\u8cda\u8cdb\u8cdc\u8cdd\u8cde\u8cdf\u8ce0\u8ce1\u8ce2\u8ce3\u8ce4\u8ce5\u8ce6\u8ce7\u8ce8\u8ce9\u8cea\u8ceb\u8cec\u8ced\u8cee\u8cef\u8cf0\u8cf1\u8cf2\u8cf3\u8cf4\u8cf5\u8cf6\u8cf7\u8cf8\u8cf9\u8cfa\u8cfb\u8cfc\u8cfd\u8cfe\u8cff\u8d00\u8d01\u8d02\u8d03\u8d04\u8d05\u8d06\u8d07\u8d08\u8d09\u8d0a\u8d0b\u8d0c\u8d0d\u8d0e\u8d0f\u8d10\u8d11\u8d12\u8d13\u8d14\u8d15\u8d16\u8d17\u8d18\u8d19\u8d1a\u8d1b\u8d1c\u8d1d\u8d1e\u8d1f\u8d20\u8d21\u8d22\u8d23\u8d24\u8d25\u8d26\u8d27\u8d28\u8d29\u8d2a\u8d2b\u8d2c\u8d2d\u8d2e\u8d2f\u8d30\u8d31\u8d32\u8d33\u8d34\u8d35\u8d36\u8d37\u8d38\u8d39\u8d3a\u8d3b\u8d3c\u8d3d\u8d3e\u8d3f\u8d40\u8d41\u8d42\u8d43\u8d44\u8d45\u8d46\u8d47\u8d48\u8d49\u8d4a\u8d4b\u8d4c\u8d4d\u8d4e\u8d4f\u8d50\u8d51\u8d52\u8d53\u8d54\u8d55\u8d56\u8d57\u8d58\u8d59\u8d5a\u8d5b\u8d5c\u8d5d\u8d5e\u8d5f\u8d60\u8d61\u8d62\u8d63\u8d64\u8d65\u8d66\u8d67\u8d68\u8d69\u8d6a\u8d6b\u8d6c\u8d6d\u8d6e\u8d6f\u8d70\u8d71\u8d72\u8d73\u8d74\u8d75\u8d76\u8d77\u8d78\u8d79\u8d7a\u8d7b\u8d7c\u8d7d\u8d7e\u8d7f\u8d80\u8d81\u8d82\u8d83\u8d84\u8d85\u8d86\u8d87\u8d88\u8d89\u8d8a\u8d8b\u8d8c\u8d8d\u8d8e\u8d8f\u8d90\u8d91\u8d92\u8d93\u8d94\u8d95\u8d96\u8d97\u8d98\u8d99\u8d9a\u8d9b\u8d9c\u8d9d\u8d9e\u8d9f\u8da0\u8da1\u8da2\u8da3\u8da4\u8da5\u8da6\u8da7\u8da8\u8da9\u8daa\u8dab\u8dac\u8dad\u8dae\u8daf\u8db0\u8db1\u8db2\u8db3\u8db4\u8db5\u8db6\u8db7\u8db8\u8db9\u8dba\u8dbb\u8dbc\u8dbd\u8dbe\u8dbf\u8dc0\u8dc1\u8dc2\u8dc3\u8dc4\u8dc5\u8dc6\u8dc7\u8dc8\u8dc9\u8dca\u8dcb\u8dcc\u8dcd\u8dce\u8dcf\u8dd0\u8dd1\u8dd2\u8dd3\u8dd4\u8dd5\u8dd6\u8dd7\u8dd8\u8dd9\u8dda\u8ddb\u8ddc\u8ddd\u8dde\u8ddf\u8de0\u8de1\u8de2\u8de3\u8de4\u8de5\u8de6\u8de7\u8de8\u8de9\u8dea\u8deb\u8dec\u8ded\u8dee\u8def\u8df0\u8df1\u8df2\u8df3\u8df4\u8df5\u8df6\u8df7\u8df8\u8df9\u8dfa\u8dfb\u8dfc\u8dfd\u8dfe\u8dff\u8e00\u8e01\u8e02\u8e03\u8e04\u8e05\u8e06\u8e07\u8e08\u8e09\u8e0a\u8e0b\u8e0c\u8e0d\u8e0e\u8e0f\u8e10\u8e11\u8e12\u8e13\u8e14\u8e15\u8e16\u8e17\u8e18\u8e19\u8e1a\u8e1b\u8e1c\u8e1d\u8e1e\u8e1f\u8e20\u8e21\u8e22\u8e23\u8e24\u8e25\u8e26\u8e27\u8e28\u8e29\u8e2a\u8e2b\u8e2c\u8e2d\u8e2e\u8e2f\u8e30\u8e31\u8e32\u8e33\u8e34\u8e35\u8e36\u8e37\u8e38\u8e39\u8e3a\u8e3b\u8e3c\u8e3d\u8e3e\u8e3f\u8e40\u8e41\u8e42\u8e43\u8e44\u8e45\u8e46\u8e47\u8e48\u8e49\u8e4a\u8e4b\u8e4c\u8e4d\u8e4e\u8e4f\u8e50\u8e51\u8e52\u8e53\u8e54\u8e55\u8e56\u8e57\u8e58\u8e59\u8e5a\u8e5b\u8e5c\u8e5d\u8e5e\u8e5f\u8e60\u8e61\u8e62\u8e63\u8e64\u8e65\u8e66\u8e67\u8e68\u8e69\u8e6a\u8e6b\u8e6c\u8e6d\u8e6e\u8e6f\u8e70\u8e71\u8e72\u8e73\u8e74\u8e75\u8e76\u8e77\u8e78\u8e79\u8e7a\u8e7b\u8e7c\u8e7d\u8e7e\u8e7f\u8e80\u8e81\u8e82\u8e83\u8e84\u8e85\u8e86\u8e87\u8e88\u8e89\u8e8a\u8e8b\u8e8c\u8e8d\u8e8e\u8e8f\u8e90\u8e91\u8e92\u8e93\u8e94\u8e95\u8e96\u8e97\u8e98\u8e99\u8e9a\u8e9b\u8e9c\u8e9d\u8e9e\u8e9f\u8ea0\u8ea1\u8ea2\u8ea3\u8ea4\u8ea5\u8ea6\u8ea7\u8ea8\u8ea9\u8eaa\u8eab\u8eac\u8ead\u8eae\u8eaf\u8eb0\u8eb1\u8eb2\u8eb3\u8eb4\u8eb5\u8eb6\u8eb7\u8eb8\u8eb9\u8eba\u8ebb\u8ebc\u8ebd\u8ebe\u8ebf\u8ec0\u8ec1\u8ec2\u8ec3\u8ec4\u8ec5\u8ec6\u8ec7\u8ec8\u8ec9\u8eca\u8ecb\u8ecc\u8ecd\u8ece\u8ecf\u8ed0\u8ed1\u8ed2\u8ed3\u8ed4\u8ed5\u8ed6\u8ed7\u8ed8\u8ed9\u8eda\u8edb\u8edc\u8edd\u8ede\u8edf\u8ee0\u8ee1\u8ee2\u8ee3\u8ee4\u8ee5\u8ee6\u8ee7\u8ee8\u8ee9\u8eea\u8eeb\u8eec\u8eed\u8eee\u8eef\u8ef0\u8ef1\u8ef2\u8ef3\u8ef4\u8ef5\u8ef6\u8ef7\u8ef8\u8ef9\u8efa\u8efb\u8efc\u8efd\u8efe\u8eff\u8f00\u8f01\u8f02\u8f03\u8f04\u8f05\u8f06\u8f07\u8f08\u8f09\u8f0a\u8f0b\u8f0c\u8f0d\u8f0e\u8f0f\u8f10\u8f11\u8f12\u8f13\u8f14\u8f15\u8f16\u8f17\u8f18\u8f19\u8f1a\u8f1b\u8f1c\u8f1d\u8f1e\u8f1f\u8f20\u8f21\u8f22\u8f23\u8f24\u8f25\u8f26\u8f27\u8f28\u8f29\u8f2a\u8f2b\u8f2c\u8f2d\u8f2e\u8f2f\u8f30\u8f31\u8f32\u8f33\u8f34\u8f35\u8f36\u8f37\u8f38\u8f39\u8f3a\u8f3b\u8f3c\u8f3d\u8f3e\u8f3f\u8f40\u8f41\u8f42\u8f43\u8f44\u8f45\u8f46\u8f47\u8f48\u8f49\u8f4a\u8f4b\u8f4c\u8f4d\u8f4e\u8f4f\u8f50\u8f51\u8f52\u8f53\u8f54\u8f55\u8f56\u8f57\u8f58\u8f59\u8f5a\u8f5b\u8f5c\u8f5d\u8f5e\u8f5f\u8f60\u8f61\u8f62\u8f63\u8f64\u8f65\u8f66\u8f67\u8f68\u8f69\u8f6a\u8f6b\u8f6c\u8f6d\u8f6e\u8f6f\u8f70\u8f71\u8f72\u8f73\u8f74\u8f75\u8f76\u8f77\u8f78\u8f79\u8f7a\u8f7b\u8f7c\u8f7d\u8f7e\u8f7f\u8f80\u8f81\u8f82\u8f83\u8f84\u8f85\u8f86\u8f87\u8f88\u8f89\u8f8a\u8f8b\u8f8c\u8f8d\u8f8e\u8f8f\u8f90\u8f91\u8f92\u8f93\u8f94\u8f95\u8f96\u8f97\u8f98\u8f99\u8f9a\u8f9b\u8f9c\u8f9d\u8f9e\u8f9f\u8fa0\u8fa1\u8fa2\u8fa3\u8fa4\u8fa5\u8fa6\u8fa7\u8fa8\u8fa9\u8faa\u8fab\u8fac\u8fad\u8fae\u8faf\u8fb0\u8fb1\u8fb2\u8fb3\u8fb4\u8fb5\u8fb6\u8fb7\u8fb8\u8fb9\u8fba\u8fbb\u8fbc\u8fbd\u8fbe\u8fbf\u8fc0\u8fc1\u8fc2\u8fc3\u8fc4\u8fc5\u8fc6\u8fc7\u8fc8\u8fc9\u8fca\u8fcb\u8fcc\u8fcd\u8fce\u8fcf\u8fd0\u8fd1\u8fd2\u8fd3\u8fd4\u8fd5\u8fd6\u8fd7\u8fd8\u8fd9\u8fda\u8fdb\u8fdc\u8fdd\u8fde\u8fdf\u8fe0\u8fe1\u8fe2\u8fe3\u8fe4\u8fe5\u8fe6\u8fe7\u8fe8\u8fe9\u8fea\u8feb\u8fec\u8fed\u8fee\u8fef\u8ff0\u8ff1\u8ff2\u8ff3\u8ff4\u8ff5\u8ff6\u8ff7\u8ff8\u8ff9\u8ffa\u8ffb\u8ffc\u8ffd\u8ffe\u8fff\u9000\u9001\u9002\u9003\u9004\u9005\u9006\u9007\u9008\u9009\u900a\u900b\u900c\u900d\u900e\u900f\u9010\u9011\u9012\u9013\u9014\u9015\u9016\u9017\u9018\u9019\u901a\u901b\u901c\u901d\u901e\u901f\u9020\u9021\u9022\u9023\u9024\u9025\u9026\u9027\u9028\u9029\u902a\u902b\u902c\u902d\u902e\u902f\u9030\u9031\u9032\u9033\u9034\u9035\u9036\u9037\u9038\u9039\u903a\u903b\u903c\u903d\u903e\u903f\u9040\u9041\u9042\u9043\u9044\u9045\u9046\u9047\u9048\u9049\u904a\u904b\u904c\u904d\u904e\u904f\u9050\u9051\u9052\u9053\u9054\u9055\u9056\u9057\u9058\u9059\u905a\u905b\u905c\u905d\u905e\u905f\u9060\u9061\u9062\u9063\u9064\u9065\u9066\u9067\u9068\u9069\u906a\u906b\u906c\u906d\u906e\u906f\u9070\u9071\u9072\u9073\u9074\u9075\u9076\u9077\u9078\u9079\u907a\u907b\u907c\u907d\u907e\u907f\u9080\u9081\u9082\u9083\u9084\u9085\u9086\u9087\u9088\u9089\u908a\u908b\u908c\u908d\u908e\u908f\u9090\u9091\u9092\u9093\u9094\u9095\u9096\u9097\u9098\u9099\u909a\u909b\u909c\u909d\u909e\u909f\u90a0\u90a1\u90a2\u90a3\u90a4\u90a5\u90a6\u90a7\u90a8\u90a9\u90aa\u90ab\u90ac\u90ad\u90ae\u90af\u90b0\u90b1\u90b2\u90b3\u90b4\u90b5\u90b6\u90b7\u90b8\u90b9\u90ba\u90bb\u90bc\u90bd\u90be\u90bf\u90c0\u90c1\u90c2\u90c3\u90c4\u90c5\u90c6\u90c7\u90c8\u90c9\u90ca\u90cb\u90cc\u90cd\u90ce\u90cf\u90d0\u90d1\u90d2\u90d3\u90d4\u90d5\u90d6\u90d7\u90d8\u90d9\u90da\u90db\u90dc\u90dd\u90de\u90df\u90e0\u90e1\u90e2\u90e3\u90e4\u90e5\u90e6\u90e7\u90e8\u90e9\u90ea\u90eb\u90ec\u90ed\u90ee\u90ef\u90f0\u90f1\u90f2\u90f3\u90f4\u90f5\u90f6\u90f7\u90f8\u90f9\u90fa\u90fb\u90fc\u90fd\u90fe\u90ff\u9100\u9101\u9102\u9103\u9104\u9105\u9106\u9107\u9108\u9109\u910a\u910b\u910c\u910d\u910e\u910f\u9110\u9111\u9112\u9113\u9114\u9115\u9116\u9117\u9118\u9119\u911a\u911b\u911c\u911d\u911e\u911f\u9120\u9121\u9122\u9123\u9124\u9125\u9126\u9127\u9128\u9129\u912a\u912b\u912c\u912d\u912e\u912f\u9130\u9131\u9132\u9133\u9134\u9135\u9136\u9137\u9138\u9139\u913a\u913b\u913c\u913d\u913e\u913f\u9140\u9141\u9142\u9143\u9144\u9145\u9146\u9147\u9148\u9149\u914a\u914b\u914c\u914d\u914e\u914f\u9150\u9151\u9152\u9153\u9154\u9155\u9156\u9157\u9158\u9159\u915a\u915b\u915c\u915d\u915e\u915f\u9160\u9161\u9162\u9163\u9164\u9165\u9166\u9167\u9168\u9169\u916a\u916b\u916c\u916d\u916e\u916f\u9170\u9171\u9172\u9173\u9174\u9175\u9176\u9177\u9178\u9179\u917a\u917b\u917c\u917d\u917e\u917f\u9180\u9181\u9182\u9183\u9184\u9185\u9186\u9187\u9188\u9189\u918a\u918b\u918c\u918d\u918e\u918f\u9190\u9191\u9192\u9193\u9194\u9195\u9196\u9197\u9198\u9199\u919a\u919b\u919c\u919d\u919e\u919f\u91a0\u91a1\u91a2\u91a3\u91a4\u91a5\u91a6\u91a7\u91a8\u91a9\u91aa\u91ab\u91ac\u91ad\u91ae\u91af\u91b0\u91b1\u91b2\u91b3\u91b4\u91b5\u91b6\u91b7\u91b8\u91b9\u91ba\u91bb\u91bc\u91bd\u91be\u91bf\u91c0\u91c1\u91c2\u91c3\u91c4\u91c5\u91c6\u91c7\u91c8\u91c9\u91ca\u91cb\u91cc\u91cd\u91ce\u91cf\u91d0\u91d1\u91d2\u91d3\u91d4\u91d5\u91d6\u91d7\u91d8\u91d9\u91da\u91db\u91dc\u91dd\u91de\u91df\u91e0\u91e1\u91e2\u91e3\u91e4\u91e5\u91e6\u91e7\u91e8\u91e9\u91ea\u91eb\u91ec\u91ed\u91ee\u91ef\u91f0\u91f1\u91f2\u91f3\u91f4\u91f5\u91f6\u91f7\u91f8\u91f9\u91fa\u91fb\u91fc\u91fd\u91fe\u91ff\u9200\u9201\u9202\u9203\u9204\u9205\u9206\u9207\u9208\u9209\u920a\u920b\u920c\u920d\u920e\u920f\u9210\u9211\u9212\u9213\u9214\u9215\u9216\u9217\u9218\u9219\u921a\u921b\u921c\u921d\u921e\u921f\u9220\u9221\u9222\u9223\u9224\u9225\u9226\u9227\u9228\u9229\u922a\u922b\u922c\u922d\u922e\u922f\u9230\u9231\u9232\u9233\u9234\u9235\u9236\u9237\u9238\u9239\u923a\u923b\u923c\u923d\u923e\u923f\u9240\u9241\u9242\u9243\u9244\u9245\u9246\u9247\u9248\u9249\u924a\u924b\u924c\u924d\u924e\u924f\u9250\u9251\u9252\u9253\u9254\u9255\u9256\u9257\u9258\u9259\u925a\u925b\u925c\u925d\u925e\u925f\u9260\u9261\u9262\u9263\u9264\u9265\u9266\u9267\u9268\u9269\u926a\u926b\u926c\u926d\u926e\u926f\u9270\u9271\u9272\u9273\u9274\u9275\u9276\u9277\u9278\u9279\u927a\u927b\u927c\u927d\u927e\u927f\u9280\u9281\u9282\u9283\u9284\u9285\u9286\u9287\u9288\u9289\u928a\u928b\u928c\u928d\u928e\u928f\u9290\u9291\u9292\u9293\u9294\u9295\u9296\u9297\u9298\u9299\u929a\u929b\u929c\u929d\u929e\u929f\u92a0\u92a1\u92a2\u92a3\u92a4\u92a5\u92a6\u92a7\u92a8\u92a9\u92aa\u92ab\u92ac\u92ad\u92ae\u92af\u92b0\u92b1\u92b2\u92b3\u92b4\u92b5\u92b6\u92b7\u92b8\u92b9\u92ba\u92bb\u92bc\u92bd\u92be\u92bf\u92c0\u92c1\u92c2\u92c3\u92c4\u92c5\u92c6\u92c7\u92c8\u92c9\u92ca\u92cb\u92cc\u92cd\u92ce\u92cf\u92d0\u92d1\u92d2\u92d3\u92d4\u92d5\u92d6\u92d7\u92d8\u92d9\u92da\u92db\u92dc\u92dd\u92de\u92df\u92e0\u92e1\u92e2\u92e3\u92e4\u92e5\u92e6\u92e7\u92e8\u92e9\u92ea\u92eb\u92ec\u92ed\u92ee\u92ef\u92f0\u92f1\u92f2\u92f3\u92f4\u92f5\u92f6\u92f7\u92f8\u92f9\u92fa\u92fb\u92fc\u92fd\u92fe\u92ff\u9300\u9301\u9302\u9303\u9304\u9305\u9306\u9307\u9308\u9309\u930a\u930b\u930c\u930d\u930e\u930f\u9310\u9311\u9312\u9313\u9314\u9315\u9316\u9317\u9318\u9319\u931a\u931b\u931c\u931d\u931e\u931f\u9320\u9321\u9322\u9323\u9324\u9325\u9326\u9327\u9328\u9329\u932a\u932b\u932c\u932d\u932e\u932f\u9330\u9331\u9332\u9333\u9334\u9335\u9336\u9337\u9338\u9339\u933a\u933b\u933c\u933d\u933e\u933f\u9340\u9341\u9342\u9343\u9344\u9345\u9346\u9347\u9348\u9349\u934a\u934b\u934c\u934d\u934e\u934f\u9350\u9351\u9352\u9353\u9354\u9355\u9356\u9357\u9358\u9359\u935a\u935b\u935c\u935d\u935e\u935f\u9360\u9361\u9362\u9363\u9364\u9365\u9366\u9367\u9368\u9369\u936a\u936b\u936c\u936d\u936e\u936f\u9370\u9371\u9372\u9373\u9374\u9375\u9376\u9377\u9378\u9379\u937a\u937b\u937c\u937d\u937e\u937f\u9380\u9381\u9382\u9383\u9384\u9385\u9386\u9387\u9388\u9389\u938a\u938b\u938c\u938d\u938e\u938f\u9390\u9391\u9392\u9393\u9394\u9395\u9396\u9397\u9398\u9399\u939a\u939b\u939c\u939d\u939e\u939f\u93a0\u93a1\u93a2\u93a3\u93a4\u93a5\u93a6\u93a7\u93a8\u93a9\u93aa\u93ab\u93ac\u93ad\u93ae\u93af\u93b0\u93b1\u93b2\u93b3\u93b4\u93b5\u93b6\u93b7\u93b8\u93b9\u93ba\u93bb\u93bc\u93bd\u93be\u93bf\u93c0\u93c1\u93c2\u93c3\u93c4\u93c5\u93c6\u93c7\u93c8\u93c9\u93ca\u93cb\u93cc\u93cd\u93ce\u93cf\u93d0\u93d1\u93d2\u93d3\u93d4\u93d5\u93d6\u93d7\u93d8\u93d9\u93da\u93db\u93dc\u93dd\u93de\u93df\u93e0\u93e1\u93e2\u93e3\u93e4\u93e5\u93e6\u93e7\u93e8\u93e9\u93ea\u93eb\u93ec\u93ed\u93ee\u93ef\u93f0\u93f1\u93f2\u93f3\u93f4\u93f5\u93f6\u93f7\u93f8\u93f9\u93fa\u93fb\u93fc\u93fd\u93fe\u93ff\u9400\u9401\u9402\u9403\u9404\u9405\u9406\u9407\u9408\u9409\u940a\u940b\u940c\u940d\u940e\u940f\u9410\u9411\u9412\u9413\u9414\u9415\u9416\u9417\u9418\u9419\u941a\u941b\u941c\u941d\u941e\u941f\u9420\u9421\u9422\u9423\u9424\u9425\u9426\u9427\u9428\u9429\u942a\u942b\u942c\u942d\u942e\u942f\u9430\u9431\u9432\u9433\u9434\u9435\u9436\u9437\u9438\u9439\u943a\u943b\u943c\u943d\u943e\u943f\u9440\u9441\u9442\u9443\u9444\u9445\u9446\u9447\u9448\u9449\u944a\u944b\u944c\u944d\u944e\u944f\u9450\u9451\u9452\u9453\u9454\u9455\u9456\u9457\u9458\u9459\u945a\u945b\u945c\u945d\u945e\u945f\u9460\u9461\u9462\u9463\u9464\u9465\u9466\u9467\u9468\u9469\u946a\u946b\u946c\u946d\u946e\u946f\u9470\u9471\u9472\u9473\u9474\u9475\u9476\u9477\u9478\u9479\u947a\u947b\u947c\u947d\u947e\u947f\u9480\u9481\u9482\u9483\u9484\u9485\u9486\u9487\u9488\u9489\u948a\u948b\u948c\u948d\u948e\u948f\u9490\u9491\u9492\u9493\u9494\u9495\u9496\u9497\u9498\u9499\u949a\u949b\u949c\u949d\u949e\u949f\u94a0\u94a1\u94a2\u94a3\u94a4\u94a5\u94a6\u94a7\u94a8\u94a9\u94aa\u94ab\u94ac\u94ad\u94ae\u94af\u94b0\u94b1\u94b2\u94b3\u94b4\u94b5\u94b6\u94b7\u94b8\u94b9\u94ba\u94bb\u94bc\u94bd\u94be\u94bf\u94c0\u94c1\u94c2\u94c3\u94c4\u94c5\u94c6\u94c7\u94c8\u94c9\u94ca\u94cb\u94cc\u94cd\u94ce\u94cf\u94d0\u94d1\u94d2\u94d3\u94d4\u94d5\u94d6\u94d7\u94d8\u94d9\u94da\u94db\u94dc\u94dd\u94de\u94df\u94e0\u94e1\u94e2\u94e3\u94e4\u94e5\u94e6\u94e7\u94e8\u94e9\u94ea\u94eb\u94ec\u94ed\u94ee\u94ef\u94f0\u94f1\u94f2\u94f3\u94f4\u94f5\u94f6\u94f7\u94f8\u94f9\u94fa\u94fb\u94fc\u94fd\u94fe\u94ff\u9500\u9501\u9502\u9503\u9504\u9505\u9506\u9507\u9508\u9509\u950a\u950b\u950c\u950d\u950e\u950f\u9510\u9511\u9512\u9513\u9514\u9515\u9516\u9517\u9518\u9519\u951a\u951b\u951c\u951d\u951e\u951f\u9520\u9521\u9522\u9523\u9524\u9525\u9526\u9527\u9528\u9529\u952a\u952b\u952c\u952d\u952e\u952f\u9530\u9531\u9532\u9533\u9534\u9535\u9536\u9537\u9538\u9539\u953a\u953b\u953c\u953d\u953e\u953f\u9540\u9541\u9542\u9543\u9544\u9545\u9546\u9547\u9548\u9549\u954a\u954b\u954c\u954d\u954e\u954f\u9550\u9551\u9552\u9553\u9554\u9555\u9556\u9557\u9558\u9559\u955a\u955b\u955c\u955d\u955e\u955f\u9560\u9561\u9562\u9563\u9564\u9565\u9566\u9567\u9568\u9569\u956a\u956b\u956c\u956d\u956e\u956f\u9570\u9571\u9572\u9573\u9574\u9575\u9576\u9577\u9578\u9579\u957a\u957b\u957c\u957d\u957e\u957f\u9580\u9581\u9582\u9583\u9584\u9585\u9586\u9587\u9588\u9589\u958a\u958b\u958c\u958d\u958e\u958f\u9590\u9591\u9592\u9593\u9594\u9595\u9596\u9597\u9598\u9599\u959a\u959b\u959c\u959d\u959e\u959f\u95a0\u95a1\u95a2\u95a3\u95a4\u95a5\u95a6\u95a7\u95a8\u95a9\u95aa\u95ab\u95ac\u95ad\u95ae\u95af\u95b0\u95b1\u95b2\u95b3\u95b4\u95b5\u95b6\u95b7\u95b8\u95b9\u95ba\u95bb\u95bc\u95bd\u95be\u95bf\u95c0\u95c1\u95c2\u95c3\u95c4\u95c5\u95c6\u95c7\u95c8\u95c9\u95ca\u95cb\u95cc\u95cd\u95ce\u95cf\u95d0\u95d1\u95d2\u95d3\u95d4\u95d5\u95d6\u95d7\u95d8\u95d9\u95da\u95db\u95dc\u95dd\u95de\u95df\u95e0\u95e1\u95e2\u95e3\u95e4\u95e5\u95e6\u95e7\u95e8\u95e9\u95ea\u95eb\u95ec\u95ed\u95ee\u95ef\u95f0\u95f1\u95f2\u95f3\u95f4\u95f5\u95f6\u95f7\u95f8\u95f9\u95fa\u95fb\u95fc\u95fd\u95fe\u95ff\u9600\u9601\u9602\u9603\u9604\u9605\u9606\u9607\u9608\u9609\u960a\u960b\u960c\u960d\u960e\u960f\u9610\u9611\u9612\u9613\u9614\u9615\u9616\u9617\u9618\u9619\u961a\u961b\u961c\u961d\u961e\u961f\u9620\u9621\u9622\u9623\u9624\u9625\u9626\u9627\u9628\u9629\u962a\u962b\u962c\u962d\u962e\u962f\u9630\u9631\u9632\u9633\u9634\u9635\u9636\u9637\u9638\u9639\u963a\u963b\u963c\u963d\u963e\u963f\u9640\u9641\u9642\u9643\u9644\u9645\u9646\u9647\u9648\u9649\u964a\u964b\u964c\u964d\u964e\u964f\u9650\u9651\u9652\u9653\u9654\u9655\u9656\u9657\u9658\u9659\u965a\u965b\u965c\u965d\u965e\u965f\u9660\u9661\u9662\u9663\u9664\u9665\u9666\u9667\u9668\u9669\u966a\u966b\u966c\u966d\u966e\u966f\u9670\u9671\u9672\u9673\u9674\u9675\u9676\u9677\u9678\u9679\u967a\u967b\u967c\u967d\u967e\u967f\u9680\u9681\u9682\u9683\u9684\u9685\u9686\u9687\u9688\u9689\u968a\u968b\u968c\u968d\u968e\u968f\u9690\u9691\u9692\u9693\u9694\u9695\u9696\u9697\u9698\u9699\u969a\u969b\u969c\u969d\u969e\u969f\u96a0\u96a1\u96a2\u96a3\u96a4\u96a5\u96a6\u96a7\u96a8\u96a9\u96aa\u96ab\u96ac\u96ad\u96ae\u96af\u96b0\u96b1\u96b2\u96b3\u96b4\u96b5\u96b6\u96b7\u96b8\u96b9\u96ba\u96bb\u96bc\u96bd\u96be\u96bf\u96c0\u96c1\u96c2\u96c3\u96c4\u96c5\u96c6\u96c7\u96c8\u96c9\u96ca\u96cb\u96cc\u96cd\u96ce\u96cf\u96d0\u96d1\u96d2\u96d3\u96d4\u96d5\u96d6\u96d7\u96d8\u96d9\u96da\u96db\u96dc\u96dd\u96de\u96df\u96e0\u96e1\u96e2\u96e3\u96e4\u96e5\u96e6\u96e7\u96e8\u96e9\u96ea\u96eb\u96ec\u96ed\u96ee\u96ef\u96f0\u96f1\u96f2\u96f3\u96f4\u96f5\u96f6\u96f7\u96f8\u96f9\u96fa\u96fb\u96fc\u96fd\u96fe\u96ff\u9700\u9701\u9702\u9703\u9704\u9705\u9706\u9707\u9708\u9709\u970a\u970b\u970c\u970d\u970e\u970f\u9710\u9711\u9712\u9713\u9714\u9715\u9716\u9717\u9718\u9719\u971a\u971b\u971c\u971d\u971e\u971f\u9720\u9721\u9722\u9723\u9724\u9725\u9726\u9727\u9728\u9729\u972a\u972b\u972c\u972d\u972e\u972f\u9730\u9731\u9732\u9733\u9734\u9735\u9736\u9737\u9738\u9739\u973a\u973b\u973c\u973d\u973e\u973f\u9740\u9741\u9742\u9743\u9744\u9745\u9746\u9747\u9748\u9749\u974a\u974b\u974c\u974d\u974e\u974f\u9750\u9751\u9752\u9753\u9754\u9755\u9756\u9757\u9758\u9759\u975a\u975b\u975c\u975d\u975e\u975f\u9760\u9761\u9762\u9763\u9764\u9765\u9766\u9767\u9768\u9769\u976a\u976b\u976c\u976d\u976e\u976f\u9770\u9771\u9772\u9773\u9774\u9775\u9776\u9777\u9778\u9779\u977a\u977b\u977c\u977d\u977e\u977f\u9780\u9781\u9782\u9783\u9784\u9785\u9786\u9787\u9788\u9789\u978a\u978b\u978c\u978d\u978e\u978f\u9790\u9791\u9792\u9793\u9794\u9795\u9796\u9797\u9798\u9799\u979a\u979b\u979c\u979d\u979e\u979f\u97a0\u97a1\u97a2\u97a3\u97a4\u97a5\u97a6\u97a7\u97a8\u97a9\u97aa\u97ab\u97ac\u97ad\u97ae\u97af\u97b0\u97b1\u97b2\u97b3\u97b4\u97b5\u97b6\u97b7\u97b8\u97b9\u97ba\u97bb\u97bc\u97bd\u97be\u97bf\u97c0\u97c1\u97c2\u97c3\u97c4\u97c5\u97c6\u97c7\u97c8\u97c9\u97ca\u97cb\u97cc\u97cd\u97ce\u97cf\u97d0\u97d1\u97d2\u97d3\u97d4\u97d5\u97d6\u97d7\u97d8\u97d9\u97da\u97db\u97dc\u97dd\u97de\u97df\u97e0\u97e1\u97e2\u97e3\u97e4\u97e5\u97e6\u97e7\u97e8\u97e9\u97ea\u97eb\u97ec\u97ed\u97ee\u97ef\u97f0\u97f1\u97f2\u97f3\u97f4\u97f5\u97f6\u97f7\u97f8\u97f9\u97fa\u97fb\u97fc\u97fd\u97fe\u97ff\u9800\u9801\u9802\u9803\u9804\u9805\u9806\u9807\u9808\u9809\u980a\u980b\u980c\u980d\u980e\u980f\u9810\u9811\u9812\u9813\u9814\u9815\u9816\u9817\u9818\u9819\u981a\u981b\u981c\u981d\u981e\u981f\u9820\u9821\u9822\u9823\u9824\u9825\u9826\u9827\u9828\u9829\u982a\u982b\u982c\u982d\u982e\u982f\u9830\u9831\u9832\u9833\u9834\u9835\u9836\u9837\u9838\u9839\u983a\u983b\u983c\u983d\u983e\u983f\u9840\u9841\u9842\u9843\u9844\u9845\u9846\u9847\u9848\u9849\u984a\u984b\u984c\u984d\u984e\u984f\u9850\u9851\u9852\u9853\u9854\u9855\u9856\u9857\u9858\u9859\u985a\u985b\u985c\u985d\u985e\u985f\u9860\u9861\u9862\u9863\u9864\u9865\u9866\u9867\u9868\u9869\u986a\u986b\u986c\u986d\u986e\u986f\u9870\u9871\u9872\u9873\u9874\u9875\u9876\u9877\u9878\u9879\u987a\u987b\u987c\u987d\u987e\u987f\u9880\u9881\u9882\u9883\u9884\u9885\u9886\u9887\u9888\u9889\u988a\u988b\u988c\u988d\u988e\u988f\u9890\u9891\u9892\u9893\u9894\u9895\u9896\u9897\u9898\u9899\u989a\u989b\u989c\u989d\u989e\u989f\u98a0\u98a1\u98a2\u98a3\u98a4\u98a5\u98a6\u98a7\u98a8\u98a9\u98aa\u98ab\u98ac\u98ad\u98ae\u98af\u98b0\u98b1\u98b2\u98b3\u98b4\u98b5\u98b6\u98b7\u98b8\u98b9\u98ba\u98bb\u98bc\u98bd\u98be\u98bf\u98c0\u98c1\u98c2\u98c3\u98c4\u98c5\u98c6\u98c7\u98c8\u98c9\u98ca\u98cb\u98cc\u98cd\u98ce\u98cf\u98d0\u98d1\u98d2\u98d3\u98d4\u98d5\u98d6\u98d7\u98d8\u98d9\u98da\u98db\u98dc\u98dd\u98de\u98df\u98e0\u98e1\u98e2\u98e3\u98e4\u98e5\u98e6\u98e7\u98e8\u98e9\u98ea\u98eb\u98ec\u98ed\u98ee\u98ef\u98f0\u98f1\u98f2\u98f3\u98f4\u98f5\u98f6\u98f7\u98f8\u98f9\u98fa\u98fb\u98fc\u98fd\u98fe\u98ff\u9900\u9901\u9902\u9903\u9904\u9905\u9906\u9907\u9908\u9909\u990a\u990b\u990c\u990d\u990e\u990f\u9910\u9911\u9912\u9913\u9914\u9915\u9916\u9917\u9918\u9919\u991a\u991b\u991c\u991d\u991e\u991f\u9920\u9921\u9922\u9923\u9924\u9925\u9926\u9927\u9928\u9929\u992a\u992b\u992c\u992d\u992e\u992f\u9930\u9931\u9932\u9933\u9934\u9935\u9936\u9937\u9938\u9939\u993a\u993b\u993c\u993d\u993e\u993f\u9940\u9941\u9942\u9943\u9944\u9945\u9946\u9947\u9948\u9949\u994a\u994b\u994c\u994d\u994e\u994f\u9950\u9951\u9952\u9953\u9954\u9955\u9956\u9957\u9958\u9959\u995a\u995b\u995c\u995d\u995e\u995f\u9960\u9961\u9962\u9963\u9964\u9965\u9966\u9967\u9968\u9969\u996a\u996b\u996c\u996d\u996e\u996f\u9970\u9971\u9972\u9973\u9974\u9975\u9976\u9977\u9978\u9979\u997a\u997b\u997c\u997d\u997e\u997f\u9980\u9981\u9982\u9983\u9984\u9985\u9986\u9987\u9988\u9989\u998a\u998b\u998c\u998d\u998e\u998f\u9990\u9991\u9992\u9993\u9994\u9995\u9996\u9997\u9998\u9999\u999a\u999b\u999c\u999d\u999e\u999f\u99a0\u99a1\u99a2\u99a3\u99a4\u99a5\u99a6\u99a7\u99a8\u99a9\u99aa\u99ab\u99ac\u99ad\u99ae\u99af\u99b0\u99b1\u99b2\u99b3\u99b4\u99b5\u99b6\u99b7\u99b8\u99b9\u99ba\u99bb\u99bc\u99bd\u99be\u99bf\u99c0\u99c1\u99c2\u99c3\u99c4\u99c5\u99c6\u99c7\u99c8\u99c9\u99ca\u99cb\u99cc\u99cd\u99ce\u99cf\u99d0\u99d1\u99d2\u99d3\u99d4\u99d5\u99d6\u99d7\u99d8\u99d9\u99da\u99db\u99dc\u99dd\u99de\u99df\u99e0\u99e1\u99e2\u99e3\u99e4\u99e5\u99e6\u99e7\u99e8\u99e9\u99ea\u99eb\u99ec\u99ed\u99ee\u99ef\u99f0\u99f1\u99f2\u99f3\u99f4\u99f5\u99f6\u99f7\u99f8\u99f9\u99fa\u99fb\u99fc\u99fd\u99fe\u99ff\u9a00\u9a01\u9a02\u9a03\u9a04\u9a05\u9a06\u9a07\u9a08\u9a09\u9a0a\u9a0b\u9a0c\u9a0d\u9a0e\u9a0f\u9a10\u9a11\u9a12\u9a13\u9a14\u9a15\u9a16\u9a17\u9a18\u9a19\u9a1a\u9a1b\u9a1c\u9a1d\u9a1e\u9a1f\u9a20\u9a21\u9a22\u9a23\u9a24\u9a25\u9a26\u9a27\u9a28\u9a29\u9a2a\u9a2b\u9a2c\u9a2d\u9a2e\u9a2f\u9a30\u9a31\u9a32\u9a33\u9a34\u9a35\u9a36\u9a37\u9a38\u9a39\u9a3a\u9a3b\u9a3c\u9a3d\u9a3e\u9a3f\u9a40\u9a41\u9a42\u9a43\u9a44\u9a45\u9a46\u9a47\u9a48\u9a49\u9a4a\u9a4b\u9a4c\u9a4d\u9a4e\u9a4f\u9a50\u9a51\u9a52\u9a53\u9a54\u9a55\u9a56\u9a57\u9a58\u9a59\u9a5a\u9a5b\u9a5c\u9a5d\u9a5e\u9a5f\u9a60\u9a61\u9a62\u9a63\u9a64\u9a65\u9a66\u9a67\u9a68\u9a69\u9a6a\u9a6b\u9a6c\u9a6d\u9a6e\u9a6f\u9a70\u9a71\u9a72\u9a73\u9a74\u9a75\u9a76\u9a77\u9a78\u9a79\u9a7a\u9a7b\u9a7c\u9a7d\u9a7e\u9a7f\u9a80\u9a81\u9a82\u9a83\u9a84\u9a85\u9a86\u9a87\u9a88\u9a89\u9a8a\u9a8b\u9a8c\u9a8d\u9a8e\u9a8f\u9a90\u9a91\u9a92\u9a93\u9a94\u9a95\u9a96\u9a97\u9a98\u9a99\u9a9a\u9a9b\u9a9c\u9a9d\u9a9e\u9a9f\u9aa0\u9aa1\u9aa2\u9aa3\u9aa4\u9aa5\u9aa6\u9aa7\u9aa8\u9aa9\u9aaa\u9aab\u9aac\u9aad\u9aae\u9aaf\u9ab0\u9ab1\u9ab2\u9ab3\u9ab4\u9ab5\u9ab6\u9ab7\u9ab8\u9ab9\u9aba\u9abb\u9abc\u9abd\u9abe\u9abf\u9ac0\u9ac1\u9ac2\u9ac3\u9ac4\u9ac5\u9ac6\u9ac7\u9ac8\u9ac9\u9aca\u9acb\u9acc\u9acd\u9ace\u9acf\u9ad0\u9ad1\u9ad2\u9ad3\u9ad4\u9ad5\u9ad6\u9ad7\u9ad8\u9ad9\u9ada\u9adb\u9adc\u9add\u9ade\u9adf\u9ae0\u9ae1\u9ae2\u9ae3\u9ae4\u9ae5\u9ae6\u9ae7\u9ae8\u9ae9\u9aea\u9aeb\u9aec\u9aed\u9aee\u9aef\u9af0\u9af1\u9af2\u9af3\u9af4\u9af5\u9af6\u9af7\u9af8\u9af9\u9afa\u9afb\u9afc\u9afd\u9afe\u9aff\u9b00\u9b01\u9b02\u9b03\u9b04\u9b05\u9b06\u9b07\u9b08\u9b09\u9b0a\u9b0b\u9b0c\u9b0d\u9b0e\u9b0f\u9b10\u9b11\u9b12\u9b13\u9b14\u9b15\u9b16\u9b17\u9b18\u9b19\u9b1a\u9b1b\u9b1c\u9b1d\u9b1e\u9b1f\u9b20\u9b21\u9b22\u9b23\u9b24\u9b25\u9b26\u9b27\u9b28\u9b29\u9b2a\u9b2b\u9b2c\u9b2d\u9b2e\u9b2f\u9b30\u9b31\u9b32\u9b33\u9b34\u9b35\u9b36\u9b37\u9b38\u9b39\u9b3a\u9b3b\u9b3c\u9b3d\u9b3e\u9b3f\u9b40\u9b41\u9b42\u9b43\u9b44\u9b45\u9b46\u9b47\u9b48\u9b49\u9b4a\u9b4b\u9b4c\u9b4d\u9b4e\u9b4f\u9b50\u9b51\u9b52\u9b53\u9b54\u9b55\u9b56\u9b57\u9b58\u9b59\u9b5a\u9b5b\u9b5c\u9b5d\u9b5e\u9b5f\u9b60\u9b61\u9b62\u9b63\u9b64\u9b65\u9b66\u9b67\u9b68\u9b69\u9b6a\u9b6b\u9b6c\u9b6d\u9b6e\u9b6f\u9b70\u9b71\u9b72\u9b73\u9b74\u9b75\u9b76\u9b77\u9b78\u9b79\u9b7a\u9b7b\u9b7c\u9b7d\u9b7e\u9b7f\u9b80\u9b81\u9b82\u9b83\u9b84\u9b85\u9b86\u9b87\u9b88\u9b89\u9b8a\u9b8b\u9b8c\u9b8d\u9b8e\u9b8f\u9b90\u9b91\u9b92\u9b93\u9b94\u9b95\u9b96\u9b97\u9b98\u9b99\u9b9a\u9b9b\u9b9c\u9b9d\u9b9e\u9b9f\u9ba0\u9ba1\u9ba2\u9ba3\u9ba4\u9ba5\u9ba6\u9ba7\u9ba8\u9ba9\u9baa\u9bab\u9bac\u9bad\u9bae\u9baf\u9bb0\u9bb1\u9bb2\u9bb3\u9bb4\u9bb5\u9bb6\u9bb7\u9bb8\u9bb9\u9bba\u9bbb\u9bbc\u9bbd\u9bbe\u9bbf\u9bc0\u9bc1\u9bc2\u9bc3\u9bc4\u9bc5\u9bc6\u9bc7\u9bc8\u9bc9\u9bca\u9bcb\u9bcc\u9bcd\u9bce\u9bcf\u9bd0\u9bd1\u9bd2\u9bd3\u9bd4\u9bd5\u9bd6\u9bd7\u9bd8\u9bd9\u9bda\u9bdb\u9bdc\u9bdd\u9bde\u9bdf\u9be0\u9be1\u9be2\u9be3\u9be4\u9be5\u9be6\u9be7\u9be8\u9be9\u9bea\u9beb\u9bec\u9bed\u9bee\u9bef\u9bf0\u9bf1\u9bf2\u9bf3\u9bf4\u9bf5\u9bf6\u9bf7\u9bf8\u9bf9\u9bfa\u9bfb\u9bfc\u9bfd\u9bfe\u9bff\u9c00\u9c01\u9c02\u9c03\u9c04\u9c05\u9c06\u9c07\u9c08\u9c09\u9c0a\u9c0b\u9c0c\u9c0d\u9c0e\u9c0f\u9c10\u9c11\u9c12\u9c13\u9c14\u9c15\u9c16\u9c17\u9c18\u9c19\u9c1a\u9c1b\u9c1c\u9c1d\u9c1e\u9c1f\u9c20\u9c21\u9c22\u9c23\u9c24\u9c25\u9c26\u9c27\u9c28\u9c29\u9c2a\u9c2b\u9c2c\u9c2d\u9c2e\u9c2f\u9c30\u9c31\u9c32\u9c33\u9c34\u9c35\u9c36\u9c37\u9c38\u9c39\u9c3a\u9c3b\u9c3c\u9c3d\u9c3e\u9c3f\u9c40\u9c41\u9c42\u9c43\u9c44\u9c45\u9c46\u9c47\u9c48\u9c49\u9c4a\u9c4b\u9c4c\u9c4d\u9c4e\u9c4f\u9c50\u9c51\u9c52\u9c53\u9c54\u9c55\u9c56\u9c57\u9c58\u9c59\u9c5a\u9c5b\u9c5c\u9c5d\u9c5e\u9c5f\u9c60\u9c61\u9c62\u9c63\u9c64\u9c65\u9c66\u9c67\u9c68\u9c69\u9c6a\u9c6b\u9c6c\u9c6d\u9c6e\u9c6f\u9c70\u9c71\u9c72\u9c73\u9c74\u9c75\u9c76\u9c77\u9c78\u9c79\u9c7a\u9c7b\u9c7c\u9c7d\u9c7e\u9c7f\u9c80\u9c81\u9c82\u9c83\u9c84\u9c85\u9c86\u9c87\u9c88\u9c89\u9c8a\u9c8b\u9c8c\u9c8d\u9c8e\u9c8f\u9c90\u9c91\u9c92\u9c93\u9c94\u9c95\u9c96\u9c97\u9c98\u9c99\u9c9a\u9c9b\u9c9c\u9c9d\u9c9e\u9c9f\u9ca0\u9ca1\u9ca2\u9ca3\u9ca4\u9ca5\u9ca6\u9ca7\u9ca8\u9ca9\u9caa\u9cab\u9cac\u9cad\u9cae\u9caf\u9cb0\u9cb1\u9cb2\u9cb3\u9cb4\u9cb5\u9cb6\u9cb7\u9cb8\u9cb9\u9cba\u9cbb\u9cbc\u9cbd\u9cbe\u9cbf\u9cc0\u9cc1\u9cc2\u9cc3\u9cc4\u9cc5\u9cc6\u9cc7\u9cc8\u9cc9\u9cca\u9ccb\u9ccc\u9ccd\u9cce\u9ccf\u9cd0\u9cd1\u9cd2\u9cd3\u9cd4\u9cd5\u9cd6\u9cd7\u9cd8\u9cd9\u9cda\u9cdb\u9cdc\u9cdd\u9cde\u9cdf\u9ce0\u9ce1\u9ce2\u9ce3\u9ce4\u9ce5\u9ce6\u9ce7\u9ce8\u9ce9\u9cea\u9ceb\u9cec\u9ced\u9cee\u9cef\u9cf0\u9cf1\u9cf2\u9cf3\u9cf4\u9cf5\u9cf6\u9cf7\u9cf8\u9cf9\u9cfa\u9cfb\u9cfc\u9cfd\u9cfe\u9cff\u9d00\u9d01\u9d02\u9d03\u9d04\u9d05\u9d06\u9d07\u9d08\u9d09\u9d0a\u9d0b\u9d0c\u9d0d\u9d0e\u9d0f\u9d10\u9d11\u9d12\u9d13\u9d14\u9d15\u9d16\u9d17\u9d18\u9d19\u9d1a\u9d1b\u9d1c\u9d1d\u9d1e\u9d1f\u9d20\u9d21\u9d22\u9d23\u9d24\u9d25\u9d26\u9d27\u9d28\u9d29\u9d2a\u9d2b\u9d2c\u9d2d\u9d2e\u9d2f\u9d30\u9d31\u9d32\u9d33\u9d34\u9d35\u9d36\u9d37\u9d38\u9d39\u9d3a\u9d3b\u9d3c\u9d3d\u9d3e\u9d3f\u9d40\u9d41\u9d42\u9d43\u9d44\u9d45\u9d46\u9d47\u9d48\u9d49\u9d4a\u9d4b\u9d4c\u9d4d\u9d4e\u9d4f\u9d50\u9d51\u9d52\u9d53\u9d54\u9d55\u9d56\u9d57\u9d58\u9d59\u9d5a\u9d5b\u9d5c\u9d5d\u9d5e\u9d5f\u9d60\u9d61\u9d62\u9d63\u9d64\u9d65\u9d66\u9d67\u9d68\u9d69\u9d6a\u9d6b\u9d6c\u9d6d\u9d6e\u9d6f\u9d70\u9d71\u9d72\u9d73\u9d74\u9d75\u9d76\u9d77\u9d78\u9d79\u9d7a\u9d7b\u9d7c\u9d7d\u9d7e\u9d7f\u9d80\u9d81\u9d82\u9d83\u9d84\u9d85\u9d86\u9d87\u9d88\u9d89\u9d8a\u9d8b\u9d8c\u9d8d\u9d8e\u9d8f\u9d90\u9d91\u9d92\u9d93\u9d94\u9d95\u9d96\u9d97\u9d98\u9d99\u9d9a\u9d9b\u9d9c\u9d9d\u9d9e\u9d9f\u9da0\u9da1\u9da2\u9da3\u9da4\u9da5\u9da6\u9da7\u9da8\u9da9\u9daa\u9dab\u9dac\u9dad\u9dae\u9daf\u9db0\u9db1\u9db2\u9db3\u9db4\u9db5\u9db6\u9db7\u9db8\u9db9\u9dba\u9dbb\u9dbc\u9dbd\u9dbe\u9dbf\u9dc0\u9dc1\u9dc2\u9dc3\u9dc4\u9dc5\u9dc6\u9dc7\u9dc8\u9dc9\u9dca\u9dcb\u9dcc\u9dcd\u9dce\u9dcf\u9dd0\u9dd1\u9dd2\u9dd3\u9dd4\u9dd5\u9dd6\u9dd7\u9dd8\u9dd9\u9dda\u9ddb\u9ddc\u9ddd\u9dde\u9ddf\u9de0\u9de1\u9de2\u9de3\u9de4\u9de5\u9de6\u9de7\u9de8\u9de9\u9dea\u9deb\u9dec\u9ded\u9dee\u9def\u9df0\u9df1\u9df2\u9df3\u9df4\u9df5\u9df6\u9df7\u9df8\u9df9\u9dfa\u9dfb\u9dfc\u9dfd\u9dfe\u9dff\u9e00\u9e01\u9e02\u9e03\u9e04\u9e05\u9e06\u9e07\u9e08\u9e09\u9e0a\u9e0b\u9e0c\u9e0d\u9e0e\u9e0f\u9e10\u9e11\u9e12\u9e13\u9e14\u9e15\u9e16\u9e17\u9e18\u9e19\u9e1a\u9e1b\u9e1c\u9e1d\u9e1e\u9e1f\u9e20\u9e21\u9e22\u9e23\u9e24\u9e25\u9e26\u9e27\u9e28\u9e29\u9e2a\u9e2b\u9e2c\u9e2d\u9e2e\u9e2f\u9e30\u9e31\u9e32\u9e33\u9e34\u9e35\u9e36\u9e37\u9e38\u9e39\u9e3a\u9e3b\u9e3c\u9e3d\u9e3e\u9e3f\u9e40\u9e41\u9e42\u9e43\u9e44\u9e45\u9e46\u9e47\u9e48\u9e49\u9e4a\u9e4b\u9e4c\u9e4d\u9e4e\u9e4f\u9e50\u9e51\u9e52\u9e53\u9e54\u9e55\u9e56\u9e57\u9e58\u9e59\u9e5a\u9e5b\u9e5c\u9e5d\u9e5e\u9e5f\u9e60\u9e61\u9e62\u9e63\u9e64\u9e65\u9e66\u9e67\u9e68\u9e69\u9e6a\u9e6b\u9e6c\u9e6d\u9e6e\u9e6f\u9e70\u9e71\u9e72\u9e73\u9e74\u9e75\u9e76\u9e77\u9e78\u9e79\u9e7a\u9e7b\u9e7c\u9e7d\u9e7e\u9e7f\u9e80\u9e81\u9e82\u9e83\u9e84\u9e85\u9e86\u9e87\u9e88\u9e89\u9e8a\u9e8b\u9e8c\u9e8d\u9e8e\u9e8f\u9e90\u9e91\u9e92\u9e93\u9e94\u9e95\u9e96\u9e97\u9e98\u9e99\u9e9a\u9e9b\u9e9c\u9e9d\u9e9e\u9e9f\u9ea0\u9ea1\u9ea2\u9ea3\u9ea4\u9ea5\u9ea6\u9ea7\u9ea8\u9ea9\u9eaa\u9eab\u9eac\u9ead\u9eae\u9eaf\u9eb0\u9eb1\u9eb2\u9eb3\u9eb4\u9eb5\u9eb6\u9eb7\u9eb8\u9eb9\u9eba\u9ebb\u9ebc\u9ebd\u9ebe\u9ebf\u9ec0\u9ec1\u9ec2\u9ec3\u9ec4\u9ec5\u9ec6\u9ec7\u9ec8\u9ec9\u9eca\u9ecb\u9ecc\u9ecd\u9ece\u9ecf\u9ed0\u9ed1\u9ed2\u9ed3\u9ed4\u9ed5\u9ed6\u9ed7\u9ed8\u9ed9\u9eda\u9edb\u9edc\u9edd\u9ede\u9edf\u9ee0\u9ee1\u9ee2\u9ee3\u9ee4\u9ee5\u9ee6\u9ee7\u9ee8\u9ee9\u9eea\u9eeb\u9eec\u9eed\u9eee\u9eef\u9ef0\u9ef1\u9ef2\u9ef3\u9ef4\u9ef5\u9ef6\u9ef7\u9ef8\u9ef9\u9efa\u9efb\u9efc\u9efd\u9efe\u9eff\u9f00\u9f01\u9f02\u9f03\u9f04\u9f05\u9f06\u9f07\u9f08\u9f09\u9f0a\u9f0b\u9f0c\u9f0d\u9f0e\u9f0f\u9f10\u9f11\u9f12\u9f13\u9f14\u9f15\u9f16\u9f17\u9f18\u9f19\u9f1a\u9f1b\u9f1c\u9f1d\u9f1e\u9f1f\u9f20\u9f21\u9f22\u9f23\u9f24\u9f25\u9f26\u9f27\u9f28\u9f29\u9f2a\u9f2b\u9f2c\u9f2d\u9f2e\u9f2f\u9f30\u9f31\u9f32\u9f33\u9f34\u9f35\u9f36\u9f37\u9f38\u9f39\u9f3a\u9f3b\u9f3c\u9f3d\u9f3e\u9f3f\u9f40\u9f41\u9f42\u9f43\u9f44\u9f45\u9f46\u9f47\u9f48\u9f49\u9f4a\u9f4b\u9f4c\u9f4d\u9f4e\u9f4f\u9f50\u9f51\u9f52\u9f53\u9f54\u9f55\u9f56\u9f57\u9f58\u9f59\u9f5a\u9f5b\u9f5c\u9f5d\u9f5e\u9f5f\u9f60\u9f61\u9f62\u9f63\u9f64\u9f65\u9f66\u9f67\u9f68\u9f69\u9f6a\u9f6b\u9f6c\u9f6d\u9f6e\u9f6f\u9f70\u9f71\u9f72\u9f73\u9f74\u9f75\u9f76\u9f77\u9f78\u9f79\u9f7a\u9f7b\u9f7c\u9f7d\u9f7e\u9f7f\u9f80\u9f81\u9f82\u9f83\u9f84\u9f85\u9f86\u9f87\u9f88\u9f89\u9f8a\u9f8b\u9f8c\u9f8d\u9f8e\u9f8f\u9f90\u9f91\u9f92\u9f93\u9f94\u9f95\u9f96\u9f97\u9f98\u9f99\u9f9a\u9f9b\u9f9c\u9f9d\u9f9e\u9f9f\u9fa0\u9fa1\u9fa2\u9fa3\u9fa4\u9fa5\u9fa6\u9fa7\u9fa8\u9fa9\u9faa\u9fab\u9fac\u9fad\u9fae\u9faf\u9fb0\u9fb1\u9fb2\u9fb3\u9fb4\u9fb5\u9fb6\u9fb7\u9fb8\u9fb9\u9fba\u9fbb\u9fbc\u9fbd\u9fbe\u9fbf\u9fc0\u9fc1\u9fc2\u9fc3\u9fc4\u9fc5\u9fc6\u9fc7\u9fc8\u9fc9\u9fca\u9fcb\ua000\ua001\ua002\ua003\ua004\ua005\ua006\ua007\ua008\ua009\ua00a\ua00b\ua00c\ua00d\ua00e\ua00f\ua010\ua011\ua012\ua013\ua014\ua016\ua017\ua018\ua019\ua01a\ua01b\ua01c\ua01d\ua01e\ua01f\ua020\ua021\ua022\ua023\ua024\ua025\ua026\ua027\ua028\ua029\ua02a\ua02b\ua02c\ua02d\ua02e\ua02f\ua030\ua031\ua032\ua033\ua034\ua035\ua036\ua037\ua038\ua039\ua03a\ua03b\ua03c\ua03d\ua03e\ua03f\ua040\ua041\ua042\ua043\ua044\ua045\ua046\ua047\ua048\ua049\ua04a\ua04b\ua04c\ua04d\ua04e\ua04f\ua050\ua051\ua052\ua053\ua054\ua055\ua056\ua057\ua058\ua059\ua05a\ua05b\ua05c\ua05d\ua05e\ua05f\ua060\ua061\ua062\ua063\ua064\ua065\ua066\ua067\ua068\ua069\ua06a\ua06b\ua06c\ua06d\ua06e\ua06f\ua070\ua071\ua072\ua073\ua074\ua075\ua076\ua077\ua078\ua079\ua07a\ua07b\ua07c\ua07d\ua07e\ua07f\ua080\ua081\ua082\ua083\ua084\ua085\ua086\ua087\ua088\ua089\ua08a\ua08b\ua08c\ua08d\ua08e\ua08f\ua090\ua091\ua092\ua093\ua094\ua095\ua096\ua097\ua098\ua099\ua09a\ua09b\ua09c\ua09d\ua09e\ua09f\ua0a0\ua0a1\ua0a2\ua0a3\ua0a4\ua0a5\ua0a6\ua0a7\ua0a8\ua0a9\ua0aa\ua0ab\ua0ac\ua0ad\ua0ae\ua0af\ua0b0\ua0b1\ua0b2\ua0b3\ua0b4\ua0b5\ua0b6\ua0b7\ua0b8\ua0b9\ua0ba\ua0bb\ua0bc\ua0bd\ua0be\ua0bf\ua0c0\ua0c1\ua0c2\ua0c3\ua0c4\ua0c5\ua0c6\ua0c7\ua0c8\ua0c9\ua0ca\ua0cb\ua0cc\ua0cd\ua0ce\ua0cf\ua0d0\ua0d1\ua0d2\ua0d3\ua0d4\ua0d5\ua0d6\ua0d7\ua0d8\ua0d9\ua0da\ua0db\ua0dc\ua0dd\ua0de\ua0df\ua0e0\ua0e1\ua0e2\ua0e3\ua0e4\ua0e5\ua0e6\ua0e7\ua0e8\ua0e9\ua0ea\ua0eb\ua0ec\ua0ed\ua0ee\ua0ef\ua0f0\ua0f1\ua0f2\ua0f3\ua0f4\ua0f5\ua0f6\ua0f7\ua0f8\ua0f9\ua0fa\ua0fb\ua0fc\ua0fd\ua0fe\ua0ff\ua100\ua101\ua102\ua103\ua104\ua105\ua106\ua107\ua108\ua109\ua10a\ua10b\ua10c\ua10d\ua10e\ua10f\ua110\ua111\ua112\ua113\ua114\ua115\ua116\ua117\ua118\ua119\ua11a\ua11b\ua11c\ua11d\ua11e\ua11f\ua120\ua121\ua122\ua123\ua124\ua125\ua126\ua127\ua128\ua129\ua12a\ua12b\ua12c\ua12d\ua12e\ua12f\ua130\ua131\ua132\ua133\ua134\ua135\ua136\ua137\ua138\ua139\ua13a\ua13b\ua13c\ua13d\ua13e\ua13f\ua140\ua141\ua142\ua143\ua144\ua145\ua146\ua147\ua148\ua149\ua14a\ua14b\ua14c\ua14d\ua14e\ua14f\ua150\ua151\ua152\ua153\ua154\ua155\ua156\ua157\ua158\ua159\ua15a\ua15b\ua15c\ua15d\ua15e\ua15f\ua160\ua161\ua162\ua163\ua164\ua165\ua166\ua167\ua168\ua169\ua16a\ua16b\ua16c\ua16d\ua16e\ua16f\ua170\ua171\ua172\ua173\ua174\ua175\ua176\ua177\ua178\ua179\ua17a\ua17b\ua17c\ua17d\ua17e\ua17f\ua180\ua181\ua182\ua183\ua184\ua185\ua186\ua187\ua188\ua189\ua18a\ua18b\ua18c\ua18d\ua18e\ua18f\ua190\ua191\ua192\ua193\ua194\ua195\ua196\ua197\ua198\ua199\ua19a\ua19b\ua19c\ua19d\ua19e\ua19f\ua1a0\ua1a1\ua1a2\ua1a3\ua1a4\ua1a5\ua1a6\ua1a7\ua1a8\ua1a9\ua1aa\ua1ab\ua1ac\ua1ad\ua1ae\ua1af\ua1b0\ua1b1\ua1b2\ua1b3\ua1b4\ua1b5\ua1b6\ua1b7\ua1b8\ua1b9\ua1ba\ua1bb\ua1bc\ua1bd\ua1be\ua1bf\ua1c0\ua1c1\ua1c2\ua1c3\ua1c4\ua1c5\ua1c6\ua1c7\ua1c8\ua1c9\ua1ca\ua1cb\ua1cc\ua1cd\ua1ce\ua1cf\ua1d0\ua1d1\ua1d2\ua1d3\ua1d4\ua1d5\ua1d6\ua1d7\ua1d8\ua1d9\ua1da\ua1db\ua1dc\ua1dd\ua1de\ua1df\ua1e0\ua1e1\ua1e2\ua1e3\ua1e4\ua1e5\ua1e6\ua1e7\ua1e8\ua1e9\ua1ea\ua1eb\ua1ec\ua1ed\ua1ee\ua1ef\ua1f0\ua1f1\ua1f2\ua1f3\ua1f4\ua1f5\ua1f6\ua1f7\ua1f8\ua1f9\ua1fa\ua1fb\ua1fc\ua1fd\ua1fe\ua1ff\ua200\ua201\ua202\ua203\ua204\ua205\ua206\ua207\ua208\ua209\ua20a\ua20b\ua20c\ua20d\ua20e\ua20f\ua210\ua211\ua212\ua213\ua214\ua215\ua216\ua217\ua218\ua219\ua21a\ua21b\ua21c\ua21d\ua21e\ua21f\ua220\ua221\ua222\ua223\ua224\ua225\ua226\ua227\ua228\ua229\ua22a\ua22b\ua22c\ua22d\ua22e\ua22f\ua230\ua231\ua232\ua233\ua234\ua235\ua236\ua237\ua238\ua239\ua23a\ua23b\ua23c\ua23d\ua23e\ua23f\ua240\ua241\ua242\ua243\ua244\ua245\ua246\ua247\ua248\ua249\ua24a\ua24b\ua24c\ua24d\ua24e\ua24f\ua250\ua251\ua252\ua253\ua254\ua255\ua256\ua257\ua258\ua259\ua25a\ua25b\ua25c\ua25d\ua25e\ua25f\ua260\ua261\ua262\ua263\ua264\ua265\ua266\ua267\ua268\ua269\ua26a\ua26b\ua26c\ua26d\ua26e\ua26f\ua270\ua271\ua272\ua273\ua274\ua275\ua276\ua277\ua278\ua279\ua27a\ua27b\ua27c\ua27d\ua27e\ua27f\ua280\ua281\ua282\ua283\ua284\ua285\ua286\ua287\ua288\ua289\ua28a\ua28b\ua28c\ua28d\ua28e\ua28f\ua290\ua291\ua292\ua293\ua294\ua295\ua296\ua297\ua298\ua299\ua29a\ua29b\ua29c\ua29d\ua29e\ua29f\ua2a0\ua2a1\ua2a2\ua2a3\ua2a4\ua2a5\ua2a6\ua2a7\ua2a8\ua2a9\ua2aa\ua2ab\ua2ac\ua2ad\ua2ae\ua2af\ua2b0\ua2b1\ua2b2\ua2b3\ua2b4\ua2b5\ua2b6\ua2b7\ua2b8\ua2b9\ua2ba\ua2bb\ua2bc\ua2bd\ua2be\ua2bf\ua2c0\ua2c1\ua2c2\ua2c3\ua2c4\ua2c5\ua2c6\ua2c7\ua2c8\ua2c9\ua2ca\ua2cb\ua2cc\ua2cd\ua2ce\ua2cf\ua2d0\ua2d1\ua2d2\ua2d3\ua2d4\ua2d5\ua2d6\ua2d7\ua2d8\ua2d9\ua2da\ua2db\ua2dc\ua2dd\ua2de\ua2df\ua2e0\ua2e1\ua2e2\ua2e3\ua2e4\ua2e5\ua2e6\ua2e7\ua2e8\ua2e9\ua2ea\ua2eb\ua2ec\ua2ed\ua2ee\ua2ef\ua2f0\ua2f1\ua2f2\ua2f3\ua2f4\ua2f5\ua2f6\ua2f7\ua2f8\ua2f9\ua2fa\ua2fb\ua2fc\ua2fd\ua2fe\ua2ff\ua300\ua301\ua302\ua303\ua304\ua305\ua306\ua307\ua308\ua309\ua30a\ua30b\ua30c\ua30d\ua30e\ua30f\ua310\ua311\ua312\ua313\ua314\ua315\ua316\ua317\ua318\ua319\ua31a\ua31b\ua31c\ua31d\ua31e\ua31f\ua320\ua321\ua322\ua323\ua324\ua325\ua326\ua327\ua328\ua329\ua32a\ua32b\ua32c\ua32d\ua32e\ua32f\ua330\ua331\ua332\ua333\ua334\ua335\ua336\ua337\ua338\ua339\ua33a\ua33b\ua33c\ua33d\ua33e\ua33f\ua340\ua341\ua342\ua343\ua344\ua345\ua346\ua347\ua348\ua349\ua34a\ua34b\ua34c\ua34d\ua34e\ua34f\ua350\ua351\ua352\ua353\ua354\ua355\ua356\ua357\ua358\ua359\ua35a\ua35b\ua35c\ua35d\ua35e\ua35f\ua360\ua361\ua362\ua363\ua364\ua365\ua366\ua367\ua368\ua369\ua36a\ua36b\ua36c\ua36d\ua36e\ua36f\ua370\ua371\ua372\ua373\ua374\ua375\ua376\ua377\ua378\ua379\ua37a\ua37b\ua37c\ua37d\ua37e\ua37f\ua380\ua381\ua382\ua383\ua384\ua385\ua386\ua387\ua388\ua389\ua38a\ua38b\ua38c\ua38d\ua38e\ua38f\ua390\ua391\ua392\ua393\ua394\ua395\ua396\ua397\ua398\ua399\ua39a\ua39b\ua39c\ua39d\ua39e\ua39f\ua3a0\ua3a1\ua3a2\ua3a3\ua3a4\ua3a5\ua3a6\ua3a7\ua3a8\ua3a9\ua3aa\ua3ab\ua3ac\ua3ad\ua3ae\ua3af\ua3b0\ua3b1\ua3b2\ua3b3\ua3b4\ua3b5\ua3b6\ua3b7\ua3b8\ua3b9\ua3ba\ua3bb\ua3bc\ua3bd\ua3be\ua3bf\ua3c0\ua3c1\ua3c2\ua3c3\ua3c4\ua3c5\ua3c6\ua3c7\ua3c8\ua3c9\ua3ca\ua3cb\ua3cc\ua3cd\ua3ce\ua3cf\ua3d0\ua3d1\ua3d2\ua3d3\ua3d4\ua3d5\ua3d6\ua3d7\ua3d8\ua3d9\ua3da\ua3db\ua3dc\ua3dd\ua3de\ua3df\ua3e0\ua3e1\ua3e2\ua3e3\ua3e4\ua3e5\ua3e6\ua3e7\ua3e8\ua3e9\ua3ea\ua3eb\ua3ec\ua3ed\ua3ee\ua3ef\ua3f0\ua3f1\ua3f2\ua3f3\ua3f4\ua3f5\ua3f6\ua3f7\ua3f8\ua3f9\ua3fa\ua3fb\ua3fc\ua3fd\ua3fe\ua3ff\ua400\ua401\ua402\ua403\ua404\ua405\ua406\ua407\ua408\ua409\ua40a\ua40b\ua40c\ua40d\ua40e\ua40f\ua410\ua411\ua412\ua413\ua414\ua415\ua416\ua417\ua418\ua419\ua41a\ua41b\ua41c\ua41d\ua41e\ua41f\ua420\ua421\ua422\ua423\ua424\ua425\ua426\ua427\ua428\ua429\ua42a\ua42b\ua42c\ua42d\ua42e\ua42f\ua430\ua431\ua432\ua433\ua434\ua435\ua436\ua437\ua438\ua439\ua43a\ua43b\ua43c\ua43d\ua43e\ua43f\ua440\ua441\ua442\ua443\ua444\ua445\ua446\ua447\ua448\ua449\ua44a\ua44b\ua44c\ua44d\ua44e\ua44f\ua450\ua451\ua452\ua453\ua454\ua455\ua456\ua457\ua458\ua459\ua45a\ua45b\ua45c\ua45d\ua45e\ua45f\ua460\ua461\ua462\ua463\ua464\ua465\ua466\ua467\ua468\ua469\ua46a\ua46b\ua46c\ua46d\ua46e\ua46f\ua470\ua471\ua472\ua473\ua474\ua475\ua476\ua477\ua478\ua479\ua47a\ua47b\ua47c\ua47d\ua47e\ua47f\ua480\ua481\ua482\ua483\ua484\ua485\ua486\ua487\ua488\ua489\ua48a\ua48b\ua48c\ua4d0\ua4d1\ua4d2\ua4d3\ua4d4\ua4d5\ua4d6\ua4d7\ua4d8\ua4d9\ua4da\ua4db\ua4dc\ua4dd\ua4de\ua4df\ua4e0\ua4e1\ua4e2\ua4e3\ua4e4\ua4e5\ua4e6\ua4e7\ua4e8\ua4e9\ua4ea\ua4eb\ua4ec\ua4ed\ua4ee\ua4ef\ua4f0\ua4f1\ua4f2\ua4f3\ua4f4\ua4f5\ua4f6\ua4f7\ua500\ua501\ua502\ua503\ua504\ua505\ua506\ua507\ua508\ua509\ua50a\ua50b\ua50c\ua50d\ua50e\ua50f\ua510\ua511\ua512\ua513\ua514\ua515\ua516\ua517\ua518\ua519\ua51a\ua51b\ua51c\ua51d\ua51e\ua51f\ua520\ua521\ua522\ua523\ua524\ua525\ua526\ua527\ua528\ua529\ua52a\ua52b\ua52c\ua52d\ua52e\ua52f\ua530\ua531\ua532\ua533\ua534\ua535\ua536\ua537\ua538\ua539\ua53a\ua53b\ua53c\ua53d\ua53e\ua53f\ua540\ua541\ua542\ua543\ua544\ua545\ua546\ua547\ua548\ua549\ua54a\ua54b\ua54c\ua54d\ua54e\ua54f\ua550\ua551\ua552\ua553\ua554\ua555\ua556\ua557\ua558\ua559\ua55a\ua55b\ua55c\ua55d\ua55e\ua55f\ua560\ua561\ua562\ua563\ua564\ua565\ua566\ua567\ua568\ua569\ua56a\ua56b\ua56c\ua56d\ua56e\ua56f\ua570\ua571\ua572\ua573\ua574\ua575\ua576\ua577\ua578\ua579\ua57a\ua57b\ua57c\ua57d\ua57e\ua57f\ua580\ua581\ua582\ua583\ua584\ua585\ua586\ua587\ua588\ua589\ua58a\ua58b\ua58c\ua58d\ua58e\ua58f\ua590\ua591\ua592\ua593\ua594\ua595\ua596\ua597\ua598\ua599\ua59a\ua59b\ua59c\ua59d\ua59e\ua59f\ua5a0\ua5a1\ua5a2\ua5a3\ua5a4\ua5a5\ua5a6\ua5a7\ua5a8\ua5a9\ua5aa\ua5ab\ua5ac\ua5ad\ua5ae\ua5af\ua5b0\ua5b1\ua5b2\ua5b3\ua5b4\ua5b5\ua5b6\ua5b7\ua5b8\ua5b9\ua5ba\ua5bb\ua5bc\ua5bd\ua5be\ua5bf\ua5c0\ua5c1\ua5c2\ua5c3\ua5c4\ua5c5\ua5c6\ua5c7\ua5c8\ua5c9\ua5ca\ua5cb\ua5cc\ua5cd\ua5ce\ua5cf\ua5d0\ua5d1\ua5d2\ua5d3\ua5d4\ua5d5\ua5d6\ua5d7\ua5d8\ua5d9\ua5da\ua5db\ua5dc\ua5dd\ua5de\ua5df\ua5e0\ua5e1\ua5e2\ua5e3\ua5e4\ua5e5\ua5e6\ua5e7\ua5e8\ua5e9\ua5ea\ua5eb\ua5ec\ua5ed\ua5ee\ua5ef\ua5f0\ua5f1\ua5f2\ua5f3\ua5f4\ua5f5\ua5f6\ua5f7\ua5f8\ua5f9\ua5fa\ua5fb\ua5fc\ua5fd\ua5fe\ua5ff\ua600\ua601\ua602\ua603\ua604\ua605\ua606\ua607\ua608\ua609\ua60a\ua60b\ua610\ua611\ua612\ua613\ua614\ua615\ua616\ua617\ua618\ua619\ua61a\ua61b\ua61c\ua61d\ua61e\ua61f\ua62a\ua62b\ua66e\ua6a0\ua6a1\ua6a2\ua6a3\ua6a4\ua6a5\ua6a6\ua6a7\ua6a8\ua6a9\ua6aa\ua6ab\ua6ac\ua6ad\ua6ae\ua6af\ua6b0\ua6b1\ua6b2\ua6b3\ua6b4\ua6b5\ua6b6\ua6b7\ua6b8\ua6b9\ua6ba\ua6bb\ua6bc\ua6bd\ua6be\ua6bf\ua6c0\ua6c1\ua6c2\ua6c3\ua6c4\ua6c5\ua6c6\ua6c7\ua6c8\ua6c9\ua6ca\ua6cb\ua6cc\ua6cd\ua6ce\ua6cf\ua6d0\ua6d1\ua6d2\ua6d3\ua6d4\ua6d5\ua6d6\ua6d7\ua6d8\ua6d9\ua6da\ua6db\ua6dc\ua6dd\ua6de\ua6df\ua6e0\ua6e1\ua6e2\ua6e3\ua6e4\ua6e5\ua7fb\ua7fc\ua7fd\ua7fe\ua7ff\ua800\ua801\ua803\ua804\ua805\ua807\ua808\ua809\ua80a\ua80c\ua80d\ua80e\ua80f\ua810\ua811\ua812\ua813\ua814\ua815\ua816\ua817\ua818\ua819\ua81a\ua81b\ua81c\ua81d\ua81e\ua81f\ua820\ua821\ua822\ua840\ua841\ua842\ua843\ua844\ua845\ua846\ua847\ua848\ua849\ua84a\ua84b\ua84c\ua84d\ua84e\ua84f\ua850\ua851\ua852\ua853\ua854\ua855\ua856\ua857\ua858\ua859\ua85a\ua85b\ua85c\ua85d\ua85e\ua85f\ua860\ua861\ua862\ua863\ua864\ua865\ua866\ua867\ua868\ua869\ua86a\ua86b\ua86c\ua86d\ua86e\ua86f\ua870\ua871\ua872\ua873\ua882\ua883\ua884\ua885\ua886\ua887\ua888\ua889\ua88a\ua88b\ua88c\ua88d\ua88e\ua88f\ua890\ua891\ua892\ua893\ua894\ua895\ua896\ua897\ua898\ua899\ua89a\ua89b\ua89c\ua89d\ua89e\ua89f\ua8a0\ua8a1\ua8a2\ua8a3\ua8a4\ua8a5\ua8a6\ua8a7\ua8a8\ua8a9\ua8aa\ua8ab\ua8ac\ua8ad\ua8ae\ua8af\ua8b0\ua8b1\ua8b2\ua8b3\ua8f2\ua8f3\ua8f4\ua8f5\ua8f6\ua8f7\ua8fb\ua90a\ua90b\ua90c\ua90d\ua90e\ua90f\ua910\ua911\ua912\ua913\ua914\ua915\ua916\ua917\ua918\ua919\ua91a\ua91b\ua91c\ua91d\ua91e\ua91f\ua920\ua921\ua922\ua923\ua924\ua925\ua930\ua931\ua932\ua933\ua934\ua935\ua936\ua937\ua938\ua939\ua93a\ua93b\ua93c\ua93d\ua93e\ua93f\ua940\ua941\ua942\ua943\ua944\ua945\ua946\ua960\ua961\ua962\ua963\ua964\ua965\ua966\ua967\ua968\ua969\ua96a\ua96b\ua96c\ua96d\ua96e\ua96f\ua970\ua971\ua972\ua973\ua974\ua975\ua976\ua977\ua978\ua979\ua97a\ua97b\ua97c\ua984\ua985\ua986\ua987\ua988\ua989\ua98a\ua98b\ua98c\ua98d\ua98e\ua98f\ua990\ua991\ua992\ua993\ua994\ua995\ua996\ua997\ua998\ua999\ua99a\ua99b\ua99c\ua99d\ua99e\ua99f\ua9a0\ua9a1\ua9a2\ua9a3\ua9a4\ua9a5\ua9a6\ua9a7\ua9a8\ua9a9\ua9aa\ua9ab\ua9ac\ua9ad\ua9ae\ua9af\ua9b0\ua9b1\ua9b2\uaa00\uaa01\uaa02\uaa03\uaa04\uaa05\uaa06\uaa07\uaa08\uaa09\uaa0a\uaa0b\uaa0c\uaa0d\uaa0e\uaa0f\uaa10\uaa11\uaa12\uaa13\uaa14\uaa15\uaa16\uaa17\uaa18\uaa19\uaa1a\uaa1b\uaa1c\uaa1d\uaa1e\uaa1f\uaa20\uaa21\uaa22\uaa23\uaa24\uaa25\uaa26\uaa27\uaa28\uaa40\uaa41\uaa42\uaa44\uaa45\uaa46\uaa47\uaa48\uaa49\uaa4a\uaa4b\uaa60\uaa61\uaa62\uaa63\uaa64\uaa65\uaa66\uaa67\uaa68\uaa69\uaa6a\uaa6b\uaa6c\uaa6d\uaa6e\uaa6f\uaa71\uaa72\uaa73\uaa74\uaa75\uaa76\uaa7a\uaa80\uaa81\uaa82\uaa83\uaa84\uaa85\uaa86\uaa87\uaa88\uaa89\uaa8a\uaa8b\uaa8c\uaa8d\uaa8e\uaa8f\uaa90\uaa91\uaa92\uaa93\uaa94\uaa95\uaa96\uaa97\uaa98\uaa99\uaa9a\uaa9b\uaa9c\uaa9d\uaa9e\uaa9f\uaaa0\uaaa1\uaaa2\uaaa3\uaaa4\uaaa5\uaaa6\uaaa7\uaaa8\uaaa9\uaaaa\uaaab\uaaac\uaaad\uaaae\uaaaf\uaab1\uaab5\uaab6\uaab9\uaaba\uaabb\uaabc\uaabd\uaac0\uaac2\uaadb\uaadc\uabc0\uabc1\uabc2\uabc3\uabc4\uabc5\uabc6\uabc7\uabc8\uabc9\uabca\uabcb\uabcc\uabcd\uabce\uabcf\uabd0\uabd1\uabd2\uabd3\uabd4\uabd5\uabd6\uabd7\uabd8\uabd9\uabda\uabdb\uabdc\uabdd\uabde\uabdf\uabe0\uabe1\uabe2\uac00\uac01\uac02\uac03\uac04\uac05\uac06\uac07\uac08\uac09\uac0a\uac0b\uac0c\uac0d\uac0e\uac0f\uac10\uac11\uac12\uac13\uac14\uac15\uac16\uac17\uac18\uac19\uac1a\uac1b\uac1c\uac1d\uac1e\uac1f\uac20\uac21\uac22\uac23\uac24\uac25\uac26\uac27\uac28\uac29\uac2a\uac2b\uac2c\uac2d\uac2e\uac2f\uac30\uac31\uac32\uac33\uac34\uac35\uac36\uac37\uac38\uac39\uac3a\uac3b\uac3c\uac3d\uac3e\uac3f\uac40\uac41\uac42\uac43\uac44\uac45\uac46\uac47\uac48\uac49\uac4a\uac4b\uac4c\uac4d\uac4e\uac4f\uac50\uac51\uac52\uac53\uac54\uac55\uac56\uac57\uac58\uac59\uac5a\uac5b\uac5c\uac5d\uac5e\uac5f\uac60\uac61\uac62\uac63\uac64\uac65\uac66\uac67\uac68\uac69\uac6a\uac6b\uac6c\uac6d\uac6e\uac6f\uac70\uac71\uac72\uac73\uac74\uac75\uac76\uac77\uac78\uac79\uac7a\uac7b\uac7c\uac7d\uac7e\uac7f\uac80\uac81\uac82\uac83\uac84\uac85\uac86\uac87\uac88\uac89\uac8a\uac8b\uac8c\uac8d\uac8e\uac8f\uac90\uac91\uac92\uac93\uac94\uac95\uac96\uac97\uac98\uac99\uac9a\uac9b\uac9c\uac9d\uac9e\uac9f\uaca0\uaca1\uaca2\uaca3\uaca4\uaca5\uaca6\uaca7\uaca8\uaca9\uacaa\uacab\uacac\uacad\uacae\uacaf\uacb0\uacb1\uacb2\uacb3\uacb4\uacb5\uacb6\uacb7\uacb8\uacb9\uacba\uacbb\uacbc\uacbd\uacbe\uacbf\uacc0\uacc1\uacc2\uacc3\uacc4\uacc5\uacc6\uacc7\uacc8\uacc9\uacca\uaccb\uaccc\uaccd\uacce\uaccf\uacd0\uacd1\uacd2\uacd3\uacd4\uacd5\uacd6\uacd7\uacd8\uacd9\uacda\uacdb\uacdc\uacdd\uacde\uacdf\uace0\uace1\uace2\uace3\uace4\uace5\uace6\uace7\uace8\uace9\uacea\uaceb\uacec\uaced\uacee\uacef\uacf0\uacf1\uacf2\uacf3\uacf4\uacf5\uacf6\uacf7\uacf8\uacf9\uacfa\uacfb\uacfc\uacfd\uacfe\uacff\uad00\uad01\uad02\uad03\uad04\uad05\uad06\uad07\uad08\uad09\uad0a\uad0b\uad0c\uad0d\uad0e\uad0f\uad10\uad11\uad12\uad13\uad14\uad15\uad16\uad17\uad18\uad19\uad1a\uad1b\uad1c\uad1d\uad1e\uad1f\uad20\uad21\uad22\uad23\uad24\uad25\uad26\uad27\uad28\uad29\uad2a\uad2b\uad2c\uad2d\uad2e\uad2f\uad30\uad31\uad32\uad33\uad34\uad35\uad36\uad37\uad38\uad39\uad3a\uad3b\uad3c\uad3d\uad3e\uad3f\uad40\uad41\uad42\uad43\uad44\uad45\uad46\uad47\uad48\uad49\uad4a\uad4b\uad4c\uad4d\uad4e\uad4f\uad50\uad51\uad52\uad53\uad54\uad55\uad56\uad57\uad58\uad59\uad5a\uad5b\uad5c\uad5d\uad5e\uad5f\uad60\uad61\uad62\uad63\uad64\uad65\uad66\uad67\uad68\uad69\uad6a\uad6b\uad6c\uad6d\uad6e\uad6f\uad70\uad71\uad72\uad73\uad74\uad75\uad76\uad77\uad78\uad79\uad7a\uad7b\uad7c\uad7d\uad7e\uad7f\uad80\uad81\uad82\uad83\uad84\uad85\uad86\uad87\uad88\uad89\uad8a\uad8b\uad8c\uad8d\uad8e\uad8f\uad90\uad91\uad92\uad93\uad94\uad95\uad96\uad97\uad98\uad99\uad9a\uad9b\uad9c\uad9d\uad9e\uad9f\uada0\uada1\uada2\uada3\uada4\uada5\uada6\uada7\uada8\uada9\uadaa\uadab\uadac\uadad\uadae\uadaf\uadb0\uadb1\uadb2\uadb3\uadb4\uadb5\uadb6\uadb7\uadb8\uadb9\uadba\uadbb\uadbc\uadbd\uadbe\uadbf\uadc0\uadc1\uadc2\uadc3\uadc4\uadc5\uadc6\uadc7\uadc8\uadc9\uadca\uadcb\uadcc\uadcd\uadce\uadcf\uadd0\uadd1\uadd2\uadd3\uadd4\uadd5\uadd6\uadd7\uadd8\uadd9\uadda\uaddb\uaddc\uaddd\uadde\uaddf\uade0\uade1\uade2\uade3\uade4\uade5\uade6\uade7\uade8\uade9\uadea\uadeb\uadec\uaded\uadee\uadef\uadf0\uadf1\uadf2\uadf3\uadf4\uadf5\uadf6\uadf7\uadf8\uadf9\uadfa\uadfb\uadfc\uadfd\uadfe\uadff\uae00\uae01\uae02\uae03\uae04\uae05\uae06\uae07\uae08\uae09\uae0a\uae0b\uae0c\uae0d\uae0e\uae0f\uae10\uae11\uae12\uae13\uae14\uae15\uae16\uae17\uae18\uae19\uae1a\uae1b\uae1c\uae1d\uae1e\uae1f\uae20\uae21\uae22\uae23\uae24\uae25\uae26\uae27\uae28\uae29\uae2a\uae2b\uae2c\uae2d\uae2e\uae2f\uae30\uae31\uae32\uae33\uae34\uae35\uae36\uae37\uae38\uae39\uae3a\uae3b\uae3c\uae3d\uae3e\uae3f\uae40\uae41\uae42\uae43\uae44\uae45\uae46\uae47\uae48\uae49\uae4a\uae4b\uae4c\uae4d\uae4e\uae4f\uae50\uae51\uae52\uae53\uae54\uae55\uae56\uae57\uae58\uae59\uae5a\uae5b\uae5c\uae5d\uae5e\uae5f\uae60\uae61\uae62\uae63\uae64\uae65\uae66\uae67\uae68\uae69\uae6a\uae6b\uae6c\uae6d\uae6e\uae6f\uae70\uae71\uae72\uae73\uae74\uae75\uae76\uae77\uae78\uae79\uae7a\uae7b\uae7c\uae7d\uae7e\uae7f\uae80\uae81\uae82\uae83\uae84\uae85\uae86\uae87\uae88\uae89\uae8a\uae8b\uae8c\uae8d\uae8e\uae8f\uae90\uae91\uae92\uae93\uae94\uae95\uae96\uae97\uae98\uae99\uae9a\uae9b\uae9c\uae9d\uae9e\uae9f\uaea0\uaea1\uaea2\uaea3\uaea4\uaea5\uaea6\uaea7\uaea8\uaea9\uaeaa\uaeab\uaeac\uaead\uaeae\uaeaf\uaeb0\uaeb1\uaeb2\uaeb3\uaeb4\uaeb5\uaeb6\uaeb7\uaeb8\uaeb9\uaeba\uaebb\uaebc\uaebd\uaebe\uaebf\uaec0\uaec1\uaec2\uaec3\uaec4\uaec5\uaec6\uaec7\uaec8\uaec9\uaeca\uaecb\uaecc\uaecd\uaece\uaecf\uaed0\uaed1\uaed2\uaed3\uaed4\uaed5\uaed6\uaed7\uaed8\uaed9\uaeda\uaedb\uaedc\uaedd\uaede\uaedf\uaee0\uaee1\uaee2\uaee3\uaee4\uaee5\uaee6\uaee7\uaee8\uaee9\uaeea\uaeeb\uaeec\uaeed\uaeee\uaeef\uaef0\uaef1\uaef2\uaef3\uaef4\uaef5\uaef6\uaef7\uaef8\uaef9\uaefa\uaefb\uaefc\uaefd\uaefe\uaeff\uaf00\uaf01\uaf02\uaf03\uaf04\uaf05\uaf06\uaf07\uaf08\uaf09\uaf0a\uaf0b\uaf0c\uaf0d\uaf0e\uaf0f\uaf10\uaf11\uaf12\uaf13\uaf14\uaf15\uaf16\uaf17\uaf18\uaf19\uaf1a\uaf1b\uaf1c\uaf1d\uaf1e\uaf1f\uaf20\uaf21\uaf22\uaf23\uaf24\uaf25\uaf26\uaf27\uaf28\uaf29\uaf2a\uaf2b\uaf2c\uaf2d\uaf2e\uaf2f\uaf30\uaf31\uaf32\uaf33\uaf34\uaf35\uaf36\uaf37\uaf38\uaf39\uaf3a\uaf3b\uaf3c\uaf3d\uaf3e\uaf3f\uaf40\uaf41\uaf42\uaf43\uaf44\uaf45\uaf46\uaf47\uaf48\uaf49\uaf4a\uaf4b\uaf4c\uaf4d\uaf4e\uaf4f\uaf50\uaf51\uaf52\uaf53\uaf54\uaf55\uaf56\uaf57\uaf58\uaf59\uaf5a\uaf5b\uaf5c\uaf5d\uaf5e\uaf5f\uaf60\uaf61\uaf62\uaf63\uaf64\uaf65\uaf66\uaf67\uaf68\uaf69\uaf6a\uaf6b\uaf6c\uaf6d\uaf6e\uaf6f\uaf70\uaf71\uaf72\uaf73\uaf74\uaf75\uaf76\uaf77\uaf78\uaf79\uaf7a\uaf7b\uaf7c\uaf7d\uaf7e\uaf7f\uaf80\uaf81\uaf82\uaf83\uaf84\uaf85\uaf86\uaf87\uaf88\uaf89\uaf8a\uaf8b\uaf8c\uaf8d\uaf8e\uaf8f\uaf90\uaf91\uaf92\uaf93\uaf94\uaf95\uaf96\uaf97\uaf98\uaf99\uaf9a\uaf9b\uaf9c\uaf9d\uaf9e\uaf9f\uafa0\uafa1\uafa2\uafa3\uafa4\uafa5\uafa6\uafa7\uafa8\uafa9\uafaa\uafab\uafac\uafad\uafae\uafaf\uafb0\uafb1\uafb2\uafb3\uafb4\uafb5\uafb6\uafb7\uafb8\uafb9\uafba\uafbb\uafbc\uafbd\uafbe\uafbf\uafc0\uafc1\uafc2\uafc3\uafc4\uafc5\uafc6\uafc7\uafc8\uafc9\uafca\uafcb\uafcc\uafcd\uafce\uafcf\uafd0\uafd1\uafd2\uafd3\uafd4\uafd5\uafd6\uafd7\uafd8\uafd9\uafda\uafdb\uafdc\uafdd\uafde\uafdf\uafe0\uafe1\uafe2\uafe3\uafe4\uafe5\uafe6\uafe7\uafe8\uafe9\uafea\uafeb\uafec\uafed\uafee\uafef\uaff0\uaff1\uaff2\uaff3\uaff4\uaff5\uaff6\uaff7\uaff8\uaff9\uaffa\uaffb\uaffc\uaffd\uaffe\uafff\ub000\ub001\ub002\ub003\ub004\ub005\ub006\ub007\ub008\ub009\ub00a\ub00b\ub00c\ub00d\ub00e\ub00f\ub010\ub011\ub012\ub013\ub014\ub015\ub016\ub017\ub018\ub019\ub01a\ub01b\ub01c\ub01d\ub01e\ub01f\ub020\ub021\ub022\ub023\ub024\ub025\ub026\ub027\ub028\ub029\ub02a\ub02b\ub02c\ub02d\ub02e\ub02f\ub030\ub031\ub032\ub033\ub034\ub035\ub036\ub037\ub038\ub039\ub03a\ub03b\ub03c\ub03d\ub03e\ub03f\ub040\ub041\ub042\ub043\ub044\ub045\ub046\ub047\ub048\ub049\ub04a\ub04b\ub04c\ub04d\ub04e\ub04f\ub050\ub051\ub052\ub053\ub054\ub055\ub056\ub057\ub058\ub059\ub05a\ub05b\ub05c\ub05d\ub05e\ub05f\ub060\ub061\ub062\ub063\ub064\ub065\ub066\ub067\ub068\ub069\ub06a\ub06b\ub06c\ub06d\ub06e\ub06f\ub070\ub071\ub072\ub073\ub074\ub075\ub076\ub077\ub078\ub079\ub07a\ub07b\ub07c\ub07d\ub07e\ub07f\ub080\ub081\ub082\ub083\ub084\ub085\ub086\ub087\ub088\ub089\ub08a\ub08b\ub08c\ub08d\ub08e\ub08f\ub090\ub091\ub092\ub093\ub094\ub095\ub096\ub097\ub098\ub099\ub09a\ub09b\ub09c\ub09d\ub09e\ub09f\ub0a0\ub0a1\ub0a2\ub0a3\ub0a4\ub0a5\ub0a6\ub0a7\ub0a8\ub0a9\ub0aa\ub0ab\ub0ac\ub0ad\ub0ae\ub0af\ub0b0\ub0b1\ub0b2\ub0b3\ub0b4\ub0b5\ub0b6\ub0b7\ub0b8\ub0b9\ub0ba\ub0bb\ub0bc\ub0bd\ub0be\ub0bf\ub0c0\ub0c1\ub0c2\ub0c3\ub0c4\ub0c5\ub0c6\ub0c7\ub0c8\ub0c9\ub0ca\ub0cb\ub0cc\ub0cd\ub0ce\ub0cf\ub0d0\ub0d1\ub0d2\ub0d3\ub0d4\ub0d5\ub0d6\ub0d7\ub0d8\ub0d9\ub0da\ub0db\ub0dc\ub0dd\ub0de\ub0df\ub0e0\ub0e1\ub0e2\ub0e3\ub0e4\ub0e5\ub0e6\ub0e7\ub0e8\ub0e9\ub0ea\ub0eb\ub0ec\ub0ed\ub0ee\ub0ef\ub0f0\ub0f1\ub0f2\ub0f3\ub0f4\ub0f5\ub0f6\ub0f7\ub0f8\ub0f9\ub0fa\ub0fb\ub0fc\ub0fd\ub0fe\ub0ff\ub100\ub101\ub102\ub103\ub104\ub105\ub106\ub107\ub108\ub109\ub10a\ub10b\ub10c\ub10d\ub10e\ub10f\ub110\ub111\ub112\ub113\ub114\ub115\ub116\ub117\ub118\ub119\ub11a\ub11b\ub11c\ub11d\ub11e\ub11f\ub120\ub121\ub122\ub123\ub124\ub125\ub126\ub127\ub128\ub129\ub12a\ub12b\ub12c\ub12d\ub12e\ub12f\ub130\ub131\ub132\ub133\ub134\ub135\ub136\ub137\ub138\ub139\ub13a\ub13b\ub13c\ub13d\ub13e\ub13f\ub140\ub141\ub142\ub143\ub144\ub145\ub146\ub147\ub148\ub149\ub14a\ub14b\ub14c\ub14d\ub14e\ub14f\ub150\ub151\ub152\ub153\ub154\ub155\ub156\ub157\ub158\ub159\ub15a\ub15b\ub15c\ub15d\ub15e\ub15f\ub160\ub161\ub162\ub163\ub164\ub165\ub166\ub167\ub168\ub169\ub16a\ub16b\ub16c\ub16d\ub16e\ub16f\ub170\ub171\ub172\ub173\ub174\ub175\ub176\ub177\ub178\ub179\ub17a\ub17b\ub17c\ub17d\ub17e\ub17f\ub180\ub181\ub182\ub183\ub184\ub185\ub186\ub187\ub188\ub189\ub18a\ub18b\ub18c\ub18d\ub18e\ub18f\ub190\ub191\ub192\ub193\ub194\ub195\ub196\ub197\ub198\ub199\ub19a\ub19b\ub19c\ub19d\ub19e\ub19f\ub1a0\ub1a1\ub1a2\ub1a3\ub1a4\ub1a5\ub1a6\ub1a7\ub1a8\ub1a9\ub1aa\ub1ab\ub1ac\ub1ad\ub1ae\ub1af\ub1b0\ub1b1\ub1b2\ub1b3\ub1b4\ub1b5\ub1b6\ub1b7\ub1b8\ub1b9\ub1ba\ub1bb\ub1bc\ub1bd\ub1be\ub1bf\ub1c0\ub1c1\ub1c2\ub1c3\ub1c4\ub1c5\ub1c6\ub1c7\ub1c8\ub1c9\ub1ca\ub1cb\ub1cc\ub1cd\ub1ce\ub1cf\ub1d0\ub1d1\ub1d2\ub1d3\ub1d4\ub1d5\ub1d6\ub1d7\ub1d8\ub1d9\ub1da\ub1db\ub1dc\ub1dd\ub1de\ub1df\ub1e0\ub1e1\ub1e2\ub1e3\ub1e4\ub1e5\ub1e6\ub1e7\ub1e8\ub1e9\ub1ea\ub1eb\ub1ec\ub1ed\ub1ee\ub1ef\ub1f0\ub1f1\ub1f2\ub1f3\ub1f4\ub1f5\ub1f6\ub1f7\ub1f8\ub1f9\ub1fa\ub1fb\ub1fc\ub1fd\ub1fe\ub1ff\ub200\ub201\ub202\ub203\ub204\ub205\ub206\ub207\ub208\ub209\ub20a\ub20b\ub20c\ub20d\ub20e\ub20f\ub210\ub211\ub212\ub213\ub214\ub215\ub216\ub217\ub218\ub219\ub21a\ub21b\ub21c\ub21d\ub21e\ub21f\ub220\ub221\ub222\ub223\ub224\ub225\ub226\ub227\ub228\ub229\ub22a\ub22b\ub22c\ub22d\ub22e\ub22f\ub230\ub231\ub232\ub233\ub234\ub235\ub236\ub237\ub238\ub239\ub23a\ub23b\ub23c\ub23d\ub23e\ub23f\ub240\ub241\ub242\ub243\ub244\ub245\ub246\ub247\ub248\ub249\ub24a\ub24b\ub24c\ub24d\ub24e\ub24f\ub250\ub251\ub252\ub253\ub254\ub255\ub256\ub257\ub258\ub259\ub25a\ub25b\ub25c\ub25d\ub25e\ub25f\ub260\ub261\ub262\ub263\ub264\ub265\ub266\ub267\ub268\ub269\ub26a\ub26b\ub26c\ub26d\ub26e\ub26f\ub270\ub271\ub272\ub273\ub274\ub275\ub276\ub277\ub278\ub279\ub27a\ub27b\ub27c\ub27d\ub27e\ub27f\ub280\ub281\ub282\ub283\ub284\ub285\ub286\ub287\ub288\ub289\ub28a\ub28b\ub28c\ub28d\ub28e\ub28f\ub290\ub291\ub292\ub293\ub294\ub295\ub296\ub297\ub298\ub299\ub29a\ub29b\ub29c\ub29d\ub29e\ub29f\ub2a0\ub2a1\ub2a2\ub2a3\ub2a4\ub2a5\ub2a6\ub2a7\ub2a8\ub2a9\ub2aa\ub2ab\ub2ac\ub2ad\ub2ae\ub2af\ub2b0\ub2b1\ub2b2\ub2b3\ub2b4\ub2b5\ub2b6\ub2b7\ub2b8\ub2b9\ub2ba\ub2bb\ub2bc\ub2bd\ub2be\ub2bf\ub2c0\ub2c1\ub2c2\ub2c3\ub2c4\ub2c5\ub2c6\ub2c7\ub2c8\ub2c9\ub2ca\ub2cb\ub2cc\ub2cd\ub2ce\ub2cf\ub2d0\ub2d1\ub2d2\ub2d3\ub2d4\ub2d5\ub2d6\ub2d7\ub2d8\ub2d9\ub2da\ub2db\ub2dc\ub2dd\ub2de\ub2df\ub2e0\ub2e1\ub2e2\ub2e3\ub2e4\ub2e5\ub2e6\ub2e7\ub2e8\ub2e9\ub2ea\ub2eb\ub2ec\ub2ed\ub2ee\ub2ef\ub2f0\ub2f1\ub2f2\ub2f3\ub2f4\ub2f5\ub2f6\ub2f7\ub2f8\ub2f9\ub2fa\ub2fb\ub2fc\ub2fd\ub2fe\ub2ff\ub300\ub301\ub302\ub303\ub304\ub305\ub306\ub307\ub308\ub309\ub30a\ub30b\ub30c\ub30d\ub30e\ub30f\ub310\ub311\ub312\ub313\ub314\ub315\ub316\ub317\ub318\ub319\ub31a\ub31b\ub31c\ub31d\ub31e\ub31f\ub320\ub321\ub322\ub323\ub324\ub325\ub326\ub327\ub328\ub329\ub32a\ub32b\ub32c\ub32d\ub32e\ub32f\ub330\ub331\ub332\ub333\ub334\ub335\ub336\ub337\ub338\ub339\ub33a\ub33b\ub33c\ub33d\ub33e\ub33f\ub340\ub341\ub342\ub343\ub344\ub345\ub346\ub347\ub348\ub349\ub34a\ub34b\ub34c\ub34d\ub34e\ub34f\ub350\ub351\ub352\ub353\ub354\ub355\ub356\ub357\ub358\ub359\ub35a\ub35b\ub35c\ub35d\ub35e\ub35f\ub360\ub361\ub362\ub363\ub364\ub365\ub366\ub367\ub368\ub369\ub36a\ub36b\ub36c\ub36d\ub36e\ub36f\ub370\ub371\ub372\ub373\ub374\ub375\ub376\ub377\ub378\ub379\ub37a\ub37b\ub37c\ub37d\ub37e\ub37f\ub380\ub381\ub382\ub383\ub384\ub385\ub386\ub387\ub388\ub389\ub38a\ub38b\ub38c\ub38d\ub38e\ub38f\ub390\ub391\ub392\ub393\ub394\ub395\ub396\ub397\ub398\ub399\ub39a\ub39b\ub39c\ub39d\ub39e\ub39f\ub3a0\ub3a1\ub3a2\ub3a3\ub3a4\ub3a5\ub3a6\ub3a7\ub3a8\ub3a9\ub3aa\ub3ab\ub3ac\ub3ad\ub3ae\ub3af\ub3b0\ub3b1\ub3b2\ub3b3\ub3b4\ub3b5\ub3b6\ub3b7\ub3b8\ub3b9\ub3ba\ub3bb\ub3bc\ub3bd\ub3be\ub3bf\ub3c0\ub3c1\ub3c2\ub3c3\ub3c4\ub3c5\ub3c6\ub3c7\ub3c8\ub3c9\ub3ca\ub3cb\ub3cc\ub3cd\ub3ce\ub3cf\ub3d0\ub3d1\ub3d2\ub3d3\ub3d4\ub3d5\ub3d6\ub3d7\ub3d8\ub3d9\ub3da\ub3db\ub3dc\ub3dd\ub3de\ub3df\ub3e0\ub3e1\ub3e2\ub3e3\ub3e4\ub3e5\ub3e6\ub3e7\ub3e8\ub3e9\ub3ea\ub3eb\ub3ec\ub3ed\ub3ee\ub3ef\ub3f0\ub3f1\ub3f2\ub3f3\ub3f4\ub3f5\ub3f6\ub3f7\ub3f8\ub3f9\ub3fa\ub3fb\ub3fc\ub3fd\ub3fe\ub3ff\ub400\ub401\ub402\ub403\ub404\ub405\ub406\ub407\ub408\ub409\ub40a\ub40b\ub40c\ub40d\ub40e\ub40f\ub410\ub411\ub412\ub413\ub414\ub415\ub416\ub417\ub418\ub419\ub41a\ub41b\ub41c\ub41d\ub41e\ub41f\ub420\ub421\ub422\ub423\ub424\ub425\ub426\ub427\ub428\ub429\ub42a\ub42b\ub42c\ub42d\ub42e\ub42f\ub430\ub431\ub432\ub433\ub434\ub435\ub436\ub437\ub438\ub439\ub43a\ub43b\ub43c\ub43d\ub43e\ub43f\ub440\ub441\ub442\ub443\ub444\ub445\ub446\ub447\ub448\ub449\ub44a\ub44b\ub44c\ub44d\ub44e\ub44f\ub450\ub451\ub452\ub453\ub454\ub455\ub456\ub457\ub458\ub459\ub45a\ub45b\ub45c\ub45d\ub45e\ub45f\ub460\ub461\ub462\ub463\ub464\ub465\ub466\ub467\ub468\ub469\ub46a\ub46b\ub46c\ub46d\ub46e\ub46f\ub470\ub471\ub472\ub473\ub474\ub475\ub476\ub477\ub478\ub479\ub47a\ub47b\ub47c\ub47d\ub47e\ub47f\ub480\ub481\ub482\ub483\ub484\ub485\ub486\ub487\ub488\ub489\ub48a\ub48b\ub48c\ub48d\ub48e\ub48f\ub490\ub491\ub492\ub493\ub494\ub495\ub496\ub497\ub498\ub499\ub49a\ub49b\ub49c\ub49d\ub49e\ub49f\ub4a0\ub4a1\ub4a2\ub4a3\ub4a4\ub4a5\ub4a6\ub4a7\ub4a8\ub4a9\ub4aa\ub4ab\ub4ac\ub4ad\ub4ae\ub4af\ub4b0\ub4b1\ub4b2\ub4b3\ub4b4\ub4b5\ub4b6\ub4b7\ub4b8\ub4b9\ub4ba\ub4bb\ub4bc\ub4bd\ub4be\ub4bf\ub4c0\ub4c1\ub4c2\ub4c3\ub4c4\ub4c5\ub4c6\ub4c7\ub4c8\ub4c9\ub4ca\ub4cb\ub4cc\ub4cd\ub4ce\ub4cf\ub4d0\ub4d1\ub4d2\ub4d3\ub4d4\ub4d5\ub4d6\ub4d7\ub4d8\ub4d9\ub4da\ub4db\ub4dc\ub4dd\ub4de\ub4df\ub4e0\ub4e1\ub4e2\ub4e3\ub4e4\ub4e5\ub4e6\ub4e7\ub4e8\ub4e9\ub4ea\ub4eb\ub4ec\ub4ed\ub4ee\ub4ef\ub4f0\ub4f1\ub4f2\ub4f3\ub4f4\ub4f5\ub4f6\ub4f7\ub4f8\ub4f9\ub4fa\ub4fb\ub4fc\ub4fd\ub4fe\ub4ff\ub500\ub501\ub502\ub503\ub504\ub505\ub506\ub507\ub508\ub509\ub50a\ub50b\ub50c\ub50d\ub50e\ub50f\ub510\ub511\ub512\ub513\ub514\ub515\ub516\ub517\ub518\ub519\ub51a\ub51b\ub51c\ub51d\ub51e\ub51f\ub520\ub521\ub522\ub523\ub524\ub525\ub526\ub527\ub528\ub529\ub52a\ub52b\ub52c\ub52d\ub52e\ub52f\ub530\ub531\ub532\ub533\ub534\ub535\ub536\ub537\ub538\ub539\ub53a\ub53b\ub53c\ub53d\ub53e\ub53f\ub540\ub541\ub542\ub543\ub544\ub545\ub546\ub547\ub548\ub549\ub54a\ub54b\ub54c\ub54d\ub54e\ub54f\ub550\ub551\ub552\ub553\ub554\ub555\ub556\ub557\ub558\ub559\ub55a\ub55b\ub55c\ub55d\ub55e\ub55f\ub560\ub561\ub562\ub563\ub564\ub565\ub566\ub567\ub568\ub569\ub56a\ub56b\ub56c\ub56d\ub56e\ub56f\ub570\ub571\ub572\ub573\ub574\ub575\ub576\ub577\ub578\ub579\ub57a\ub57b\ub57c\ub57d\ub57e\ub57f\ub580\ub581\ub582\ub583\ub584\ub585\ub586\ub587\ub588\ub589\ub58a\ub58b\ub58c\ub58d\ub58e\ub58f\ub590\ub591\ub592\ub593\ub594\ub595\ub596\ub597\ub598\ub599\ub59a\ub59b\ub59c\ub59d\ub59e\ub59f\ub5a0\ub5a1\ub5a2\ub5a3\ub5a4\ub5a5\ub5a6\ub5a7\ub5a8\ub5a9\ub5aa\ub5ab\ub5ac\ub5ad\ub5ae\ub5af\ub5b0\ub5b1\ub5b2\ub5b3\ub5b4\ub5b5\ub5b6\ub5b7\ub5b8\ub5b9\ub5ba\ub5bb\ub5bc\ub5bd\ub5be\ub5bf\ub5c0\ub5c1\ub5c2\ub5c3\ub5c4\ub5c5\ub5c6\ub5c7\ub5c8\ub5c9\ub5ca\ub5cb\ub5cc\ub5cd\ub5ce\ub5cf\ub5d0\ub5d1\ub5d2\ub5d3\ub5d4\ub5d5\ub5d6\ub5d7\ub5d8\ub5d9\ub5da\ub5db\ub5dc\ub5dd\ub5de\ub5df\ub5e0\ub5e1\ub5e2\ub5e3\ub5e4\ub5e5\ub5e6\ub5e7\ub5e8\ub5e9\ub5ea\ub5eb\ub5ec\ub5ed\ub5ee\ub5ef\ub5f0\ub5f1\ub5f2\ub5f3\ub5f4\ub5f5\ub5f6\ub5f7\ub5f8\ub5f9\ub5fa\ub5fb\ub5fc\ub5fd\ub5fe\ub5ff\ub600\ub601\ub602\ub603\ub604\ub605\ub606\ub607\ub608\ub609\ub60a\ub60b\ub60c\ub60d\ub60e\ub60f\ub610\ub611\ub612\ub613\ub614\ub615\ub616\ub617\ub618\ub619\ub61a\ub61b\ub61c\ub61d\ub61e\ub61f\ub620\ub621\ub622\ub623\ub624\ub625\ub626\ub627\ub628\ub629\ub62a\ub62b\ub62c\ub62d\ub62e\ub62f\ub630\ub631\ub632\ub633\ub634\ub635\ub636\ub637\ub638\ub639\ub63a\ub63b\ub63c\ub63d\ub63e\ub63f\ub640\ub641\ub642\ub643\ub644\ub645\ub646\ub647\ub648\ub649\ub64a\ub64b\ub64c\ub64d\ub64e\ub64f\ub650\ub651\ub652\ub653\ub654\ub655\ub656\ub657\ub658\ub659\ub65a\ub65b\ub65c\ub65d\ub65e\ub65f\ub660\ub661\ub662\ub663\ub664\ub665\ub666\ub667\ub668\ub669\ub66a\ub66b\ub66c\ub66d\ub66e\ub66f\ub670\ub671\ub672\ub673\ub674\ub675\ub676\ub677\ub678\ub679\ub67a\ub67b\ub67c\ub67d\ub67e\ub67f\ub680\ub681\ub682\ub683\ub684\ub685\ub686\ub687\ub688\ub689\ub68a\ub68b\ub68c\ub68d\ub68e\ub68f\ub690\ub691\ub692\ub693\ub694\ub695\ub696\ub697\ub698\ub699\ub69a\ub69b\ub69c\ub69d\ub69e\ub69f\ub6a0\ub6a1\ub6a2\ub6a3\ub6a4\ub6a5\ub6a6\ub6a7\ub6a8\ub6a9\ub6aa\ub6ab\ub6ac\ub6ad\ub6ae\ub6af\ub6b0\ub6b1\ub6b2\ub6b3\ub6b4\ub6b5\ub6b6\ub6b7\ub6b8\ub6b9\ub6ba\ub6bb\ub6bc\ub6bd\ub6be\ub6bf\ub6c0\ub6c1\ub6c2\ub6c3\ub6c4\ub6c5\ub6c6\ub6c7\ub6c8\ub6c9\ub6ca\ub6cb\ub6cc\ub6cd\ub6ce\ub6cf\ub6d0\ub6d1\ub6d2\ub6d3\ub6d4\ub6d5\ub6d6\ub6d7\ub6d8\ub6d9\ub6da\ub6db\ub6dc\ub6dd\ub6de\ub6df\ub6e0\ub6e1\ub6e2\ub6e3\ub6e4\ub6e5\ub6e6\ub6e7\ub6e8\ub6e9\ub6ea\ub6eb\ub6ec\ub6ed\ub6ee\ub6ef\ub6f0\ub6f1\ub6f2\ub6f3\ub6f4\ub6f5\ub6f6\ub6f7\ub6f8\ub6f9\ub6fa\ub6fb\ub6fc\ub6fd\ub6fe\ub6ff\ub700\ub701\ub702\ub703\ub704\ub705\ub706\ub707\ub708\ub709\ub70a\ub70b\ub70c\ub70d\ub70e\ub70f\ub710\ub711\ub712\ub713\ub714\ub715\ub716\ub717\ub718\ub719\ub71a\ub71b\ub71c\ub71d\ub71e\ub71f\ub720\ub721\ub722\ub723\ub724\ub725\ub726\ub727\ub728\ub729\ub72a\ub72b\ub72c\ub72d\ub72e\ub72f\ub730\ub731\ub732\ub733\ub734\ub735\ub736\ub737\ub738\ub739\ub73a\ub73b\ub73c\ub73d\ub73e\ub73f\ub740\ub741\ub742\ub743\ub744\ub745\ub746\ub747\ub748\ub749\ub74a\ub74b\ub74c\ub74d\ub74e\ub74f\ub750\ub751\ub752\ub753\ub754\ub755\ub756\ub757\ub758\ub759\ub75a\ub75b\ub75c\ub75d\ub75e\ub75f\ub760\ub761\ub762\ub763\ub764\ub765\ub766\ub767\ub768\ub769\ub76a\ub76b\ub76c\ub76d\ub76e\ub76f\ub770\ub771\ub772\ub773\ub774\ub775\ub776\ub777\ub778\ub779\ub77a\ub77b\ub77c\ub77d\ub77e\ub77f\ub780\ub781\ub782\ub783\ub784\ub785\ub786\ub787\ub788\ub789\ub78a\ub78b\ub78c\ub78d\ub78e\ub78f\ub790\ub791\ub792\ub793\ub794\ub795\ub796\ub797\ub798\ub799\ub79a\ub79b\ub79c\ub79d\ub79e\ub79f\ub7a0\ub7a1\ub7a2\ub7a3\ub7a4\ub7a5\ub7a6\ub7a7\ub7a8\ub7a9\ub7aa\ub7ab\ub7ac\ub7ad\ub7ae\ub7af\ub7b0\ub7b1\ub7b2\ub7b3\ub7b4\ub7b5\ub7b6\ub7b7\ub7b8\ub7b9\ub7ba\ub7bb\ub7bc\ub7bd\ub7be\ub7bf\ub7c0\ub7c1\ub7c2\ub7c3\ub7c4\ub7c5\ub7c6\ub7c7\ub7c8\ub7c9\ub7ca\ub7cb\ub7cc\ub7cd\ub7ce\ub7cf\ub7d0\ub7d1\ub7d2\ub7d3\ub7d4\ub7d5\ub7d6\ub7d7\ub7d8\ub7d9\ub7da\ub7db\ub7dc\ub7dd\ub7de\ub7df\ub7e0\ub7e1\ub7e2\ub7e3\ub7e4\ub7e5\ub7e6\ub7e7\ub7e8\ub7e9\ub7ea\ub7eb\ub7ec\ub7ed\ub7ee\ub7ef\ub7f0\ub7f1\ub7f2\ub7f3\ub7f4\ub7f5\ub7f6\ub7f7\ub7f8\ub7f9\ub7fa\ub7fb\ub7fc\ub7fd\ub7fe\ub7ff\ub800\ub801\ub802\ub803\ub804\ub805\ub806\ub807\ub808\ub809\ub80a\ub80b\ub80c\ub80d\ub80e\ub80f\ub810\ub811\ub812\ub813\ub814\ub815\ub816\ub817\ub818\ub819\ub81a\ub81b\ub81c\ub81d\ub81e\ub81f\ub820\ub821\ub822\ub823\ub824\ub825\ub826\ub827\ub828\ub829\ub82a\ub82b\ub82c\ub82d\ub82e\ub82f\ub830\ub831\ub832\ub833\ub834\ub835\ub836\ub837\ub838\ub839\ub83a\ub83b\ub83c\ub83d\ub83e\ub83f\ub840\ub841\ub842\ub843\ub844\ub845\ub846\ub847\ub848\ub849\ub84a\ub84b\ub84c\ub84d\ub84e\ub84f\ub850\ub851\ub852\ub853\ub854\ub855\ub856\ub857\ub858\ub859\ub85a\ub85b\ub85c\ub85d\ub85e\ub85f\ub860\ub861\ub862\ub863\ub864\ub865\ub866\ub867\ub868\ub869\ub86a\ub86b\ub86c\ub86d\ub86e\ub86f\ub870\ub871\ub872\ub873\ub874\ub875\ub876\ub877\ub878\ub879\ub87a\ub87b\ub87c\ub87d\ub87e\ub87f\ub880\ub881\ub882\ub883\ub884\ub885\ub886\ub887\ub888\ub889\ub88a\ub88b\ub88c\ub88d\ub88e\ub88f\ub890\ub891\ub892\ub893\ub894\ub895\ub896\ub897\ub898\ub899\ub89a\ub89b\ub89c\ub89d\ub89e\ub89f\ub8a0\ub8a1\ub8a2\ub8a3\ub8a4\ub8a5\ub8a6\ub8a7\ub8a8\ub8a9\ub8aa\ub8ab\ub8ac\ub8ad\ub8ae\ub8af\ub8b0\ub8b1\ub8b2\ub8b3\ub8b4\ub8b5\ub8b6\ub8b7\ub8b8\ub8b9\ub8ba\ub8bb\ub8bc\ub8bd\ub8be\ub8bf\ub8c0\ub8c1\ub8c2\ub8c3\ub8c4\ub8c5\ub8c6\ub8c7\ub8c8\ub8c9\ub8ca\ub8cb\ub8cc\ub8cd\ub8ce\ub8cf\ub8d0\ub8d1\ub8d2\ub8d3\ub8d4\ub8d5\ub8d6\ub8d7\ub8d8\ub8d9\ub8da\ub8db\ub8dc\ub8dd\ub8de\ub8df\ub8e0\ub8e1\ub8e2\ub8e3\ub8e4\ub8e5\ub8e6\ub8e7\ub8e8\ub8e9\ub8ea\ub8eb\ub8ec\ub8ed\ub8ee\ub8ef\ub8f0\ub8f1\ub8f2\ub8f3\ub8f4\ub8f5\ub8f6\ub8f7\ub8f8\ub8f9\ub8fa\ub8fb\ub8fc\ub8fd\ub8fe\ub8ff\ub900\ub901\ub902\ub903\ub904\ub905\ub906\ub907\ub908\ub909\ub90a\ub90b\ub90c\ub90d\ub90e\ub90f\ub910\ub911\ub912\ub913\ub914\ub915\ub916\ub917\ub918\ub919\ub91a\ub91b\ub91c\ub91d\ub91e\ub91f\ub920\ub921\ub922\ub923\ub924\ub925\ub926\ub927\ub928\ub929\ub92a\ub92b\ub92c\ub92d\ub92e\ub92f\ub930\ub931\ub932\ub933\ub934\ub935\ub936\ub937\ub938\ub939\ub93a\ub93b\ub93c\ub93d\ub93e\ub93f\ub940\ub941\ub942\ub943\ub944\ub945\ub946\ub947\ub948\ub949\ub94a\ub94b\ub94c\ub94d\ub94e\ub94f\ub950\ub951\ub952\ub953\ub954\ub955\ub956\ub957\ub958\ub959\ub95a\ub95b\ub95c\ub95d\ub95e\ub95f\ub960\ub961\ub962\ub963\ub964\ub965\ub966\ub967\ub968\ub969\ub96a\ub96b\ub96c\ub96d\ub96e\ub96f\ub970\ub971\ub972\ub973\ub974\ub975\ub976\ub977\ub978\ub979\ub97a\ub97b\ub97c\ub97d\ub97e\ub97f\ub980\ub981\ub982\ub983\ub984\ub985\ub986\ub987\ub988\ub989\ub98a\ub98b\ub98c\ub98d\ub98e\ub98f\ub990\ub991\ub992\ub993\ub994\ub995\ub996\ub997\ub998\ub999\ub99a\ub99b\ub99c\ub99d\ub99e\ub99f\ub9a0\ub9a1\ub9a2\ub9a3\ub9a4\ub9a5\ub9a6\ub9a7\ub9a8\ub9a9\ub9aa\ub9ab\ub9ac\ub9ad\ub9ae\ub9af\ub9b0\ub9b1\ub9b2\ub9b3\ub9b4\ub9b5\ub9b6\ub9b7\ub9b8\ub9b9\ub9ba\ub9bb\ub9bc\ub9bd\ub9be\ub9bf\ub9c0\ub9c1\ub9c2\ub9c3\ub9c4\ub9c5\ub9c6\ub9c7\ub9c8\ub9c9\ub9ca\ub9cb\ub9cc\ub9cd\ub9ce\ub9cf\ub9d0\ub9d1\ub9d2\ub9d3\ub9d4\ub9d5\ub9d6\ub9d7\ub9d8\ub9d9\ub9da\ub9db\ub9dc\ub9dd\ub9de\ub9df\ub9e0\ub9e1\ub9e2\ub9e3\ub9e4\ub9e5\ub9e6\ub9e7\ub9e8\ub9e9\ub9ea\ub9eb\ub9ec\ub9ed\ub9ee\ub9ef\ub9f0\ub9f1\ub9f2\ub9f3\ub9f4\ub9f5\ub9f6\ub9f7\ub9f8\ub9f9\ub9fa\ub9fb\ub9fc\ub9fd\ub9fe\ub9ff\uba00\uba01\uba02\uba03\uba04\uba05\uba06\uba07\uba08\uba09\uba0a\uba0b\uba0c\uba0d\uba0e\uba0f\uba10\uba11\uba12\uba13\uba14\uba15\uba16\uba17\uba18\uba19\uba1a\uba1b\uba1c\uba1d\uba1e\uba1f\uba20\uba21\uba22\uba23\uba24\uba25\uba26\uba27\uba28\uba29\uba2a\uba2b\uba2c\uba2d\uba2e\uba2f\uba30\uba31\uba32\uba33\uba34\uba35\uba36\uba37\uba38\uba39\uba3a\uba3b\uba3c\uba3d\uba3e\uba3f\uba40\uba41\uba42\uba43\uba44\uba45\uba46\uba47\uba48\uba49\uba4a\uba4b\uba4c\uba4d\uba4e\uba4f\uba50\uba51\uba52\uba53\uba54\uba55\uba56\uba57\uba58\uba59\uba5a\uba5b\uba5c\uba5d\uba5e\uba5f\uba60\uba61\uba62\uba63\uba64\uba65\uba66\uba67\uba68\uba69\uba6a\uba6b\uba6c\uba6d\uba6e\uba6f\uba70\uba71\uba72\uba73\uba74\uba75\uba76\uba77\uba78\uba79\uba7a\uba7b\uba7c\uba7d\uba7e\uba7f\uba80\uba81\uba82\uba83\uba84\uba85\uba86\uba87\uba88\uba89\uba8a\uba8b\uba8c\uba8d\uba8e\uba8f\uba90\uba91\uba92\uba93\uba94\uba95\uba96\uba97\uba98\uba99\uba9a\uba9b\uba9c\uba9d\uba9e\uba9f\ubaa0\ubaa1\ubaa2\ubaa3\ubaa4\ubaa5\ubaa6\ubaa7\ubaa8\ubaa9\ubaaa\ubaab\ubaac\ubaad\ubaae\ubaaf\ubab0\ubab1\ubab2\ubab3\ubab4\ubab5\ubab6\ubab7\ubab8\ubab9\ubaba\ubabb\ubabc\ubabd\ubabe\ubabf\ubac0\ubac1\ubac2\ubac3\ubac4\ubac5\ubac6\ubac7\ubac8\ubac9\ubaca\ubacb\ubacc\ubacd\ubace\ubacf\ubad0\ubad1\ubad2\ubad3\ubad4\ubad5\ubad6\ubad7\ubad8\ubad9\ubada\ubadb\ubadc\ubadd\ubade\ubadf\ubae0\ubae1\ubae2\ubae3\ubae4\ubae5\ubae6\ubae7\ubae8\ubae9\ubaea\ubaeb\ubaec\ubaed\ubaee\ubaef\ubaf0\ubaf1\ubaf2\ubaf3\ubaf4\ubaf5\ubaf6\ubaf7\ubaf8\ubaf9\ubafa\ubafb\ubafc\ubafd\ubafe\ubaff\ubb00\ubb01\ubb02\ubb03\ubb04\ubb05\ubb06\ubb07\ubb08\ubb09\ubb0a\ubb0b\ubb0c\ubb0d\ubb0e\ubb0f\ubb10\ubb11\ubb12\ubb13\ubb14\ubb15\ubb16\ubb17\ubb18\ubb19\ubb1a\ubb1b\ubb1c\ubb1d\ubb1e\ubb1f\ubb20\ubb21\ubb22\ubb23\ubb24\ubb25\ubb26\ubb27\ubb28\ubb29\ubb2a\ubb2b\ubb2c\ubb2d\ubb2e\ubb2f\ubb30\ubb31\ubb32\ubb33\ubb34\ubb35\ubb36\ubb37\ubb38\ubb39\ubb3a\ubb3b\ubb3c\ubb3d\ubb3e\ubb3f\ubb40\ubb41\ubb42\ubb43\ubb44\ubb45\ubb46\ubb47\ubb48\ubb49\ubb4a\ubb4b\ubb4c\ubb4d\ubb4e\ubb4f\ubb50\ubb51\ubb52\ubb53\ubb54\ubb55\ubb56\ubb57\ubb58\ubb59\ubb5a\ubb5b\ubb5c\ubb5d\ubb5e\ubb5f\ubb60\ubb61\ubb62\ubb63\ubb64\ubb65\ubb66\ubb67\ubb68\ubb69\ubb6a\ubb6b\ubb6c\ubb6d\ubb6e\ubb6f\ubb70\ubb71\ubb72\ubb73\ubb74\ubb75\ubb76\ubb77\ubb78\ubb79\ubb7a\ubb7b\ubb7c\ubb7d\ubb7e\ubb7f\ubb80\ubb81\ubb82\ubb83\ubb84\ubb85\ubb86\ubb87\ubb88\ubb89\ubb8a\ubb8b\ubb8c\ubb8d\ubb8e\ubb8f\ubb90\ubb91\ubb92\ubb93\ubb94\ubb95\ubb96\ubb97\ubb98\ubb99\ubb9a\ubb9b\ubb9c\ubb9d\ubb9e\ubb9f\ubba0\ubba1\ubba2\ubba3\ubba4\ubba5\ubba6\ubba7\ubba8\ubba9\ubbaa\ubbab\ubbac\ubbad\ubbae\ubbaf\ubbb0\ubbb1\ubbb2\ubbb3\ubbb4\ubbb5\ubbb6\ubbb7\ubbb8\ubbb9\ubbba\ubbbb\ubbbc\ubbbd\ubbbe\ubbbf\ubbc0\ubbc1\ubbc2\ubbc3\ubbc4\ubbc5\ubbc6\ubbc7\ubbc8\ubbc9\ubbca\ubbcb\ubbcc\ubbcd\ubbce\ubbcf\ubbd0\ubbd1\ubbd2\ubbd3\ubbd4\ubbd5\ubbd6\ubbd7\ubbd8\ubbd9\ubbda\ubbdb\ubbdc\ubbdd\ubbde\ubbdf\ubbe0\ubbe1\ubbe2\ubbe3\ubbe4\ubbe5\ubbe6\ubbe7\ubbe8\ubbe9\ubbea\ubbeb\ubbec\ubbed\ubbee\ubbef\ubbf0\ubbf1\ubbf2\ubbf3\ubbf4\ubbf5\ubbf6\ubbf7\ubbf8\ubbf9\ubbfa\ubbfb\ubbfc\ubbfd\ubbfe\ubbff\ubc00\ubc01\ubc02\ubc03\ubc04\ubc05\ubc06\ubc07\ubc08\ubc09\ubc0a\ubc0b\ubc0c\ubc0d\ubc0e\ubc0f\ubc10\ubc11\ubc12\ubc13\ubc14\ubc15\ubc16\ubc17\ubc18\ubc19\ubc1a\ubc1b\ubc1c\ubc1d\ubc1e\ubc1f\ubc20\ubc21\ubc22\ubc23\ubc24\ubc25\ubc26\ubc27\ubc28\ubc29\ubc2a\ubc2b\ubc2c\ubc2d\ubc2e\ubc2f\ubc30\ubc31\ubc32\ubc33\ubc34\ubc35\ubc36\ubc37\ubc38\ubc39\ubc3a\ubc3b\ubc3c\ubc3d\ubc3e\ubc3f\ubc40\ubc41\ubc42\ubc43\ubc44\ubc45\ubc46\ubc47\ubc48\ubc49\ubc4a\ubc4b\ubc4c\ubc4d\ubc4e\ubc4f\ubc50\ubc51\ubc52\ubc53\ubc54\ubc55\ubc56\ubc57\ubc58\ubc59\ubc5a\ubc5b\ubc5c\ubc5d\ubc5e\ubc5f\ubc60\ubc61\ubc62\ubc63\ubc64\ubc65\ubc66\ubc67\ubc68\ubc69\ubc6a\ubc6b\ubc6c\ubc6d\ubc6e\ubc6f\ubc70\ubc71\ubc72\ubc73\ubc74\ubc75\ubc76\ubc77\ubc78\ubc79\ubc7a\ubc7b\ubc7c\ubc7d\ubc7e\ubc7f\ubc80\ubc81\ubc82\ubc83\ubc84\ubc85\ubc86\ubc87\ubc88\ubc89\ubc8a\ubc8b\ubc8c\ubc8d\ubc8e\ubc8f\ubc90\ubc91\ubc92\ubc93\ubc94\ubc95\ubc96\ubc97\ubc98\ubc99\ubc9a\ubc9b\ubc9c\ubc9d\ubc9e\ubc9f\ubca0\ubca1\ubca2\ubca3\ubca4\ubca5\ubca6\ubca7\ubca8\ubca9\ubcaa\ubcab\ubcac\ubcad\ubcae\ubcaf\ubcb0\ubcb1\ubcb2\ubcb3\ubcb4\ubcb5\ubcb6\ubcb7\ubcb8\ubcb9\ubcba\ubcbb\ubcbc\ubcbd\ubcbe\ubcbf\ubcc0\ubcc1\ubcc2\ubcc3\ubcc4\ubcc5\ubcc6\ubcc7\ubcc8\ubcc9\ubcca\ubccb\ubccc\ubccd\ubcce\ubccf\ubcd0\ubcd1\ubcd2\ubcd3\ubcd4\ubcd5\ubcd6\ubcd7\ubcd8\ubcd9\ubcda\ubcdb\ubcdc\ubcdd\ubcde\ubcdf\ubce0\ubce1\ubce2\ubce3\ubce4\ubce5\ubce6\ubce7\ubce8\ubce9\ubcea\ubceb\ubcec\ubced\ubcee\ubcef\ubcf0\ubcf1\ubcf2\ubcf3\ubcf4\ubcf5\ubcf6\ubcf7\ubcf8\ubcf9\ubcfa\ubcfb\ubcfc\ubcfd\ubcfe\ubcff\ubd00\ubd01\ubd02\ubd03\ubd04\ubd05\ubd06\ubd07\ubd08\ubd09\ubd0a\ubd0b\ubd0c\ubd0d\ubd0e\ubd0f\ubd10\ubd11\ubd12\ubd13\ubd14\ubd15\ubd16\ubd17\ubd18\ubd19\ubd1a\ubd1b\ubd1c\ubd1d\ubd1e\ubd1f\ubd20\ubd21\ubd22\ubd23\ubd24\ubd25\ubd26\ubd27\ubd28\ubd29\ubd2a\ubd2b\ubd2c\ubd2d\ubd2e\ubd2f\ubd30\ubd31\ubd32\ubd33\ubd34\ubd35\ubd36\ubd37\ubd38\ubd39\ubd3a\ubd3b\ubd3c\ubd3d\ubd3e\ubd3f\ubd40\ubd41\ubd42\ubd43\ubd44\ubd45\ubd46\ubd47\ubd48\ubd49\ubd4a\ubd4b\ubd4c\ubd4d\ubd4e\ubd4f\ubd50\ubd51\ubd52\ubd53\ubd54\ubd55\ubd56\ubd57\ubd58\ubd59\ubd5a\ubd5b\ubd5c\ubd5d\ubd5e\ubd5f\ubd60\ubd61\ubd62\ubd63\ubd64\ubd65\ubd66\ubd67\ubd68\ubd69\ubd6a\ubd6b\ubd6c\ubd6d\ubd6e\ubd6f\ubd70\ubd71\ubd72\ubd73\ubd74\ubd75\ubd76\ubd77\ubd78\ubd79\ubd7a\ubd7b\ubd7c\ubd7d\ubd7e\ubd7f\ubd80\ubd81\ubd82\ubd83\ubd84\ubd85\ubd86\ubd87\ubd88\ubd89\ubd8a\ubd8b\ubd8c\ubd8d\ubd8e\ubd8f\ubd90\ubd91\ubd92\ubd93\ubd94\ubd95\ubd96\ubd97\ubd98\ubd99\ubd9a\ubd9b\ubd9c\ubd9d\ubd9e\ubd9f\ubda0\ubda1\ubda2\ubda3\ubda4\ubda5\ubda6\ubda7\ubda8\ubda9\ubdaa\ubdab\ubdac\ubdad\ubdae\ubdaf\ubdb0\ubdb1\ubdb2\ubdb3\ubdb4\ubdb5\ubdb6\ubdb7\ubdb8\ubdb9\ubdba\ubdbb\ubdbc\ubdbd\ubdbe\ubdbf\ubdc0\ubdc1\ubdc2\ubdc3\ubdc4\ubdc5\ubdc6\ubdc7\ubdc8\ubdc9\ubdca\ubdcb\ubdcc\ubdcd\ubdce\ubdcf\ubdd0\ubdd1\ubdd2\ubdd3\ubdd4\ubdd5\ubdd6\ubdd7\ubdd8\ubdd9\ubdda\ubddb\ubddc\ubddd\ubdde\ubddf\ubde0\ubde1\ubde2\ubde3\ubde4\ubde5\ubde6\ubde7\ubde8\ubde9\ubdea\ubdeb\ubdec\ubded\ubdee\ubdef\ubdf0\ubdf1\ubdf2\ubdf3\ubdf4\ubdf5\ubdf6\ubdf7\ubdf8\ubdf9\ubdfa\ubdfb\ubdfc\ubdfd\ubdfe\ubdff\ube00\ube01\ube02\ube03\ube04\ube05\ube06\ube07\ube08\ube09\ube0a\ube0b\ube0c\ube0d\ube0e\ube0f\ube10\ube11\ube12\ube13\ube14\ube15\ube16\ube17\ube18\ube19\ube1a\ube1b\ube1c\ube1d\ube1e\ube1f\ube20\ube21\ube22\ube23\ube24\ube25\ube26\ube27\ube28\ube29\ube2a\ube2b\ube2c\ube2d\ube2e\ube2f\ube30\ube31\ube32\ube33\ube34\ube35\ube36\ube37\ube38\ube39\ube3a\ube3b\ube3c\ube3d\ube3e\ube3f\ube40\ube41\ube42\ube43\ube44\ube45\ube46\ube47\ube48\ube49\ube4a\ube4b\ube4c\ube4d\ube4e\ube4f\ube50\ube51\ube52\ube53\ube54\ube55\ube56\ube57\ube58\ube59\ube5a\ube5b\ube5c\ube5d\ube5e\ube5f\ube60\ube61\ube62\ube63\ube64\ube65\ube66\ube67\ube68\ube69\ube6a\ube6b\ube6c\ube6d\ube6e\ube6f\ube70\ube71\ube72\ube73\ube74\ube75\ube76\ube77\ube78\ube79\ube7a\ube7b\ube7c\ube7d\ube7e\ube7f\ube80\ube81\ube82\ube83\ube84\ube85\ube86\ube87\ube88\ube89\ube8a\ube8b\ube8c\ube8d\ube8e\ube8f\ube90\ube91\ube92\ube93\ube94\ube95\ube96\ube97\ube98\ube99\ube9a\ube9b\ube9c\ube9d\ube9e\ube9f\ubea0\ubea1\ubea2\ubea3\ubea4\ubea5\ubea6\ubea7\ubea8\ubea9\ubeaa\ubeab\ubeac\ubead\ubeae\ubeaf\ubeb0\ubeb1\ubeb2\ubeb3\ubeb4\ubeb5\ubeb6\ubeb7\ubeb8\ubeb9\ubeba\ubebb\ubebc\ubebd\ubebe\ubebf\ubec0\ubec1\ubec2\ubec3\ubec4\ubec5\ubec6\ubec7\ubec8\ubec9\ubeca\ubecb\ubecc\ubecd\ubece\ubecf\ubed0\ubed1\ubed2\ubed3\ubed4\ubed5\ubed6\ubed7\ubed8\ubed9\ubeda\ubedb\ubedc\ubedd\ubede\ubedf\ubee0\ubee1\ubee2\ubee3\ubee4\ubee5\ubee6\ubee7\ubee8\ubee9\ubeea\ubeeb\ubeec\ubeed\ubeee\ubeef\ubef0\ubef1\ubef2\ubef3\ubef4\ubef5\ubef6\ubef7\ubef8\ubef9\ubefa\ubefb\ubefc\ubefd\ubefe\ubeff\ubf00\ubf01\ubf02\ubf03\ubf04\ubf05\ubf06\ubf07\ubf08\ubf09\ubf0a\ubf0b\ubf0c\ubf0d\ubf0e\ubf0f\ubf10\ubf11\ubf12\ubf13\ubf14\ubf15\ubf16\ubf17\ubf18\ubf19\ubf1a\ubf1b\ubf1c\ubf1d\ubf1e\ubf1f\ubf20\ubf21\ubf22\ubf23\ubf24\ubf25\ubf26\ubf27\ubf28\ubf29\ubf2a\ubf2b\ubf2c\ubf2d\ubf2e\ubf2f\ubf30\ubf31\ubf32\ubf33\ubf34\ubf35\ubf36\ubf37\ubf38\ubf39\ubf3a\ubf3b\ubf3c\ubf3d\ubf3e\ubf3f\ubf40\ubf41\ubf42\ubf43\ubf44\ubf45\ubf46\ubf47\ubf48\ubf49\ubf4a\ubf4b\ubf4c\ubf4d\ubf4e\ubf4f\ubf50\ubf51\ubf52\ubf53\ubf54\ubf55\ubf56\ubf57\ubf58\ubf59\ubf5a\ubf5b\ubf5c\ubf5d\ubf5e\ubf5f\ubf60\ubf61\ubf62\ubf63\ubf64\ubf65\ubf66\ubf67\ubf68\ubf69\ubf6a\ubf6b\ubf6c\ubf6d\ubf6e\ubf6f\ubf70\ubf71\ubf72\ubf73\ubf74\ubf75\ubf76\ubf77\ubf78\ubf79\ubf7a\ubf7b\ubf7c\ubf7d\ubf7e\ubf7f\ubf80\ubf81\ubf82\ubf83\ubf84\ubf85\ubf86\ubf87\ubf88\ubf89\ubf8a\ubf8b\ubf8c\ubf8d\ubf8e\ubf8f\ubf90\ubf91\ubf92\ubf93\ubf94\ubf95\ubf96\ubf97\ubf98\ubf99\ubf9a\ubf9b\ubf9c\ubf9d\ubf9e\ubf9f\ubfa0\ubfa1\ubfa2\ubfa3\ubfa4\ubfa5\ubfa6\ubfa7\ubfa8\ubfa9\ubfaa\ubfab\ubfac\ubfad\ubfae\ubfaf\ubfb0\ubfb1\ubfb2\ubfb3\ubfb4\ubfb5\ubfb6\ubfb7\ubfb8\ubfb9\ubfba\ubfbb\ubfbc\ubfbd\ubfbe\ubfbf\ubfc0\ubfc1\ubfc2\ubfc3\ubfc4\ubfc5\ubfc6\ubfc7\ubfc8\ubfc9\ubfca\ubfcb\ubfcc\ubfcd\ubfce\ubfcf\ubfd0\ubfd1\ubfd2\ubfd3\ubfd4\ubfd5\ubfd6\ubfd7\ubfd8\ubfd9\ubfda\ubfdb\ubfdc\ubfdd\ubfde\ubfdf\ubfe0\ubfe1\ubfe2\ubfe3\ubfe4\ubfe5\ubfe6\ubfe7\ubfe8\ubfe9\ubfea\ubfeb\ubfec\ubfed\ubfee\ubfef\ubff0\ubff1\ubff2\ubff3\ubff4\ubff5\ubff6\ubff7\ubff8\ubff9\ubffa\ubffb\ubffc\ubffd\ubffe\ubfff\uc000\uc001\uc002\uc003\uc004\uc005\uc006\uc007\uc008\uc009\uc00a\uc00b\uc00c\uc00d\uc00e\uc00f\uc010\uc011\uc012\uc013\uc014\uc015\uc016\uc017\uc018\uc019\uc01a\uc01b\uc01c\uc01d\uc01e\uc01f\uc020\uc021\uc022\uc023\uc024\uc025\uc026\uc027\uc028\uc029\uc02a\uc02b\uc02c\uc02d\uc02e\uc02f\uc030\uc031\uc032\uc033\uc034\uc035\uc036\uc037\uc038\uc039\uc03a\uc03b\uc03c\uc03d\uc03e\uc03f\uc040\uc041\uc042\uc043\uc044\uc045\uc046\uc047\uc048\uc049\uc04a\uc04b\uc04c\uc04d\uc04e\uc04f\uc050\uc051\uc052\uc053\uc054\uc055\uc056\uc057\uc058\uc059\uc05a\uc05b\uc05c\uc05d\uc05e\uc05f\uc060\uc061\uc062\uc063\uc064\uc065\uc066\uc067\uc068\uc069\uc06a\uc06b\uc06c\uc06d\uc06e\uc06f\uc070\uc071\uc072\uc073\uc074\uc075\uc076\uc077\uc078\uc079\uc07a\uc07b\uc07c\uc07d\uc07e\uc07f\uc080\uc081\uc082\uc083\uc084\uc085\uc086\uc087\uc088\uc089\uc08a\uc08b\uc08c\uc08d\uc08e\uc08f\uc090\uc091\uc092\uc093\uc094\uc095\uc096\uc097\uc098\uc099\uc09a\uc09b\uc09c\uc09d\uc09e\uc09f\uc0a0\uc0a1\uc0a2\uc0a3\uc0a4\uc0a5\uc0a6\uc0a7\uc0a8\uc0a9\uc0aa\uc0ab\uc0ac\uc0ad\uc0ae\uc0af\uc0b0\uc0b1\uc0b2\uc0b3\uc0b4\uc0b5\uc0b6\uc0b7\uc0b8\uc0b9\uc0ba\uc0bb\uc0bc\uc0bd\uc0be\uc0bf\uc0c0\uc0c1\uc0c2\uc0c3\uc0c4\uc0c5\uc0c6\uc0c7\uc0c8\uc0c9\uc0ca\uc0cb\uc0cc\uc0cd\uc0ce\uc0cf\uc0d0\uc0d1\uc0d2\uc0d3\uc0d4\uc0d5\uc0d6\uc0d7\uc0d8\uc0d9\uc0da\uc0db\uc0dc\uc0dd\uc0de\uc0df\uc0e0\uc0e1\uc0e2\uc0e3\uc0e4\uc0e5\uc0e6\uc0e7\uc0e8\uc0e9\uc0ea\uc0eb\uc0ec\uc0ed\uc0ee\uc0ef\uc0f0\uc0f1\uc0f2\uc0f3\uc0f4\uc0f5\uc0f6\uc0f7\uc0f8\uc0f9\uc0fa\uc0fb\uc0fc\uc0fd\uc0fe\uc0ff\uc100\uc101\uc102\uc103\uc104\uc105\uc106\uc107\uc108\uc109\uc10a\uc10b\uc10c\uc10d\uc10e\uc10f\uc110\uc111\uc112\uc113\uc114\uc115\uc116\uc117\uc118\uc119\uc11a\uc11b\uc11c\uc11d\uc11e\uc11f\uc120\uc121\uc122\uc123\uc124\uc125\uc126\uc127\uc128\uc129\uc12a\uc12b\uc12c\uc12d\uc12e\uc12f\uc130\uc131\uc132\uc133\uc134\uc135\uc136\uc137\uc138\uc139\uc13a\uc13b\uc13c\uc13d\uc13e\uc13f\uc140\uc141\uc142\uc143\uc144\uc145\uc146\uc147\uc148\uc149\uc14a\uc14b\uc14c\uc14d\uc14e\uc14f\uc150\uc151\uc152\uc153\uc154\uc155\uc156\uc157\uc158\uc159\uc15a\uc15b\uc15c\uc15d\uc15e\uc15f\uc160\uc161\uc162\uc163\uc164\uc165\uc166\uc167\uc168\uc169\uc16a\uc16b\uc16c\uc16d\uc16e\uc16f\uc170\uc171\uc172\uc173\uc174\uc175\uc176\uc177\uc178\uc179\uc17a\uc17b\uc17c\uc17d\uc17e\uc17f\uc180\uc181\uc182\uc183\uc184\uc185\uc186\uc187\uc188\uc189\uc18a\uc18b\uc18c\uc18d\uc18e\uc18f\uc190\uc191\uc192\uc193\uc194\uc195\uc196\uc197\uc198\uc199\uc19a\uc19b\uc19c\uc19d\uc19e\uc19f\uc1a0\uc1a1\uc1a2\uc1a3\uc1a4\uc1a5\uc1a6\uc1a7\uc1a8\uc1a9\uc1aa\uc1ab\uc1ac\uc1ad\uc1ae\uc1af\uc1b0\uc1b1\uc1b2\uc1b3\uc1b4\uc1b5\uc1b6\uc1b7\uc1b8\uc1b9\uc1ba\uc1bb\uc1bc\uc1bd\uc1be\uc1bf\uc1c0\uc1c1\uc1c2\uc1c3\uc1c4\uc1c5\uc1c6\uc1c7\uc1c8\uc1c9\uc1ca\uc1cb\uc1cc\uc1cd\uc1ce\uc1cf\uc1d0\uc1d1\uc1d2\uc1d3\uc1d4\uc1d5\uc1d6\uc1d7\uc1d8\uc1d9\uc1da\uc1db\uc1dc\uc1dd\uc1de\uc1df\uc1e0\uc1e1\uc1e2\uc1e3\uc1e4\uc1e5\uc1e6\uc1e7\uc1e8\uc1e9\uc1ea\uc1eb\uc1ec\uc1ed\uc1ee\uc1ef\uc1f0\uc1f1\uc1f2\uc1f3\uc1f4\uc1f5\uc1f6\uc1f7\uc1f8\uc1f9\uc1fa\uc1fb\uc1fc\uc1fd\uc1fe\uc1ff\uc200\uc201\uc202\uc203\uc204\uc205\uc206\uc207\uc208\uc209\uc20a\uc20b\uc20c\uc20d\uc20e\uc20f\uc210\uc211\uc212\uc213\uc214\uc215\uc216\uc217\uc218\uc219\uc21a\uc21b\uc21c\uc21d\uc21e\uc21f\uc220\uc221\uc222\uc223\uc224\uc225\uc226\uc227\uc228\uc229\uc22a\uc22b\uc22c\uc22d\uc22e\uc22f\uc230\uc231\uc232\uc233\uc234\uc235\uc236\uc237\uc238\uc239\uc23a\uc23b\uc23c\uc23d\uc23e\uc23f\uc240\uc241\uc242\uc243\uc244\uc245\uc246\uc247\uc248\uc249\uc24a\uc24b\uc24c\uc24d\uc24e\uc24f\uc250\uc251\uc252\uc253\uc254\uc255\uc256\uc257\uc258\uc259\uc25a\uc25b\uc25c\uc25d\uc25e\uc25f\uc260\uc261\uc262\uc263\uc264\uc265\uc266\uc267\uc268\uc269\uc26a\uc26b\uc26c\uc26d\uc26e\uc26f\uc270\uc271\uc272\uc273\uc274\uc275\uc276\uc277\uc278\uc279\uc27a\uc27b\uc27c\uc27d\uc27e\uc27f\uc280\uc281\uc282\uc283\uc284\uc285\uc286\uc287\uc288\uc289\uc28a\uc28b\uc28c\uc28d\uc28e\uc28f\uc290\uc291\uc292\uc293\uc294\uc295\uc296\uc297\uc298\uc299\uc29a\uc29b\uc29c\uc29d\uc29e\uc29f\uc2a0\uc2a1\uc2a2\uc2a3\uc2a4\uc2a5\uc2a6\uc2a7\uc2a8\uc2a9\uc2aa\uc2ab\uc2ac\uc2ad\uc2ae\uc2af\uc2b0\uc2b1\uc2b2\uc2b3\uc2b4\uc2b5\uc2b6\uc2b7\uc2b8\uc2b9\uc2ba\uc2bb\uc2bc\uc2bd\uc2be\uc2bf\uc2c0\uc2c1\uc2c2\uc2c3\uc2c4\uc2c5\uc2c6\uc2c7\uc2c8\uc2c9\uc2ca\uc2cb\uc2cc\uc2cd\uc2ce\uc2cf\uc2d0\uc2d1\uc2d2\uc2d3\uc2d4\uc2d5\uc2d6\uc2d7\uc2d8\uc2d9\uc2da\uc2db\uc2dc\uc2dd\uc2de\uc2df\uc2e0\uc2e1\uc2e2\uc2e3\uc2e4\uc2e5\uc2e6\uc2e7\uc2e8\uc2e9\uc2ea\uc2eb\uc2ec\uc2ed\uc2ee\uc2ef\uc2f0\uc2f1\uc2f2\uc2f3\uc2f4\uc2f5\uc2f6\uc2f7\uc2f8\uc2f9\uc2fa\uc2fb\uc2fc\uc2fd\uc2fe\uc2ff\uc300\uc301\uc302\uc303\uc304\uc305\uc306\uc307\uc308\uc309\uc30a\uc30b\uc30c\uc30d\uc30e\uc30f\uc310\uc311\uc312\uc313\uc314\uc315\uc316\uc317\uc318\uc319\uc31a\uc31b\uc31c\uc31d\uc31e\uc31f\uc320\uc321\uc322\uc323\uc324\uc325\uc326\uc327\uc328\uc329\uc32a\uc32b\uc32c\uc32d\uc32e\uc32f\uc330\uc331\uc332\uc333\uc334\uc335\uc336\uc337\uc338\uc339\uc33a\uc33b\uc33c\uc33d\uc33e\uc33f\uc340\uc341\uc342\uc343\uc344\uc345\uc346\uc347\uc348\uc349\uc34a\uc34b\uc34c\uc34d\uc34e\uc34f\uc350\uc351\uc352\uc353\uc354\uc355\uc356\uc357\uc358\uc359\uc35a\uc35b\uc35c\uc35d\uc35e\uc35f\uc360\uc361\uc362\uc363\uc364\uc365\uc366\uc367\uc368\uc369\uc36a\uc36b\uc36c\uc36d\uc36e\uc36f\uc370\uc371\uc372\uc373\uc374\uc375\uc376\uc377\uc378\uc379\uc37a\uc37b\uc37c\uc37d\uc37e\uc37f\uc380\uc381\uc382\uc383\uc384\uc385\uc386\uc387\uc388\uc389\uc38a\uc38b\uc38c\uc38d\uc38e\uc38f\uc390\uc391\uc392\uc393\uc394\uc395\uc396\uc397\uc398\uc399\uc39a\uc39b\uc39c\uc39d\uc39e\uc39f\uc3a0\uc3a1\uc3a2\uc3a3\uc3a4\uc3a5\uc3a6\uc3a7\uc3a8\uc3a9\uc3aa\uc3ab\uc3ac\uc3ad\uc3ae\uc3af\uc3b0\uc3b1\uc3b2\uc3b3\uc3b4\uc3b5\uc3b6\uc3b7\uc3b8\uc3b9\uc3ba\uc3bb\uc3bc\uc3bd\uc3be\uc3bf\uc3c0\uc3c1\uc3c2\uc3c3\uc3c4\uc3c5\uc3c6\uc3c7\uc3c8\uc3c9\uc3ca\uc3cb\uc3cc\uc3cd\uc3ce\uc3cf\uc3d0\uc3d1\uc3d2\uc3d3\uc3d4\uc3d5\uc3d6\uc3d7\uc3d8\uc3d9\uc3da\uc3db\uc3dc\uc3dd\uc3de\uc3df\uc3e0\uc3e1\uc3e2\uc3e3\uc3e4\uc3e5\uc3e6\uc3e7\uc3e8\uc3e9\uc3ea\uc3eb\uc3ec\uc3ed\uc3ee\uc3ef\uc3f0\uc3f1\uc3f2\uc3f3\uc3f4\uc3f5\uc3f6\uc3f7\uc3f8\uc3f9\uc3fa\uc3fb\uc3fc\uc3fd\uc3fe\uc3ff\uc400\uc401\uc402\uc403\uc404\uc405\uc406\uc407\uc408\uc409\uc40a\uc40b\uc40c\uc40d\uc40e\uc40f\uc410\uc411\uc412\uc413\uc414\uc415\uc416\uc417\uc418\uc419\uc41a\uc41b\uc41c\uc41d\uc41e\uc41f\uc420\uc421\uc422\uc423\uc424\uc425\uc426\uc427\uc428\uc429\uc42a\uc42b\uc42c\uc42d\uc42e\uc42f\uc430\uc431\uc432\uc433\uc434\uc435\uc436\uc437\uc438\uc439\uc43a\uc43b\uc43c\uc43d\uc43e\uc43f\uc440\uc441\uc442\uc443\uc444\uc445\uc446\uc447\uc448\uc449\uc44a\uc44b\uc44c\uc44d\uc44e\uc44f\uc450\uc451\uc452\uc453\uc454\uc455\uc456\uc457\uc458\uc459\uc45a\uc45b\uc45c\uc45d\uc45e\uc45f\uc460\uc461\uc462\uc463\uc464\uc465\uc466\uc467\uc468\uc469\uc46a\uc46b\uc46c\uc46d\uc46e\uc46f\uc470\uc471\uc472\uc473\uc474\uc475\uc476\uc477\uc478\uc479\uc47a\uc47b\uc47c\uc47d\uc47e\uc47f\uc480\uc481\uc482\uc483\uc484\uc485\uc486\uc487\uc488\uc489\uc48a\uc48b\uc48c\uc48d\uc48e\uc48f\uc490\uc491\uc492\uc493\uc494\uc495\uc496\uc497\uc498\uc499\uc49a\uc49b\uc49c\uc49d\uc49e\uc49f\uc4a0\uc4a1\uc4a2\uc4a3\uc4a4\uc4a5\uc4a6\uc4a7\uc4a8\uc4a9\uc4aa\uc4ab\uc4ac\uc4ad\uc4ae\uc4af\uc4b0\uc4b1\uc4b2\uc4b3\uc4b4\uc4b5\uc4b6\uc4b7\uc4b8\uc4b9\uc4ba\uc4bb\uc4bc\uc4bd\uc4be\uc4bf\uc4c0\uc4c1\uc4c2\uc4c3\uc4c4\uc4c5\uc4c6\uc4c7\uc4c8\uc4c9\uc4ca\uc4cb\uc4cc\uc4cd\uc4ce\uc4cf\uc4d0\uc4d1\uc4d2\uc4d3\uc4d4\uc4d5\uc4d6\uc4d7\uc4d8\uc4d9\uc4da\uc4db\uc4dc\uc4dd\uc4de\uc4df\uc4e0\uc4e1\uc4e2\uc4e3\uc4e4\uc4e5\uc4e6\uc4e7\uc4e8\uc4e9\uc4ea\uc4eb\uc4ec\uc4ed\uc4ee\uc4ef\uc4f0\uc4f1\uc4f2\uc4f3\uc4f4\uc4f5\uc4f6\uc4f7\uc4f8\uc4f9\uc4fa\uc4fb\uc4fc\uc4fd\uc4fe\uc4ff\uc500\uc501\uc502\uc503\uc504\uc505\uc506\uc507\uc508\uc509\uc50a\uc50b\uc50c\uc50d\uc50e\uc50f\uc510\uc511\uc512\uc513\uc514\uc515\uc516\uc517\uc518\uc519\uc51a\uc51b\uc51c\uc51d\uc51e\uc51f\uc520\uc521\uc522\uc523\uc524\uc525\uc526\uc527\uc528\uc529\uc52a\uc52b\uc52c\uc52d\uc52e\uc52f\uc530\uc531\uc532\uc533\uc534\uc535\uc536\uc537\uc538\uc539\uc53a\uc53b\uc53c\uc53d\uc53e\uc53f\uc540\uc541\uc542\uc543\uc544\uc545\uc546\uc547\uc548\uc549\uc54a\uc54b\uc54c\uc54d\uc54e\uc54f\uc550\uc551\uc552\uc553\uc554\uc555\uc556\uc557\uc558\uc559\uc55a\uc55b\uc55c\uc55d\uc55e\uc55f\uc560\uc561\uc562\uc563\uc564\uc565\uc566\uc567\uc568\uc569\uc56a\uc56b\uc56c\uc56d\uc56e\uc56f\uc570\uc571\uc572\uc573\uc574\uc575\uc576\uc577\uc578\uc579\uc57a\uc57b\uc57c\uc57d\uc57e\uc57f\uc580\uc581\uc582\uc583\uc584\uc585\uc586\uc587\uc588\uc589\uc58a\uc58b\uc58c\uc58d\uc58e\uc58f\uc590\uc591\uc592\uc593\uc594\uc595\uc596\uc597\uc598\uc599\uc59a\uc59b\uc59c\uc59d\uc59e\uc59f\uc5a0\uc5a1\uc5a2\uc5a3\uc5a4\uc5a5\uc5a6\uc5a7\uc5a8\uc5a9\uc5aa\uc5ab\uc5ac\uc5ad\uc5ae\uc5af\uc5b0\uc5b1\uc5b2\uc5b3\uc5b4\uc5b5\uc5b6\uc5b7\uc5b8\uc5b9\uc5ba\uc5bb\uc5bc\uc5bd\uc5be\uc5bf\uc5c0\uc5c1\uc5c2\uc5c3\uc5c4\uc5c5\uc5c6\uc5c7\uc5c8\uc5c9\uc5ca\uc5cb\uc5cc\uc5cd\uc5ce\uc5cf\uc5d0\uc5d1\uc5d2\uc5d3\uc5d4\uc5d5\uc5d6\uc5d7\uc5d8\uc5d9\uc5da\uc5db\uc5dc\uc5dd\uc5de\uc5df\uc5e0\uc5e1\uc5e2\uc5e3\uc5e4\uc5e5\uc5e6\uc5e7\uc5e8\uc5e9\uc5ea\uc5eb\uc5ec\uc5ed\uc5ee\uc5ef\uc5f0\uc5f1\uc5f2\uc5f3\uc5f4\uc5f5\uc5f6\uc5f7\uc5f8\uc5f9\uc5fa\uc5fb\uc5fc\uc5fd\uc5fe\uc5ff\uc600\uc601\uc602\uc603\uc604\uc605\uc606\uc607\uc608\uc609\uc60a\uc60b\uc60c\uc60d\uc60e\uc60f\uc610\uc611\uc612\uc613\uc614\uc615\uc616\uc617\uc618\uc619\uc61a\uc61b\uc61c\uc61d\uc61e\uc61f\uc620\uc621\uc622\uc623\uc624\uc625\uc626\uc627\uc628\uc629\uc62a\uc62b\uc62c\uc62d\uc62e\uc62f\uc630\uc631\uc632\uc633\uc634\uc635\uc636\uc637\uc638\uc639\uc63a\uc63b\uc63c\uc63d\uc63e\uc63f\uc640\uc641\uc642\uc643\uc644\uc645\uc646\uc647\uc648\uc649\uc64a\uc64b\uc64c\uc64d\uc64e\uc64f\uc650\uc651\uc652\uc653\uc654\uc655\uc656\uc657\uc658\uc659\uc65a\uc65b\uc65c\uc65d\uc65e\uc65f\uc660\uc661\uc662\uc663\uc664\uc665\uc666\uc667\uc668\uc669\uc66a\uc66b\uc66c\uc66d\uc66e\uc66f\uc670\uc671\uc672\uc673\uc674\uc675\uc676\uc677\uc678\uc679\uc67a\uc67b\uc67c\uc67d\uc67e\uc67f\uc680\uc681\uc682\uc683\uc684\uc685\uc686\uc687\uc688\uc689\uc68a\uc68b\uc68c\uc68d\uc68e\uc68f\uc690\uc691\uc692\uc693\uc694\uc695\uc696\uc697\uc698\uc699\uc69a\uc69b\uc69c\uc69d\uc69e\uc69f\uc6a0\uc6a1\uc6a2\uc6a3\uc6a4\uc6a5\uc6a6\uc6a7\uc6a8\uc6a9\uc6aa\uc6ab\uc6ac\uc6ad\uc6ae\uc6af\uc6b0\uc6b1\uc6b2\uc6b3\uc6b4\uc6b5\uc6b6\uc6b7\uc6b8\uc6b9\uc6ba\uc6bb\uc6bc\uc6bd\uc6be\uc6bf\uc6c0\uc6c1\uc6c2\uc6c3\uc6c4\uc6c5\uc6c6\uc6c7\uc6c8\uc6c9\uc6ca\uc6cb\uc6cc\uc6cd\uc6ce\uc6cf\uc6d0\uc6d1\uc6d2\uc6d3\uc6d4\uc6d5\uc6d6\uc6d7\uc6d8\uc6d9\uc6da\uc6db\uc6dc\uc6dd\uc6de\uc6df\uc6e0\uc6e1\uc6e2\uc6e3\uc6e4\uc6e5\uc6e6\uc6e7\uc6e8\uc6e9\uc6ea\uc6eb\uc6ec\uc6ed\uc6ee\uc6ef\uc6f0\uc6f1\uc6f2\uc6f3\uc6f4\uc6f5\uc6f6\uc6f7\uc6f8\uc6f9\uc6fa\uc6fb\uc6fc\uc6fd\uc6fe\uc6ff\uc700\uc701\uc702\uc703\uc704\uc705\uc706\uc707\uc708\uc709\uc70a\uc70b\uc70c\uc70d\uc70e\uc70f\uc710\uc711\uc712\uc713\uc714\uc715\uc716\uc717\uc718\uc719\uc71a\uc71b\uc71c\uc71d\uc71e\uc71f\uc720\uc721\uc722\uc723\uc724\uc725\uc726\uc727\uc728\uc729\uc72a\uc72b\uc72c\uc72d\uc72e\uc72f\uc730\uc731\uc732\uc733\uc734\uc735\uc736\uc737\uc738\uc739\uc73a\uc73b\uc73c\uc73d\uc73e\uc73f\uc740\uc741\uc742\uc743\uc744\uc745\uc746\uc747\uc748\uc749\uc74a\uc74b\uc74c\uc74d\uc74e\uc74f\uc750\uc751\uc752\uc753\uc754\uc755\uc756\uc757\uc758\uc759\uc75a\uc75b\uc75c\uc75d\uc75e\uc75f\uc760\uc761\uc762\uc763\uc764\uc765\uc766\uc767\uc768\uc769\uc76a\uc76b\uc76c\uc76d\uc76e\uc76f\uc770\uc771\uc772\uc773\uc774\uc775\uc776\uc777\uc778\uc779\uc77a\uc77b\uc77c\uc77d\uc77e\uc77f\uc780\uc781\uc782\uc783\uc784\uc785\uc786\uc787\uc788\uc789\uc78a\uc78b\uc78c\uc78d\uc78e\uc78f\uc790\uc791\uc792\uc793\uc794\uc795\uc796\uc797\uc798\uc799\uc79a\uc79b\uc79c\uc79d\uc79e\uc79f\uc7a0\uc7a1\uc7a2\uc7a3\uc7a4\uc7a5\uc7a6\uc7a7\uc7a8\uc7a9\uc7aa\uc7ab\uc7ac\uc7ad\uc7ae\uc7af\uc7b0\uc7b1\uc7b2\uc7b3\uc7b4\uc7b5\uc7b6\uc7b7\uc7b8\uc7b9\uc7ba\uc7bb\uc7bc\uc7bd\uc7be\uc7bf\uc7c0\uc7c1\uc7c2\uc7c3\uc7c4\uc7c5\uc7c6\uc7c7\uc7c8\uc7c9\uc7ca\uc7cb\uc7cc\uc7cd\uc7ce\uc7cf\uc7d0\uc7d1\uc7d2\uc7d3\uc7d4\uc7d5\uc7d6\uc7d7\uc7d8\uc7d9\uc7da\uc7db\uc7dc\uc7dd\uc7de\uc7df\uc7e0\uc7e1\uc7e2\uc7e3\uc7e4\uc7e5\uc7e6\uc7e7\uc7e8\uc7e9\uc7ea\uc7eb\uc7ec\uc7ed\uc7ee\uc7ef\uc7f0\uc7f1\uc7f2\uc7f3\uc7f4\uc7f5\uc7f6\uc7f7\uc7f8\uc7f9\uc7fa\uc7fb\uc7fc\uc7fd\uc7fe\uc7ff\uc800\uc801\uc802\uc803\uc804\uc805\uc806\uc807\uc808\uc809\uc80a\uc80b\uc80c\uc80d\uc80e\uc80f\uc810\uc811\uc812\uc813\uc814\uc815\uc816\uc817\uc818\uc819\uc81a\uc81b\uc81c\uc81d\uc81e\uc81f\uc820\uc821\uc822\uc823\uc824\uc825\uc826\uc827\uc828\uc829\uc82a\uc82b\uc82c\uc82d\uc82e\uc82f\uc830\uc831\uc832\uc833\uc834\uc835\uc836\uc837\uc838\uc839\uc83a\uc83b\uc83c\uc83d\uc83e\uc83f\uc840\uc841\uc842\uc843\uc844\uc845\uc846\uc847\uc848\uc849\uc84a\uc84b\uc84c\uc84d\uc84e\uc84f\uc850\uc851\uc852\uc853\uc854\uc855\uc856\uc857\uc858\uc859\uc85a\uc85b\uc85c\uc85d\uc85e\uc85f\uc860\uc861\uc862\uc863\uc864\uc865\uc866\uc867\uc868\uc869\uc86a\uc86b\uc86c\uc86d\uc86e\uc86f\uc870\uc871\uc872\uc873\uc874\uc875\uc876\uc877\uc878\uc879\uc87a\uc87b\uc87c\uc87d\uc87e\uc87f\uc880\uc881\uc882\uc883\uc884\uc885\uc886\uc887\uc888\uc889\uc88a\uc88b\uc88c\uc88d\uc88e\uc88f\uc890\uc891\uc892\uc893\uc894\uc895\uc896\uc897\uc898\uc899\uc89a\uc89b\uc89c\uc89d\uc89e\uc89f\uc8a0\uc8a1\uc8a2\uc8a3\uc8a4\uc8a5\uc8a6\uc8a7\uc8a8\uc8a9\uc8aa\uc8ab\uc8ac\uc8ad\uc8ae\uc8af\uc8b0\uc8b1\uc8b2\uc8b3\uc8b4\uc8b5\uc8b6\uc8b7\uc8b8\uc8b9\uc8ba\uc8bb\uc8bc\uc8bd\uc8be\uc8bf\uc8c0\uc8c1\uc8c2\uc8c3\uc8c4\uc8c5\uc8c6\uc8c7\uc8c8\uc8c9\uc8ca\uc8cb\uc8cc\uc8cd\uc8ce\uc8cf\uc8d0\uc8d1\uc8d2\uc8d3\uc8d4\uc8d5\uc8d6\uc8d7\uc8d8\uc8d9\uc8da\uc8db\uc8dc\uc8dd\uc8de\uc8df\uc8e0\uc8e1\uc8e2\uc8e3\uc8e4\uc8e5\uc8e6\uc8e7\uc8e8\uc8e9\uc8ea\uc8eb\uc8ec\uc8ed\uc8ee\uc8ef\uc8f0\uc8f1\uc8f2\uc8f3\uc8f4\uc8f5\uc8f6\uc8f7\uc8f8\uc8f9\uc8fa\uc8fb\uc8fc\uc8fd\uc8fe\uc8ff\uc900\uc901\uc902\uc903\uc904\uc905\uc906\uc907\uc908\uc909\uc90a\uc90b\uc90c\uc90d\uc90e\uc90f\uc910\uc911\uc912\uc913\uc914\uc915\uc916\uc917\uc918\uc919\uc91a\uc91b\uc91c\uc91d\uc91e\uc91f\uc920\uc921\uc922\uc923\uc924\uc925\uc926\uc927\uc928\uc929\uc92a\uc92b\uc92c\uc92d\uc92e\uc92f\uc930\uc931\uc932\uc933\uc934\uc935\uc936\uc937\uc938\uc939\uc93a\uc93b\uc93c\uc93d\uc93e\uc93f\uc940\uc941\uc942\uc943\uc944\uc945\uc946\uc947\uc948\uc949\uc94a\uc94b\uc94c\uc94d\uc94e\uc94f\uc950\uc951\uc952\uc953\uc954\uc955\uc956\uc957\uc958\uc959\uc95a\uc95b\uc95c\uc95d\uc95e\uc95f\uc960\uc961\uc962\uc963\uc964\uc965\uc966\uc967\uc968\uc969\uc96a\uc96b\uc96c\uc96d\uc96e\uc96f\uc970\uc971\uc972\uc973\uc974\uc975\uc976\uc977\uc978\uc979\uc97a\uc97b\uc97c\uc97d\uc97e\uc97f\uc980\uc981\uc982\uc983\uc984\uc985\uc986\uc987\uc988\uc989\uc98a\uc98b\uc98c\uc98d\uc98e\uc98f\uc990\uc991\uc992\uc993\uc994\uc995\uc996\uc997\uc998\uc999\uc99a\uc99b\uc99c\uc99d\uc99e\uc99f\uc9a0\uc9a1\uc9a2\uc9a3\uc9a4\uc9a5\uc9a6\uc9a7\uc9a8\uc9a9\uc9aa\uc9ab\uc9ac\uc9ad\uc9ae\uc9af\uc9b0\uc9b1\uc9b2\uc9b3\uc9b4\uc9b5\uc9b6\uc9b7\uc9b8\uc9b9\uc9ba\uc9bb\uc9bc\uc9bd\uc9be\uc9bf\uc9c0\uc9c1\uc9c2\uc9c3\uc9c4\uc9c5\uc9c6\uc9c7\uc9c8\uc9c9\uc9ca\uc9cb\uc9cc\uc9cd\uc9ce\uc9cf\uc9d0\uc9d1\uc9d2\uc9d3\uc9d4\uc9d5\uc9d6\uc9d7\uc9d8\uc9d9\uc9da\uc9db\uc9dc\uc9dd\uc9de\uc9df\uc9e0\uc9e1\uc9e2\uc9e3\uc9e4\uc9e5\uc9e6\uc9e7\uc9e8\uc9e9\uc9ea\uc9eb\uc9ec\uc9ed\uc9ee\uc9ef\uc9f0\uc9f1\uc9f2\uc9f3\uc9f4\uc9f5\uc9f6\uc9f7\uc9f8\uc9f9\uc9fa\uc9fb\uc9fc\uc9fd\uc9fe\uc9ff\uca00\uca01\uca02\uca03\uca04\uca05\uca06\uca07\uca08\uca09\uca0a\uca0b\uca0c\uca0d\uca0e\uca0f\uca10\uca11\uca12\uca13\uca14\uca15\uca16\uca17\uca18\uca19\uca1a\uca1b\uca1c\uca1d\uca1e\uca1f\uca20\uca21\uca22\uca23\uca24\uca25\uca26\uca27\uca28\uca29\uca2a\uca2b\uca2c\uca2d\uca2e\uca2f\uca30\uca31\uca32\uca33\uca34\uca35\uca36\uca37\uca38\uca39\uca3a\uca3b\uca3c\uca3d\uca3e\uca3f\uca40\uca41\uca42\uca43\uca44\uca45\uca46\uca47\uca48\uca49\uca4a\uca4b\uca4c\uca4d\uca4e\uca4f\uca50\uca51\uca52\uca53\uca54\uca55\uca56\uca57\uca58\uca59\uca5a\uca5b\uca5c\uca5d\uca5e\uca5f\uca60\uca61\uca62\uca63\uca64\uca65\uca66\uca67\uca68\uca69\uca6a\uca6b\uca6c\uca6d\uca6e\uca6f\uca70\uca71\uca72\uca73\uca74\uca75\uca76\uca77\uca78\uca79\uca7a\uca7b\uca7c\uca7d\uca7e\uca7f\uca80\uca81\uca82\uca83\uca84\uca85\uca86\uca87\uca88\uca89\uca8a\uca8b\uca8c\uca8d\uca8e\uca8f\uca90\uca91\uca92\uca93\uca94\uca95\uca96\uca97\uca98\uca99\uca9a\uca9b\uca9c\uca9d\uca9e\uca9f\ucaa0\ucaa1\ucaa2\ucaa3\ucaa4\ucaa5\ucaa6\ucaa7\ucaa8\ucaa9\ucaaa\ucaab\ucaac\ucaad\ucaae\ucaaf\ucab0\ucab1\ucab2\ucab3\ucab4\ucab5\ucab6\ucab7\ucab8\ucab9\ucaba\ucabb\ucabc\ucabd\ucabe\ucabf\ucac0\ucac1\ucac2\ucac3\ucac4\ucac5\ucac6\ucac7\ucac8\ucac9\ucaca\ucacb\ucacc\ucacd\ucace\ucacf\ucad0\ucad1\ucad2\ucad3\ucad4\ucad5\ucad6\ucad7\ucad8\ucad9\ucada\ucadb\ucadc\ucadd\ucade\ucadf\ucae0\ucae1\ucae2\ucae3\ucae4\ucae5\ucae6\ucae7\ucae8\ucae9\ucaea\ucaeb\ucaec\ucaed\ucaee\ucaef\ucaf0\ucaf1\ucaf2\ucaf3\ucaf4\ucaf5\ucaf6\ucaf7\ucaf8\ucaf9\ucafa\ucafb\ucafc\ucafd\ucafe\ucaff\ucb00\ucb01\ucb02\ucb03\ucb04\ucb05\ucb06\ucb07\ucb08\ucb09\ucb0a\ucb0b\ucb0c\ucb0d\ucb0e\ucb0f\ucb10\ucb11\ucb12\ucb13\ucb14\ucb15\ucb16\ucb17\ucb18\ucb19\ucb1a\ucb1b\ucb1c\ucb1d\ucb1e\ucb1f\ucb20\ucb21\ucb22\ucb23\ucb24\ucb25\ucb26\ucb27\ucb28\ucb29\ucb2a\ucb2b\ucb2c\ucb2d\ucb2e\ucb2f\ucb30\ucb31\ucb32\ucb33\ucb34\ucb35\ucb36\ucb37\ucb38\ucb39\ucb3a\ucb3b\ucb3c\ucb3d\ucb3e\ucb3f\ucb40\ucb41\ucb42\ucb43\ucb44\ucb45\ucb46\ucb47\ucb48\ucb49\ucb4a\ucb4b\ucb4c\ucb4d\ucb4e\ucb4f\ucb50\ucb51\ucb52\ucb53\ucb54\ucb55\ucb56\ucb57\ucb58\ucb59\ucb5a\ucb5b\ucb5c\ucb5d\ucb5e\ucb5f\ucb60\ucb61\ucb62\ucb63\ucb64\ucb65\ucb66\ucb67\ucb68\ucb69\ucb6a\ucb6b\ucb6c\ucb6d\ucb6e\ucb6f\ucb70\ucb71\ucb72\ucb73\ucb74\ucb75\ucb76\ucb77\ucb78\ucb79\ucb7a\ucb7b\ucb7c\ucb7d\ucb7e\ucb7f\ucb80\ucb81\ucb82\ucb83\ucb84\ucb85\ucb86\ucb87\ucb88\ucb89\ucb8a\ucb8b\ucb8c\ucb8d\ucb8e\ucb8f\ucb90\ucb91\ucb92\ucb93\ucb94\ucb95\ucb96\ucb97\ucb98\ucb99\ucb9a\ucb9b\ucb9c\ucb9d\ucb9e\ucb9f\ucba0\ucba1\ucba2\ucba3\ucba4\ucba5\ucba6\ucba7\ucba8\ucba9\ucbaa\ucbab\ucbac\ucbad\ucbae\ucbaf\ucbb0\ucbb1\ucbb2\ucbb3\ucbb4\ucbb5\ucbb6\ucbb7\ucbb8\ucbb9\ucbba\ucbbb\ucbbc\ucbbd\ucbbe\ucbbf\ucbc0\ucbc1\ucbc2\ucbc3\ucbc4\ucbc5\ucbc6\ucbc7\ucbc8\ucbc9\ucbca\ucbcb\ucbcc\ucbcd\ucbce\ucbcf\ucbd0\ucbd1\ucbd2\ucbd3\ucbd4\ucbd5\ucbd6\ucbd7\ucbd8\ucbd9\ucbda\ucbdb\ucbdc\ucbdd\ucbde\ucbdf\ucbe0\ucbe1\ucbe2\ucbe3\ucbe4\ucbe5\ucbe6\ucbe7\ucbe8\ucbe9\ucbea\ucbeb\ucbec\ucbed\ucbee\ucbef\ucbf0\ucbf1\ucbf2\ucbf3\ucbf4\ucbf5\ucbf6\ucbf7\ucbf8\ucbf9\ucbfa\ucbfb\ucbfc\ucbfd\ucbfe\ucbff\ucc00\ucc01\ucc02\ucc03\ucc04\ucc05\ucc06\ucc07\ucc08\ucc09\ucc0a\ucc0b\ucc0c\ucc0d\ucc0e\ucc0f\ucc10\ucc11\ucc12\ucc13\ucc14\ucc15\ucc16\ucc17\ucc18\ucc19\ucc1a\ucc1b\ucc1c\ucc1d\ucc1e\ucc1f\ucc20\ucc21\ucc22\ucc23\ucc24\ucc25\ucc26\ucc27\ucc28\ucc29\ucc2a\ucc2b\ucc2c\ucc2d\ucc2e\ucc2f\ucc30\ucc31\ucc32\ucc33\ucc34\ucc35\ucc36\ucc37\ucc38\ucc39\ucc3a\ucc3b\ucc3c\ucc3d\ucc3e\ucc3f\ucc40\ucc41\ucc42\ucc43\ucc44\ucc45\ucc46\ucc47\ucc48\ucc49\ucc4a\ucc4b\ucc4c\ucc4d\ucc4e\ucc4f\ucc50\ucc51\ucc52\ucc53\ucc54\ucc55\ucc56\ucc57\ucc58\ucc59\ucc5a\ucc5b\ucc5c\ucc5d\ucc5e\ucc5f\ucc60\ucc61\ucc62\ucc63\ucc64\ucc65\ucc66\ucc67\ucc68\ucc69\ucc6a\ucc6b\ucc6c\ucc6d\ucc6e\ucc6f\ucc70\ucc71\ucc72\ucc73\ucc74\ucc75\ucc76\ucc77\ucc78\ucc79\ucc7a\ucc7b\ucc7c\ucc7d\ucc7e\ucc7f\ucc80\ucc81\ucc82\ucc83\ucc84\ucc85\ucc86\ucc87\ucc88\ucc89\ucc8a\ucc8b\ucc8c\ucc8d\ucc8e\ucc8f\ucc90\ucc91\ucc92\ucc93\ucc94\ucc95\ucc96\ucc97\ucc98\ucc99\ucc9a\ucc9b\ucc9c\ucc9d\ucc9e\ucc9f\ucca0\ucca1\ucca2\ucca3\ucca4\ucca5\ucca6\ucca7\ucca8\ucca9\uccaa\uccab\uccac\uccad\uccae\uccaf\uccb0\uccb1\uccb2\uccb3\uccb4\uccb5\uccb6\uccb7\uccb8\uccb9\uccba\uccbb\uccbc\uccbd\uccbe\uccbf\uccc0\uccc1\uccc2\uccc3\uccc4\uccc5\uccc6\uccc7\uccc8\uccc9\uccca\ucccb\ucccc\ucccd\uccce\ucccf\uccd0\uccd1\uccd2\uccd3\uccd4\uccd5\uccd6\uccd7\uccd8\uccd9\uccda\uccdb\uccdc\uccdd\uccde\uccdf\ucce0\ucce1\ucce2\ucce3\ucce4\ucce5\ucce6\ucce7\ucce8\ucce9\uccea\ucceb\uccec\ucced\uccee\uccef\uccf0\uccf1\uccf2\uccf3\uccf4\uccf5\uccf6\uccf7\uccf8\uccf9\uccfa\uccfb\uccfc\uccfd\uccfe\uccff\ucd00\ucd01\ucd02\ucd03\ucd04\ucd05\ucd06\ucd07\ucd08\ucd09\ucd0a\ucd0b\ucd0c\ucd0d\ucd0e\ucd0f\ucd10\ucd11\ucd12\ucd13\ucd14\ucd15\ucd16\ucd17\ucd18\ucd19\ucd1a\ucd1b\ucd1c\ucd1d\ucd1e\ucd1f\ucd20\ucd21\ucd22\ucd23\ucd24\ucd25\ucd26\ucd27\ucd28\ucd29\ucd2a\ucd2b\ucd2c\ucd2d\ucd2e\ucd2f\ucd30\ucd31\ucd32\ucd33\ucd34\ucd35\ucd36\ucd37\ucd38\ucd39\ucd3a\ucd3b\ucd3c\ucd3d\ucd3e\ucd3f\ucd40\ucd41\ucd42\ucd43\ucd44\ucd45\ucd46\ucd47\ucd48\ucd49\ucd4a\ucd4b\ucd4c\ucd4d\ucd4e\ucd4f\ucd50\ucd51\ucd52\ucd53\ucd54\ucd55\ucd56\ucd57\ucd58\ucd59\ucd5a\ucd5b\ucd5c\ucd5d\ucd5e\ucd5f\ucd60\ucd61\ucd62\ucd63\ucd64\ucd65\ucd66\ucd67\ucd68\ucd69\ucd6a\ucd6b\ucd6c\ucd6d\ucd6e\ucd6f\ucd70\ucd71\ucd72\ucd73\ucd74\ucd75\ucd76\ucd77\ucd78\ucd79\ucd7a\ucd7b\ucd7c\ucd7d\ucd7e\ucd7f\ucd80\ucd81\ucd82\ucd83\ucd84\ucd85\ucd86\ucd87\ucd88\ucd89\ucd8a\ucd8b\ucd8c\ucd8d\ucd8e\ucd8f\ucd90\ucd91\ucd92\ucd93\ucd94\ucd95\ucd96\ucd97\ucd98\ucd99\ucd9a\ucd9b\ucd9c\ucd9d\ucd9e\ucd9f\ucda0\ucda1\ucda2\ucda3\ucda4\ucda5\ucda6\ucda7\ucda8\ucda9\ucdaa\ucdab\ucdac\ucdad\ucdae\ucdaf\ucdb0\ucdb1\ucdb2\ucdb3\ucdb4\ucdb5\ucdb6\ucdb7\ucdb8\ucdb9\ucdba\ucdbb\ucdbc\ucdbd\ucdbe\ucdbf\ucdc0\ucdc1\ucdc2\ucdc3\ucdc4\ucdc5\ucdc6\ucdc7\ucdc8\ucdc9\ucdca\ucdcb\ucdcc\ucdcd\ucdce\ucdcf\ucdd0\ucdd1\ucdd2\ucdd3\ucdd4\ucdd5\ucdd6\ucdd7\ucdd8\ucdd9\ucdda\ucddb\ucddc\ucddd\ucdde\ucddf\ucde0\ucde1\ucde2\ucde3\ucde4\ucde5\ucde6\ucde7\ucde8\ucde9\ucdea\ucdeb\ucdec\ucded\ucdee\ucdef\ucdf0\ucdf1\ucdf2\ucdf3\ucdf4\ucdf5\ucdf6\ucdf7\ucdf8\ucdf9\ucdfa\ucdfb\ucdfc\ucdfd\ucdfe\ucdff\uce00\uce01\uce02\uce03\uce04\uce05\uce06\uce07\uce08\uce09\uce0a\uce0b\uce0c\uce0d\uce0e\uce0f\uce10\uce11\uce12\uce13\uce14\uce15\uce16\uce17\uce18\uce19\uce1a\uce1b\uce1c\uce1d\uce1e\uce1f\uce20\uce21\uce22\uce23\uce24\uce25\uce26\uce27\uce28\uce29\uce2a\uce2b\uce2c\uce2d\uce2e\uce2f\uce30\uce31\uce32\uce33\uce34\uce35\uce36\uce37\uce38\uce39\uce3a\uce3b\uce3c\uce3d\uce3e\uce3f\uce40\uce41\uce42\uce43\uce44\uce45\uce46\uce47\uce48\uce49\uce4a\uce4b\uce4c\uce4d\uce4e\uce4f\uce50\uce51\uce52\uce53\uce54\uce55\uce56\uce57\uce58\uce59\uce5a\uce5b\uce5c\uce5d\uce5e\uce5f\uce60\uce61\uce62\uce63\uce64\uce65\uce66\uce67\uce68\uce69\uce6a\uce6b\uce6c\uce6d\uce6e\uce6f\uce70\uce71\uce72\uce73\uce74\uce75\uce76\uce77\uce78\uce79\uce7a\uce7b\uce7c\uce7d\uce7e\uce7f\uce80\uce81\uce82\uce83\uce84\uce85\uce86\uce87\uce88\uce89\uce8a\uce8b\uce8c\uce8d\uce8e\uce8f\uce90\uce91\uce92\uce93\uce94\uce95\uce96\uce97\uce98\uce99\uce9a\uce9b\uce9c\uce9d\uce9e\uce9f\ucea0\ucea1\ucea2\ucea3\ucea4\ucea5\ucea6\ucea7\ucea8\ucea9\uceaa\uceab\uceac\ucead\uceae\uceaf\uceb0\uceb1\uceb2\uceb3\uceb4\uceb5\uceb6\uceb7\uceb8\uceb9\uceba\ucebb\ucebc\ucebd\ucebe\ucebf\ucec0\ucec1\ucec2\ucec3\ucec4\ucec5\ucec6\ucec7\ucec8\ucec9\uceca\ucecb\ucecc\ucecd\ucece\ucecf\uced0\uced1\uced2\uced3\uced4\uced5\uced6\uced7\uced8\uced9\uceda\ucedb\ucedc\ucedd\ucede\ucedf\ucee0\ucee1\ucee2\ucee3\ucee4\ucee5\ucee6\ucee7\ucee8\ucee9\uceea\uceeb\uceec\uceed\uceee\uceef\ucef0\ucef1\ucef2\ucef3\ucef4\ucef5\ucef6\ucef7\ucef8\ucef9\ucefa\ucefb\ucefc\ucefd\ucefe\uceff\ucf00\ucf01\ucf02\ucf03\ucf04\ucf05\ucf06\ucf07\ucf08\ucf09\ucf0a\ucf0b\ucf0c\ucf0d\ucf0e\ucf0f\ucf10\ucf11\ucf12\ucf13\ucf14\ucf15\ucf16\ucf17\ucf18\ucf19\ucf1a\ucf1b\ucf1c\ucf1d\ucf1e\ucf1f\ucf20\ucf21\ucf22\ucf23\ucf24\ucf25\ucf26\ucf27\ucf28\ucf29\ucf2a\ucf2b\ucf2c\ucf2d\ucf2e\ucf2f\ucf30\ucf31\ucf32\ucf33\ucf34\ucf35\ucf36\ucf37\ucf38\ucf39\ucf3a\ucf3b\ucf3c\ucf3d\ucf3e\ucf3f\ucf40\ucf41\ucf42\ucf43\ucf44\ucf45\ucf46\ucf47\ucf48\ucf49\ucf4a\ucf4b\ucf4c\ucf4d\ucf4e\ucf4f\ucf50\ucf51\ucf52\ucf53\ucf54\ucf55\ucf56\ucf57\ucf58\ucf59\ucf5a\ucf5b\ucf5c\ucf5d\ucf5e\ucf5f\ucf60\ucf61\ucf62\ucf63\ucf64\ucf65\ucf66\ucf67\ucf68\ucf69\ucf6a\ucf6b\ucf6c\ucf6d\ucf6e\ucf6f\ucf70\ucf71\ucf72\ucf73\ucf74\ucf75\ucf76\ucf77\ucf78\ucf79\ucf7a\ucf7b\ucf7c\ucf7d\ucf7e\ucf7f\ucf80\ucf81\ucf82\ucf83\ucf84\ucf85\ucf86\ucf87\ucf88\ucf89\ucf8a\ucf8b\ucf8c\ucf8d\ucf8e\ucf8f\ucf90\ucf91\ucf92\ucf93\ucf94\ucf95\ucf96\ucf97\ucf98\ucf99\ucf9a\ucf9b\ucf9c\ucf9d\ucf9e\ucf9f\ucfa0\ucfa1\ucfa2\ucfa3\ucfa4\ucfa5\ucfa6\ucfa7\ucfa8\ucfa9\ucfaa\ucfab\ucfac\ucfad\ucfae\ucfaf\ucfb0\ucfb1\ucfb2\ucfb3\ucfb4\ucfb5\ucfb6\ucfb7\ucfb8\ucfb9\ucfba\ucfbb\ucfbc\ucfbd\ucfbe\ucfbf\ucfc0\ucfc1\ucfc2\ucfc3\ucfc4\ucfc5\ucfc6\ucfc7\ucfc8\ucfc9\ucfca\ucfcb\ucfcc\ucfcd\ucfce\ucfcf\ucfd0\ucfd1\ucfd2\ucfd3\ucfd4\ucfd5\ucfd6\ucfd7\ucfd8\ucfd9\ucfda\ucfdb\ucfdc\ucfdd\ucfde\ucfdf\ucfe0\ucfe1\ucfe2\ucfe3\ucfe4\ucfe5\ucfe6\ucfe7\ucfe8\ucfe9\ucfea\ucfeb\ucfec\ucfed\ucfee\ucfef\ucff0\ucff1\ucff2\ucff3\ucff4\ucff5\ucff6\ucff7\ucff8\ucff9\ucffa\ucffb\ucffc\ucffd\ucffe\ucfff\ud000\ud001\ud002\ud003\ud004\ud005\ud006\ud007\ud008\ud009\ud00a\ud00b\ud00c\ud00d\ud00e\ud00f\ud010\ud011\ud012\ud013\ud014\ud015\ud016\ud017\ud018\ud019\ud01a\ud01b\ud01c\ud01d\ud01e\ud01f\ud020\ud021\ud022\ud023\ud024\ud025\ud026\ud027\ud028\ud029\ud02a\ud02b\ud02c\ud02d\ud02e\ud02f\ud030\ud031\ud032\ud033\ud034\ud035\ud036\ud037\ud038\ud039\ud03a\ud03b\ud03c\ud03d\ud03e\ud03f\ud040\ud041\ud042\ud043\ud044\ud045\ud046\ud047\ud048\ud049\ud04a\ud04b\ud04c\ud04d\ud04e\ud04f\ud050\ud051\ud052\ud053\ud054\ud055\ud056\ud057\ud058\ud059\ud05a\ud05b\ud05c\ud05d\ud05e\ud05f\ud060\ud061\ud062\ud063\ud064\ud065\ud066\ud067\ud068\ud069\ud06a\ud06b\ud06c\ud06d\ud06e\ud06f\ud070\ud071\ud072\ud073\ud074\ud075\ud076\ud077\ud078\ud079\ud07a\ud07b\ud07c\ud07d\ud07e\ud07f\ud080\ud081\ud082\ud083\ud084\ud085\ud086\ud087\ud088\ud089\ud08a\ud08b\ud08c\ud08d\ud08e\ud08f\ud090\ud091\ud092\ud093\ud094\ud095\ud096\ud097\ud098\ud099\ud09a\ud09b\ud09c\ud09d\ud09e\ud09f\ud0a0\ud0a1\ud0a2\ud0a3\ud0a4\ud0a5\ud0a6\ud0a7\ud0a8\ud0a9\ud0aa\ud0ab\ud0ac\ud0ad\ud0ae\ud0af\ud0b0\ud0b1\ud0b2\ud0b3\ud0b4\ud0b5\ud0b6\ud0b7\ud0b8\ud0b9\ud0ba\ud0bb\ud0bc\ud0bd\ud0be\ud0bf\ud0c0\ud0c1\ud0c2\ud0c3\ud0c4\ud0c5\ud0c6\ud0c7\ud0c8\ud0c9\ud0ca\ud0cb\ud0cc\ud0cd\ud0ce\ud0cf\ud0d0\ud0d1\ud0d2\ud0d3\ud0d4\ud0d5\ud0d6\ud0d7\ud0d8\ud0d9\ud0da\ud0db\ud0dc\ud0dd\ud0de\ud0df\ud0e0\ud0e1\ud0e2\ud0e3\ud0e4\ud0e5\ud0e6\ud0e7\ud0e8\ud0e9\ud0ea\ud0eb\ud0ec\ud0ed\ud0ee\ud0ef\ud0f0\ud0f1\ud0f2\ud0f3\ud0f4\ud0f5\ud0f6\ud0f7\ud0f8\ud0f9\ud0fa\ud0fb\ud0fc\ud0fd\ud0fe\ud0ff\ud100\ud101\ud102\ud103\ud104\ud105\ud106\ud107\ud108\ud109\ud10a\ud10b\ud10c\ud10d\ud10e\ud10f\ud110\ud111\ud112\ud113\ud114\ud115\ud116\ud117\ud118\ud119\ud11a\ud11b\ud11c\ud11d\ud11e\ud11f\ud120\ud121\ud122\ud123\ud124\ud125\ud126\ud127\ud128\ud129\ud12a\ud12b\ud12c\ud12d\ud12e\ud12f\ud130\ud131\ud132\ud133\ud134\ud135\ud136\ud137\ud138\ud139\ud13a\ud13b\ud13c\ud13d\ud13e\ud13f\ud140\ud141\ud142\ud143\ud144\ud145\ud146\ud147\ud148\ud149\ud14a\ud14b\ud14c\ud14d\ud14e\ud14f\ud150\ud151\ud152\ud153\ud154\ud155\ud156\ud157\ud158\ud159\ud15a\ud15b\ud15c\ud15d\ud15e\ud15f\ud160\ud161\ud162\ud163\ud164\ud165\ud166\ud167\ud168\ud169\ud16a\ud16b\ud16c\ud16d\ud16e\ud16f\ud170\ud171\ud172\ud173\ud174\ud175\ud176\ud177\ud178\ud179\ud17a\ud17b\ud17c\ud17d\ud17e\ud17f\ud180\ud181\ud182\ud183\ud184\ud185\ud186\ud187\ud188\ud189\ud18a\ud18b\ud18c\ud18d\ud18e\ud18f\ud190\ud191\ud192\ud193\ud194\ud195\ud196\ud197\ud198\ud199\ud19a\ud19b\ud19c\ud19d\ud19e\ud19f\ud1a0\ud1a1\ud1a2\ud1a3\ud1a4\ud1a5\ud1a6\ud1a7\ud1a8\ud1a9\ud1aa\ud1ab\ud1ac\ud1ad\ud1ae\ud1af\ud1b0\ud1b1\ud1b2\ud1b3\ud1b4\ud1b5\ud1b6\ud1b7\ud1b8\ud1b9\ud1ba\ud1bb\ud1bc\ud1bd\ud1be\ud1bf\ud1c0\ud1c1\ud1c2\ud1c3\ud1c4\ud1c5\ud1c6\ud1c7\ud1c8\ud1c9\ud1ca\ud1cb\ud1cc\ud1cd\ud1ce\ud1cf\ud1d0\ud1d1\ud1d2\ud1d3\ud1d4\ud1d5\ud1d6\ud1d7\ud1d8\ud1d9\ud1da\ud1db\ud1dc\ud1dd\ud1de\ud1df\ud1e0\ud1e1\ud1e2\ud1e3\ud1e4\ud1e5\ud1e6\ud1e7\ud1e8\ud1e9\ud1ea\ud1eb\ud1ec\ud1ed\ud1ee\ud1ef\ud1f0\ud1f1\ud1f2\ud1f3\ud1f4\ud1f5\ud1f6\ud1f7\ud1f8\ud1f9\ud1fa\ud1fb\ud1fc\ud1fd\ud1fe\ud1ff\ud200\ud201\ud202\ud203\ud204\ud205\ud206\ud207\ud208\ud209\ud20a\ud20b\ud20c\ud20d\ud20e\ud20f\ud210\ud211\ud212\ud213\ud214\ud215\ud216\ud217\ud218\ud219\ud21a\ud21b\ud21c\ud21d\ud21e\ud21f\ud220\ud221\ud222\ud223\ud224\ud225\ud226\ud227\ud228\ud229\ud22a\ud22b\ud22c\ud22d\ud22e\ud22f\ud230\ud231\ud232\ud233\ud234\ud235\ud236\ud237\ud238\ud239\ud23a\ud23b\ud23c\ud23d\ud23e\ud23f\ud240\ud241\ud242\ud243\ud244\ud245\ud246\ud247\ud248\ud249\ud24a\ud24b\ud24c\ud24d\ud24e\ud24f\ud250\ud251\ud252\ud253\ud254\ud255\ud256\ud257\ud258\ud259\ud25a\ud25b\ud25c\ud25d\ud25e\ud25f\ud260\ud261\ud262\ud263\ud264\ud265\ud266\ud267\ud268\ud269\ud26a\ud26b\ud26c\ud26d\ud26e\ud26f\ud270\ud271\ud272\ud273\ud274\ud275\ud276\ud277\ud278\ud279\ud27a\ud27b\ud27c\ud27d\ud27e\ud27f\ud280\ud281\ud282\ud283\ud284\ud285\ud286\ud287\ud288\ud289\ud28a\ud28b\ud28c\ud28d\ud28e\ud28f\ud290\ud291\ud292\ud293\ud294\ud295\ud296\ud297\ud298\ud299\ud29a\ud29b\ud29c\ud29d\ud29e\ud29f\ud2a0\ud2a1\ud2a2\ud2a3\ud2a4\ud2a5\ud2a6\ud2a7\ud2a8\ud2a9\ud2aa\ud2ab\ud2ac\ud2ad\ud2ae\ud2af\ud2b0\ud2b1\ud2b2\ud2b3\ud2b4\ud2b5\ud2b6\ud2b7\ud2b8\ud2b9\ud2ba\ud2bb\ud2bc\ud2bd\ud2be\ud2bf\ud2c0\ud2c1\ud2c2\ud2c3\ud2c4\ud2c5\ud2c6\ud2c7\ud2c8\ud2c9\ud2ca\ud2cb\ud2cc\ud2cd\ud2ce\ud2cf\ud2d0\ud2d1\ud2d2\ud2d3\ud2d4\ud2d5\ud2d6\ud2d7\ud2d8\ud2d9\ud2da\ud2db\ud2dc\ud2dd\ud2de\ud2df\ud2e0\ud2e1\ud2e2\ud2e3\ud2e4\ud2e5\ud2e6\ud2e7\ud2e8\ud2e9\ud2ea\ud2eb\ud2ec\ud2ed\ud2ee\ud2ef\ud2f0\ud2f1\ud2f2\ud2f3\ud2f4\ud2f5\ud2f6\ud2f7\ud2f8\ud2f9\ud2fa\ud2fb\ud2fc\ud2fd\ud2fe\ud2ff\ud300\ud301\ud302\ud303\ud304\ud305\ud306\ud307\ud308\ud309\ud30a\ud30b\ud30c\ud30d\ud30e\ud30f\ud310\ud311\ud312\ud313\ud314\ud315\ud316\ud317\ud318\ud319\ud31a\ud31b\ud31c\ud31d\ud31e\ud31f\ud320\ud321\ud322\ud323\ud324\ud325\ud326\ud327\ud328\ud329\ud32a\ud32b\ud32c\ud32d\ud32e\ud32f\ud330\ud331\ud332\ud333\ud334\ud335\ud336\ud337\ud338\ud339\ud33a\ud33b\ud33c\ud33d\ud33e\ud33f\ud340\ud341\ud342\ud343\ud344\ud345\ud346\ud347\ud348\ud349\ud34a\ud34b\ud34c\ud34d\ud34e\ud34f\ud350\ud351\ud352\ud353\ud354\ud355\ud356\ud357\ud358\ud359\ud35a\ud35b\ud35c\ud35d\ud35e\ud35f\ud360\ud361\ud362\ud363\ud364\ud365\ud366\ud367\ud368\ud369\ud36a\ud36b\ud36c\ud36d\ud36e\ud36f\ud370\ud371\ud372\ud373\ud374\ud375\ud376\ud377\ud378\ud379\ud37a\ud37b\ud37c\ud37d\ud37e\ud37f\ud380\ud381\ud382\ud383\ud384\ud385\ud386\ud387\ud388\ud389\ud38a\ud38b\ud38c\ud38d\ud38e\ud38f\ud390\ud391\ud392\ud393\ud394\ud395\ud396\ud397\ud398\ud399\ud39a\ud39b\ud39c\ud39d\ud39e\ud39f\ud3a0\ud3a1\ud3a2\ud3a3\ud3a4\ud3a5\ud3a6\ud3a7\ud3a8\ud3a9\ud3aa\ud3ab\ud3ac\ud3ad\ud3ae\ud3af\ud3b0\ud3b1\ud3b2\ud3b3\ud3b4\ud3b5\ud3b6\ud3b7\ud3b8\ud3b9\ud3ba\ud3bb\ud3bc\ud3bd\ud3be\ud3bf\ud3c0\ud3c1\ud3c2\ud3c3\ud3c4\ud3c5\ud3c6\ud3c7\ud3c8\ud3c9\ud3ca\ud3cb\ud3cc\ud3cd\ud3ce\ud3cf\ud3d0\ud3d1\ud3d2\ud3d3\ud3d4\ud3d5\ud3d6\ud3d7\ud3d8\ud3d9\ud3da\ud3db\ud3dc\ud3dd\ud3de\ud3df\ud3e0\ud3e1\ud3e2\ud3e3\ud3e4\ud3e5\ud3e6\ud3e7\ud3e8\ud3e9\ud3ea\ud3eb\ud3ec\ud3ed\ud3ee\ud3ef\ud3f0\ud3f1\ud3f2\ud3f3\ud3f4\ud3f5\ud3f6\ud3f7\ud3f8\ud3f9\ud3fa\ud3fb\ud3fc\ud3fd\ud3fe\ud3ff\ud400\ud401\ud402\ud403\ud404\ud405\ud406\ud407\ud408\ud409\ud40a\ud40b\ud40c\ud40d\ud40e\ud40f\ud410\ud411\ud412\ud413\ud414\ud415\ud416\ud417\ud418\ud419\ud41a\ud41b\ud41c\ud41d\ud41e\ud41f\ud420\ud421\ud422\ud423\ud424\ud425\ud426\ud427\ud428\ud429\ud42a\ud42b\ud42c\ud42d\ud42e\ud42f\ud430\ud431\ud432\ud433\ud434\ud435\ud436\ud437\ud438\ud439\ud43a\ud43b\ud43c\ud43d\ud43e\ud43f\ud440\ud441\ud442\ud443\ud444\ud445\ud446\ud447\ud448\ud449\ud44a\ud44b\ud44c\ud44d\ud44e\ud44f\ud450\ud451\ud452\ud453\ud454\ud455\ud456\ud457\ud458\ud459\ud45a\ud45b\ud45c\ud45d\ud45e\ud45f\ud460\ud461\ud462\ud463\ud464\ud465\ud466\ud467\ud468\ud469\ud46a\ud46b\ud46c\ud46d\ud46e\ud46f\ud470\ud471\ud472\ud473\ud474\ud475\ud476\ud477\ud478\ud479\ud47a\ud47b\ud47c\ud47d\ud47e\ud47f\ud480\ud481\ud482\ud483\ud484\ud485\ud486\ud487\ud488\ud489\ud48a\ud48b\ud48c\ud48d\ud48e\ud48f\ud490\ud491\ud492\ud493\ud494\ud495\ud496\ud497\ud498\ud499\ud49a\ud49b\ud49c\ud49d\ud49e\ud49f\ud4a0\ud4a1\ud4a2\ud4a3\ud4a4\ud4a5\ud4a6\ud4a7\ud4a8\ud4a9\ud4aa\ud4ab\ud4ac\ud4ad\ud4ae\ud4af\ud4b0\ud4b1\ud4b2\ud4b3\ud4b4\ud4b5\ud4b6\ud4b7\ud4b8\ud4b9\ud4ba\ud4bb\ud4bc\ud4bd\ud4be\ud4bf\ud4c0\ud4c1\ud4c2\ud4c3\ud4c4\ud4c5\ud4c6\ud4c7\ud4c8\ud4c9\ud4ca\ud4cb\ud4cc\ud4cd\ud4ce\ud4cf\ud4d0\ud4d1\ud4d2\ud4d3\ud4d4\ud4d5\ud4d6\ud4d7\ud4d8\ud4d9\ud4da\ud4db\ud4dc\ud4dd\ud4de\ud4df\ud4e0\ud4e1\ud4e2\ud4e3\ud4e4\ud4e5\ud4e6\ud4e7\ud4e8\ud4e9\ud4ea\ud4eb\ud4ec\ud4ed\ud4ee\ud4ef\ud4f0\ud4f1\ud4f2\ud4f3\ud4f4\ud4f5\ud4f6\ud4f7\ud4f8\ud4f9\ud4fa\ud4fb\ud4fc\ud4fd\ud4fe\ud4ff\ud500\ud501\ud502\ud503\ud504\ud505\ud506\ud507\ud508\ud509\ud50a\ud50b\ud50c\ud50d\ud50e\ud50f\ud510\ud511\ud512\ud513\ud514\ud515\ud516\ud517\ud518\ud519\ud51a\ud51b\ud51c\ud51d\ud51e\ud51f\ud520\ud521\ud522\ud523\ud524\ud525\ud526\ud527\ud528\ud529\ud52a\ud52b\ud52c\ud52d\ud52e\ud52f\ud530\ud531\ud532\ud533\ud534\ud535\ud536\ud537\ud538\ud539\ud53a\ud53b\ud53c\ud53d\ud53e\ud53f\ud540\ud541\ud542\ud543\ud544\ud545\ud546\ud547\ud548\ud549\ud54a\ud54b\ud54c\ud54d\ud54e\ud54f\ud550\ud551\ud552\ud553\ud554\ud555\ud556\ud557\ud558\ud559\ud55a\ud55b\ud55c\ud55d\ud55e\ud55f\ud560\ud561\ud562\ud563\ud564\ud565\ud566\ud567\ud568\ud569\ud56a\ud56b\ud56c\ud56d\ud56e\ud56f\ud570\ud571\ud572\ud573\ud574\ud575\ud576\ud577\ud578\ud579\ud57a\ud57b\ud57c\ud57d\ud57e\ud57f\ud580\ud581\ud582\ud583\ud584\ud585\ud586\ud587\ud588\ud589\ud58a\ud58b\ud58c\ud58d\ud58e\ud58f\ud590\ud591\ud592\ud593\ud594\ud595\ud596\ud597\ud598\ud599\ud59a\ud59b\ud59c\ud59d\ud59e\ud59f\ud5a0\ud5a1\ud5a2\ud5a3\ud5a4\ud5a5\ud5a6\ud5a7\ud5a8\ud5a9\ud5aa\ud5ab\ud5ac\ud5ad\ud5ae\ud5af\ud5b0\ud5b1\ud5b2\ud5b3\ud5b4\ud5b5\ud5b6\ud5b7\ud5b8\ud5b9\ud5ba\ud5bb\ud5bc\ud5bd\ud5be\ud5bf\ud5c0\ud5c1\ud5c2\ud5c3\ud5c4\ud5c5\ud5c6\ud5c7\ud5c8\ud5c9\ud5ca\ud5cb\ud5cc\ud5cd\ud5ce\ud5cf\ud5d0\ud5d1\ud5d2\ud5d3\ud5d4\ud5d5\ud5d6\ud5d7\ud5d8\ud5d9\ud5da\ud5db\ud5dc\ud5dd\ud5de\ud5df\ud5e0\ud5e1\ud5e2\ud5e3\ud5e4\ud5e5\ud5e6\ud5e7\ud5e8\ud5e9\ud5ea\ud5eb\ud5ec\ud5ed\ud5ee\ud5ef\ud5f0\ud5f1\ud5f2\ud5f3\ud5f4\ud5f5\ud5f6\ud5f7\ud5f8\ud5f9\ud5fa\ud5fb\ud5fc\ud5fd\ud5fe\ud5ff\ud600\ud601\ud602\ud603\ud604\ud605\ud606\ud607\ud608\ud609\ud60a\ud60b\ud60c\ud60d\ud60e\ud60f\ud610\ud611\ud612\ud613\ud614\ud615\ud616\ud617\ud618\ud619\ud61a\ud61b\ud61c\ud61d\ud61e\ud61f\ud620\ud621\ud622\ud623\ud624\ud625\ud626\ud627\ud628\ud629\ud62a\ud62b\ud62c\ud62d\ud62e\ud62f\ud630\ud631\ud632\ud633\ud634\ud635\ud636\ud637\ud638\ud639\ud63a\ud63b\ud63c\ud63d\ud63e\ud63f\ud640\ud641\ud642\ud643\ud644\ud645\ud646\ud647\ud648\ud649\ud64a\ud64b\ud64c\ud64d\ud64e\ud64f\ud650\ud651\ud652\ud653\ud654\ud655\ud656\ud657\ud658\ud659\ud65a\ud65b\ud65c\ud65d\ud65e\ud65f\ud660\ud661\ud662\ud663\ud664\ud665\ud666\ud667\ud668\ud669\ud66a\ud66b\ud66c\ud66d\ud66e\ud66f\ud670\ud671\ud672\ud673\ud674\ud675\ud676\ud677\ud678\ud679\ud67a\ud67b\ud67c\ud67d\ud67e\ud67f\ud680\ud681\ud682\ud683\ud684\ud685\ud686\ud687\ud688\ud689\ud68a\ud68b\ud68c\ud68d\ud68e\ud68f\ud690\ud691\ud692\ud693\ud694\ud695\ud696\ud697\ud698\ud699\ud69a\ud69b\ud69c\ud69d\ud69e\ud69f\ud6a0\ud6a1\ud6a2\ud6a3\ud6a4\ud6a5\ud6a6\ud6a7\ud6a8\ud6a9\ud6aa\ud6ab\ud6ac\ud6ad\ud6ae\ud6af\ud6b0\ud6b1\ud6b2\ud6b3\ud6b4\ud6b5\ud6b6\ud6b7\ud6b8\ud6b9\ud6ba\ud6bb\ud6bc\ud6bd\ud6be\ud6bf\ud6c0\ud6c1\ud6c2\ud6c3\ud6c4\ud6c5\ud6c6\ud6c7\ud6c8\ud6c9\ud6ca\ud6cb\ud6cc\ud6cd\ud6ce\ud6cf\ud6d0\ud6d1\ud6d2\ud6d3\ud6d4\ud6d5\ud6d6\ud6d7\ud6d8\ud6d9\ud6da\ud6db\ud6dc\ud6dd\ud6de\ud6df\ud6e0\ud6e1\ud6e2\ud6e3\ud6e4\ud6e5\ud6e6\ud6e7\ud6e8\ud6e9\ud6ea\ud6eb\ud6ec\ud6ed\ud6ee\ud6ef\ud6f0\ud6f1\ud6f2\ud6f3\ud6f4\ud6f5\ud6f6\ud6f7\ud6f8\ud6f9\ud6fa\ud6fb\ud6fc\ud6fd\ud6fe\ud6ff\ud700\ud701\ud702\ud703\ud704\ud705\ud706\ud707\ud708\ud709\ud70a\ud70b\ud70c\ud70d\ud70e\ud70f\ud710\ud711\ud712\ud713\ud714\ud715\ud716\ud717\ud718\ud719\ud71a\ud71b\ud71c\ud71d\ud71e\ud71f\ud720\ud721\ud722\ud723\ud724\ud725\ud726\ud727\ud728\ud729\ud72a\ud72b\ud72c\ud72d\ud72e\ud72f\ud730\ud731\ud732\ud733\ud734\ud735\ud736\ud737\ud738\ud739\ud73a\ud73b\ud73c\ud73d\ud73e\ud73f\ud740\ud741\ud742\ud743\ud744\ud745\ud746\ud747\ud748\ud749\ud74a\ud74b\ud74c\ud74d\ud74e\ud74f\ud750\ud751\ud752\ud753\ud754\ud755\ud756\ud757\ud758\ud759\ud75a\ud75b\ud75c\ud75d\ud75e\ud75f\ud760\ud761\ud762\ud763\ud764\ud765\ud766\ud767\ud768\ud769\ud76a\ud76b\ud76c\ud76d\ud76e\ud76f\ud770\ud771\ud772\ud773\ud774\ud775\ud776\ud777\ud778\ud779\ud77a\ud77b\ud77c\ud77d\ud77e\ud77f\ud780\ud781\ud782\ud783\ud784\ud785\ud786\ud787\ud788\ud789\ud78a\ud78b\ud78c\ud78d\ud78e\ud78f\ud790\ud791\ud792\ud793\ud794\ud795\ud796\ud797\ud798\ud799\ud79a\ud79b\ud79c\ud79d\ud79e\ud79f\ud7a0\ud7a1\ud7a2\ud7a3\ud7b0\ud7b1\ud7b2\ud7b3\ud7b4\ud7b5\ud7b6\ud7b7\ud7b8\ud7b9\ud7ba\ud7bb\ud7bc\ud7bd\ud7be\ud7bf\ud7c0\ud7c1\ud7c2\ud7c3\ud7c4\ud7c5\ud7c6\ud7cb\ud7cc\ud7cd\ud7ce\ud7cf\ud7d0\ud7d1\ud7d2\ud7d3\ud7d4\ud7d5\ud7d6\ud7d7\ud7d8\ud7d9\ud7da\ud7db\ud7dc\ud7dd\ud7de\ud7df\ud7e0\ud7e1\ud7e2\ud7e3\ud7e4\ud7e5\ud7e6\ud7e7\ud7e8\ud7e9\ud7ea\ud7eb\ud7ec\ud7ed\ud7ee\ud7ef\ud7f0\ud7f1\ud7f2\ud7f3\ud7f4\ud7f5\ud7f6\ud7f7\ud7f8\ud7f9\ud7fa\ud7fb\uf900\uf901\uf902\uf903\uf904\uf905\uf906\uf907\uf908\uf909\uf90a\uf90b\uf90c\uf90d\uf90e\uf90f\uf910\uf911\uf912\uf913\uf914\uf915\uf916\uf917\uf918\uf919\uf91a\uf91b\uf91c\uf91d\uf91e\uf91f\uf920\uf921\uf922\uf923\uf924\uf925\uf926\uf927\uf928\uf929\uf92a\uf92b\uf92c\uf92d\uf92e\uf92f\uf930\uf931\uf932\uf933\uf934\uf935\uf936\uf937\uf938\uf939\uf93a\uf93b\uf93c\uf93d\uf93e\uf93f\uf940\uf941\uf942\uf943\uf944\uf945\uf946\uf947\uf948\uf949\uf94a\uf94b\uf94c\uf94d\uf94e\uf94f\uf950\uf951\uf952\uf953\uf954\uf955\uf956\uf957\uf958\uf959\uf95a\uf95b\uf95c\uf95d\uf95e\uf95f\uf960\uf961\uf962\uf963\uf964\uf965\uf966\uf967\uf968\uf969\uf96a\uf96b\uf96c\uf96d\uf96e\uf96f\uf970\uf971\uf972\uf973\uf974\uf975\uf976\uf977\uf978\uf979\uf97a\uf97b\uf97c\uf97d\uf97e\uf97f\uf980\uf981\uf982\uf983\uf984\uf985\uf986\uf987\uf988\uf989\uf98a\uf98b\uf98c\uf98d\uf98e\uf98f\uf990\uf991\uf992\uf993\uf994\uf995\uf996\uf997\uf998\uf999\uf99a\uf99b\uf99c\uf99d\uf99e\uf99f\uf9a0\uf9a1\uf9a2\uf9a3\uf9a4\uf9a5\uf9a6\uf9a7\uf9a8\uf9a9\uf9aa\uf9ab\uf9ac\uf9ad\uf9ae\uf9af\uf9b0\uf9b1\uf9b2\uf9b3\uf9b4\uf9b5\uf9b6\uf9b7\uf9b8\uf9b9\uf9ba\uf9bb\uf9bc\uf9bd\uf9be\uf9bf\uf9c0\uf9c1\uf9c2\uf9c3\uf9c4\uf9c5\uf9c6\uf9c7\uf9c8\uf9c9\uf9ca\uf9cb\uf9cc\uf9cd\uf9ce\uf9cf\uf9d0\uf9d1\uf9d2\uf9d3\uf9d4\uf9d5\uf9d6\uf9d7\uf9d8\uf9d9\uf9da\uf9db\uf9dc\uf9dd\uf9de\uf9df\uf9e0\uf9e1\uf9e2\uf9e3\uf9e4\uf9e5\uf9e6\uf9e7\uf9e8\uf9e9\uf9ea\uf9eb\uf9ec\uf9ed\uf9ee\uf9ef\uf9f0\uf9f1\uf9f2\uf9f3\uf9f4\uf9f5\uf9f6\uf9f7\uf9f8\uf9f9\uf9fa\uf9fb\uf9fc\uf9fd\uf9fe\uf9ff\ufa00\ufa01\ufa02\ufa03\ufa04\ufa05\ufa06\ufa07\ufa08\ufa09\ufa0a\ufa0b\ufa0c\ufa0d\ufa0e\ufa0f\ufa10\ufa11\ufa12\ufa13\ufa14\ufa15\ufa16\ufa17\ufa18\ufa19\ufa1a\ufa1b\ufa1c\ufa1d\ufa1e\ufa1f\ufa20\ufa21\ufa22\ufa23\ufa24\ufa25\ufa26\ufa27\ufa28\ufa29\ufa2a\ufa2b\ufa2c\ufa2d\ufa30\ufa31\ufa32\ufa33\ufa34\ufa35\ufa36\ufa37\ufa38\ufa39\ufa3a\ufa3b\ufa3c\ufa3d\ufa3e\ufa3f\ufa40\ufa41\ufa42\ufa43\ufa44\ufa45\ufa46\ufa47\ufa48\ufa49\ufa4a\ufa4b\ufa4c\ufa4d\ufa4e\ufa4f\ufa50\ufa51\ufa52\ufa53\ufa54\ufa55\ufa56\ufa57\ufa58\ufa59\ufa5a\ufa5b\ufa5c\ufa5d\ufa5e\ufa5f\ufa60\ufa61\ufa62\ufa63\ufa64\ufa65\ufa66\ufa67\ufa68\ufa69\ufa6a\ufa6b\ufa6c\ufa6d\ufa70\ufa71\ufa72\ufa73\ufa74\ufa75\ufa76\ufa77\ufa78\ufa79\ufa7a\ufa7b\ufa7c\ufa7d\ufa7e\ufa7f\ufa80\ufa81\ufa82\ufa83\ufa84\ufa85\ufa86\ufa87\ufa88\ufa89\ufa8a\ufa8b\ufa8c\ufa8d\ufa8e\ufa8f\ufa90\ufa91\ufa92\ufa93\ufa94\ufa95\ufa96\ufa97\ufa98\ufa99\ufa9a\ufa9b\ufa9c\ufa9d\ufa9e\ufa9f\ufaa0\ufaa1\ufaa2\ufaa3\ufaa4\ufaa5\ufaa6\ufaa7\ufaa8\ufaa9\ufaaa\ufaab\ufaac\ufaad\ufaae\ufaaf\ufab0\ufab1\ufab2\ufab3\ufab4\ufab5\ufab6\ufab7\ufab8\ufab9\ufaba\ufabb\ufabc\ufabd\ufabe\ufabf\ufac0\ufac1\ufac2\ufac3\ufac4\ufac5\ufac6\ufac7\ufac8\ufac9\ufaca\ufacb\ufacc\ufacd\uface\ufacf\ufad0\ufad1\ufad2\ufad3\ufad4\ufad5\ufad6\ufad7\ufad8\ufad9\ufb1d\ufb1f\ufb20\ufb21\ufb22\ufb23\ufb24\ufb25\ufb26\ufb27\ufb28\ufb2a\ufb2b\ufb2c\ufb2d\ufb2e\ufb2f\ufb30\ufb31\ufb32\ufb33\ufb34\ufb35\ufb36\ufb38\ufb39\ufb3a\ufb3b\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46\ufb47\ufb48\ufb49\ufb4a\ufb4b\ufb4c\ufb4d\ufb4e\ufb4f\ufb50\ufb51\ufb52\ufb53\ufb54\ufb55\ufb56\ufb57\ufb58\ufb59\ufb5a\ufb5b\ufb5c\ufb5d\ufb5e\ufb5f\ufb60\ufb61\ufb62\ufb63\ufb64\ufb65\ufb66\ufb67\ufb68\ufb69\ufb6a\ufb6b\ufb6c\ufb6d\ufb6e\ufb6f\ufb70\ufb71\ufb72\ufb73\ufb74\ufb75\ufb76\ufb77\ufb78\ufb79\ufb7a\ufb7b\ufb7c\ufb7d\ufb7e\ufb7f\ufb80\ufb81\ufb82\ufb83\ufb84\ufb85\ufb86\ufb87\ufb88\ufb89\ufb8a\ufb8b\ufb8c\ufb8d\ufb8e\ufb8f\ufb90\ufb91\ufb92\ufb93\ufb94\ufb95\ufb96\ufb97\ufb98\ufb99\ufb9a\ufb9b\ufb9c\ufb9d\ufb9e\ufb9f\ufba0\ufba1\ufba2\ufba3\ufba4\ufba5\ufba6\ufba7\ufba8\ufba9\ufbaa\ufbab\ufbac\ufbad\ufbae\ufbaf\ufbb0\ufbb1\ufbd3\ufbd4\ufbd5\ufbd6\ufbd7\ufbd8\ufbd9\ufbda\ufbdb\ufbdc\ufbdd\ufbde\ufbdf\ufbe0\ufbe1\ufbe2\ufbe3\ufbe4\ufbe5\ufbe6\ufbe7\ufbe8\ufbe9\ufbea\ufbeb\ufbec\ufbed\ufbee\ufbef\ufbf0\ufbf1\ufbf2\ufbf3\ufbf4\ufbf5\ufbf6\ufbf7\ufbf8\ufbf9\ufbfa\ufbfb\ufbfc\ufbfd\ufbfe\ufbff\ufc00\ufc01\ufc02\ufc03\ufc04\ufc05\ufc06\ufc07\ufc08\ufc09\ufc0a\ufc0b\ufc0c\ufc0d\ufc0e\ufc0f\ufc10\ufc11\ufc12\ufc13\ufc14\ufc15\ufc16\ufc17\ufc18\ufc19\ufc1a\ufc1b\ufc1c\ufc1d\ufc1e\ufc1f\ufc20\ufc21\ufc22\ufc23\ufc24\ufc25\ufc26\ufc27\ufc28\ufc29\ufc2a\ufc2b\ufc2c\ufc2d\ufc2e\ufc2f\ufc30\ufc31\ufc32\ufc33\ufc34\ufc35\ufc36\ufc37\ufc38\ufc39\ufc3a\ufc3b\ufc3c\ufc3d\ufc3e\ufc3f\ufc40\ufc41\ufc42\ufc43\ufc44\ufc45\ufc46\ufc47\ufc48\ufc49\ufc4a\ufc4b\ufc4c\ufc4d\ufc4e\ufc4f\ufc50\ufc51\ufc52\ufc53\ufc54\ufc55\ufc56\ufc57\ufc58\ufc59\ufc5a\ufc5b\ufc5c\ufc5d\ufc5e\ufc5f\ufc60\ufc61\ufc62\ufc63\ufc64\ufc65\ufc66\ufc67\ufc68\ufc69\ufc6a\ufc6b\ufc6c\ufc6d\ufc6e\ufc6f\ufc70\ufc71\ufc72\ufc73\ufc74\ufc75\ufc76\ufc77\ufc78\ufc79\ufc7a\ufc7b\ufc7c\ufc7d\ufc7e\ufc7f\ufc80\ufc81\ufc82\ufc83\ufc84\ufc85\ufc86\ufc87\ufc88\ufc89\ufc8a\ufc8b\ufc8c\ufc8d\ufc8e\ufc8f\ufc90\ufc91\ufc92\ufc93\ufc94\ufc95\ufc96\ufc97\ufc98\ufc99\ufc9a\ufc9b\ufc9c\ufc9d\ufc9e\ufc9f\ufca0\ufca1\ufca2\ufca3\ufca4\ufca5\ufca6\ufca7\ufca8\ufca9\ufcaa\ufcab\ufcac\ufcad\ufcae\ufcaf\ufcb0\ufcb1\ufcb2\ufcb3\ufcb4\ufcb5\ufcb6\ufcb7\ufcb8\ufcb9\ufcba\ufcbb\ufcbc\ufcbd\ufcbe\ufcbf\ufcc0\ufcc1\ufcc2\ufcc3\ufcc4\ufcc5\ufcc6\ufcc7\ufcc8\ufcc9\ufcca\ufccb\ufccc\ufccd\ufcce\ufccf\ufcd0\ufcd1\ufcd2\ufcd3\ufcd4\ufcd5\ufcd6\ufcd7\ufcd8\ufcd9\ufcda\ufcdb\ufcdc\ufcdd\ufcde\ufcdf\ufce0\ufce1\ufce2\ufce3\ufce4\ufce5\ufce6\ufce7\ufce8\ufce9\ufcea\ufceb\ufcec\ufced\ufcee\ufcef\ufcf0\ufcf1\ufcf2\ufcf3\ufcf4\ufcf5\ufcf6\ufcf7\ufcf8\ufcf9\ufcfa\ufcfb\ufcfc\ufcfd\ufcfe\ufcff\ufd00\ufd01\ufd02\ufd03\ufd04\ufd05\ufd06\ufd07\ufd08\ufd09\ufd0a\ufd0b\ufd0c\ufd0d\ufd0e\ufd0f\ufd10\ufd11\ufd12\ufd13\ufd14\ufd15\ufd16\ufd17\ufd18\ufd19\ufd1a\ufd1b\ufd1c\ufd1d\ufd1e\ufd1f\ufd20\ufd21\ufd22\ufd23\ufd24\ufd25\ufd26\ufd27\ufd28\ufd29\ufd2a\ufd2b\ufd2c\ufd2d\ufd2e\ufd2f\ufd30\ufd31\ufd32\ufd33\ufd34\ufd35\ufd36\ufd37\ufd38\ufd39\ufd3a\ufd3b\ufd3c\ufd3d\ufd50\ufd51\ufd52\ufd53\ufd54\ufd55\ufd56\ufd57\ufd58\ufd59\ufd5a\ufd5b\ufd5c\ufd5d\ufd5e\ufd5f\ufd60\ufd61\ufd62\ufd63\ufd64\ufd65\ufd66\ufd67\ufd68\ufd69\ufd6a\ufd6b\ufd6c\ufd6d\ufd6e\ufd6f\ufd70\ufd71\ufd72\ufd73\ufd74\ufd75\ufd76\ufd77\ufd78\ufd79\ufd7a\ufd7b\ufd7c\ufd7d\ufd7e\ufd7f\ufd80\ufd81\ufd82\ufd83\ufd84\ufd85\ufd86\ufd87\ufd88\ufd89\ufd8a\ufd8b\ufd8c\ufd8d\ufd8e\ufd8f\ufd92\ufd93\ufd94\ufd95\ufd96\ufd97\ufd98\ufd99\ufd9a\ufd9b\ufd9c\ufd9d\ufd9e\ufd9f\ufda0\ufda1\ufda2\ufda3\ufda4\ufda5\ufda6\ufda7\ufda8\ufda9\ufdaa\ufdab\ufdac\ufdad\ufdae\ufdaf\ufdb0\ufdb1\ufdb2\ufdb3\ufdb4\ufdb5\ufdb6\ufdb7\ufdb8\ufdb9\ufdba\ufdbb\ufdbc\ufdbd\ufdbe\ufdbf\ufdc0\ufdc1\ufdc2\ufdc3\ufdc4\ufdc5\ufdc6\ufdc7\ufdf0\ufdf1\ufdf2\ufdf3\ufdf4\ufdf5\ufdf6\ufdf7\ufdf8\ufdf9\ufdfa\ufdfb\ufe70\ufe71\ufe72\ufe73\ufe74\ufe76\ufe77\ufe78\ufe79\ufe7a\ufe7b\ufe7c\ufe7d\ufe7e\ufe7f\ufe80\ufe81\ufe82\ufe83\ufe84\ufe85\ufe86\ufe87\ufe88\ufe89\ufe8a\ufe8b\ufe8c\ufe8d\ufe8e\ufe8f\ufe90\ufe91\ufe92\ufe93\ufe94\ufe95\ufe96\ufe97\ufe98\ufe99\ufe9a\ufe9b\ufe9c\ufe9d\ufe9e\ufe9f\ufea0\ufea1\ufea2\ufea3\ufea4\ufea5\ufea6\ufea7\ufea8\ufea9\ufeaa\ufeab\ufeac\ufead\ufeae\ufeaf\ufeb0\ufeb1\ufeb2\ufeb3\ufeb4\ufeb5\ufeb6\ufeb7\ufeb8\ufeb9\ufeba\ufebb\ufebc\ufebd\ufebe\ufebf\ufec0\ufec1\ufec2\ufec3\ufec4\ufec5\ufec6\ufec7\ufec8\ufec9\ufeca\ufecb\ufecc\ufecd\ufece\ufecf\ufed0\ufed1\ufed2\ufed3\ufed4\ufed5\ufed6\ufed7\ufed8\ufed9\ufeda\ufedb\ufedc\ufedd\ufede\ufedf\ufee0\ufee1\ufee2\ufee3\ufee4\ufee5\ufee6\ufee7\ufee8\ufee9\ufeea\ufeeb\ufeec\ufeed\ufeee\ufeef\ufef0\ufef1\ufef2\ufef3\ufef4\ufef5\ufef6\ufef7\ufef8\ufef9\ufefa\ufefb\ufefc\uff66\uff67\uff68\uff69\uff6a\uff6b\uff6c\uff6d\uff6e\uff6f\uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79\uff7a\uff7b\uff7c\uff7d\uff7e\uff7f\uff80\uff81\uff82\uff83\uff84\uff85\uff86\uff87\uff88\uff89\uff8a\uff8b\uff8c\uff8d\uff8e\uff8f\uff90\uff91\uff92\uff93\uff94\uff95\uff96\uff97\uff98\uff99\uff9a\uff9b\uff9c\uff9d\uffa0\uffa1\uffa2\uffa3\uffa4\uffa5\uffa6\uffa7\uffa8\uffa9\uffaa\uffab\uffac\uffad\uffae\uffaf\uffb0\uffb1\uffb2\uffb3\uffb4\uffb5\uffb6\uffb7\uffb8\uffb9\uffba\uffbb\uffbc\uffbd\uffbe\uffc2\uffc3\uffc4\uffc5\uffc6\uffc7\uffca\uffcb\uffcc\uffcd\uffce\uffcf\uffd2\uffd3\uffd4\uffd5\uffd6\uffd7\uffda\uffdb\uffdc'
+Lo = u'\xaa\xba\u01bb\u01c0-\u01c3\u0294\u05d0-\u05ea\u05f0-\u05f2\u0620-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u0800-\u0815\u0840-\u0858\u08a0\u08a2-\u08ac\u0904-\u0939\u093d\u0950\u0958-\u0961\u0972-\u0977\u0979-\u097f\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c33\u0c35-\u0c39\u0c3d\u0c58-\u0c59\u0c60-\u0c61\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d60-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2-\u0eb3\u0ebd\u0ec0-\u0ec4\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10d0-\u10fa\u10fd-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f4\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17dc\u1820-\u1842\u1844-\u1877\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191c\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19c1-\u19c7\u1a00-\u1a16\u1a20-\u1a54\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u2135-\u2138\u2d30-\u2d67\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3006\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fcc\ua000-\ua014\ua016-\ua48c\ua4d0-\ua4f7\ua500-\ua60b\ua610-\ua61f\ua62a-\ua62b\ua66e\ua6a0-\ua6e5\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa6f\uaa71-\uaa76\uaa7a\uaa80-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadc\uaae0-\uaaea\uaaf2\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff66-\uff6f\uff71-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc'
-Lt = u'\u01c5\u01c8\u01cb\u01f2\u1f88\u1f89\u1f8a\u1f8b\u1f8c\u1f8d\u1f8e\u1f8f\u1f98\u1f99\u1f9a\u1f9b\u1f9c\u1f9d\u1f9e\u1f9f\u1fa8\u1fa9\u1faa\u1fab\u1fac\u1fad\u1fae\u1faf\u1fbc\u1fcc\u1ffc'
+Lt = u'\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc'
-Lu = u'ABCDEFGHIJKLMNOPQRSTUVWXYZ\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd8\xd9\xda\xdb\xdc\xdd\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178\u0179\u017b\u017d\u0181\u0182\u0184\u0186\u0187\u0189\u018a\u018b\u018e\u018f\u0190\u0191\u0193\u0194\u0196\u0197\u0198\u019c\u019d\u019f\u01a0\u01a2\u01a4\u01a6\u01a7\u01a9\u01ac\u01ae\u01af\u01b1\u01b2\u01b3\u01b5\u01b7\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6\u01f7\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a\u023b\u023d\u023e\u0241\u0243\u0244\u0245\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388\u0389\u038a\u038c\u038e\u038f\u0391\u0392\u0393\u0394\u0395\u0396\u0397\u0398\u0399\u039a\u039b\u039c\u039d\u039e\u039f\u03a0\u03a1\u03a3\u03a4\u03a5\u03a6\u03a7\u03a8\u03a9\u03aa\u03ab\u03cf\u03d2\u03d3\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9\u03fa\u03fd\u03fe\u03ff\u0400\u0401\u0402\u0403\u0404\u0405\u0406\u0407\u0408\u0409\u040a\u040b\u040c\u040d\u040e\u040f\u0410\u0411\u0412\u0413\u0414\u0415\u0416\u0417\u0418\u0419\u041a\u041b\u041c\u041d\u041e\u041f\u0420\u0421\u0422\u0423\u0424\u0425\u0426\u0427\u0428\u0429\u042a\u042b\u042c\u042d\u042e\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0531\u0532\u0533\u0534\u0535\u0536\u0537\u0538\u0539\u053a\u053b\u053c\u053d\u053e\u053f\u0540\u0541\u0542\u0543\u0544\u0545\u0546\u0547\u0548\u0549\u054a\u054b\u054c\u054d\u054e\u054f\u0550\u0551\u0552\u0553\u0554\u0555\u0556\u10a0\u10a1\u10a2\u10a3\u10a4\u10a5\u10a6\u10a7\u10a8\u10a9\u10aa\u10ab\u10ac\u10ad\u10ae\u10af\u10b0\u10b1\u10b2\u10b3\u10b4\u10b5\u10b6\u10b7\u10b8\u10b9\u10ba\u10bb\u10bc\u10bd\u10be\u10bf\u10c0\u10c1\u10c2\u10c3\u10c4\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08\u1f09\u1f0a\u1f0b\u1f0c\u1f0d\u1f0e\u1f0f\u1f18\u1f19\u1f1a\u1f1b\u1f1c\u1f1d\u1f28\u1f29\u1f2a\u1f2b\u1f2c\u1f2d\u1f2e\u1f2f\u1f38\u1f39\u1f3a\u1f3b\u1f3c\u1f3d\u1f3e\u1f3f\u1f48\u1f49\u1f4a\u1f4b\u1f4c\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68\u1f69\u1f6a\u1f6b\u1f6c\u1f6d\u1f6e\u1f6f\u1fb8\u1fb9\u1fba\u1fbb\u1fc8\u1fc9\u1fca\u1fcb\u1fd8\u1fd9\u1fda\u1fdb\u1fe8\u1fe9\u1fea\u1feb\u1fec\u1ff8\u1ff9\u1ffa\u1ffb\u2102\u2107\u210b\u210c\u210d\u2110\u2111\u2112\u2115\u2119\u211a\u211b\u211c\u211d\u2124\u2126\u2128\u212a\u212b\u212c\u212d\u2130\u2131\u2132\u2133\u213e\u213f\u2145\u2183\u2c00\u2c01\u2c02\u2c03\u2c04\u2c05\u2c06\u2c07\u2c08\u2c09\u2c0a\u2c0b\u2c0c\u2c0d\u2c0e\u2c0f\u2c10\u2c11\u2c12\u2c13\u2c14\u2c15\u2c16\u2c17\u2c18\u2c19\u2c1a\u2c1b\u2c1c\u2c1d\u2c1e\u2c1f\u2c20\u2c21\u2c22\u2c23\u2c24\u2c25\u2c26\u2c27\u2c28\u2c29\u2c2a\u2c2b\u2c2c\u2c2d\u2c2e\u2c60\u2c62\u2c63\u2c64\u2c67\u2c69\u2c6b\u2c6d\u2c6e\u2c6f\u2c70\u2c72\u2c75\u2c7e\u2c7f\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21\uff22\uff23\uff24\uff25\uff26\uff27\uff28\uff29\uff2a\uff2b\uff2c\uff2d\uff2e\uff2f\uff30\uff31\uff32\uff33\uff34\uff35\uff36\uff37\uff38\uff39\uff3a'
+Lu = u'A-Z\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa\uff21-\uff3a'
-Mc = u'\u0903\u093e\u093f\u0940\u0949\u094a\u094b\u094c\u094e\u0982\u0983\u09be\u09bf\u09c0\u09c7\u09c8\u09cb\u09cc\u09d7\u0a03\u0a3e\u0a3f\u0a40\u0a83\u0abe\u0abf\u0ac0\u0ac9\u0acb\u0acc\u0b02\u0b03\u0b3e\u0b40\u0b47\u0b48\u0b4b\u0b4c\u0b57\u0bbe\u0bbf\u0bc1\u0bc2\u0bc6\u0bc7\u0bc8\u0bca\u0bcb\u0bcc\u0bd7\u0c01\u0c02\u0c03\u0c41\u0c42\u0c43\u0c44\u0c82\u0c83\u0cbe\u0cc0\u0cc1\u0cc2\u0cc3\u0cc4\u0cc7\u0cc8\u0cca\u0ccb\u0cd5\u0cd6\u0d02\u0d03\u0d3e\u0d3f\u0d40\u0d46\u0d47\u0d48\u0d4a\u0d4b\u0d4c\u0d57\u0d82\u0d83\u0dcf\u0dd0\u0dd1\u0dd8\u0dd9\u0dda\u0ddb\u0ddc\u0ddd\u0dde\u0ddf\u0df2\u0df3\u0f3e\u0f3f\u0f7f\u102b\u102c\u1031\u1038\u103b\u103c\u1056\u1057\u1062\u1063\u1064\u1067\u1068\u1069\u106a\u106b\u106c\u106d\u1083\u1084\u1087\u1088\u1089\u108a\u108b\u108c\u108f\u109a\u109b\u109c\u17b6\u17be\u17bf\u17c0\u17c1\u17c2\u17c3\u17c4\u17c5\u17c7\u17c8\u1923\u1924\u1925\u1926\u1929\u192a\u192b\u1930\u1931\u1933\u1934\u1935\u1936\u1937\u1938\u19b0\u19b1\u19b2\u19b3\u19b4\u19b5\u19b6\u19b7\u19b8\u19b9\u19ba\u19bb\u19bc\u19bd\u19be\u19bf\u19c0\u19c8\u19c9\u1a19\u1a1a\u1a1b\u1a55\u1a57\u1a61\u1a63\u1a64\u1a6d\u1a6e\u1a6f\u1a70\u1a71\u1a72\u1b04\u1b35\u1b3b\u1b3d\u1b3e\u1b3f\u1b40\u1b41\u1b43\u1b44\u1b82\u1ba1\u1ba6\u1ba7\u1baa\u1c24\u1c25\u1c26\u1c27\u1c28\u1c29\u1c2a\u1c2b\u1c34\u1c35\u1ce1\u1cf2\ua823\ua824\ua827\ua880\ua881\ua8b4\ua8b5\ua8b6\ua8b7\ua8b8\ua8b9\ua8ba\ua8bb\ua8bc\ua8bd\ua8be\ua8bf\ua8c0\ua8c1\ua8c2\ua8c3\ua952\ua953\ua983\ua9b4\ua9b5\ua9ba\ua9bb\ua9bd\ua9be\ua9bf\ua9c0\uaa2f\uaa30\uaa33\uaa34\uaa4d\uaa7b\uabe3\uabe4\uabe6\uabe7\uabe9\uabea\uabec'
+Mc = u'\u0903\u093b\u093e-\u0940\u0949-\u094c\u094e-\u094f\u0982-\u0983\u09be-\u09c0\u09c7-\u09c8\u09cb-\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb-\u0acc\u0b02-\u0b03\u0b3e\u0b40\u0b47-\u0b48\u0b4b-\u0b4c\u0b57\u0bbe-\u0bbf\u0bc1-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82-\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7-\u0cc8\u0cca-\u0ccb\u0cd5-\u0cd6\u0d02-\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82-\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2-\u0df3\u0f3e-\u0f3f\u0f7f\u102b-\u102c\u1031\u1038\u103b-\u103c\u1056-\u1057\u1062-\u1064\u1067-\u106d\u1083-\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7-\u17c8\u1923-\u1926\u1929-\u192b\u1930-\u1931\u1933-\u1938\u19b0-\u19c0\u19c8-\u19c9\u1a19-\u1a1a\u1a55\u1a57\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b44\u1b82\u1ba1\u1ba6-\u1ba7\u1baa\u1bac-\u1bad\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1bf3\u1c24-\u1c2b\u1c34-\u1c35\u1ce1\u1cf2-\u1cf3\u302e-\u302f\ua823-\ua824\ua827\ua880-\ua881\ua8b4-\ua8c3\ua952-\ua953\ua983\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\ua9c0\uaa2f-\uaa30\uaa33-\uaa34\uaa4d\uaa7b\uaaeb\uaaee-\uaaef\uaaf5\uabe3-\uabe4\uabe6-\uabe7\uabe9-\uabea\uabec'
-Me = u'\u0488\u0489\u06de\u20dd\u20de\u20df\u20e0\u20e2\u20e3\u20e4\ua670\ua671\ua672'
+Me = u'\u0488-\u0489\u20dd-\u20e0\u20e2-\u20e4\ua670-\ua672'
-Mn = u'\u0300\u0301\u0302\u0303\u0304\u0305\u0306\u0307\u0308\u0309\u030a\u030b\u030c\u030d\u030e\u030f\u0310\u0311\u0312\u0313\u0314\u0315\u0316\u0317\u0318\u0319\u031a\u031b\u031c\u031d\u031e\u031f\u0320\u0321\u0322\u0323\u0324\u0325\u0326\u0327\u0328\u0329\u032a\u032b\u032c\u032d\u032e\u032f\u0330\u0331\u0332\u0333\u0334\u0335\u0336\u0337\u0338\u0339\u033a\u033b\u033c\u033d\u033e\u033f\u0340\u0341\u0342\u0343\u0344\u0345\u0346\u0347\u0348\u0349\u034a\u034b\u034c\u034d\u034e\u034f\u0350\u0351\u0352\u0353\u0354\u0355\u0356\u0357\u0358\u0359\u035a\u035b\u035c\u035d\u035e\u035f\u0360\u0361\u0362\u0363\u0364\u0365\u0366\u0367\u0368\u0369\u036a\u036b\u036c\u036d\u036e\u036f\u0483\u0484\u0485\u0486\u0487\u0591\u0592\u0593\u0594\u0595\u0596\u0597\u0598\u0599\u059a\u059b\u059c\u059d\u059e\u059f\u05a0\u05a1\u05a2\u05a3\u05a4\u05a5\u05a6\u05a7\u05a8\u05a9\u05aa\u05ab\u05ac\u05ad\u05ae\u05af\u05b0\u05b1\u05b2\u05b3\u05b4\u05b5\u05b6\u05b7\u05b8\u05b9\u05ba\u05bb\u05bc\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610\u0611\u0612\u0613\u0614\u0615\u0616\u0617\u0618\u0619\u061a\u064b\u064c\u064d\u064e\u064f\u0650\u0651\u0652\u0653\u0654\u0655\u0656\u0657\u0658\u0659\u065a\u065b\u065c\u065d\u065e\u0670\u06d6\u06d7\u06d8\u06d9\u06da\u06db\u06dc\u06df\u06e0\u06e1\u06e2\u06e3\u06e4\u06e7\u06e8\u06ea\u06eb\u06ec\u06ed\u0711\u0730\u0731\u0732\u0733\u0734\u0735\u0736\u0737\u0738\u0739\u073a\u073b\u073c\u073d\u073e\u073f\u0740\u0741\u0742\u0743\u0744\u0745\u0746\u0747\u0748\u0749\u074a\u07a6\u07a7\u07a8\u07a9\u07aa\u07ab\u07ac\u07ad\u07ae\u07af\u07b0\u07eb\u07ec\u07ed\u07ee\u07ef\u07f0\u07f1\u07f2\u07f3\u0816\u0817\u0818\u0819\u081b\u081c\u081d\u081e\u081f\u0820\u0821\u0822\u0823\u0825\u0826\u0827\u0829\u082a\u082b\u082c\u082d\u0900\u0901\u0902\u093c\u0941\u0942\u0943\u0944\u0945\u0946\u0947\u0948\u094d\u0951\u0952\u0953\u0954\u0955\u0962\u0963\u0981\u09bc\u09c1\u09c2\u09c3\u09c4\u09cd\u09e2\u09e3\u0a01\u0a02\u0a3c\u0a41\u0a42\u0a47\u0a48\u0a4b\u0a4c\u0a4d\u0a51\u0a70\u0a71\u0a75\u0a81\u0a82\u0abc\u0ac1\u0ac2\u0ac3\u0ac4\u0ac5\u0ac7\u0ac8\u0acd\u0ae2\u0ae3\u0b01\u0b3c\u0b3f\u0b41\u0b42\u0b43\u0b44\u0b4d\u0b56\u0b62\u0b63\u0b82\u0bc0\u0bcd\u0c3e\u0c3f\u0c40\u0c46\u0c47\u0c48\u0c4a\u0c4b\u0c4c\u0c4d\u0c55\u0c56\u0c62\u0c63\u0cbc\u0cbf\u0cc6\u0ccc\u0ccd\u0ce2\u0ce3\u0d41\u0d42\u0d43\u0d44\u0d4d\u0d62\u0d63\u0dca\u0dd2\u0dd3\u0dd4\u0dd6\u0e31\u0e34\u0e35\u0e36\u0e37\u0e38\u0e39\u0e3a\u0e47\u0e48\u0e49\u0e4a\u0e4b\u0e4c\u0e4d\u0e4e\u0eb1\u0eb4\u0eb5\u0eb6\u0eb7\u0eb8\u0eb9\u0ebb\u0ebc\u0ec8\u0ec9\u0eca\u0ecb\u0ecc\u0ecd\u0f18\u0f19\u0f35\u0f37\u0f39\u0f71\u0f72\u0f73\u0f74\u0f75\u0f76\u0f77\u0f78\u0f79\u0f7a\u0f7b\u0f7c\u0f7d\u0f7e\u0f80\u0f81\u0f82\u0f83\u0f84\u0f86\u0f87\u0f90\u0f91\u0f92\u0f93\u0f94\u0f95\u0f96\u0f97\u0f99\u0f9a\u0f9b\u0f9c\u0f9d\u0f9e\u0f9f\u0fa0\u0fa1\u0fa2\u0fa3\u0fa4\u0fa5\u0fa6\u0fa7\u0fa8\u0fa9\u0faa\u0fab\u0fac\u0fad\u0fae\u0faf\u0fb0\u0fb1\u0fb2\u0fb3\u0fb4\u0fb5\u0fb6\u0fb7\u0fb8\u0fb9\u0fba\u0fbb\u0fbc\u0fc6\u102d\u102e\u102f\u1030\u1032\u1033\u1034\u1035\u1036\u1037\u1039\u103a\u103d\u103e\u1058\u1059\u105e\u105f\u1060\u1071\u1072\u1073\u1074\u1082\u1085\u1086\u108d\u109d\u135f\u1712\u1713\u1714\u1732\u1733\u1734\u1752\u1753\u1772\u1773\u17b7\u17b8\u17b9\u17ba\u17bb\u17bc\u17bd\u17c6\u17c9\u17ca\u17cb\u17cc\u17cd\u17ce\u17cf\u17d0\u17d1\u17d2\u17d3\u17dd\u180b\u180c\u180d\u18a9\u1920\u1921\u1922\u1927\u1928\u1932\u1939\u193a\u193b\u1a17\u1a18\u1a56\u1a58\u1a59\u1a5a\u1a5b\u1a5c\u1a5d\u1a5e\u1a60\u1a62\u1a65\u1a66\u1a67\u1a68\u1a69\u1a6a\u1a6b\u1a6c\u1a73\u1a74\u1a75\u1a76\u1a77\u1a78\u1a79\u1a7a\u1a7b\u1a7c\u1a7f\u1b00\u1b01\u1b02\u1b03\u1b34\u1b36\u1b37\u1b38\u1b39\u1b3a\u1b3c\u1b42\u1b6b\u1b6c\u1b6d\u1b6e\u1b6f\u1b70\u1b71\u1b72\u1b73\u1b80\u1b81\u1ba2\u1ba3\u1ba4\u1ba5\u1ba8\u1ba9\u1c2c\u1c2d\u1c2e\u1c2f\u1c30\u1c31\u1c32\u1c33\u1c36\u1c37\u1cd0\u1cd1\u1cd2\u1cd4\u1cd5\u1cd6\u1cd7\u1cd8\u1cd9\u1cda\u1cdb\u1cdc\u1cdd\u1cde\u1cdf\u1ce0\u1ce2\u1ce3\u1ce4\u1ce5\u1ce6\u1ce7\u1ce8\u1ced\u1dc0\u1dc1\u1dc2\u1dc3\u1dc4\u1dc5\u1dc6\u1dc7\u1dc8\u1dc9\u1dca\u1dcb\u1dcc\u1dcd\u1dce\u1dcf\u1dd0\u1dd1\u1dd2\u1dd3\u1dd4\u1dd5\u1dd6\u1dd7\u1dd8\u1dd9\u1dda\u1ddb\u1ddc\u1ddd\u1dde\u1ddf\u1de0\u1de1\u1de2\u1de3\u1de4\u1de5\u1de6\u1dfd\u1dfe\u1dff\u20d0\u20d1\u20d2\u20d3\u20d4\u20d5\u20d6\u20d7\u20d8\u20d9\u20da\u20db\u20dc\u20e1\u20e5\u20e6\u20e7\u20e8\u20e9\u20ea\u20eb\u20ec\u20ed\u20ee\u20ef\u20f0\u2cef\u2cf0\u2cf1\u2de0\u2de1\u2de2\u2de3\u2de4\u2de5\u2de6\u2de7\u2de8\u2de9\u2dea\u2deb\u2dec\u2ded\u2dee\u2def\u2df0\u2df1\u2df2\u2df3\u2df4\u2df5\u2df6\u2df7\u2df8\u2df9\u2dfa\u2dfb\u2dfc\u2dfd\u2dfe\u2dff\u302a\u302b\u302c\u302d\u302e\u302f\u3099\u309a\ua66f\ua67c\ua67d\ua6f0\ua6f1\ua802\ua806\ua80b\ua825\ua826\ua8c4\ua8e0\ua8e1\ua8e2\ua8e3\ua8e4\ua8e5\ua8e6\ua8e7\ua8e8\ua8e9\ua8ea\ua8eb\ua8ec\ua8ed\ua8ee\ua8ef\ua8f0\ua8f1\ua926\ua927\ua928\ua929\ua92a\ua92b\ua92c\ua92d\ua947\ua948\ua949\ua94a\ua94b\ua94c\ua94d\ua94e\ua94f\ua950\ua951\ua980\ua981\ua982\ua9b3\ua9b6\ua9b7\ua9b8\ua9b9\ua9bc\uaa29\uaa2a\uaa2b\uaa2c\uaa2d\uaa2e\uaa31\uaa32\uaa35\uaa36\uaa43\uaa4c\uaab0\uaab2\uaab3\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uabe5\uabe8\uabed\ufb1e\ufe00\ufe01\ufe02\ufe03\ufe04\ufe05\ufe06\ufe07\ufe08\ufe09\ufe0a\ufe0b\ufe0c\ufe0d\ufe0e\ufe0f\ufe20\ufe21\ufe22\ufe23\ufe24\ufe25\ufe26'
+Mn = u'\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u0610-\u061a\u064b-\u065f\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7-\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08e4-\u08fe\u0900-\u0902\u093a\u093c\u0941-\u0948\u094d\u0951-\u0957\u0962-\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2-\u09e3\u0a01-\u0a02\u0a3c\u0a41-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a70-\u0a71\u0a75\u0a81-\u0a82\u0abc\u0ac1-\u0ac5\u0ac7-\u0ac8\u0acd\u0ae2-\u0ae3\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62-\u0b63\u0b82\u0bc0\u0bcd\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c62-\u0c63\u0cbc\u0cbf\u0cc6\u0ccc-\u0ccd\u0ce2-\u0ce3\u0d41-\u0d44\u0d4d\u0d62-\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb-\u0ebc\u0ec8-\u0ecd\u0f18-\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86-\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u103e\u1058-\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085-\u1086\u108d\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752-\u1753\u1772-\u1773\u17b4-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927-\u1928\u1932\u1939-\u193b\u1a17-\u1a18\u1a1b\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80-\u1b81\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1c2c-\u1c33\u1c36-\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1cf4\u1dc0-\u1de6\u1dfc-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302d\u3099-\u309a\ua66f\ua674-\ua67d\ua69f\ua6f0-\ua6f1\ua802\ua806\ua80b\ua825-\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31-\uaa32\uaa35-\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaaec-\uaaed\uaaf6\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe26'
-Nd = u'0123456789\u0660\u0661\u0662\u0663\u0664\u0665\u0666\u0667\u0668\u0669\u06f0\u06f1\u06f2\u06f3\u06f4\u06f5\u06f6\u06f7\u06f8\u06f9\u07c0\u07c1\u07c2\u07c3\u07c4\u07c5\u07c6\u07c7\u07c8\u07c9\u0966\u0967\u0968\u0969\u096a\u096b\u096c\u096d\u096e\u096f\u09e6\u09e7\u09e8\u09e9\u09ea\u09eb\u09ec\u09ed\u09ee\u09ef\u0a66\u0a67\u0a68\u0a69\u0a6a\u0a6b\u0a6c\u0a6d\u0a6e\u0a6f\u0ae6\u0ae7\u0ae8\u0ae9\u0aea\u0aeb\u0aec\u0aed\u0aee\u0aef\u0b66\u0b67\u0b68\u0b69\u0b6a\u0b6b\u0b6c\u0b6d\u0b6e\u0b6f\u0be6\u0be7\u0be8\u0be9\u0bea\u0beb\u0bec\u0bed\u0bee\u0bef\u0c66\u0c67\u0c68\u0c69\u0c6a\u0c6b\u0c6c\u0c6d\u0c6e\u0c6f\u0ce6\u0ce7\u0ce8\u0ce9\u0cea\u0ceb\u0cec\u0ced\u0cee\u0cef\u0d66\u0d67\u0d68\u0d69\u0d6a\u0d6b\u0d6c\u0d6d\u0d6e\u0d6f\u0e50\u0e51\u0e52\u0e53\u0e54\u0e55\u0e56\u0e57\u0e58\u0e59\u0ed0\u0ed1\u0ed2\u0ed3\u0ed4\u0ed5\u0ed6\u0ed7\u0ed8\u0ed9\u0f20\u0f21\u0f22\u0f23\u0f24\u0f25\u0f26\u0f27\u0f28\u0f29\u1040\u1041\u1042\u1043\u1044\u1045\u1046\u1047\u1048\u1049\u1090\u1091\u1092\u1093\u1094\u1095\u1096\u1097\u1098\u1099\u17e0\u17e1\u17e2\u17e3\u17e4\u17e5\u17e6\u17e7\u17e8\u17e9\u1810\u1811\u1812\u1813\u1814\u1815\u1816\u1817\u1818\u1819\u1946\u1947\u1948\u1949\u194a\u194b\u194c\u194d\u194e\u194f\u19d0\u19d1\u19d2\u19d3\u19d4\u19d5\u19d6\u19d7\u19d8\u19d9\u19da\u1a80\u1a81\u1a82\u1a83\u1a84\u1a85\u1a86\u1a87\u1a88\u1a89\u1a90\u1a91\u1a92\u1a93\u1a94\u1a95\u1a96\u1a97\u1a98\u1a99\u1b50\u1b51\u1b52\u1b53\u1b54\u1b55\u1b56\u1b57\u1b58\u1b59\u1bb0\u1bb1\u1bb2\u1bb3\u1bb4\u1bb5\u1bb6\u1bb7\u1bb8\u1bb9\u1c40\u1c41\u1c42\u1c43\u1c44\u1c45\u1c46\u1c47\u1c48\u1c49\u1c50\u1c51\u1c52\u1c53\u1c54\u1c55\u1c56\u1c57\u1c58\u1c59\ua620\ua621\ua622\ua623\ua624\ua625\ua626\ua627\ua628\ua629\ua8d0\ua8d1\ua8d2\ua8d3\ua8d4\ua8d5\ua8d6\ua8d7\ua8d8\ua8d9\ua900\ua901\ua902\ua903\ua904\ua905\ua906\ua907\ua908\ua909\ua9d0\ua9d1\ua9d2\ua9d3\ua9d4\ua9d5\ua9d6\ua9d7\ua9d8\ua9d9\uaa50\uaa51\uaa52\uaa53\uaa54\uaa55\uaa56\uaa57\uaa58\uaa59\uabf0\uabf1\uabf2\uabf3\uabf4\uabf5\uabf6\uabf7\uabf8\uabf9\uff10\uff11\uff12\uff13\uff14\uff15\uff16\uff17\uff18\uff19'
+Nd = u'0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19d9\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19'
-Nl = u'\u16ee\u16ef\u16f0\u2160\u2161\u2162\u2163\u2164\u2165\u2166\u2167\u2168\u2169\u216a\u216b\u216c\u216d\u216e\u216f\u2170\u2171\u2172\u2173\u2174\u2175\u2176\u2177\u2178\u2179\u217a\u217b\u217c\u217d\u217e\u217f\u2180\u2181\u2182\u2185\u2186\u2187\u2188\u3007\u3021\u3022\u3023\u3024\u3025\u3026\u3027\u3028\u3029\u3038\u3039\u303a\ua6e6\ua6e7\ua6e8\ua6e9\ua6ea\ua6eb\ua6ec\ua6ed\ua6ee\ua6ef'
+Nl = u'\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef'
-No = u'\xb2\xb3\xb9\xbc\xbd\xbe\u09f4\u09f5\u09f6\u09f7\u09f8\u09f9\u0bf0\u0bf1\u0bf2\u0c78\u0c79\u0c7a\u0c7b\u0c7c\u0c7d\u0c7e\u0d70\u0d71\u0d72\u0d73\u0d74\u0d75\u0f2a\u0f2b\u0f2c\u0f2d\u0f2e\u0f2f\u0f30\u0f31\u0f32\u0f33\u1369\u136a\u136b\u136c\u136d\u136e\u136f\u1370\u1371\u1372\u1373\u1374\u1375\u1376\u1377\u1378\u1379\u137a\u137b\u137c\u17f0\u17f1\u17f2\u17f3\u17f4\u17f5\u17f6\u17f7\u17f8\u17f9\u2070\u2074\u2075\u2076\u2077\u2078\u2079\u2080\u2081\u2082\u2083\u2084\u2085\u2086\u2087\u2088\u2089\u2150\u2151\u2152\u2153\u2154\u2155\u2156\u2157\u2158\u2159\u215a\u215b\u215c\u215d\u215e\u215f\u2189\u2460\u2461\u2462\u2463\u2464\u2465\u2466\u2467\u2468\u2469\u246a\u246b\u246c\u246d\u246e\u246f\u2470\u2471\u2472\u2473\u2474\u2475\u2476\u2477\u2478\u2479\u247a\u247b\u247c\u247d\u247e\u247f\u2480\u2481\u2482\u2483\u2484\u2485\u2486\u2487\u2488\u2489\u248a\u248b\u248c\u248d\u248e\u248f\u2490\u2491\u2492\u2493\u2494\u2495\u2496\u2497\u2498\u2499\u249a\u249b\u24ea\u24eb\u24ec\u24ed\u24ee\u24ef\u24f0\u24f1\u24f2\u24f3\u24f4\u24f5\u24f6\u24f7\u24f8\u24f9\u24fa\u24fb\u24fc\u24fd\u24fe\u24ff\u2776\u2777\u2778\u2779\u277a\u277b\u277c\u277d\u277e\u277f\u2780\u2781\u2782\u2783\u2784\u2785\u2786\u2787\u2788\u2789\u278a\u278b\u278c\u278d\u278e\u278f\u2790\u2791\u2792\u2793\u2cfd\u3192\u3193\u3194\u3195\u3220\u3221\u3222\u3223\u3224\u3225\u3226\u3227\u3228\u3229\u3251\u3252\u3253\u3254\u3255\u3256\u3257\u3258\u3259\u325a\u325b\u325c\u325d\u325e\u325f\u3280\u3281\u3282\u3283\u3284\u3285\u3286\u3287\u3288\u3289\u32b1\u32b2\u32b3\u32b4\u32b5\u32b6\u32b7\u32b8\u32b9\u32ba\u32bb\u32bc\u32bd\u32be\u32bf\ua830\ua831\ua832\ua833\ua834\ua835'
+No = u'\xb2-\xb3\xb9\xbc-\xbe\u09f4-\u09f9\u0b72-\u0b77\u0bf0-\u0bf2\u0c78-\u0c7e\u0d70-\u0d75\u0f2a-\u0f33\u1369-\u137c\u17f0-\u17f9\u19da\u2070\u2074-\u2079\u2080-\u2089\u2150-\u215f\u2189\u2460-\u249b\u24ea-\u24ff\u2776-\u2793\u2cfd\u3192-\u3195\u3220-\u3229\u3248-\u324f\u3251-\u325f\u3280-\u3289\u32b1-\u32bf\ua830-\ua835'
-Pc = u'_\u203f\u2040\u2054\ufe33\ufe34\ufe4d\ufe4e\ufe4f\uff3f'
+Pc = u'_\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f'
-Pd = u'\\-\u058a\u05be\u1400\u1806\u2010\u2011\u2012\u2013\u2014\u2015\u2e17\u2e1a\u301c\u3030\u30a0\ufe31\ufe32\ufe58\ufe63\uff0d'
+Pd = u'\\-\u058a\u05be\u1400\u1806\u2010-\u2015\u2e17\u2e1a\u2e3a-\u2e3b\u301c\u3030\u30a0\ufe31-\ufe32\ufe58\ufe63\uff0d'
-Pe = u')]}\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u232a\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u27ed\u27ef\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u2e23\u2e25\u2e27\u2e29\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e\u301f\ufd3f\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63'
+Pe = u')\\]}\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u2309\u230b\u232a\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u27ed\u27ef\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u2e23\u2e25\u2e27\u2e29\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e-\u301f\ufd3f\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63'
Pf = u'\xbb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d\u2e21'
-Pi = u'\xab\u2018\u201b\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20'
+Pi = u'\xab\u2018\u201b-\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20'
-Po = u'!"#%&\'*,./:;?@\\\\\xa1\xb7\xbf\u037e\u0387\u055a\u055b\u055c\u055d\u055e\u055f\u0589\u05c0\u05c3\u05c6\u05f3\u05f4\u0609\u060a\u060c\u060d\u061b\u061e\u061f\u066a\u066b\u066c\u066d\u06d4\u0700\u0701\u0702\u0703\u0704\u0705\u0706\u0707\u0708\u0709\u070a\u070b\u070c\u070d\u07f7\u07f8\u07f9\u0830\u0831\u0832\u0833\u0834\u0835\u0836\u0837\u0838\u0839\u083a\u083b\u083c\u083d\u083e\u0964\u0965\u0970\u0df4\u0e4f\u0e5a\u0e5b\u0f04\u0f05\u0f06\u0f07\u0f08\u0f09\u0f0a\u0f0b\u0f0c\u0f0d\u0f0e\u0f0f\u0f10\u0f11\u0f12\u0f85\u0fd0\u0fd1\u0fd2\u0fd3\u0fd4\u104a\u104b\u104c\u104d\u104e\u104f\u10fb\u1361\u1362\u1363\u1364\u1365\u1366\u1367\u1368\u166d\u166e\u16eb\u16ec\u16ed\u1735\u1736\u17d4\u17d5\u17d6\u17d8\u17d9\u17da\u1800\u1801\u1802\u1803\u1804\u1805\u1807\u1808\u1809\u180a\u1944\u1945\u19de\u19df\u1a1e\u1a1f\u1aa0\u1aa1\u1aa2\u1aa3\u1aa4\u1aa5\u1aa6\u1aa8\u1aa9\u1aaa\u1aab\u1aac\u1aad\u1b5a\u1b5b\u1b5c\u1b5d\u1b5e\u1b5f\u1b60\u1c3b\u1c3c\u1c3d\u1c3e\u1c3f\u1c7e\u1c7f\u1cd3\u2016\u2017\u2020\u2021\u2022\u2023\u2024\u2025\u2026\u2027\u2030\u2031\u2032\u2033\u2034\u2035\u2036\u2037\u2038\u203b\u203c\u203d\u203e\u2041\u2042\u2043\u2047\u2048\u2049\u204a\u204b\u204c\u204d\u204e\u204f\u2050\u2051\u2053\u2055\u2056\u2057\u2058\u2059\u205a\u205b\u205c\u205d\u205e\u2cf9\u2cfa\u2cfb\u2cfc\u2cfe\u2cff\u2e00\u2e01\u2e06\u2e07\u2e08\u2e0b\u2e0e\u2e0f\u2e10\u2e11\u2e12\u2e13\u2e14\u2e15\u2e16\u2e18\u2e19\u2e1b\u2e1e\u2e1f\u2e2a\u2e2b\u2e2c\u2e2d\u2e2e\u2e30\u2e31\u3001\u3002\u3003\u303d\u30fb\ua4fe\ua4ff\ua60d\ua60e\ua60f\ua673\ua67e\ua6f2\ua6f3\ua6f4\ua6f5\ua6f6\ua6f7\ua874\ua875\ua876\ua877\ua8ce\ua8cf\ua8f8\ua8f9\ua8fa\ua92e\ua92f\ua95f\ua9c1\ua9c2\ua9c3\ua9c4\ua9c5\ua9c6\ua9c7\ua9c8\ua9c9\ua9ca\ua9cb\ua9cc\ua9cd\ua9de\ua9df\uaa5c\uaa5d\uaa5e\uaa5f\uaade\uaadf\uabeb\ufe10\ufe11\ufe12\ufe13\ufe14\ufe15\ufe16\ufe19\ufe30\ufe45\ufe46\ufe49\ufe4a\ufe4b\ufe4c\ufe50\ufe51\ufe52\ufe54\ufe55\ufe56\ufe57\ufe5f\ufe60\ufe61\ufe68\ufe6a\ufe6b\uff01\uff02\uff03\uff05\uff06\uff07\uff0a\uff0c\uff0e\uff0f\uff1a\uff1b\uff1f\uff20\uff3c\uff61\uff64\uff65'
+Po = u"!-#%-'*,.-/:-;?-@\\\\\xa1\xa7\xb6-\xb7\xbf\u037e\u0387\u055a-\u055f\u0589\u05c0\u05c3\u05c6\u05f3-\u05f4\u0609-\u060a\u060c-\u060d\u061b\u061e-\u061f\u066a-\u066d\u06d4\u0700-\u070d\u07f7-\u07f9\u0830-\u083e\u085e\u0964-\u0965\u0970\u0af0\u0df4\u0e4f\u0e5a-\u0e5b\u0f04-\u0f12\u0f14\u0f85\u0fd0-\u0fd4\u0fd9-\u0fda\u104a-\u104f\u10fb\u1360-\u1368\u166d-\u166e\u16eb-\u16ed\u1735-\u1736\u17d4-\u17d6\u17d8-\u17da\u1800-\u1805\u1807-\u180a\u1944-\u1945\u1a1e-\u1a1f\u1aa0-\u1aa6\u1aa8-\u1aad\u1b5a-\u1b60\u1bfc-\u1bff\u1c3b-\u1c3f\u1c7e-\u1c7f\u1cc0-\u1cc7\u1cd3\u2016-\u2017\u2020-\u2027\u2030-\u2038\u203b-\u203e\u2041-\u2043\u2047-\u2051\u2053\u2055-\u205e\u2cf9-\u2cfc\u2cfe-\u2cff\u2d70\u2e00-\u2e01\u2e06-\u2e08\u2e0b\u2e0e-\u2e16\u2e18-\u2e19\u2e1b\u2e1e-\u2e1f\u2e2a-\u2e2e\u2e30-\u2e39\u3001-\u3003\u303d\u30fb\ua4fe-\ua4ff\ua60d-\ua60f\ua673\ua67e\ua6f2-\ua6f7\ua874-\ua877\ua8ce-\ua8cf\ua8f8-\ua8fa\ua92e-\ua92f\ua95f\ua9c1-\ua9cd\ua9de-\ua9df\uaa5c-\uaa5f\uaade-\uaadf\uaaf0-\uaaf1\uabeb\ufe10-\ufe16\ufe19\ufe30\ufe45-\ufe46\ufe49-\ufe4c\ufe50-\ufe52\ufe54-\ufe57\ufe5f-\ufe61\ufe68\ufe6a-\ufe6b\uff01-\uff03\uff05-\uff07\uff0a\uff0c\uff0e-\uff0f\uff1a-\uff1b\uff1f-\uff20\uff3c\uff61\uff64-\uff65"
-Ps = u'([{\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2329\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u27ec\u27ee\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u2e22\u2e24\u2e26\u2e28\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3e\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62'
+Ps = u'(\\[{\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2308\u230a\u2329\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u27ec\u27ee\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u2e22\u2e24\u2e26\u2e28\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3e\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62'
-Sc = u'$\xa2\xa3\xa4\xa5\u060b\u09f2\u09f3\u09fb\u0af1\u0bf9\u0e3f\u17db\u20a0\u20a1\u20a2\u20a3\u20a4\u20a5\u20a6\u20a7\u20a8\u20a9\u20aa\u20ab\u20ac\u20ad\u20ae\u20af\u20b0\u20b1\u20b2\u20b3\u20b4\u20b5\u20b6\u20b7\u20b8\ua838\ufdfc\ufe69\uff04\uffe0\uffe1\uffe5\uffe6'
+Sc = u'$\xa2-\xa5\u058f\u060b\u09f2-\u09f3\u09fb\u0af1\u0bf9\u0e3f\u17db\u20a0-\u20ba\ua838\ufdfc\ufe69\uff04\uffe0-\uffe1\uffe5-\uffe6'
-Sk = u'^`\xa8\xaf\xb4\xb8\u02c2\u02c3\u02c4\u02c5\u02d2\u02d3\u02d4\u02d5\u02d6\u02d7\u02d8\u02d9\u02da\u02db\u02dc\u02dd\u02de\u02df\u02e5\u02e6\u02e7\u02e8\u02e9\u02ea\u02eb\u02ed\u02ef\u02f0\u02f1\u02f2\u02f3\u02f4\u02f5\u02f6\u02f7\u02f8\u02f9\u02fa\u02fb\u02fc\u02fd\u02fe\u02ff\u0375\u0384\u0385\u1fbd\u1fbf\u1fc0\u1fc1\u1fcd\u1fce\u1fcf\u1fdd\u1fde\u1fdf\u1fed\u1fee\u1fef\u1ffd\u1ffe\u309b\u309c\ua700\ua701\ua702\ua703\ua704\ua705\ua706\ua707\ua708\ua709\ua70a\ua70b\ua70c\ua70d\ua70e\ua70f\ua710\ua711\ua712\ua713\ua714\ua715\ua716\ua720\ua721\ua789\ua78a\uff3e\uff40\uffe3'
+Sk = u'\\^`\xa8\xaf\xb4\xb8\u02c2-\u02c5\u02d2-\u02df\u02e5-\u02eb\u02ed\u02ef-\u02ff\u0375\u0384-\u0385\u1fbd\u1fbf-\u1fc1\u1fcd-\u1fcf\u1fdd-\u1fdf\u1fed-\u1fef\u1ffd-\u1ffe\u309b-\u309c\ua700-\ua716\ua720-\ua721\ua789-\ua78a\ufbb2-\ufbc1\uff3e\uff40\uffe3'
-Sm = u'+<=>|~\xac\xb1\xd7\xf7\u03f6\u0606\u0607\u0608\u2044\u2052\u207a\u207b\u207c\u208a\u208b\u208c\u2140\u2141\u2142\u2143\u2144\u214b\u2190\u2191\u2192\u2193\u2194\u219a\u219b\u21a0\u21a3\u21a6\u21ae\u21ce\u21cf\u21d2\u21d4\u21f4\u21f5\u21f6\u21f7\u21f8\u21f9\u21fa\u21fb\u21fc\u21fd\u21fe\u21ff\u2200\u2201\u2202\u2203\u2204\u2205\u2206\u2207\u2208\u2209\u220a\u220b\u220c\u220d\u220e\u220f\u2210\u2211\u2212\u2213\u2214\u2215\u2216\u2217\u2218\u2219\u221a\u221b\u221c\u221d\u221e\u221f\u2220\u2221\u2222\u2223\u2224\u2225\u2226\u2227\u2228\u2229\u222a\u222b\u222c\u222d\u222e\u222f\u2230\u2231\u2232\u2233\u2234\u2235\u2236\u2237\u2238\u2239\u223a\u223b\u223c\u223d\u223e\u223f\u2240\u2241\u2242\u2243\u2244\u2245\u2246\u2247\u2248\u2249\u224a\u224b\u224c\u224d\u224e\u224f\u2250\u2251\u2252\u2253\u2254\u2255\u2256\u2257\u2258\u2259\u225a\u225b\u225c\u225d\u225e\u225f\u2260\u2261\u2262\u2263\u2264\u2265\u2266\u2267\u2268\u2269\u226a\u226b\u226c\u226d\u226e\u226f\u2270\u2271\u2272\u2273\u2274\u2275\u2276\u2277\u2278\u2279\u227a\u227b\u227c\u227d\u227e\u227f\u2280\u2281\u2282\u2283\u2284\u2285\u2286\u2287\u2288\u2289\u228a\u228b\u228c\u228d\u228e\u228f\u2290\u2291\u2292\u2293\u2294\u2295\u2296\u2297\u2298\u2299\u229a\u229b\u229c\u229d\u229e\u229f\u22a0\u22a1\u22a2\u22a3\u22a4\u22a5\u22a6\u22a7\u22a8\u22a9\u22aa\u22ab\u22ac\u22ad\u22ae\u22af\u22b0\u22b1\u22b2\u22b3\u22b4\u22b5\u22b6\u22b7\u22b8\u22b9\u22ba\u22bb\u22bc\u22bd\u22be\u22bf\u22c0\u22c1\u22c2\u22c3\u22c4\u22c5\u22c6\u22c7\u22c8\u22c9\u22ca\u22cb\u22cc\u22cd\u22ce\u22cf\u22d0\u22d1\u22d2\u22d3\u22d4\u22d5\u22d6\u22d7\u22d8\u22d9\u22da\u22db\u22dc\u22dd\u22de\u22df\u22e0\u22e1\u22e2\u22e3\u22e4\u22e5\u22e6\u22e7\u22e8\u22e9\u22ea\u22eb\u22ec\u22ed\u22ee\u22ef\u22f0\u22f1\u22f2\u22f3\u22f4\u22f5\u22f6\u22f7\u22f8\u22f9\u22fa\u22fb\u22fc\u22fd\u22fe\u22ff\u2308\u2309\u230a\u230b\u2320\u2321\u237c\u239b\u239c\u239d\u239e\u239f\u23a0\u23a1\u23a2\u23a3\u23a4\u23a5\u23a6\u23a7\u23a8\u23a9\u23aa\u23ab\u23ac\u23ad\u23ae\u23af\u23b0\u23b1\u23b2\u23b3\u23dc\u23dd\u23de\u23df\u23e0\u23e1\u25b7\u25c1\u25f8\u25f9\u25fa\u25fb\u25fc\u25fd\u25fe\u25ff\u266f\u27c0\u27c1\u27c2\u27c3\u27c4\u27c7\u27c8\u27c9\u27ca\u27cc\u27d0\u27d1\u27d2\u27d3\u27d4\u27d5\u27d6\u27d7\u27d8\u27d9\u27da\u27db\u27dc\u27dd\u27de\u27df\u27e0\u27e1\u27e2\u27e3\u27e4\u27e5\u27f0\u27f1\u27f2\u27f3\u27f4\u27f5\u27f6\u27f7\u27f8\u27f9\u27fa\u27fb\u27fc\u27fd\u27fe\u27ff\u2900\u2901\u2902\u2903\u2904\u2905\u2906\u2907\u2908\u2909\u290a\u290b\u290c\u290d\u290e\u290f\u2910\u2911\u2912\u2913\u2914\u2915\u2916\u2917\u2918\u2919\u291a\u291b\u291c\u291d\u291e\u291f\u2920\u2921\u2922\u2923\u2924\u2925\u2926\u2927\u2928\u2929\u292a\u292b\u292c\u292d\u292e\u292f\u2930\u2931\u2932\u2933\u2934\u2935\u2936\u2937\u2938\u2939\u293a\u293b\u293c\u293d\u293e\u293f\u2940\u2941\u2942\u2943\u2944\u2945\u2946\u2947\u2948\u2949\u294a\u294b\u294c\u294d\u294e\u294f\u2950\u2951\u2952\u2953\u2954\u2955\u2956\u2957\u2958\u2959\u295a\u295b\u295c\u295d\u295e\u295f\u2960\u2961\u2962\u2963\u2964\u2965\u2966\u2967\u2968\u2969\u296a\u296b\u296c\u296d\u296e\u296f\u2970\u2971\u2972\u2973\u2974\u2975\u2976\u2977\u2978\u2979\u297a\u297b\u297c\u297d\u297e\u297f\u2980\u2981\u2982\u2999\u299a\u299b\u299c\u299d\u299e\u299f\u29a0\u29a1\u29a2\u29a3\u29a4\u29a5\u29a6\u29a7\u29a8\u29a9\u29aa\u29ab\u29ac\u29ad\u29ae\u29af\u29b0\u29b1\u29b2\u29b3\u29b4\u29b5\u29b6\u29b7\u29b8\u29b9\u29ba\u29bb\u29bc\u29bd\u29be\u29bf\u29c0\u29c1\u29c2\u29c3\u29c4\u29c5\u29c6\u29c7\u29c8\u29c9\u29ca\u29cb\u29cc\u29cd\u29ce\u29cf\u29d0\u29d1\u29d2\u29d3\u29d4\u29d5\u29d6\u29d7\u29dc\u29dd\u29de\u29df\u29e0\u29e1\u29e2\u29e3\u29e4\u29e5\u29e6\u29e7\u29e8\u29e9\u29ea\u29eb\u29ec\u29ed\u29ee\u29ef\u29f0\u29f1\u29f2\u29f3\u29f4\u29f5\u29f6\u29f7\u29f8\u29f9\u29fa\u29fb\u29fe\u29ff\u2a00\u2a01\u2a02\u2a03\u2a04\u2a05\u2a06\u2a07\u2a08\u2a09\u2a0a\u2a0b\u2a0c\u2a0d\u2a0e\u2a0f\u2a10\u2a11\u2a12\u2a13\u2a14\u2a15\u2a16\u2a17\u2a18\u2a19\u2a1a\u2a1b\u2a1c\u2a1d\u2a1e\u2a1f\u2a20\u2a21\u2a22\u2a23\u2a24\u2a25\u2a26\u2a27\u2a28\u2a29\u2a2a\u2a2b\u2a2c\u2a2d\u2a2e\u2a2f\u2a30\u2a31\u2a32\u2a33\u2a34\u2a35\u2a36\u2a37\u2a38\u2a39\u2a3a\u2a3b\u2a3c\u2a3d\u2a3e\u2a3f\u2a40\u2a41\u2a42\u2a43\u2a44\u2a45\u2a46\u2a47\u2a48\u2a49\u2a4a\u2a4b\u2a4c\u2a4d\u2a4e\u2a4f\u2a50\u2a51\u2a52\u2a53\u2a54\u2a55\u2a56\u2a57\u2a58\u2a59\u2a5a\u2a5b\u2a5c\u2a5d\u2a5e\u2a5f\u2a60\u2a61\u2a62\u2a63\u2a64\u2a65\u2a66\u2a67\u2a68\u2a69\u2a6a\u2a6b\u2a6c\u2a6d\u2a6e\u2a6f\u2a70\u2a71\u2a72\u2a73\u2a74\u2a75\u2a76\u2a77\u2a78\u2a79\u2a7a\u2a7b\u2a7c\u2a7d\u2a7e\u2a7f\u2a80\u2a81\u2a82\u2a83\u2a84\u2a85\u2a86\u2a87\u2a88\u2a89\u2a8a\u2a8b\u2a8c\u2a8d\u2a8e\u2a8f\u2a90\u2a91\u2a92\u2a93\u2a94\u2a95\u2a96\u2a97\u2a98\u2a99\u2a9a\u2a9b\u2a9c\u2a9d\u2a9e\u2a9f\u2aa0\u2aa1\u2aa2\u2aa3\u2aa4\u2aa5\u2aa6\u2aa7\u2aa8\u2aa9\u2aaa\u2aab\u2aac\u2aad\u2aae\u2aaf\u2ab0\u2ab1\u2ab2\u2ab3\u2ab4\u2ab5\u2ab6\u2ab7\u2ab8\u2ab9\u2aba\u2abb\u2abc\u2abd\u2abe\u2abf\u2ac0\u2ac1\u2ac2\u2ac3\u2ac4\u2ac5\u2ac6\u2ac7\u2ac8\u2ac9\u2aca\u2acb\u2acc\u2acd\u2ace\u2acf\u2ad0\u2ad1\u2ad2\u2ad3\u2ad4\u2ad5\u2ad6\u2ad7\u2ad8\u2ad9\u2ada\u2adb\u2adc\u2add\u2ade\u2adf\u2ae0\u2ae1\u2ae2\u2ae3\u2ae4\u2ae5\u2ae6\u2ae7\u2ae8\u2ae9\u2aea\u2aeb\u2aec\u2aed\u2aee\u2aef\u2af0\u2af1\u2af2\u2af3\u2af4\u2af5\u2af6\u2af7\u2af8\u2af9\u2afa\u2afb\u2afc\u2afd\u2afe\u2aff\u2b30\u2b31\u2b32\u2b33\u2b34\u2b35\u2b36\u2b37\u2b38\u2b39\u2b3a\u2b3b\u2b3c\u2b3d\u2b3e\u2b3f\u2b40\u2b41\u2b42\u2b43\u2b44\u2b47\u2b48\u2b49\u2b4a\u2b4b\u2b4c\ufb29\ufe62\ufe64\ufe65\ufe66\uff0b\uff1c\uff1d\uff1e\uff5c\uff5e\uffe2\uffe9\uffea\uffeb\uffec'
+Sm = u'+<->|~\xac\xb1\xd7\xf7\u03f6\u0606-\u0608\u2044\u2052\u207a-\u207c\u208a-\u208c\u2118\u2140-\u2144\u214b\u2190-\u2194\u219a-\u219b\u21a0\u21a3\u21a6\u21ae\u21ce-\u21cf\u21d2\u21d4\u21f4-\u22ff\u2320-\u2321\u237c\u239b-\u23b3\u23dc-\u23e1\u25b7\u25c1\u25f8-\u25ff\u266f\u27c0-\u27c4\u27c7-\u27e5\u27f0-\u27ff\u2900-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2aff\u2b30-\u2b44\u2b47-\u2b4c\ufb29\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe9-\uffec'
-So = u'\xa6\xa7\xa9\xae\xb0\xb6\u0482\u060e\u060f\u06e9\u06fd\u06fe\u07f6\u09fa\u0b70\u0bf3\u0bf4\u0bf5\u0bf6\u0bf7\u0bf8\u0bfa\u0c7f\u0cf1\u0cf2\u0d79\u0f01\u0f02\u0f03\u0f13\u0f14\u0f15\u0f16\u0f17\u0f1a\u0f1b\u0f1c\u0f1d\u0f1e\u0f1f\u0f34\u0f36\u0f38\u0fbe\u0fbf\u0fc0\u0fc1\u0fc2\u0fc3\u0fc4\u0fc5\u0fc7\u0fc8\u0fc9\u0fca\u0fcb\u0fcc\u0fce\u0fcf\u0fd5\u0fd6\u0fd7\u0fd8\u109e\u109f\u1360\u1390\u1391\u1392\u1393\u1394\u1395\u1396\u1397\u1398\u1399\u1940\u19e0\u19e1\u19e2\u19e3\u19e4\u19e5\u19e6\u19e7\u19e8\u19e9\u19ea\u19eb\u19ec\u19ed\u19ee\u19ef\u19f0\u19f1\u19f2\u19f3\u19f4\u19f5\u19f6\u19f7\u19f8\u19f9\u19fa\u19fb\u19fc\u19fd\u19fe\u19ff\u1b61\u1b62\u1b63\u1b64\u1b65\u1b66\u1b67\u1b68\u1b69\u1b6a\u1b74\u1b75\u1b76\u1b77\u1b78\u1b79\u1b7a\u1b7b\u1b7c\u2100\u2101\u2103\u2104\u2105\u2106\u2108\u2109\u2114\u2116\u2117\u2118\u211e\u211f\u2120\u2121\u2122\u2123\u2125\u2127\u2129\u212e\u213a\u213b\u214a\u214c\u214d\u214f\u2195\u2196\u2197\u2198\u2199\u219c\u219d\u219e\u219f\u21a1\u21a2\u21a4\u21a5\u21a7\u21a8\u21a9\u21aa\u21ab\u21ac\u21ad\u21af\u21b0\u21b1\u21b2\u21b3\u21b4\u21b5\u21b6\u21b7\u21b8\u21b9\u21ba\u21bb\u21bc\u21bd\u21be\u21bf\u21c0\u21c1\u21c2\u21c3\u21c4\u21c5\u21c6\u21c7\u21c8\u21c9\u21ca\u21cb\u21cc\u21cd\u21d0\u21d1\u21d3\u21d5\u21d6\u21d7\u21d8\u21d9\u21da\u21db\u21dc\u21dd\u21de\u21df\u21e0\u21e1\u21e2\u21e3\u21e4\u21e5\u21e6\u21e7\u21e8\u21e9\u21ea\u21eb\u21ec\u21ed\u21ee\u21ef\u21f0\u21f1\u21f2\u21f3\u2300\u2301\u2302\u2303\u2304\u2305\u2306\u2307\u230c\u230d\u230e\u230f\u2310\u2311\u2312\u2313\u2314\u2315\u2316\u2317\u2318\u2319\u231a\u231b\u231c\u231d\u231e\u231f\u2322\u2323\u2324\u2325\u2326\u2327\u2328\u232b\u232c\u232d\u232e\u232f\u2330\u2331\u2332\u2333\u2334\u2335\u2336\u2337\u2338\u2339\u233a\u233b\u233c\u233d\u233e\u233f\u2340\u2341\u2342\u2343\u2344\u2345\u2346\u2347\u2348\u2349\u234a\u234b\u234c\u234d\u234e\u234f\u2350\u2351\u2352\u2353\u2354\u2355\u2356\u2357\u2358\u2359\u235a\u235b\u235c\u235d\u235e\u235f\u2360\u2361\u2362\u2363\u2364\u2365\u2366\u2367\u2368\u2369\u236a\u236b\u236c\u236d\u236e\u236f\u2370\u2371\u2372\u2373\u2374\u2375\u2376\u2377\u2378\u2379\u237a\u237b\u237d\u237e\u237f\u2380\u2381\u2382\u2383\u2384\u2385\u2386\u2387\u2388\u2389\u238a\u238b\u238c\u238d\u238e\u238f\u2390\u2391\u2392\u2393\u2394\u2395\u2396\u2397\u2398\u2399\u239a\u23b4\u23b5\u23b6\u23b7\u23b8\u23b9\u23ba\u23bb\u23bc\u23bd\u23be\u23bf\u23c0\u23c1\u23c2\u23c3\u23c4\u23c5\u23c6\u23c7\u23c8\u23c9\u23ca\u23cb\u23cc\u23cd\u23ce\u23cf\u23d0\u23d1\u23d2\u23d3\u23d4\u23d5\u23d6\u23d7\u23d8\u23d9\u23da\u23db\u23e2\u23e3\u23e4\u23e5\u23e6\u23e7\u23e8\u2400\u2401\u2402\u2403\u2404\u2405\u2406\u2407\u2408\u2409\u240a\u240b\u240c\u240d\u240e\u240f\u2410\u2411\u2412\u2413\u2414\u2415\u2416\u2417\u2418\u2419\u241a\u241b\u241c\u241d\u241e\u241f\u2420\u2421\u2422\u2423\u2424\u2425\u2426\u2440\u2441\u2442\u2443\u2444\u2445\u2446\u2447\u2448\u2449\u244a\u249c\u249d\u249e\u249f\u24a0\u24a1\u24a2\u24a3\u24a4\u24a5\u24a6\u24a7\u24a8\u24a9\u24aa\u24ab\u24ac\u24ad\u24ae\u24af\u24b0\u24b1\u24b2\u24b3\u24b4\u24b5\u24b6\u24b7\u24b8\u24b9\u24ba\u24bb\u24bc\u24bd\u24be\u24bf\u24c0\u24c1\u24c2\u24c3\u24c4\u24c5\u24c6\u24c7\u24c8\u24c9\u24ca\u24cb\u24cc\u24cd\u24ce\u24cf\u24d0\u24d1\u24d2\u24d3\u24d4\u24d5\u24d6\u24d7\u24d8\u24d9\u24da\u24db\u24dc\u24dd\u24de\u24df\u24e0\u24e1\u24e2\u24e3\u24e4\u24e5\u24e6\u24e7\u24e8\u24e9\u2500\u2501\u2502\u2503\u2504\u2505\u2506\u2507\u2508\u2509\u250a\u250b\u250c\u250d\u250e\u250f\u2510\u2511\u2512\u2513\u2514\u2515\u2516\u2517\u2518\u2519\u251a\u251b\u251c\u251d\u251e\u251f\u2520\u2521\u2522\u2523\u2524\u2525\u2526\u2527\u2528\u2529\u252a\u252b\u252c\u252d\u252e\u252f\u2530\u2531\u2532\u2533\u2534\u2535\u2536\u2537\u2538\u2539\u253a\u253b\u253c\u253d\u253e\u253f\u2540\u2541\u2542\u2543\u2544\u2545\u2546\u2547\u2548\u2549\u254a\u254b\u254c\u254d\u254e\u254f\u2550\u2551\u2552\u2553\u2554\u2555\u2556\u2557\u2558\u2559\u255a\u255b\u255c\u255d\u255e\u255f\u2560\u2561\u2562\u2563\u2564\u2565\u2566\u2567\u2568\u2569\u256a\u256b\u256c\u256d\u256e\u256f\u2570\u2571\u2572\u2573\u2574\u2575\u2576\u2577\u2578\u2579\u257a\u257b\u257c\u257d\u257e\u257f\u2580\u2581\u2582\u2583\u2584\u2585\u2586\u2587\u2588\u2589\u258a\u258b\u258c\u258d\u258e\u258f\u2590\u2591\u2592\u2593\u2594\u2595\u2596\u2597\u2598\u2599\u259a\u259b\u259c\u259d\u259e\u259f\u25a0\u25a1\u25a2\u25a3\u25a4\u25a5\u25a6\u25a7\u25a8\u25a9\u25aa\u25ab\u25ac\u25ad\u25ae\u25af\u25b0\u25b1\u25b2\u25b3\u25b4\u25b5\u25b6\u25b8\u25b9\u25ba\u25bb\u25bc\u25bd\u25be\u25bf\u25c0\u25c2\u25c3\u25c4\u25c5\u25c6\u25c7\u25c8\u25c9\u25ca\u25cb\u25cc\u25cd\u25ce\u25cf\u25d0\u25d1\u25d2\u25d3\u25d4\u25d5\u25d6\u25d7\u25d8\u25d9\u25da\u25db\u25dc\u25dd\u25de\u25df\u25e0\u25e1\u25e2\u25e3\u25e4\u25e5\u25e6\u25e7\u25e8\u25e9\u25ea\u25eb\u25ec\u25ed\u25ee\u25ef\u25f0\u25f1\u25f2\u25f3\u25f4\u25f5\u25f6\u25f7\u2600\u2601\u2602\u2603\u2604\u2605\u2606\u2607\u2608\u2609\u260a\u260b\u260c\u260d\u260e\u260f\u2610\u2611\u2612\u2613\u2614\u2615\u2616\u2617\u2618\u2619\u261a\u261b\u261c\u261d\u261e\u261f\u2620\u2621\u2622\u2623\u2624\u2625\u2626\u2627\u2628\u2629\u262a\u262b\u262c\u262d\u262e\u262f\u2630\u2631\u2632\u2633\u2634\u2635\u2636\u2637\u2638\u2639\u263a\u263b\u263c\u263d\u263e\u263f\u2640\u2641\u2642\u2643\u2644\u2645\u2646\u2647\u2648\u2649\u264a\u264b\u264c\u264d\u264e\u264f\u2650\u2651\u2652\u2653\u2654\u2655\u2656\u2657\u2658\u2659\u265a\u265b\u265c\u265d\u265e\u265f\u2660\u2661\u2662\u2663\u2664\u2665\u2666\u2667\u2668\u2669\u266a\u266b\u266c\u266d\u266e\u2670\u2671\u2672\u2673\u2674\u2675\u2676\u2677\u2678\u2679\u267a\u267b\u267c\u267d\u267e\u267f\u2680\u2681\u2682\u2683\u2684\u2685\u2686\u2687\u2688\u2689\u268a\u268b\u268c\u268d\u268e\u268f\u2690\u2691\u2692\u2693\u2694\u2695\u2696\u2697\u2698\u2699\u269a\u269b\u269c\u269d\u269e\u269f\u26a0\u26a1\u26a2\u26a3\u26a4\u26a5\u26a6\u26a7\u26a8\u26a9\u26aa\u26ab\u26ac\u26ad\u26ae\u26af\u26b0\u26b1\u26b2\u26b3\u26b4\u26b5\u26b6\u26b7\u26b8\u26b9\u26ba\u26bb\u26bc\u26bd\u26be\u26bf\u26c0\u26c1\u26c2\u26c3\u26c4\u26c5\u26c6\u26c7\u26c8\u26c9\u26ca\u26cb\u26cc\u26cd\u26cf\u26d0\u26d1\u26d2\u26d3\u26d4\u26d5\u26d6\u26d7\u26d8\u26d9\u26da\u26db\u26dc\u26dd\u26de\u26df\u26e0\u26e1\u26e3\u26e8\u26e9\u26ea\u26eb\u26ec\u26ed\u26ee\u26ef\u26f0\u26f1\u26f2\u26f3\u26f4\u26f5\u26f6\u26f7\u26f8\u26f9\u26fa\u26fb\u26fc\u26fd\u26fe\u26ff\u2701\u2702\u2703\u2704\u2706\u2707\u2708\u2709\u270c\u270d\u270e\u270f\u2710\u2711\u2712\u2713\u2714\u2715\u2716\u2717\u2718\u2719\u271a\u271b\u271c\u271d\u271e\u271f\u2720\u2721\u2722\u2723\u2724\u2725\u2726\u2727\u2729\u272a\u272b\u272c\u272d\u272e\u272f\u2730\u2731\u2732\u2733\u2734\u2735\u2736\u2737\u2738\u2739\u273a\u273b\u273c\u273d\u273e\u273f\u2740\u2741\u2742\u2743\u2744\u2745\u2746\u2747\u2748\u2749\u274a\u274b\u274d\u274f\u2750\u2751\u2752\u2756\u2757\u2758\u2759\u275a\u275b\u275c\u275d\u275e\u2761\u2762\u2763\u2764\u2765\u2766\u2767\u2794\u2798\u2799\u279a\u279b\u279c\u279d\u279e\u279f\u27a0\u27a1\u27a2\u27a3\u27a4\u27a5\u27a6\u27a7\u27a8\u27a9\u27aa\u27ab\u27ac\u27ad\u27ae\u27af\u27b1\u27b2\u27b3\u27b4\u27b5\u27b6\u27b7\u27b8\u27b9\u27ba\u27bb\u27bc\u27bd\u27be\u2800\u2801\u2802\u2803\u2804\u2805\u2806\u2807\u2808\u2809\u280a\u280b\u280c\u280d\u280e\u280f\u2810\u2811\u2812\u2813\u2814\u2815\u2816\u2817\u2818\u2819\u281a\u281b\u281c\u281d\u281e\u281f\u2820\u2821\u2822\u2823\u2824\u2825\u2826\u2827\u2828\u2829\u282a\u282b\u282c\u282d\u282e\u282f\u2830\u2831\u2832\u2833\u2834\u2835\u2836\u2837\u2838\u2839\u283a\u283b\u283c\u283d\u283e\u283f\u2840\u2841\u2842\u2843\u2844\u2845\u2846\u2847\u2848\u2849\u284a\u284b\u284c\u284d\u284e\u284f\u2850\u2851\u2852\u2853\u2854\u2855\u2856\u2857\u2858\u2859\u285a\u285b\u285c\u285d\u285e\u285f\u2860\u2861\u2862\u2863\u2864\u2865\u2866\u2867\u2868\u2869\u286a\u286b\u286c\u286d\u286e\u286f\u2870\u2871\u2872\u2873\u2874\u2875\u2876\u2877\u2878\u2879\u287a\u287b\u287c\u287d\u287e\u287f\u2880\u2881\u2882\u2883\u2884\u2885\u2886\u2887\u2888\u2889\u288a\u288b\u288c\u288d\u288e\u288f\u2890\u2891\u2892\u2893\u2894\u2895\u2896\u2897\u2898\u2899\u289a\u289b\u289c\u289d\u289e\u289f\u28a0\u28a1\u28a2\u28a3\u28a4\u28a5\u28a6\u28a7\u28a8\u28a9\u28aa\u28ab\u28ac\u28ad\u28ae\u28af\u28b0\u28b1\u28b2\u28b3\u28b4\u28b5\u28b6\u28b7\u28b8\u28b9\u28ba\u28bb\u28bc\u28bd\u28be\u28bf\u28c0\u28c1\u28c2\u28c3\u28c4\u28c5\u28c6\u28c7\u28c8\u28c9\u28ca\u28cb\u28cc\u28cd\u28ce\u28cf\u28d0\u28d1\u28d2\u28d3\u28d4\u28d5\u28d6\u28d7\u28d8\u28d9\u28da\u28db\u28dc\u28dd\u28de\u28df\u28e0\u28e1\u28e2\u28e3\u28e4\u28e5\u28e6\u28e7\u28e8\u28e9\u28ea\u28eb\u28ec\u28ed\u28ee\u28ef\u28f0\u28f1\u28f2\u28f3\u28f4\u28f5\u28f6\u28f7\u28f8\u28f9\u28fa\u28fb\u28fc\u28fd\u28fe\u28ff\u2b00\u2b01\u2b02\u2b03\u2b04\u2b05\u2b06\u2b07\u2b08\u2b09\u2b0a\u2b0b\u2b0c\u2b0d\u2b0e\u2b0f\u2b10\u2b11\u2b12\u2b13\u2b14\u2b15\u2b16\u2b17\u2b18\u2b19\u2b1a\u2b1b\u2b1c\u2b1d\u2b1e\u2b1f\u2b20\u2b21\u2b22\u2b23\u2b24\u2b25\u2b26\u2b27\u2b28\u2b29\u2b2a\u2b2b\u2b2c\u2b2d\u2b2e\u2b2f\u2b45\u2b46\u2b50\u2b51\u2b52\u2b53\u2b54\u2b55\u2b56\u2b57\u2b58\u2b59\u2ce5\u2ce6\u2ce7\u2ce8\u2ce9\u2cea\u2e80\u2e81\u2e82\u2e83\u2e84\u2e85\u2e86\u2e87\u2e88\u2e89\u2e8a\u2e8b\u2e8c\u2e8d\u2e8e\u2e8f\u2e90\u2e91\u2e92\u2e93\u2e94\u2e95\u2e96\u2e97\u2e98\u2e99\u2e9b\u2e9c\u2e9d\u2e9e\u2e9f\u2ea0\u2ea1\u2ea2\u2ea3\u2ea4\u2ea5\u2ea6\u2ea7\u2ea8\u2ea9\u2eaa\u2eab\u2eac\u2ead\u2eae\u2eaf\u2eb0\u2eb1\u2eb2\u2eb3\u2eb4\u2eb5\u2eb6\u2eb7\u2eb8\u2eb9\u2eba\u2ebb\u2ebc\u2ebd\u2ebe\u2ebf\u2ec0\u2ec1\u2ec2\u2ec3\u2ec4\u2ec5\u2ec6\u2ec7\u2ec8\u2ec9\u2eca\u2ecb\u2ecc\u2ecd\u2ece\u2ecf\u2ed0\u2ed1\u2ed2\u2ed3\u2ed4\u2ed5\u2ed6\u2ed7\u2ed8\u2ed9\u2eda\u2edb\u2edc\u2edd\u2ede\u2edf\u2ee0\u2ee1\u2ee2\u2ee3\u2ee4\u2ee5\u2ee6\u2ee7\u2ee8\u2ee9\u2eea\u2eeb\u2eec\u2eed\u2eee\u2eef\u2ef0\u2ef1\u2ef2\u2ef3\u2f00\u2f01\u2f02\u2f03\u2f04\u2f05\u2f06\u2f07\u2f08\u2f09\u2f0a\u2f0b\u2f0c\u2f0d\u2f0e\u2f0f\u2f10\u2f11\u2f12\u2f13\u2f14\u2f15\u2f16\u2f17\u2f18\u2f19\u2f1a\u2f1b\u2f1c\u2f1d\u2f1e\u2f1f\u2f20\u2f21\u2f22\u2f23\u2f24\u2f25\u2f26\u2f27\u2f28\u2f29\u2f2a\u2f2b\u2f2c\u2f2d\u2f2e\u2f2f\u2f30\u2f31\u2f32\u2f33\u2f34\u2f35\u2f36\u2f37\u2f38\u2f39\u2f3a\u2f3b\u2f3c\u2f3d\u2f3e\u2f3f\u2f40\u2f41\u2f42\u2f43\u2f44\u2f45\u2f46\u2f47\u2f48\u2f49\u2f4a\u2f4b\u2f4c\u2f4d\u2f4e\u2f4f\u2f50\u2f51\u2f52\u2f53\u2f54\u2f55\u2f56\u2f57\u2f58\u2f59\u2f5a\u2f5b\u2f5c\u2f5d\u2f5e\u2f5f\u2f60\u2f61\u2f62\u2f63\u2f64\u2f65\u2f66\u2f67\u2f68\u2f69\u2f6a\u2f6b\u2f6c\u2f6d\u2f6e\u2f6f\u2f70\u2f71\u2f72\u2f73\u2f74\u2f75\u2f76\u2f77\u2f78\u2f79\u2f7a\u2f7b\u2f7c\u2f7d\u2f7e\u2f7f\u2f80\u2f81\u2f82\u2f83\u2f84\u2f85\u2f86\u2f87\u2f88\u2f89\u2f8a\u2f8b\u2f8c\u2f8d\u2f8e\u2f8f\u2f90\u2f91\u2f92\u2f93\u2f94\u2f95\u2f96\u2f97\u2f98\u2f99\u2f9a\u2f9b\u2f9c\u2f9d\u2f9e\u2f9f\u2fa0\u2fa1\u2fa2\u2fa3\u2fa4\u2fa5\u2fa6\u2fa7\u2fa8\u2fa9\u2faa\u2fab\u2fac\u2fad\u2fae\u2faf\u2fb0\u2fb1\u2fb2\u2fb3\u2fb4\u2fb5\u2fb6\u2fb7\u2fb8\u2fb9\u2fba\u2fbb\u2fbc\u2fbd\u2fbe\u2fbf\u2fc0\u2fc1\u2fc2\u2fc3\u2fc4\u2fc5\u2fc6\u2fc7\u2fc8\u2fc9\u2fca\u2fcb\u2fcc\u2fcd\u2fce\u2fcf\u2fd0\u2fd1\u2fd2\u2fd3\u2fd4\u2fd5\u2ff0\u2ff1\u2ff2\u2ff3\u2ff4\u2ff5\u2ff6\u2ff7\u2ff8\u2ff9\u2ffa\u2ffb\u3004\u3012\u3013\u3020\u3036\u3037\u303e\u303f\u3190\u3191\u3196\u3197\u3198\u3199\u319a\u319b\u319c\u319d\u319e\u319f\u31c0\u31c1\u31c2\u31c3\u31c4\u31c5\u31c6\u31c7\u31c8\u31c9\u31ca\u31cb\u31cc\u31cd\u31ce\u31cf\u31d0\u31d1\u31d2\u31d3\u31d4\u31d5\u31d6\u31d7\u31d8\u31d9\u31da\u31db\u31dc\u31dd\u31de\u31df\u31e0\u31e1\u31e2\u31e3\u3200\u3201\u3202\u3203\u3204\u3205\u3206\u3207\u3208\u3209\u320a\u320b\u320c\u320d\u320e\u320f\u3210\u3211\u3212\u3213\u3214\u3215\u3216\u3217\u3218\u3219\u321a\u321b\u321c\u321d\u321e\u322a\u322b\u322c\u322d\u322e\u322f\u3230\u3231\u3232\u3233\u3234\u3235\u3236\u3237\u3238\u3239\u323a\u323b\u323c\u323d\u323e\u323f\u3240\u3241\u3242\u3243\u3244\u3245\u3246\u3247\u3248\u3249\u324a\u324b\u324c\u324d\u324e\u324f\u3250\u3260\u3261\u3262\u3263\u3264\u3265\u3266\u3267\u3268\u3269\u326a\u326b\u326c\u326d\u326e\u326f\u3270\u3271\u3272\u3273\u3274\u3275\u3276\u3277\u3278\u3279\u327a\u327b\u327c\u327d\u327e\u327f\u328a\u328b\u328c\u328d\u328e\u328f\u3290\u3291\u3292\u3293\u3294\u3295\u3296\u3297\u3298\u3299\u329a\u329b\u329c\u329d\u329e\u329f\u32a0\u32a1\u32a2\u32a3\u32a4\u32a5\u32a6\u32a7\u32a8\u32a9\u32aa\u32ab\u32ac\u32ad\u32ae\u32af\u32b0\u32c0\u32c1\u32c2\u32c3\u32c4\u32c5\u32c6\u32c7\u32c8\u32c9\u32ca\u32cb\u32cc\u32cd\u32ce\u32cf\u32d0\u32d1\u32d2\u32d3\u32d4\u32d5\u32d6\u32d7\u32d8\u32d9\u32da\u32db\u32dc\u32dd\u32de\u32df\u32e0\u32e1\u32e2\u32e3\u32e4\u32e5\u32e6\u32e7\u32e8\u32e9\u32ea\u32eb\u32ec\u32ed\u32ee\u32ef\u32f0\u32f1\u32f2\u32f3\u32f4\u32f5\u32f6\u32f7\u32f8\u32f9\u32fa\u32fb\u32fc\u32fd\u32fe\u3300\u3301\u3302\u3303\u3304\u3305\u3306\u3307\u3308\u3309\u330a\u330b\u330c\u330d\u330e\u330f\u3310\u3311\u3312\u3313\u3314\u3315\u3316\u3317\u3318\u3319\u331a\u331b\u331c\u331d\u331e\u331f\u3320\u3321\u3322\u3323\u3324\u3325\u3326\u3327\u3328\u3329\u332a\u332b\u332c\u332d\u332e\u332f\u3330\u3331\u3332\u3333\u3334\u3335\u3336\u3337\u3338\u3339\u333a\u333b\u333c\u333d\u333e\u333f\u3340\u3341\u3342\u3343\u3344\u3345\u3346\u3347\u3348\u3349\u334a\u334b\u334c\u334d\u334e\u334f\u3350\u3351\u3352\u3353\u3354\u3355\u3356\u3357\u3358\u3359\u335a\u335b\u335c\u335d\u335e\u335f\u3360\u3361\u3362\u3363\u3364\u3365\u3366\u3367\u3368\u3369\u336a\u336b\u336c\u336d\u336e\u336f\u3370\u3371\u3372\u3373\u3374\u3375\u3376\u3377\u3378\u3379\u337a\u337b\u337c\u337d\u337e\u337f\u3380\u3381\u3382\u3383\u3384\u3385\u3386\u3387\u3388\u3389\u338a\u338b\u338c\u338d\u338e\u338f\u3390\u3391\u3392\u3393\u3394\u3395\u3396\u3397\u3398\u3399\u339a\u339b\u339c\u339d\u339e\u339f\u33a0\u33a1\u33a2\u33a3\u33a4\u33a5\u33a6\u33a7\u33a8\u33a9\u33aa\u33ab\u33ac\u33ad\u33ae\u33af\u33b0\u33b1\u33b2\u33b3\u33b4\u33b5\u33b6\u33b7\u33b8\u33b9\u33ba\u33bb\u33bc\u33bd\u33be\u33bf\u33c0\u33c1\u33c2\u33c3\u33c4\u33c5\u33c6\u33c7\u33c8\u33c9\u33ca\u33cb\u33cc\u33cd\u33ce\u33cf\u33d0\u33d1\u33d2\u33d3\u33d4\u33d5\u33d6\u33d7\u33d8\u33d9\u33da\u33db\u33dc\u33dd\u33de\u33df\u33e0\u33e1\u33e2\u33e3\u33e4\u33e5\u33e6\u33e7\u33e8\u33e9\u33ea\u33eb\u33ec\u33ed\u33ee\u33ef\u33f0\u33f1\u33f2\u33f3\u33f4\u33f5\u33f6\u33f7\u33f8\u33f9\u33fa\u33fb\u33fc\u33fd\u33fe\u33ff\u4dc0\u4dc1\u4dc2\u4dc3\u4dc4\u4dc5\u4dc6\u4dc7\u4dc8\u4dc9\u4dca\u4dcb\u4dcc\u4dcd\u4dce\u4dcf\u4dd0\u4dd1\u4dd2\u4dd3\u4dd4\u4dd5\u4dd6\u4dd7\u4dd8\u4dd9\u4dda\u4ddb\u4ddc\u4ddd\u4dde\u4ddf\u4de0\u4de1\u4de2\u4de3\u4de4\u4de5\u4de6\u4de7\u4de8\u4de9\u4dea\u4deb\u4dec\u4ded\u4dee\u4def\u4df0\u4df1\u4df2\u4df3\u4df4\u4df5\u4df6\u4df7\u4df8\u4df9\u4dfa\u4dfb\u4dfc\u4dfd\u4dfe\u4dff\ua490\ua491\ua492\ua493\ua494\ua495\ua496\ua497\ua498\ua499\ua49a\ua49b\ua49c\ua49d\ua49e\ua49f\ua4a0\ua4a1\ua4a2\ua4a3\ua4a4\ua4a5\ua4a6\ua4a7\ua4a8\ua4a9\ua4aa\ua4ab\ua4ac\ua4ad\ua4ae\ua4af\ua4b0\ua4b1\ua4b2\ua4b3\ua4b4\ua4b5\ua4b6\ua4b7\ua4b8\ua4b9\ua4ba\ua4bb\ua4bc\ua4bd\ua4be\ua4bf\ua4c0\ua4c1\ua4c2\ua4c3\ua4c4\ua4c5\ua4c6\ua828\ua829\ua82a\ua82b\ua836\ua837\ua839\uaa77\uaa78\uaa79\ufdfd\uffe4\uffe8\uffed\uffee\ufffc\ufffd'
+So = u'\xa6\xa9\xae\xb0\u0482\u060e-\u060f\u06de\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0d79\u0f01-\u0f03\u0f13\u0f15-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcc\u0fce-\u0fcf\u0fd5-\u0fd8\u109e-\u109f\u1390-\u1399\u1940\u19de-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2117\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u214a\u214c-\u214d\u214f\u2195-\u2199\u219c-\u219f\u21a1-\u21a2\u21a4-\u21a5\u21a7-\u21ad\u21af-\u21cd\u21d0-\u21d1\u21d3\u21d5-\u21f3\u2300-\u2307\u230c-\u231f\u2322-\u2328\u232b-\u237b\u237d-\u239a\u23b4-\u23db\u23e2-\u23f3\u2400-\u2426\u2440-\u244a\u249c-\u24e9\u2500-\u25b6\u25b8-\u25c0\u25c2-\u25f7\u2600-\u266e\u2670-\u26ff\u2701-\u2767\u2794-\u27bf\u2800-\u28ff\u2b00-\u2b2f\u2b45-\u2b46\u2b50-\u2b59\u2ce5-\u2cea\u2e80-\u2e99\u2e9b-\u2ef3\u2f00-\u2fd5\u2ff0-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3247\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u32fe\u3300-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ua836-\ua837\ua839\uaa77-\uaa79\ufdfd\uffe4\uffe8\uffed-\uffee\ufffc-\ufffd'
Zl = u'\u2028'
Zp = u'\u2029'
-Zs = u' \xa0\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000'
+Zs = u' \xa0\u1680\u2000-\u200a\u202f\u205f\u3000'
+
+xid_continue = u'0-9A-Z_a-z\xaa\xb5\xb7\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0300-\u0374\u0376-\u0377\u037b-\u037d\u0386-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u0483-\u0487\u048a-\u0527\u0531-\u0556\u0559\u0561-\u0587\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u05d0-\u05ea\u05f0-\u05f2\u0610-\u061a\u0620-\u0669\u066e-\u06d3\u06d5-\u06dc\u06df-\u06e8\u06ea-\u06fc\u06ff\u0710-\u074a\u074d-\u07b1\u07c0-\u07f5\u07fa\u0800-\u082d\u0840-\u085b\u08a0\u08a2-\u08ac\u08e4-\u08fe\u0900-\u0963\u0966-\u096f\u0971-\u0977\u0979-\u097f\u0981-\u0983\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bc-\u09c4\u09c7-\u09c8\u09cb-\u09ce\u09d7\u09dc-\u09dd\u09df-\u09e3\u09e6-\u09f1\u0a01-\u0a03\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a3c\u0a3e-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a59-\u0a5c\u0a5e\u0a66-\u0a75\u0a81-\u0a83\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abc-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ad0\u0ae0-\u0ae3\u0ae6-\u0aef\u0b01-\u0b03\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3c-\u0b44\u0b47-\u0b48\u0b4b-\u0b4d\u0b56-\u0b57\u0b5c-\u0b5d\u0b5f-\u0b63\u0b66-\u0b6f\u0b71\u0b82-\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd0\u0bd7\u0be6-\u0bef\u0c01-\u0c03\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c33\u0c35-\u0c39\u0c3d-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c58-\u0c59\u0c60-\u0c63\u0c66-\u0c6f\u0c82-\u0c83\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbc-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5-\u0cd6\u0cde\u0ce0-\u0ce3\u0ce6-\u0cef\u0cf1-\u0cf2\u0d02-\u0d03\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d-\u0d44\u0d46-\u0d48\u0d4a-\u0d4e\u0d57\u0d60-\u0d63\u0d66-\u0d6f\u0d7a-\u0d7f\u0d82-\u0d83\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0df2-\u0df3\u0e01-\u0e3a\u0e40-\u0e4e\u0e50-\u0e59\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb9\u0ebb-\u0ebd\u0ec0-\u0ec4\u0ec6\u0ec8-\u0ecd\u0ed0-\u0ed9\u0edc-\u0edf\u0f00\u0f18-\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e-\u0f47\u0f49-\u0f6c\u0f71-\u0f84\u0f86-\u0f97\u0f99-\u0fbc\u0fc6\u1000-\u1049\u1050-\u109d\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u135d-\u135f\u1369-\u1371\u1380-\u138f\u13a0-\u13f4\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f0\u1700-\u170c\u170e-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176c\u176e-\u1770\u1772-\u1773\u1780-\u17d3\u17d7\u17dc-\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u1820-\u1877\u1880-\u18aa\u18b0-\u18f5\u1900-\u191c\u1920-\u192b\u1930-\u193b\u1946-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u19d0-\u19da\u1a00-\u1a1b\u1a20-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1aa7\u1b00-\u1b4b\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1bf3\u1c00-\u1c37\u1c40-\u1c49\u1c4d-\u1c7d\u1cd0-\u1cd2\u1cd4-\u1cf6\u1d00-\u1de6\u1dfc-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u203f-\u2040\u2054\u2071\u207f\u2090-\u209c\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d7f-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2de0-\u2dff\u3005-\u3007\u3021-\u302f\u3031-\u3035\u3038-\u303c\u3041-\u3096\u3099-\u309a\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fcc\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua62b\ua640-\ua66f\ua674-\ua67d\ua67f-\ua697\ua69f-\ua6f1\ua717-\ua71f\ua722-\ua788\ua78b-\ua78e\ua790-\ua793\ua7a0-\ua7aa\ua7f8-\ua827\ua840-\ua873\ua880-\ua8c4\ua8d0-\ua8d9\ua8e0-\ua8f7\ua8fb\ua900-\ua92d\ua930-\ua953\ua960-\ua97c\ua980-\ua9c0\ua9cf-\ua9d9\uaa00-\uaa36\uaa40-\uaa4d\uaa50-\uaa59\uaa60-\uaa76\uaa7a-\uaa7b\uaa80-\uaac2\uaadb-\uaadd\uaae0-\uaaef\uaaf2-\uaaf6\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabea\uabec-\uabed\uabf0-\uabf9\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe00-\ufe0f\ufe20-\ufe26\ufe33-\ufe34\ufe4d-\ufe4f\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff10-\uff19\uff21-\uff3a\uff3f\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc'
+
+xid_start = u'A-Z_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376-\u0377\u037b-\u037d\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05d0-\u05ea\u05f0-\u05f2\u0620-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06e5-\u06e6\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4-\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u08a0\u08a2-\u08ac\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0977\u0979-\u097f\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c33\u0c35-\u0c39\u0c3d\u0c58-\u0c59\u0c60-\u0c61\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d60-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e40-\u0e46\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f4\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f0\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1877\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191c\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19c1-\u19c7\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fcc\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a-\ua62b\ua640-\ua66e\ua67f-\ua697\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua78e\ua790-\ua793\ua7a0-\ua7aa\ua7f8-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa80-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc'
+
+if sys.maxunicode > 0xFFFF:
+ # non-BMP characters, use only on wide Unicode builds
+ Cf += u'\U000110bd\U0001d173-\U0001d17a\U000e0001\U000e0020-\U000e007f'
+
+ Cn += u'\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U000100ff\U00010103-\U00010106\U00010134-\U00010136\U0001018b-\U0001018f\U0001019c-\U000101cf\U000101fe-\U0001027f\U0001029d-\U0001029f\U000102d1-\U000102ff\U0001031f\U00010324-\U0001032f\U0001034b-\U0001037f\U0001039e\U000103c4-\U000103c7\U000103d6-\U000103ff\U0001049e-\U0001049f\U000104aa-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010856\U00010860-\U000108ff\U0001091c-\U0001091e\U0001093a-\U0001093e\U00010940-\U0001097f\U000109b8-\U000109bd\U000109c0-\U000109ff\U00010a04\U00010a07-\U00010a0b\U00010a14\U00010a18\U00010a34-\U00010a37\U00010a3b-\U00010a3e\U00010a48-\U00010a4f\U00010a59-\U00010a5f\U00010a80-\U00010aff\U00010b36-\U00010b38\U00010b56-\U00010b57\U00010b73-\U00010b77\U00010b80-\U00010bff\U00010c49-\U00010e5f\U00010e7f-\U00010fff\U0001104e-\U00011051\U00011070-\U0001107f\U000110c2-\U000110cf\U000110e9-\U000110ef\U000110fa-\U000110ff\U00011135\U00011144-\U0001117f\U000111c9-\U000111cf\U000111da-\U0001167f\U000116b8-\U000116bf\U000116ca-\U00011fff\U0001236f-\U000123ff\U00012463-\U0001246f\U00012474-\U00012fff\U0001342f-\U000167ff\U00016a39-\U00016eff\U00016f45-\U00016f4f\U00016f7f-\U00016f8e\U00016fa0-\U0001afff\U0001b002-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d128\U0001d1de-\U0001d1ff\U0001d246-\U0001d2ff\U0001d357-\U0001d35f\U0001d372-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a6-\U0001d6a7\U0001d7cc-\U0001d7cd\U0001d800-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001eeef\U0001eef2-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0bf-\U0001f0c0\U0001f0d0\U0001f0e0-\U0001f0ff\U0001f10b-\U0001f10f\U0001f12f\U0001f16c-\U0001f16f\U0001f19b-\U0001f1e5\U0001f203-\U0001f20f\U0001f23b-\U0001f23f\U0001f249-\U0001f24f\U0001f252-\U0001f2ff\U0001f321-\U0001f32f\U0001f336\U0001f37d-\U0001f37f\U0001f394-\U0001f39f\U0001f3c5\U0001f3cb-\U0001f3df\U0001f3f1-\U0001f3ff\U0001f43f\U0001f441\U0001f4f8\U0001f4fd-\U0001f4ff\U0001f53e-\U0001f53f\U0001f544-\U0001f54f\U0001f568-\U0001f5fa\U0001f641-\U0001f644\U0001f650-\U0001f67f\U0001f6c6-\U0001f6ff\U0001f774-\U0001ffff\U0002a6d7-\U0002a6ff\U0002b735-\U0002b73f\U0002b81e-\U0002f7ff\U0002fa1e-\U000e0000\U000e0002-\U000e001f\U000e0080-\U000e00ff\U000e01f0-\U000effff\U000ffffe-\U000fffff\U0010fffe-\U0010ffff'
+
+ Co += u'\U000f0000-\U000ffffd\U00100000-\U0010fffd'
+
+ Ll += u'\U00010428-\U0001044f\U0001d41a-\U0001d433\U0001d44e-\U0001d454\U0001d456-\U0001d467\U0001d482-\U0001d49b\U0001d4b6-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d4cf\U0001d4ea-\U0001d503\U0001d51e-\U0001d537\U0001d552-\U0001d56b\U0001d586-\U0001d59f\U0001d5ba-\U0001d5d3\U0001d5ee-\U0001d607\U0001d622-\U0001d63b\U0001d656-\U0001d66f\U0001d68a-\U0001d6a5\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6e1\U0001d6fc-\U0001d714\U0001d716-\U0001d71b\U0001d736-\U0001d74e\U0001d750-\U0001d755\U0001d770-\U0001d788\U0001d78a-\U0001d78f\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7c9\U0001d7cb'
+
+ Lm += u'\U00016f93-\U00016f9f'
+
+ Lo += u'\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031e\U00010330-\U00010340\U00010342-\U00010349\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U00010450-\U0001049d\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a33\U00010a60-\U00010a7c\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010c00-\U00010c48\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011183-\U000111b2\U000111c1-\U000111c4\U00011680-\U000116aa\U00012000-\U0001236e\U00013000-\U0001342e\U00016800-\U00016a38\U00016f00-\U00016f44\U00016f50\U0001b000-\U0001b001\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002f800-\U0002fa1d'
+
+ Lu += u'\U00010400-\U00010427\U0001d400-\U0001d419\U0001d434-\U0001d44d\U0001d468-\U0001d481\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b5\U0001d4d0-\U0001d4e9\U0001d504-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d538-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d56c-\U0001d585\U0001d5a0-\U0001d5b9\U0001d5d4-\U0001d5ed\U0001d608-\U0001d621\U0001d63c-\U0001d655\U0001d670-\U0001d689\U0001d6a8-\U0001d6c0\U0001d6e2-\U0001d6fa\U0001d71c-\U0001d734\U0001d756-\U0001d76e\U0001d790-\U0001d7a8\U0001d7ca'
+
+ Mc += u'\U00011000\U00011002\U00011082\U000110b0-\U000110b2\U000110b7-\U000110b8\U0001112c\U00011182\U000111b3-\U000111b5\U000111bf-\U000111c0\U000116ac\U000116ae-\U000116af\U000116b6\U00016f51-\U00016f7e\U0001d165-\U0001d166\U0001d16d-\U0001d172'
+
+ Mn += u'\U000101fd\U00010a01-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a0f\U00010a38-\U00010a3a\U00010a3f\U00011001\U00011038-\U00011046\U00011080-\U00011081\U000110b3-\U000110b6\U000110b9-\U000110ba\U00011100-\U00011102\U00011127-\U0001112b\U0001112d-\U00011134\U00011180-\U00011181\U000111b6-\U000111be\U000116ab\U000116ad\U000116b0-\U000116b5\U000116b7\U00016f8f-\U00016f92\U0001d167-\U0001d169\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U000e0100-\U000e01ef'
+
+ Nd += u'\U000104a0-\U000104a9\U00011066-\U0001106f\U000110f0-\U000110f9\U00011136-\U0001113f\U000111d0-\U000111d9\U000116c0-\U000116c9\U0001d7ce-\U0001d7ff'
+
+ Nl += u'\U00010140-\U00010174\U00010341\U0001034a\U000103d1-\U000103d5\U00012400-\U00012462'
+
+ No += u'\U00010107-\U00010133\U00010175-\U00010178\U0001018a\U00010320-\U00010323\U00010858-\U0001085f\U00010916-\U0001091b\U00010a40-\U00010a47\U00010a7d-\U00010a7e\U00010b58-\U00010b5f\U00010b78-\U00010b7f\U00010e60-\U00010e7e\U00011052-\U00011065\U0001d360-\U0001d371\U0001f100-\U0001f10a'
+
+ Po += u'\U00010100-\U00010102\U0001039f\U000103d0\U00010857\U0001091f\U0001093f\U00010a50-\U00010a58\U00010a7f\U00010b39-\U00010b3f\U00011047-\U0001104d\U000110bb-\U000110bc\U000110be-\U000110c1\U00011140-\U00011143\U000111c5-\U000111c8\U00012470-\U00012473'
+
+ Sm += u'\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001eef0-\U0001eef1'
+
+ So += u'\U00010137-\U0001013f\U00010179-\U00010189\U00010190-\U0001019b\U000101d0-\U000101fc\U0001d000-\U0001d0f5\U0001d100-\U0001d126\U0001d129-\U0001d164\U0001d16a-\U0001d16c\U0001d183-\U0001d184\U0001d18c-\U0001d1a9\U0001d1ae-\U0001d1dd\U0001d200-\U0001d241\U0001d245\U0001d300-\U0001d356\U0001f000-\U0001f02b\U0001f030-\U0001f093\U0001f0a0-\U0001f0ae\U0001f0b1-\U0001f0be\U0001f0c1-\U0001f0cf\U0001f0d1-\U0001f0df\U0001f110-\U0001f12e\U0001f130-\U0001f16b\U0001f170-\U0001f19a\U0001f1e6-\U0001f202\U0001f210-\U0001f23a\U0001f240-\U0001f248\U0001f250-\U0001f251\U0001f300-\U0001f320\U0001f330-\U0001f335\U0001f337-\U0001f37c\U0001f380-\U0001f393\U0001f3a0-\U0001f3c4\U0001f3c6-\U0001f3ca\U0001f3e0-\U0001f3f0\U0001f400-\U0001f43e\U0001f440\U0001f442-\U0001f4f7\U0001f4f9-\U0001f4fc\U0001f500-\U0001f53d\U0001f540-\U0001f543\U0001f550-\U0001f567\U0001f5fb-\U0001f640\U0001f645-\U0001f64f\U0001f680-\U0001f6c5\U0001f700-\U0001f773'
+
+ xid_continue += u'\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U000101fd\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031e\U00010330-\U0001034a\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104a0-\U000104a9\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a33\U00010a38-\U00010a3a\U00010a3f\U00010a60-\U00010a7c\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010c00-\U00010c48\U00011000-\U00011046\U00011066-\U0001106f\U00011080-\U000110ba\U000110d0-\U000110e8\U000110f0-\U000110f9\U00011100-\U00011134\U00011136-\U0001113f\U00011180-\U000111c4\U000111d0-\U000111d9\U00011680-\U000116b7\U000116c0-\U000116c9\U00012000-\U0001236e\U00012400-\U00012462\U00013000-\U0001342e\U00016800-\U00016a38\U00016f00-\U00016f44\U00016f50-\U00016f7e\U00016f8f-\U00016f9f\U0001b000-\U0001b001\U0001d165-\U0001d169\U0001d16d-\U0001d172\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001d7ce-\U0001d7ff\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002f800-\U0002fa1d\U000e0100-\U000e01ef'
+
+ xid_start += u'\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031e\U00010330-\U0001034a\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a33\U00010a60-\U00010a7c\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010c00-\U00010c48\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011183-\U000111b2\U000111c1-\U000111c4\U00011680-\U000116aa\U00012000-\U0001236e\U00012400-\U00012462\U00013000-\U0001342e\U00016800-\U00016a38\U00016f00-\U00016f44\U00016f50\U00016f93-\U00016f9f\U0001b000-\U0001b001\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002f800-\U0002fa1d'
cats = ['Cc', 'Cf', 'Cn', 'Co', 'Cs', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu', 'Mc', 'Me', 'Mn', 'Nd', 'Nl', 'No', 'Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps', 'Sc', 'Sk', 'Sm', 'So', 'Zl', 'Zp', 'Zs']
-# Generated from unidata 5.2.0
+# Generated from unidata 6.3.0
def combine(*args):
- return u''.join([globals()[cat] for cat in args])
+ return u''.join(globals()[cat] for cat in args)
-xid_start = u'\u0041-\u005A\u005F\u0061-\u007A\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u01BA\u01BB\u01BC-\u01BF\u01C0-\u01C3\u01C4-\u0241\u0250-\u02AF\u02B0-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EE\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03CE\u03D0-\u03F5\u03F7-\u0481\u048A-\u04CE\u04D0-\u04F9\u0500-\u050F\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA\u05F0-\u05F2\u0621-\u063A\u0640\u0641-\u064A\u066E-\u066F\u0671-\u06D3\u06D5\u06E5-\u06E6\u06EE-\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u076D\u0780-\u07A5\u07B1\u0904-\u0939\u093D\u0950\u0958-\u0961\u097D\u0985-\u098C\u098F-\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BD\u09CE\u09DC-\u09DD\u09DF-\u09E1\u09F0-\u09F1\u0A05-\u0A0A\u0A0F-\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32-\u0A33\u0A35-\u0A36\u0A38-\u0A39\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2-\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0-\u0AE1\u0B05-\u0B0C\u0B0F-\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32-\u0B33\u0B35-\u0B39\u0B3D\u0B5C-\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99-\u0B9A\u0B9C\u0B9E-\u0B9F\u0BA3-\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C60-\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0-\u0CE1\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D28\u0D2A-\u0D39\u0D60-\u0D61\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E40-\u0E45\u0E46\u0E81-\u0E82\u0E84\u0E87-\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA-\u0EAB\u0EAD-\u0EB0\u0EB2\u0EBD\u0EC0-\u0EC4\u0EC6\u0EDC-\u0EDD\u0F00\u0F40-\u0F47\u0F49-\u0F6A\u0F88-\u0F8B\u1000-\u1021\u1023-\u1027\u1029-\u102A\u1050-\u1055\u10A0-\u10C5\u10D0-\u10FA\u10FC\u1100-\u1159\u115F-\u11A2\u11A8-\u11F9\u1200-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C\u166F-\u1676\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F0\u1700-\u170C\u170E-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7\u17DC\u1820-\u1842\u1843\u1844-\u1877\u1880-\u18A8\u1900-\u191C\u1950-\u196D\u1970-\u1974\u1980-\u19A9\u19C1-\u19C7\u1A00-\u1A16\u1D00-\u1D2B\u1D2C-\u1D61\u1D62-\u1D77\u1D78\u1D79-\u1D9A\u1D9B-\u1DBF\u1E00-\u1E9B\u1EA0-\u1EF9\u1F00-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F\u2090-\u2094\u2102\u2107\u210A-\u2113\u2115\u2118\u2119-\u211D\u2124\u2126\u2128\u212A-\u212D\u212E\u212F-\u2131\u2133-\u2134\u2135-\u2138\u2139\u213C-\u213F\u2145-\u2149\u2160-\u2183\u2C00-\u2C2E\u2C30-\u2C5E\u2C80-\u2CE4\u2D00-\u2D25\u2D30-\u2D65\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u3005\u3006\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303A\u303B\u303C\u3041-\u3096\u309D-\u309E\u309F\u30A1-\u30FA\u30FC-\u30FE\u30FF\u3105-\u312C\u3131-\u318E\u31A0-\u31B7\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FBB\uA000-\uA014\uA015\uA016-\uA48C\uA800-\uA801\uA803-\uA805\uA807-\uA80A\uA80C-\uA822\uAC00-\uD7A3\uF900-\uFA2D\uFA30-\uFA6A\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40-\uFB41\uFB43-\uFB44\uFB46-\uFBB1\uFBD3-\uFC5D\uFC64-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDF9\uFE71\uFE73\uFE77\uFE79\uFE7B\uFE7D\uFE7F-\uFEFC\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFF6F\uFF70\uFF71-\uFF9D\uFFA0-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC'
-
-xid_continue = u'\u0030-\u0039\u0041-\u005A\u005F\u0061-\u007A\u00AA\u00B5\u00B7\u00BA\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u01BA\u01BB\u01BC-\u01BF\u01C0-\u01C3\u01C4-\u0241\u0250-\u02AF\u02B0-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EE\u0300-\u036F\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03CE\u03D0-\u03F5\u03F7-\u0481\u0483-\u0486\u048A-\u04CE\u04D0-\u04F9\u0500-\u050F\u0531-\u0556\u0559\u0561-\u0587\u0591-\u05B9\u05BB-\u05BD\u05BF\u05C1-\u05C2\u05C4-\u05C5\u05C7\u05D0-\u05EA\u05F0-\u05F2\u0610-\u0615\u0621-\u063A\u0640\u0641-\u064A\u064B-\u065E\u0660-\u0669\u066E-\u066F\u0670\u0671-\u06D3\u06D5\u06D6-\u06DC\u06DF-\u06E4\u06E5-\u06E6\u06E7-\u06E8\u06EA-\u06ED\u06EE-\u06EF\u06F0-\u06F9\u06FA-\u06FC\u06FF\u0710\u0711\u0712-\u072F\u0730-\u074A\u074D-\u076D\u0780-\u07A5\u07A6-\u07B0\u07B1\u0901-\u0902\u0903\u0904-\u0939\u093C\u093D\u093E-\u0940\u0941-\u0948\u0949-\u094C\u094D\u0950\u0951-\u0954\u0958-\u0961\u0962-\u0963\u0966-\u096F\u097D\u0981\u0982-\u0983\u0985-\u098C\u098F-\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BC\u09BD\u09BE-\u09C0\u09C1-\u09C4\u09C7-\u09C8\u09CB-\u09CC\u09CD\u09CE\u09D7\u09DC-\u09DD\u09DF-\u09E1\u09E2-\u09E3\u09E6-\u09EF\u09F0-\u09F1\u0A01-\u0A02\u0A03\u0A05-\u0A0A\u0A0F-\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32-\u0A33\u0A35-\u0A36\u0A38-\u0A39\u0A3C\u0A3E-\u0A40\u0A41-\u0A42\u0A47-\u0A48\u0A4B-\u0A4D\u0A59-\u0A5C\u0A5E\u0A66-\u0A6F\u0A70-\u0A71\u0A72-\u0A74\u0A81-\u0A82\u0A83\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2-\u0AB3\u0AB5-\u0AB9\u0ABC\u0ABD\u0ABE-\u0AC0\u0AC1-\u0AC5\u0AC7-\u0AC8\u0AC9\u0ACB-\u0ACC\u0ACD\u0AD0\u0AE0-\u0AE1\u0AE2-\u0AE3\u0AE6-\u0AEF\u0B01\u0B02-\u0B03\u0B05-\u0B0C\u0B0F-\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32-\u0B33\u0B35-\u0B39\u0B3C\u0B3D\u0B3E\u0B3F\u0B40\u0B41-\u0B43\u0B47-\u0B48\u0B4B-\u0B4C\u0B4D\u0B56\u0B57\u0B5C-\u0B5D\u0B5F-\u0B61\u0B66-\u0B6F\u0B71\u0B82\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99-\u0B9A\u0B9C\u0B9E-\u0B9F\u0BA3-\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BBE-\u0BBF\u0BC0\u0BC1-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCC\u0BCD\u0BD7\u0BE6-\u0BEF\u0C01-\u0C03\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3E-\u0C40\u0C41-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55-\u0C56\u0C60-\u0C61\u0C66-\u0C6F\u0C82-\u0C83\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBC\u0CBD\u0CBE\u0CBF\u0CC0-\u0CC4\u0CC6\u0CC7-\u0CC8\u0CCA-\u0CCB\u0CCC-\u0CCD\u0CD5-\u0CD6\u0CDE\u0CE0-\u0CE1\u0CE6-\u0CEF\u0D02-\u0D03\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D28\u0D2A-\u0D39\u0D3E-\u0D40\u0D41-\u0D43\u0D46-\u0D48\u0D4A-\u0D4C\u0D4D\u0D57\u0D60-\u0D61\u0D66-\u0D6F\u0D82-\u0D83\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0DCA\u0DCF-\u0DD1\u0DD2-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2-\u0DF3\u0E01-\u0E30\u0E31\u0E32-\u0E33\u0E34-\u0E3A\u0E40-\u0E45\u0E46\u0E47-\u0E4E\u0E50-\u0E59\u0E81-\u0E82\u0E84\u0E87-\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA-\u0EAB\u0EAD-\u0EB0\u0EB1\u0EB2-\u0EB3\u0EB4-\u0EB9\u0EBB-\u0EBC\u0EBD\u0EC0-\u0EC4\u0EC6\u0EC8-\u0ECD\u0ED0-\u0ED9\u0EDC-\u0EDD\u0F00\u0F18-\u0F19\u0F20-\u0F29\u0F35\u0F37\u0F39\u0F3E-\u0F3F\u0F40-\u0F47\u0F49-\u0F6A\u0F71-\u0F7E\u0F7F\u0F80-\u0F84\u0F86-\u0F87\u0F88-\u0F8B\u0F90-\u0F97\u0F99-\u0FBC\u0FC6\u1000-\u1021\u1023-\u1027\u1029-\u102A\u102C\u102D-\u1030\u1031\u1032\u1036-\u1037\u1038\u1039\u1040-\u1049\u1050-\u1055\u1056-\u1057\u1058-\u1059\u10A0-\u10C5\u10D0-\u10FA\u10FC\u1100-\u1159\u115F-\u11A2\u11A8-\u11F9\u1200-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u135F\u1369-\u1371\u1380-\u138F\u13A0-\u13F4\u1401-\u166C\u166F-\u1676\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F0\u1700-\u170C\u170E-\u1711\u1712-\u1714\u1720-\u1731\u1732-\u1734\u1740-\u1751\u1752-\u1753\u1760-\u176C\u176E-\u1770\u1772-\u1773\u1780-\u17B3\u17B6\u17B7-\u17BD\u17BE-\u17C5\u17C6\u17C7-\u17C8\u17C9-\u17D3\u17D7\u17DC\u17DD\u17E0-\u17E9\u180B-\u180D\u1810-\u1819\u1820-\u1842\u1843\u1844-\u1877\u1880-\u18A8\u18A9\u1900-\u191C\u1920-\u1922\u1923-\u1926\u1927-\u1928\u1929-\u192B\u1930-\u1931\u1932\u1933-\u1938\u1939-\u193B\u1946-\u194F\u1950-\u196D\u1970-\u1974\u1980-\u19A9\u19B0-\u19C0\u19C1-\u19C7\u19C8-\u19C9\u19D0-\u19D9\u1A00-\u1A16\u1A17-\u1A18\u1A19-\u1A1B\u1D00-\u1D2B\u1D2C-\u1D61\u1D62-\u1D77\u1D78\u1D79-\u1D9A\u1D9B-\u1DBF\u1DC0-\u1DC3\u1E00-\u1E9B\u1EA0-\u1EF9\u1F00-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u203F-\u2040\u2054\u2071\u207F\u2090-\u2094\u20D0-\u20DC\u20E1\u20E5-\u20EB\u2102\u2107\u210A-\u2113\u2115\u2118\u2119-\u211D\u2124\u2126\u2128\u212A-\u212D\u212E\u212F-\u2131\u2133-\u2134\u2135-\u2138\u2139\u213C-\u213F\u2145-\u2149\u2160-\u2183\u2C00-\u2C2E\u2C30-\u2C5E\u2C80-\u2CE4\u2D00-\u2D25\u2D30-\u2D65\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u3005\u3006\u3007\u3021-\u3029\u302A-\u302F\u3031-\u3035\u3038-\u303A\u303B\u303C\u3041-\u3096\u3099-\u309A\u309D-\u309E\u309F\u30A1-\u30FA\u30FC-\u30FE\u30FF\u3105-\u312C\u3131-\u318E\u31A0-\u31B7\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FBB\uA000-\uA014\uA015\uA016-\uA48C\uA800-\uA801\uA802\uA803-\uA805\uA806\uA807-\uA80A\uA80B\uA80C-\uA822\uA823-\uA824\uA825-\uA826\uA827\uAC00-\uD7A3\uF900-\uFA2D\uFA30-\uFA6A\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1E\uFB1F-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40-\uFB41\uFB43-\uFB44\uFB46-\uFBB1\uFBD3-\uFC5D\uFC64-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDF9\uFE00-\uFE0F\uFE20-\uFE23\uFE33-\uFE34\uFE4D-\uFE4F\uFE71\uFE73\uFE77\uFE79\uFE7B\uFE7D\uFE7F-\uFEFC\uFF10-\uFF19\uFF21-\uFF3A\uFF3F\uFF41-\uFF5A\uFF66-\uFF6F\uFF70\uFF71-\uFF9D\uFF9E-\uFF9F\uFFA0-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC'
def allexcept(*args):
newcats = cats[:]
for arg in args:
newcats.remove(arg)
- return u''.join([globals()[cat] for cat in newcats])
+ return u''.join(globals()[cat] for cat in newcats)
+
-if __name__ == '__main__':
+def _handle_runs(char_list): # pragma: no cover
+ buf = []
+ for c in char_list:
+ if len(c) == 1:
+ if buf and buf[-1][1] == chr(ord(c)-1):
+ buf[-1] = (buf[-1][0], c)
+ else:
+ buf.append((c, c))
+ else:
+ buf.append((c, c))
+ for a, b in buf:
+ if a == b:
+ yield a
+ else:
+ yield u'%s-%s' % (a, b)
+
+
+if __name__ == '__main__': # pragma: no cover
import unicodedata
- categories = {}
+ # we need Py3 for the determination of the XID_* properties
+ if sys.version_info[:2] < (3, 3):
+ raise RuntimeError('this file must be regenerated with Python 3.3+')
- f = open(__file__)
- try:
- content = f.read()
- finally:
- f.close()
+ categories_bmp = {'xid_start': [], 'xid_continue': []}
+ categories_nonbmp = {'xid_start': [], 'xid_continue': []}
+
+ with open(__file__) as fp:
+ content = fp.read()
header = content[:content.find('Cc =')]
footer = content[content.find("def combine("):]
- for code in range(65535):
- c = unichr(code)
+ for code in range(0x110000):
+ c = chr(code)
cat = unicodedata.category(c)
if ord(c) == 0xdc00:
# Hack to avoid combining this combining with the preceeding high
# surrogate, 0xdbff, when doing a repr.
c = u'\\' + c
- elif ord(c) in (0x2d, 0x5b, 0x5c, 0x5d):
+ elif ord(c) in (0x2d, 0x5b, 0x5c, 0x5d, 0x5e):
# Escape regex metachars.
c = u'\\' + c
- categories.setdefault(cat, []).append(c)
-
- f = open(__file__, 'w')
- f.write(header)
-
- for cat in sorted(categories):
- val = u''.join(categories[cat])
- if cat == 'Cs':
- # Jython can't handle isolated surrogates
- f.write("""\
+ cat_dic = categories_bmp if code < 0x10000 else categories_nonbmp
+ cat_dic.setdefault(cat, []).append(c)
+ # XID_START and XID_CONTINUE are special categories used for matching
+ # identifiers in Python 3.
+ if c.isidentifier():
+ cat_dic['xid_start'].append(c)
+ if ('a' + c).isidentifier():
+ cat_dic['xid_continue'].append(c)
+
+ with open(__file__, 'w') as fp:
+ fp.write(header)
+
+ for cat in sorted(categories_bmp):
+ val = u''.join(_handle_runs(categories_bmp[cat]))
+ if cat == 'Cs':
+ # Jython can't handle isolated surrogates
+ fp.write("""\
try:
- Cs = eval(u_prefix + r"%s")
+ Cs = eval(r"u%s")
except UnicodeDecodeError:
- Cs = '' # Jython can't handle isolated surrogates\n\n""" % repr(val).lstrip('u'))
- else:
- f.write('%s = %r\n\n' % (cat, val))
- f.write('cats = %r\n\n' % sorted(categories.keys()))
- f.write('# Generated from unidata %s\n\n' % (unicodedata.unidata_version,))
-
- f.write(footer)
- f.close()
+ Cs = '' # Jython can't handle isolated surrogates\n\n""" % ascii(val))
+ else:
+ fp.write('%s = u%a\n\n' % (cat, val))
+
+ fp.write('if sys.maxunicode > 0xFFFF:\n')
+ fp.write(' # non-BMP characters, use only on wide Unicode builds\n')
+ for cat in sorted(categories_nonbmp):
+ # no special case for Cs needed, since there are no surrogates
+ # in the higher planes
+ val = u''.join(_handle_runs(categories_nonbmp[cat]))
+ fp.write(' %s += u%a\n\n' % (cat, val))
+
+ cats = sorted(categories_bmp)
+ cats.remove('xid_start')
+ cats.remove('xid_continue')
+ fp.write('cats = %r\n\n' % cats)
+
+ fp.write('# Generated from unidata %s\n\n' % (unicodedata.unidata_version,))
+
+ fp.write(footer)
diff --git a/pygments/util.py b/pygments/util.py
index caac1144..c464e17c 100644
--- a/pygments/util.py
+++ b/pygments/util.py
@@ -5,31 +5,31 @@
Utility functions.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import sys
-import codecs
split_path_re = re.compile(r'[/\\ ]')
doctype_lookup_re = re.compile(r'''(?smx)
(<\?.*?\?>)?\s*
<!DOCTYPE\s+(
+ [a-zA-Z_][a-zA-Z0-9]*
+ (?: \s+ # optional in HTML5
[a-zA-Z_][a-zA-Z0-9]*\s+
- [a-zA-Z_][a-zA-Z0-9]*\s+
- "[^"]*")
+ "[^"]*")?
+ )
[^>]*>
''')
tag_re = re.compile(r'<(.+?)(\s.*?)?>.*?</.+?>(?uism)')
+xml_decl_re = re.compile(r'\s*<\?xml[^>]*\?>', re.I)
class ClassNotFound(ValueError):
- """
- If one of the get_*_by_* functions didn't find a matching class.
- """
+ """Raised if one of the lookup functions didn't find a matching class."""
class OptionError(Exception):
@@ -52,10 +52,10 @@ def get_bool_opt(options, optname, default=None):
return string
elif isinstance(string, int):
return bool(string)
- elif not isinstance(string, basestring):
+ elif not isinstance(string, string_types):
raise OptionError('Invalid type %r for option %s; use '
'1/0, yes/no, true/false, on/off' % (
- string, optname))
+ string, optname))
elif string.lower() in ('1', 'yes', 'true', 'on'):
return True
elif string.lower() in ('0', 'no', 'false', 'off'):
@@ -63,7 +63,7 @@ def get_bool_opt(options, optname, default=None):
else:
raise OptionError('Invalid value %r for option %s; use '
'1/0, yes/no, true/false, on/off' % (
- string, optname))
+ string, optname))
def get_int_opt(options, optname, default=None):
@@ -73,23 +73,23 @@ def get_int_opt(options, optname, default=None):
except TypeError:
raise OptionError('Invalid type %r for option %s; you '
'must give an integer value' % (
- string, optname))
+ string, optname))
except ValueError:
raise OptionError('Invalid value %r for option %s; you '
'must give an integer value' % (
- string, optname))
+ string, optname))
def get_list_opt(options, optname, default=None):
val = options.get(optname, default)
- if isinstance(val, basestring):
+ if isinstance(val, string_types):
return val.split()
elif isinstance(val, (list, tuple)):
return list(val)
else:
raise OptionError('Invalid type %r for option %s; you '
'must give a list value' % (
- val, optname))
+ val, optname))
def docstring_headline(obj):
@@ -105,10 +105,7 @@ def docstring_headline(obj):
def make_analysator(f):
- """
- Return a static text analysation function that
- returns float values.
- """
+ """Return a static text analyser function that returns float values."""
def text_analyse(text):
try:
rv = f(text)
@@ -125,8 +122,7 @@ def make_analysator(f):
def shebang_matches(text, regex):
- """
- Check if the given regular expression matches the last part of the
+ """Check if the given regular expression matches the last part of the
shebang if one exists.
>>> from pygments.util import shebang_matches
@@ -171,8 +167,8 @@ def shebang_matches(text, regex):
def doctype_matches(text, regex):
- """
- Check if the doctype matches a regular expression (if present).
+ """Check if the doctype matches a regular expression (if present).
+
Note that this method only checks the first part of a DOCTYPE.
eg: 'html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"'
"""
@@ -180,21 +176,21 @@ def doctype_matches(text, regex):
if m is None:
return False
doctype = m.group(2)
- return re.compile(regex).match(doctype.strip()) is not None
+ return re.compile(regex, re.I).match(doctype.strip()) is not None
def html_doctype_matches(text):
- """
- Check if the file looks like it has a html doctype.
- """
- return doctype_matches(text, r'html\s+PUBLIC\s+"-//W3C//DTD X?HTML.*')
+ """Check if the file looks like it has a html doctype."""
+ return doctype_matches(text, r'html')
_looks_like_xml_cache = {}
+
+
def looks_like_xml(text):
- """
- Check if a doctype exists or if we have some tags.
- """
+ """Check if a doctype exists or if we have some tags."""
+ if xml_decl_re.match(text):
+ return True
key = hash(text)
try:
return _looks_like_xml_cache[key]
@@ -206,15 +202,20 @@ def looks_like_xml(text):
_looks_like_xml_cache[key] = rv
return rv
+
# Python narrow build compatibility
def _surrogatepair(c):
+ # Given a unicode character code
+ # with length greater than 16 bits,
+ # return the two 16 bit surrogate pair.
+ # From example D28 of:
+ # http://www.unicode.org/book/ch03.pdf
return (0xd7c0 + (c >> 10), (0xdc00 + (c & 0x3ff)))
+
def unirange(a, b):
- """
- Returns a regular expression string to match the given non-BMP range.
- """
+ """Returns a regular expression string to match the given non-BMP range."""
if b < a:
raise ValueError("Bad character range")
if a < 0x10000 or b < 0x10000:
@@ -251,27 +252,136 @@ def unirange(a, b):
return u'(?:' + u'|'.join(buf) + u')'
+
+def format_lines(var_name, seq, raw=False, indent_level=0):
+ """Formats a sequence of strings for output."""
+ lines = []
+ base_indent = ' ' * indent_level * 4
+ inner_indent = ' ' * (indent_level + 1) * 4
+ lines.append(base_indent + var_name + ' = (')
+ if raw:
+ # These should be preformatted reprs of, say, tuples.
+ for i in seq:
+ lines.append(inner_indent + i + ',')
+ else:
+ for i in seq:
+ # Force use of single quotes
+ r = repr(i + '"')
+ lines.append(inner_indent + r[:-2] + r[-1] + ',')
+ lines.append(base_indent + ')')
+ return '\n'.join(lines)
+
+
+def duplicates_removed(it, already_seen=()):
+ """
+ Returns a list with duplicates removed from the iterable `it`.
+
+ Order is preserved.
+ """
+ lst = []
+ seen = set()
+ for i in it:
+ if i in seen or i in already_seen:
+ continue
+ lst.append(i)
+ seen.add(i)
+ return lst
+
+
+class Future(object):
+ """Generic class to defer some work.
+
+ Handled specially in RegexLexerMeta, to support regex string construction at
+ first use.
+ """
+ def get(self):
+ raise NotImplementedError
+
+
+def guess_decode(text):
+ """Decode *text* with guessed encoding.
+
+ First try UTF-8; this should fail for non-UTF-8 encodings.
+ Then try the preferred locale encoding.
+ Fall back to latin-1, which always works.
+ """
+ try:
+ text = text.decode('utf-8')
+ return text, 'utf-8'
+ except UnicodeDecodeError:
+ try:
+ import locale
+ prefencoding = locale.getpreferredencoding()
+ text = text.decode()
+ return text, prefencoding
+ except (UnicodeDecodeError, LookupError):
+ text = text.decode('latin1')
+ return text, 'latin1'
+
+
+def guess_decode_from_terminal(text, term):
+ """Decode *text* coming from terminal *term*.
+
+ First try the terminal encoding, if given.
+ Then try UTF-8. Then try the preferred locale encoding.
+ Fall back to latin-1, which always works.
+ """
+ if getattr(term, 'encoding', None):
+ try:
+ text = text.decode(term.encoding)
+ except UnicodeDecodeError:
+ pass
+ else:
+ return text, term.encoding
+ return guess_decode(text)
+
+
+def terminal_encoding(term):
+ """Return our best guess of encoding for the given *term*."""
+ if getattr(term, 'encoding', None):
+ return term.encoding
+ import locale
+ return locale.getpreferredencoding()
+
+
# Python 2/3 compatibility
-if sys.version_info < (3,0):
- b = bytes = str
+if sys.version_info < (3, 0):
+ unichr = unichr
+ xrange = xrange
+ string_types = (str, unicode)
+ text_type = unicode
u_prefix = 'u'
- import StringIO, cStringIO
- BytesIO = cStringIO.StringIO
+ iteritems = dict.iteritems
+ itervalues = dict.itervalues
+ import StringIO
+ import cStringIO
+ # unfortunately, io.StringIO in Python 2 doesn't accept str at all
StringIO = StringIO.StringIO
- uni_open = codecs.open
+ BytesIO = cStringIO.StringIO
else:
- import builtins
- bytes = builtins.bytes
+ unichr = chr
+ xrange = range
+ string_types = (str,)
+ text_type = str
u_prefix = ''
- def b(s):
- if isinstance(s, str):
- return bytes(map(ord, s))
- elif isinstance(s, bytes):
- return s
- else:
- raise TypeError("Invalid argument %r for b()" % (s,))
- import io
- BytesIO = io.BytesIO
- StringIO = io.StringIO
- uni_open = builtins.open
+ iteritems = dict.items
+ itervalues = dict.values
+ from io import StringIO, BytesIO, TextIOWrapper
+
+ class UnclosingTextIOWrapper(TextIOWrapper):
+ # Don't close underlying buffer on destruction.
+ def close(self):
+ pass
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ for slots_var in orig_vars.get('__slots__', ()):
+ orig_vars.pop(slots_var)
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 00000000..4754a9d2
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,5 @@
+coverage
+nose
+pyflakes
+pylint
+tox
diff --git a/scripts/check_sources.py b/scripts/check_sources.py
index d9e5c2ae..4f5926f6 100755
--- a/scripts/check_sources.py
+++ b/scripts/check_sources.py
@@ -7,20 +7,26 @@
Make sure each Python file has a correct file header
including copyright and license information.
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import sys, os, re
+from __future__ import print_function
+
+import io
+import os
+import re
+import sys
import getopt
-import cStringIO
from os.path import join, splitext, abspath
checkers = {}
+
def checker(*suffixes, **kwds):
only_pkg = kwds.pop('only_pkg', False)
+
def deco(func):
for suffix in suffixes:
checkers.setdefault(suffix, []).append(func)
@@ -30,55 +36,42 @@ def checker(*suffixes, **kwds):
name_mail_re = r'[\w ]+(<.*?>)?'
-copyright_re = re.compile(r'^ :copyright: Copyright 2006-2013 by '
+copyright_re = re.compile(r'^ :copyright: Copyright 2006-2015 by '
r'the Pygments team, see AUTHORS\.$', re.UNICODE)
copyright_2_re = re.compile(r'^ %s(, %s)*[,.]$' %
(name_mail_re, name_mail_re), re.UNICODE)
-coding_re = re.compile(r'coding[:=]\s*([-\w.]+)')
-not_ix_re = re.compile(r'\bnot\s+\S+?\s+i[sn]\s\S+')
is_const_re = re.compile(r'if.*?==\s+(None|False|True)\b')
misspellings = ["developement", "adress", "verificate", # ALLOW-MISSPELLING
- "informations"] # ALLOW-MISSPELLING
+ "informations", "unlexer"] # ALLOW-MISSPELLING
@checker('.py')
def check_syntax(fn, lines):
+ if '#!/' in lines[0]:
+ lines = lines[1:]
+ if 'coding:' in lines[0]:
+ lines = lines[1:]
try:
- compile(''.join(lines), fn, "exec")
- except SyntaxError, err:
+ compile('\n'.join(lines), fn, "exec")
+ except SyntaxError as err:
yield 0, "not compilable: %s" % err
@checker('.py')
def check_style_and_encoding(fn, lines):
- encoding = 'ascii'
for lno, line in enumerate(lines):
- if len(line) > 90:
+ if len(line) > 110:
yield lno+1, "line too long"
- m = not_ix_re.search(line)
- if m:
- yield lno+1, '"' + m.group() + '"'
if is_const_re.search(line):
yield lno+1, 'using == None/True/False'
- if lno < 2:
- co = coding_re.search(line)
- if co:
- encoding = co.group(1)
- try:
- line.decode(encoding)
- except UnicodeDecodeError, err:
- yield lno+1, "not decodable: %s\n Line: %r" % (err, line)
- except LookupError, err:
- yield 0, "unknown encoding: %s" % encoding
- encoding = 'latin1'
@checker('.py', only_pkg=True)
def check_fileheader(fn, lines):
# line number correction
c = 1
- if lines[0:1] == ['#!/usr/bin/env python\n']:
+ if lines[0:1] == ['#!/usr/bin/env python']:
lines = lines[1:]
c = 2
@@ -87,31 +80,28 @@ def check_fileheader(fn, lines):
for lno, l in enumerate(lines):
llist.append(l)
if lno == 0:
- if l == '# -*- coding: rot13 -*-\n':
- # special-case pony package
- return
- elif l != '# -*- coding: utf-8 -*-\n':
+ if l != '# -*- coding: utf-8 -*-':
yield 1, "missing coding declaration"
elif lno == 1:
- if l != '"""\n' and l != 'r"""\n':
+ if l != '"""' and l != 'r"""':
yield 2, 'missing docstring begin (""")'
else:
docopen = True
elif docopen:
- if l == '"""\n':
+ if l == '"""':
# end of docstring
if lno <= 4:
yield lno+c, "missing module name in docstring"
break
- if l != "\n" and l[:4] != ' ' and docopen:
+ if l != "" and l[:4] != ' ' and docopen:
yield lno+c, "missing correct docstring indentation"
if lno == 2:
# if not in package, don't check the module name
modname = fn[:-3].replace('/', '.').replace('.__init__', '')
while modname:
- if l.lower()[4:-1] == modname:
+ if l.lower()[4:] == modname:
break
modname = '.'.join(modname.split('.')[1:])
else:
@@ -126,11 +116,11 @@ def check_fileheader(fn, lines):
# check for copyright and license fields
license = llist[-2:-1]
- if license != [" :license: BSD, see LICENSE for details.\n"]:
+ if license != [" :license: BSD, see LICENSE for details."]:
yield 0, "no correct license info"
ci = -3
- copyright = [s.decode('utf-8') for s in llist[ci:ci+1]]
+ copyright = llist[ci:ci+1]
while copyright and copyright_2_re.match(copyright[0]):
ci -= 1
copyright = llist[ci:ci+1]
@@ -138,34 +128,11 @@ def check_fileheader(fn, lines):
yield 0, "no correct copyright info"
-@checker('.py', '.html', '.js')
-def check_whitespace_and_spelling(fn, lines):
- for lno, line in enumerate(lines):
- if "\t" in line:
- yield lno+1, "OMG TABS!!!1 "
- if line[:-1].rstrip(' \t') != line[:-1]:
- yield lno+1, "trailing whitespace"
- for word in misspellings:
- if word in line and 'ALLOW-MISSPELLING' not in line:
- yield lno+1, '"%s" used' % word
-
-
-bad_tags = ('<b>', '<i>', '<u>', '<s>', '<strike>'
- '<center>', '<big>', '<small>', '<font')
-
-@checker('.html')
-def check_xhtml(fn, lines):
- for lno, line in enumerate(lines):
- for bad_tag in bad_tags:
- if bad_tag in line:
- yield lno+1, "used " + bad_tag
-
-
def main(argv):
try:
gopts, args = getopt.getopt(argv[1:], "vi:")
except getopt.GetoptError:
- print "Usage: %s [-v] [-i ignorepath]* [path]" % argv[0]
+ print("Usage: %s [-v] [-i ignorepath]* [path]" % argv[0])
return 2
opts = {}
for opt, val in gopts:
@@ -178,30 +145,33 @@ def main(argv):
elif len(args) == 1:
path = args[0]
else:
- print "Usage: %s [-v] [-i ignorepath]* [path]" % argv[0]
+ print("Usage: %s [-v] [-i ignorepath]* [path]" % argv[0])
return 2
verbose = '-v' in opts
num = 0
- out = cStringIO.StringIO()
+ out = io.StringIO()
# TODO: replace os.walk run with iteration over output of
# `svn list -R`.
for root, dirs, files in os.walk(path):
- if '.svn' in dirs:
- dirs.remove('.svn')
+ if '.hg' in dirs:
+ dirs.remove('.hg')
+ if 'examplefiles' in dirs:
+ dirs.remove('examplefiles')
if '-i' in opts and abspath(root) in opts['-i']:
del dirs[:]
continue
# XXX: awkward: for the Makefile call: don't check non-package
# files for file headers
- in_pocoo_pkg = root.startswith('./pygments')
+ in_pygments_pkg = root.startswith('./pygments')
for fn in files:
fn = join(root, fn)
- if fn[:2] == './': fn = fn[2:]
+ if fn[:2] == './':
+ fn = fn[2:]
if '-i' in opts and abspath(fn) in opts['-i']:
continue
@@ -212,29 +182,28 @@ def main(argv):
continue
if verbose:
- print "Checking %s..." % fn
+ print("Checking %s..." % fn)
try:
- f = open(fn, 'r')
- lines = list(f)
- except (IOError, OSError), err:
- print "%s: cannot open: %s" % (fn, err)
+ lines = open(fn, 'rb').read().decode('utf-8').splitlines()
+ except (IOError, OSError) as err:
+ print("%s: cannot open: %s" % (fn, err))
num += 1
continue
for checker in checkerlist:
- if not in_pocoo_pkg and checker.only_pkg:
+ if not in_pygments_pkg and checker.only_pkg:
continue
for lno, msg in checker(fn, lines):
- print >>out, "%s:%d: %s" % (fn, lno, msg)
+ print(u"%s:%d: %s" % (fn, lno, msg), file=out)
num += 1
if verbose:
- print
+ print()
if num == 0:
- print "No errors found."
+ print("No errors found.")
else:
- print out.getvalue().rstrip('\n')
- print "%d error%s found." % (num, num > 1 and "s" or "")
+ print(out.getvalue().rstrip('\n'))
+ print("%d error%s found." % (num, num > 1 and "s" or ""))
return int(num > 0)
diff --git a/scripts/debug_lexer.py b/scripts/debug_lexer.py
new file mode 100755
index 00000000..4dac42ca
--- /dev/null
+++ b/scripts/debug_lexer.py
@@ -0,0 +1,244 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+"""
+ Lexing error finder
+ ~~~~~~~~~~~~~~~~~~~
+
+ For the source files given on the command line, display
+ the text where Error tokens are being generated, along
+ with some context.
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+
+# always prefer Pygments from source if exists
+srcpath = os.path.join(os.path.dirname(__file__), '..')
+if os.path.isdir(os.path.join(srcpath, 'pygments')):
+ sys.path.insert(0, srcpath)
+
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, LexerContext, \
+ ProfilingRegexLexer, ProfilingRegexLexerMeta
+from pygments.lexers import get_lexer_by_name, find_lexer_class, \
+ find_lexer_class_for_filename
+from pygments.token import Error, Text, _TokenType
+from pygments.cmdline import _parse_options
+
+
+class DebuggingRegexLexer(ExtendedRegexLexer):
+ """Make the state stack, position and current match instance attributes."""
+
+ def get_tokens_unprocessed(self, text, stack=('root',)):
+ """
+ Split ``text`` into (tokentype, text) pairs.
+
+ ``stack`` is the inital stack (default: ``['root']``)
+ """
+ tokendefs = self._tokens
+ self.ctx = ctx = LexerContext(text, 0)
+ ctx.stack = list(stack)
+ statetokens = tokendefs[ctx.stack[-1]]
+ while 1:
+ for rexmatch, action, new_state in statetokens:
+ self.m = m = rexmatch(text, ctx.pos, ctx.end)
+ if m:
+ if action is not None:
+ if type(action) is _TokenType:
+ yield ctx.pos, action, m.group()
+ ctx.pos = m.end()
+ else:
+ if not isinstance(self, ExtendedRegexLexer):
+ for item in action(self, m):
+ yield item
+ ctx.pos = m.end()
+ else:
+ for item in action(self, m, ctx):
+ yield item
+ if not new_state:
+ # altered the state stack?
+ statetokens = tokendefs[ctx.stack[-1]]
+ if new_state is not None:
+ # state transition
+ if isinstance(new_state, tuple):
+ for state in new_state:
+ if state == '#pop':
+ ctx.stack.pop()
+ elif state == '#push':
+ ctx.stack.append(ctx.stack[-1])
+ else:
+ ctx.stack.append(state)
+ elif isinstance(new_state, int):
+ # pop
+ del ctx.stack[new_state:]
+ elif new_state == '#push':
+ ctx.stack.append(ctx.stack[-1])
+ else:
+ assert False, 'wrong state def: %r' % new_state
+ statetokens = tokendefs[ctx.stack[-1]]
+ break
+ else:
+ try:
+ if ctx.pos >= ctx.end:
+ break
+ if text[ctx.pos] == '\n':
+ # at EOL, reset state to 'root'
+ ctx.stack = ['root']
+ statetokens = tokendefs['root']
+ yield ctx.pos, Text, u'\n'
+ ctx.pos += 1
+ continue
+ yield ctx.pos, Error, text[ctx.pos]
+ ctx.pos += 1
+ except IndexError:
+ break
+
+
+def main(fn, lexer=None, options={}):
+ if lexer is not None:
+ lxcls = get_lexer_by_name(lexer).__class__
+ else:
+ lxcls = find_lexer_class_for_filename(os.path.basename(fn))
+ if lxcls is None:
+ name, rest = fn.split('_', 1)
+ lxcls = find_lexer_class(name)
+ if lxcls is None:
+ raise AssertionError('no lexer found for file %r' % fn)
+ debug_lexer = False
+ if profile:
+ # does not work for e.g. ExtendedRegexLexers
+ if lxcls.__bases__ == (RegexLexer,):
+ # yes we can! (change the metaclass)
+ lxcls.__class__ = ProfilingRegexLexerMeta
+ lxcls.__bases__ = (ProfilingRegexLexer,)
+ lxcls._prof_sort_index = profsort
+ else:
+ if lxcls.__bases__ == (RegexLexer,):
+ lxcls.__bases__ = (DebuggingRegexLexer,)
+ debug_lexer = True
+ elif lxcls.__bases__ == (DebuggingRegexLexer,):
+ # already debugged before
+ debug_lexer = True
+ else:
+ # HACK: ExtendedRegexLexer subclasses will only partially work here.
+ lxcls.__bases__ = (DebuggingRegexLexer,)
+ debug_lexer = True
+
+ lx = lxcls(**options)
+ lno = 1
+ if fn == '-':
+ text = sys.stdin.read()
+ else:
+ with open(fn, 'rb') as fp:
+ text = fp.read().decode('utf-8')
+ text = text.strip('\n') + '\n'
+ tokens = []
+ states = []
+
+ def show_token(tok, state):
+ reprs = list(map(repr, tok))
+ print(' ' + reprs[1] + ' ' + ' ' * (29-len(reprs[1])) + reprs[0], end=' ')
+ if debug_lexer:
+ print(' ' + ' ' * (29-len(reprs[0])) + ' : '.join(state) if state else '', end=' ')
+ print()
+
+ for type, val in lx.get_tokens(text):
+ lno += val.count('\n')
+ if type == Error:
+ print('Error parsing', fn, 'on line', lno)
+ print('Previous tokens' + (debug_lexer and ' and states' or '') + ':')
+ if showall:
+ for tok, state in map(None, tokens, states):
+ show_token(tok, state)
+ else:
+ for i in range(max(len(tokens) - num, 0), len(tokens)):
+ if debug_lexer:
+ show_token(tokens[i], states[i])
+ else:
+ show_token(tokens[i], None)
+ print('Error token:')
+ l = len(repr(val))
+ print(' ' + repr(val), end=' ')
+ if debug_lexer and hasattr(lx, 'ctx'):
+ print(' ' * (60-l) + ' : '.join(lx.ctx.stack), end=' ')
+ print()
+ print()
+ return 1
+ tokens.append((type, val))
+ if debug_lexer:
+ if hasattr(lx, 'ctx'):
+ states.append(lx.ctx.stack[:])
+ else:
+ states.append(None)
+ if showall:
+ for tok, state in zip(tokens, states):
+ show_token(tok, state)
+ return 0
+
+
+def print_help():
+ print('''\
+Pygments development helper to quickly debug lexers.
+
+ scripts/debug_lexer.py [options] file ...
+
+Give one or more filenames to lex them and display possible error tokens
+and/or profiling info. Files are assumed to be encoded in UTF-8.
+
+Selecting lexer and options:
+
+ -l NAME use lexer named NAME (default is to guess from
+ the given filenames)
+ -O OPTIONSTR use lexer options parsed from OPTIONSTR
+
+Debugging lexing errors:
+
+ -n N show the last N tokens on error
+ -a always show all lexed tokens (default is only
+ to show them when an error occurs)
+
+Profiling:
+
+ -p use the ProfilingRegexLexer to profile regexes
+ instead of the debugging lexer
+ -s N sort profiling output by column N (default is
+ column 4, the time per call)
+''')
+
+num = 10
+showall = False
+lexer = None
+options = {}
+profile = False
+profsort = 4
+
+if __name__ == '__main__':
+ import getopt
+ opts, args = getopt.getopt(sys.argv[1:], 'n:l:apO:s:h')
+ for opt, val in opts:
+ if opt == '-n':
+ num = int(val)
+ elif opt == '-a':
+ showall = True
+ elif opt == '-l':
+ lexer = val
+ elif opt == '-p':
+ profile = True
+ elif opt == '-s':
+ profsort = int(val)
+ elif opt == '-O':
+ options = _parse_options([val])
+ elif opt == '-h':
+ print_help()
+ sys.exit(0)
+ ret = 0
+ if not args:
+ print_help()
+ for f in args:
+ ret += main(f, lexer, options)
+ sys.exit(bool(ret))
diff --git a/scripts/detect_missing_analyse_text.py b/scripts/detect_missing_analyse_text.py
index 1312648f..ab58558e 100644
--- a/scripts/detect_missing_analyse_text.py
+++ b/scripts/detect_missing_analyse_text.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
import sys
from pygments.lexers import get_all_lexers, find_lexer_class
@@ -9,22 +10,22 @@ def main():
for name, aliases, filenames, mimetypes in get_all_lexers():
cls = find_lexer_class(name)
if not cls.aliases:
- print cls, "has no aliases"
+ print(cls, "has no aliases")
for f in filenames:
if f not in uses:
uses[f] = []
uses[f].append(cls)
ret = 0
- for k, v in uses.iteritems():
+ for k, v in uses.items():
if len(v) > 1:
#print "Multiple for", k, v
for i in v:
if i.analyse_text is None:
- print i, "has a None analyse_text"
+ print(i, "has a None analyse_text")
ret |= 1
elif Lexer.analyse_text.__doc__ == i.analyse_text.__doc__:
- print i, "needs analyse_text, multiple lexers for", k
+ print(i, "needs analyse_text, multiple lexers for", k)
ret |= 2
return ret
diff --git a/scripts/find_codetags.py b/scripts/find_codetags.py
deleted file mode 100755
index 2fb18333..00000000
--- a/scripts/find_codetags.py
+++ /dev/null
@@ -1,205 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
- Codetags finder
- ~~~~~~~~~~~~~~~
-
- Find code tags in specified files and/or directories
- and create a report in HTML format.
-
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import sys, os, re
-import getopt
-from os.path import join, abspath, isdir, isfile
-
-
-TAGS = set(('XXX', 'TODO', 'FIXME', 'HACK'))
-
-tag_re = re.compile(
- r'(?P<tag>\b' + r'\b|\b'.join(TAGS) + r'\b)\s*'
- r'(?: \( (?P<who> .*? ) \) )?'
- r'\s*:?\s* (?P<what> .*? ) \s* $',
- re.X)
-
-binary_re = re.compile('[\x00-\x06\x0E-\x1F]')
-
-
-def escape_html(text):
- return text.replace('&', '&amp;'). \
- replace('<', '&lt;'). \
- replace('>', '&gt;'). \
- replace('"', '&quot;')
-
-def process_file(store, filename):
- try:
- f = open(filename, 'r')
- except (IOError, OSError):
- return False
- llmatch = 0
- try:
- for lno, line in enumerate(f):
- # just some random heuristics to filter out binary files
- if lno < 100 and binary_re.search(line):
- return False
- m = tag_re.search(line)
- if m:
- store.setdefault(filename, []).append({
- 'lno': lno+1,
- 'tag': m.group('tag'),
- 'who': m.group('who') or '',
- 'what': escape_html(m.group('what')),
- })
- # 'what' cannot start at column 0
- llmatch = m.start('what')
- elif llmatch:
- # continuation lines
- # XXX: this is Python centric, doesn't work for
- # JavaScript, for example.
- if line[:llmatch].replace('#', '').isspace():
- cont = line[llmatch:].strip()
- if cont:
- store[filename][-1]['what'] += ' ' + escape_html(cont)
- continue
- llmatch = 0
- return True
- finally:
- f.close()
-
-
-def main():
- try:
- gopts, args = getopt.getopt(sys.argv[1:], "vo:i:")
- except getopt.GetoptError:
- print ("Usage: %s [-v] [-i ignoredir]* [-o reportfile.html] "
- "path ..." % sys.argv[0])
- return 2
- opts = {}
- for opt, val in gopts:
- if opt == '-i':
- val = abspath(val)
- opts.setdefault(opt, []).append(val)
-
- if not args:
- args = ['.']
-
- if '-o' in opts:
- output = abspath(opts['-o'][-1])
- else:
- output = abspath('tags.html')
-
- verbose = '-v' in opts
-
- store = {}
- gnum = 0
- num = 0
-
- for path in args:
- print "Searching for code tags in %s, please wait." % path
-
- if isfile(path):
- gnum += 1
- if process_file(store, path):
- if verbose:
- print path + ": found %d tags" % \
- (path in store and len(store[path]) or 0)
- num += 1
- else:
- if verbose:
- print path + ": binary or not readable"
- continue
- elif not isdir(path):
- continue
-
- for root, dirs, files in os.walk(path):
- if '-i' in opts and abspath(root) in opts['-i']:
- del dirs[:]
- continue
- if '.svn' in dirs:
- dirs.remove('.svn')
- for fn in files:
- gnum += 1
- if gnum % 50 == 0 and not verbose:
- sys.stdout.write('.')
- sys.stdout.flush()
-
- fn = join(root, fn)
-
- if fn.endswith('.pyc') or fn.endswith('.pyo'):
- continue
- elif '-i' in opts and abspath(fn) in opts['-i']:
- continue
- elif abspath(fn) == output:
- continue
-
- if fn[:2] == './': fn = fn[2:]
- if process_file(store, fn):
- if verbose:
- print fn + ": found %d tags" % \
- (fn in store and len(store[fn]) or 0)
- num += 1
- else:
- if verbose:
- print fn + ": binary or not readable"
- print
-
- print "Processed %d of %d files. Found %d tags in %d files." % (
- num, gnum, sum(len(fitem) for fitem in store.itervalues()), len(store))
-
- if not store:
- return 0
-
- HTML = '''\
-<html>
-<head>
-<title>Code tags report</title>
-<style type="text/css">
-body { font-family: Trebuchet MS,Verdana,sans-serif;
- width: 80%%; margin-left: auto; margin-right: auto; }
-table { width: 100%%; border-spacing: 0;
- border: 1px solid #CCC; }
-th { font-weight: bold; background-color: #DDD }
-td { padding: 2px 5px 2px 5px;
- vertical-align: top; }
-.tr0 { background-color: #EEEEEE; }
-.tr1 { background-color: #F6F6F6; }
-.tag { text-align: center; font-weight: bold; }
-.tr0 .tag { background-color: #FFEEEE; }
-.tr1 .tag { background-color: #FFDDDD; }
-.head { padding-top: 10px; font-size: 100%%; font-weight: bold }
-.XXX { color: #500; }
-.FIXME { color: red; }
-.TODO { color: #880; }
-</style>
-</head>
-<body>
-<h1>Code tags report for %s</h1>
-<table>
-<tr><th>Line</th><th>Tag</th><th>Who</th><th>Description</th></tr>
-%s
-</table>
-</body>
-</html>
-'''
-
- TABLE = '\n<tr><td class="head" colspan="4">File: %s</td>\n'
-
- TR = ('<tr class="tr%d"><td class="lno">%%(lno)d</td>'
- '<td class="tag %%(tag)s">%%(tag)s</td>'
- '<td class="who">%%(who)s</td><td class="what">%%(what)s</td></tr>')
-
- f = file(output, 'w')
- table = '\n'.join(TABLE % fname +
- '\n'.join(TR % (no % 2,) % entry
- for no, entry in enumerate(store[fname]))
- for fname in sorted(store))
- f.write(HTML % (', '.join(map(abspath, args)), table))
- f.close()
-
- print "Report written to %s." % output
- return 0
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/scripts/find_error.py b/scripts/find_error.py
index 00923569..ba0b76f1 100755..120000
--- a/scripts/find_error.py
+++ b/scripts/find_error.py
@@ -1,170 +1 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-"""
- Lexing error finder
- ~~~~~~~~~~~~~~~~~~~
-
- For the source files given on the command line, display
- the text where Error tokens are being generated, along
- with some context.
-
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import sys, os
-
-# always prefer Pygments from source if exists
-srcpath = os.path.join(os.path.dirname(__file__), '..')
-if os.path.isdir(os.path.join(srcpath, 'pygments')):
- sys.path.insert(0, srcpath)
-
-
-from pygments.lexer import RegexLexer
-from pygments.lexers import get_lexer_for_filename, get_lexer_by_name
-from pygments.token import Error, Text, _TokenType
-from pygments.cmdline import _parse_options
-
-
-class DebuggingRegexLexer(RegexLexer):
- """Make the state stack, position and current match instance attributes."""
-
- def get_tokens_unprocessed(self, text, stack=('root',)):
- """
- Split ``text`` into (tokentype, text) pairs.
-
- ``stack`` is the inital stack (default: ``['root']``)
- """
- self.pos = 0
- tokendefs = self._tokens
- self.statestack = list(stack)
- statetokens = tokendefs[self.statestack[-1]]
- while 1:
- for rexmatch, action, new_state in statetokens:
- self.m = m = rexmatch(text, self.pos)
- if m:
- if type(action) is _TokenType:
- yield self.pos, action, m.group()
- else:
- for item in action(self, m):
- yield item
- self.pos = m.end()
- if new_state is not None:
- # state transition
- if isinstance(new_state, tuple):
- for state in new_state:
- if state == '#pop':
- self.statestack.pop()
- elif state == '#push':
- self.statestack.append(self.statestack[-1])
- else:
- self.statestack.append(state)
- elif isinstance(new_state, int):
- # pop
- del self.statestack[new_state:]
- elif new_state == '#push':
- self.statestack.append(self.statestack[-1])
- else:
- assert False, 'wrong state def: %r' % new_state
- statetokens = tokendefs[self.statestack[-1]]
- break
- else:
- try:
- if text[self.pos] == '\n':
- # at EOL, reset state to 'root'
- self.pos += 1
- self.statestack = ['root']
- statetokens = tokendefs['root']
- yield self.pos, Text, u'\n'
- continue
- yield self.pos, Error, text[self.pos]
- self.pos += 1
- except IndexError:
- break
-
-
-def main(fn, lexer=None, options={}):
- if lexer is not None:
- lx = get_lexer_by_name(lexer)
- else:
- try:
- lx = get_lexer_for_filename(os.path.basename(fn), **options)
- except ValueError:
- try:
- name, rest = fn.split('_', 1)
- lx = get_lexer_by_name(name, **options)
- except ValueError:
- raise AssertionError('no lexer found for file %r' % fn)
- debug_lexer = False
- # does not work for e.g. ExtendedRegexLexers
- if lx.__class__.__bases__ == (RegexLexer,):
- lx.__class__.__bases__ = (DebuggingRegexLexer,)
- debug_lexer = True
- elif lx.__class__.__bases__ == (DebuggingRegexLexer,):
- # already debugged before
- debug_lexer = True
- lno = 1
- text = file(fn, 'U').read()
- text = text.strip('\n') + '\n'
- tokens = []
- states = []
-
- def show_token(tok, state):
- reprs = map(repr, tok)
- print ' ' + reprs[1] + ' ' + ' ' * (29-len(reprs[1])) + reprs[0],
- if debug_lexer:
- print ' ' + ' ' * (29-len(reprs[0])) + repr(state),
- print
-
- for type, val in lx.get_tokens(text):
- lno += val.count('\n')
- if type == Error:
- print 'Error parsing', fn, 'on line', lno
- print 'Previous tokens' + (debug_lexer and ' and states' or '') + ':'
- if showall:
- for tok, state in map(None, tokens, states):
- show_token(tok, state)
- else:
- for i in range(max(len(tokens) - num, 0), len(tokens)):
- show_token(tokens[i], states[i])
- print 'Error token:'
- l = len(repr(val))
- print ' ' + repr(val),
- if debug_lexer and hasattr(lx, 'statestack'):
- print ' ' * (60-l) + repr(lx.statestack),
- print
- print
- return 1
- tokens.append((type, val))
- if debug_lexer:
- if hasattr(lx, 'statestack'):
- states.append(lx.statestack[:])
- else:
- states.append(None)
- if showall:
- for tok, state in map(None, tokens, states):
- show_token(tok, state)
- return 0
-
-
-num = 10
-showall = False
-lexer = None
-options = {}
-
-if __name__ == '__main__':
- import getopt
- opts, args = getopt.getopt(sys.argv[1:], 'n:l:aO:')
- for opt, val in opts:
- if opt == '-n':
- num = int(val)
- elif opt == '-a':
- showall = True
- elif opt == '-l':
- lexer = val
- elif opt == '-O':
- options = _parse_options([val])
- ret = 0
- for f in args:
- ret += main(f, lexer, options)
- sys.exit(bool(ret))
+debug_lexer.py \ No newline at end of file
diff --git a/scripts/get_vimkw.py b/scripts/get_vimkw.py
index 153c88c3..45652740 100644
--- a/scripts/get_vimkw.py
+++ b/scripts/get_vimkw.py
@@ -1,13 +1,42 @@
+from __future__ import print_function
+
import re
-from pprint import pprint
+
+from pygments.util import format_lines
r_line = re.compile(r"^(syn keyword vimCommand contained|syn keyword vimOption "
r"contained|syn keyword vimAutoEvent contained)\s+(.*)")
r_item = re.compile(r"(\w+)(?:\[(\w+)\])?")
+HEADER = '''\
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._vim_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file is autogenerated by scripts/get_vimkw.py
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+# Split up in multiple functions so it's importable by jython, which has a
+# per-method size limit.
+'''
+
+METHOD = '''\
+def _get%(key)s():
+%(body)s
+ return var
+%(key)s = _get%(key)s()
+'''
+
def getkw(input, output):
out = file(output, 'w')
+ # Copy template from an existing file.
+ print(HEADER, file=out)
+
output_info = {'command': [], 'option': [], 'auto': []}
for line in file(input):
m = r_line.match(line)
@@ -29,15 +58,17 @@ def getkw(input, output):
output_info['option'].append("('inoremap','inoremap')")
output_info['option'].append("('vnoremap','vnoremap')")
- for a, b in output_info.items():
- b.sort()
- print >>out, '%s=[%s]' % (a, ','.join(b))
+ for key, keywordlist in output_info.items():
+ keywordlist.sort()
+ body = format_lines('var', keywordlist, raw=True, indent_level=1)
+ print(METHOD % locals(), file=out)
def is_keyword(w, keywords):
for i in range(len(w), 0, -1):
if w[:i] in keywords:
- return signals[w[:i]][:len(w)] == w
+ return keywords[w[:i]][:len(w)] == w
return False
if __name__ == "__main__":
- getkw("/usr/share/vim/vim73/syntax/vim.vim", "temp.py")
+ getkw("/usr/share/vim/vim74/syntax/vim.vim",
+ "pygments/lexers/_vim_builtins.py")
diff --git a/scripts/reindent.py b/scripts/reindent.py
deleted file mode 100755
index e6ee8287..00000000
--- a/scripts/reindent.py
+++ /dev/null
@@ -1,291 +0,0 @@
-#! /usr/bin/env python
-
-# Released to the public domain, by Tim Peters, 03 October 2000.
-# -B option added by Georg Brandl, 2006.
-
-"""reindent [-d][-r][-v] [ path ... ]
-
--d (--dryrun) Dry run. Analyze, but don't make any changes to files.
--r (--recurse) Recurse. Search for all .py files in subdirectories too.
--B (--no-backup) Don't write .bak backup files.
--v (--verbose) Verbose. Print informative msgs; else only names of changed files.
--h (--help) Help. Print this usage information and exit.
-
-Change Python (.py) files to use 4-space indents and no hard tab characters.
-Also trim excess spaces and tabs from ends of lines, and remove empty lines
-at the end of files. Also ensure the last line ends with a newline.
-
-If no paths are given on the command line, reindent operates as a filter,
-reading a single source file from standard input and writing the transformed
-source to standard output. In this case, the -d, -r and -v flags are
-ignored.
-
-You can pass one or more file and/or directory paths. When a directory
-path, all .py files within the directory will be examined, and, if the -r
-option is given, likewise recursively for subdirectories.
-
-If output is not to standard output, reindent overwrites files in place,
-renaming the originals with a .bak extension. If it finds nothing to
-change, the file is left alone. If reindent does change a file, the changed
-file is a fixed-point for future runs (i.e., running reindent on the
-resulting .py file won't change it again).
-
-The hard part of reindenting is figuring out what to do with comment
-lines. So long as the input files get a clean bill of health from
-tabnanny.py, reindent should do a good job.
-"""
-
-__version__ = "1"
-
-import tokenize
-import os
-import sys
-
-verbose = 0
-recurse = 0
-dryrun = 0
-no_backup = 0
-
-def usage(msg=None):
- if msg is not None:
- print >> sys.stderr, msg
- print >> sys.stderr, __doc__
-
-def errprint(*args):
- sep = ""
- for arg in args:
- sys.stderr.write(sep + str(arg))
- sep = " "
- sys.stderr.write("\n")
-
-def main():
- import getopt
- global verbose, recurse, dryrun, no_backup
-
- try:
- opts, args = getopt.getopt(sys.argv[1:], "drvhB",
- ["dryrun", "recurse", "verbose", "help",
- "no-backup"])
- except getopt.error, msg:
- usage(msg)
- return
- for o, a in opts:
- if o in ('-d', '--dryrun'):
- dryrun += 1
- elif o in ('-r', '--recurse'):
- recurse += 1
- elif o in ('-v', '--verbose'):
- verbose += 1
- elif o in ('-B', '--no-backup'):
- no_backup += 1
- elif o in ('-h', '--help'):
- usage()
- return
- if not args:
- r = Reindenter(sys.stdin)
- r.run()
- r.write(sys.stdout)
- return
- for arg in args:
- check(arg)
-
-def check(file):
- if os.path.isdir(file) and not os.path.islink(file):
- if verbose:
- print "listing directory", file
- names = os.listdir(file)
- for name in names:
- fullname = os.path.join(file, name)
- if ((recurse and os.path.isdir(fullname) and
- not os.path.islink(fullname))
- or name.lower().endswith(".py")):
- check(fullname)
- return
-
- if verbose:
- print "checking", file, "...",
- try:
- f = open(file)
- except IOError, msg:
- errprint("%s: I/O Error: %s" % (file, str(msg)))
- return
-
- r = Reindenter(f)
- f.close()
- if r.run():
- if verbose:
- print "changed."
- if dryrun:
- print "But this is a dry run, so leaving it alone."
- else:
- print "reindented", file, (dryrun and "(dry run => not really)" or "")
- if not dryrun:
- if not no_backup:
- bak = file + ".bak"
- if os.path.exists(bak):
- os.remove(bak)
- os.rename(file, bak)
- if verbose:
- print "renamed", file, "to", bak
- f = open(file, "w")
- r.write(f)
- f.close()
- if verbose:
- print "wrote new", file
- else:
- if verbose:
- print "unchanged."
-
-
-class Reindenter:
-
- def __init__(self, f):
- self.find_stmt = 1 # next token begins a fresh stmt?
- self.level = 0 # current indent level
-
- # Raw file lines.
- self.raw = f.readlines()
-
- # File lines, rstripped & tab-expanded. Dummy at start is so
- # that we can use tokenize's 1-based line numbering easily.
- # Note that a line is all-blank iff it's "\n".
- self.lines = [line.rstrip('\n \t').expandtabs() + "\n"
- for line in self.raw]
- self.lines.insert(0, None)
- self.index = 1 # index into self.lines of next line
-
- # List of (lineno, indentlevel) pairs, one for each stmt and
- # comment line. indentlevel is -1 for comment lines, as a
- # signal that tokenize doesn't know what to do about them;
- # indeed, they're our headache!
- self.stats = []
-
- def run(self):
- tokenize.tokenize(self.getline, self.tokeneater)
- # Remove trailing empty lines.
- lines = self.lines
- while lines and lines[-1] == "\n":
- lines.pop()
- # Sentinel.
- stats = self.stats
- stats.append((len(lines), 0))
- # Map count of leading spaces to # we want.
- have2want = {}
- # Program after transformation.
- after = self.after = []
- # Copy over initial empty lines -- there's nothing to do until
- # we see a line with *something* on it.
- i = stats[0][0]
- after.extend(lines[1:i])
- for i in range(len(stats)-1):
- thisstmt, thislevel = stats[i]
- nextstmt = stats[i+1][0]
- have = getlspace(lines[thisstmt])
- want = thislevel * 4
- if want < 0:
- # A comment line.
- if have:
- # An indented comment line. If we saw the same
- # indentation before, reuse what it most recently
- # mapped to.
- want = have2want.get(have, -1)
- if want < 0:
- # Then it probably belongs to the next real stmt.
- for j in xrange(i+1, len(stats)-1):
- jline, jlevel = stats[j]
- if jlevel >= 0:
- if have == getlspace(lines[jline]):
- want = jlevel * 4
- break
- if want < 0: # Maybe it's a hanging
- # comment like this one,
- # in which case we should shift it like its base
- # line got shifted.
- for j in xrange(i-1, -1, -1):
- jline, jlevel = stats[j]
- if jlevel >= 0:
- want = have + getlspace(after[jline-1]) - \
- getlspace(lines[jline])
- break
- if want < 0:
- # Still no luck -- leave it alone.
- want = have
- else:
- want = 0
- assert want >= 0
- have2want[have] = want
- diff = want - have
- if diff == 0 or have == 0:
- after.extend(lines[thisstmt:nextstmt])
- else:
- for line in lines[thisstmt:nextstmt]:
- if diff > 0:
- if line == "\n":
- after.append(line)
- else:
- after.append(" " * diff + line)
- else:
- remove = min(getlspace(line), -diff)
- after.append(line[remove:])
- return self.raw != self.after
-
- def write(self, f):
- f.writelines(self.after)
-
- # Line-getter for tokenize.
- def getline(self):
- if self.index >= len(self.lines):
- line = ""
- else:
- line = self.lines[self.index]
- self.index += 1
- return line
-
- # Line-eater for tokenize.
- def tokeneater(self, type, token, (sline, scol), end, line,
- INDENT=tokenize.INDENT,
- DEDENT=tokenize.DEDENT,
- NEWLINE=tokenize.NEWLINE,
- COMMENT=tokenize.COMMENT,
- NL=tokenize.NL):
-
- if type == NEWLINE:
- # A program statement, or ENDMARKER, will eventually follow,
- # after some (possibly empty) run of tokens of the form
- # (NL | COMMENT)* (INDENT | DEDENT+)?
- self.find_stmt = 1
-
- elif type == INDENT:
- self.find_stmt = 1
- self.level += 1
-
- elif type == DEDENT:
- self.find_stmt = 1
- self.level -= 1
-
- elif type == COMMENT:
- if self.find_stmt:
- self.stats.append((sline, -1))
- # but we're still looking for a new stmt, so leave
- # find_stmt alone
-
- elif type == NL:
- pass
-
- elif self.find_stmt:
- # This is the first "real token" following a NEWLINE, so it
- # must be the first token of the next program statement, or an
- # ENDMARKER.
- self.find_stmt = 0
- if line: # not endmarker
- self.stats.append((sline, self.level))
-
-# Count number of leading blanks.
-def getlspace(line):
- i, n = 0, len(line)
- while i < n and line[i] == " ":
- i += 1
- return i
-
-if __name__ == '__main__':
- main()
diff --git a/scripts/vim2pygments.py b/scripts/vim2pygments.py
index 80f0ada2..42af0bbe 100644..100755
--- a/scripts/vim2pygments.py
+++ b/scripts/vim2pygments.py
@@ -11,10 +11,12 @@
:license: BSD, see LICENSE for details.
"""
+from __future__ import print_function
+
import sys
import re
from os import path
-from cStringIO import StringIO
+from io import StringIO
split_re = re.compile(r'(?<!\\)\s+')
@@ -765,7 +767,7 @@ TOKENS = {
}
TOKEN_TYPES = set()
-for token in TOKENS.itervalues():
+for token in TOKENS.values():
if not isinstance(token, tuple):
token = (token,)
for token in token:
@@ -836,7 +838,7 @@ def find_colors(code):
colors['Normal']['bgcolor'] = bg_color
color_map = {}
- for token, styles in colors.iteritems():
+ for token, styles in colors.items():
if token in TOKENS:
tmp = []
if styles.get('noinherit'):
@@ -879,7 +881,7 @@ class StyleWriter(object):
def write(self, out):
self.write_header(out)
default_token, tokens = find_colors(self.code)
- tokens = tokens.items()
+ tokens = list(tokens.items())
tokens.sort(lambda a, b: cmp(len(a[0]), len(a[1])))
bg_color = [x[3:] for x in default_token.split() if x.startswith('bg:')]
if bg_color:
@@ -916,14 +918,14 @@ def convert(filename, stream=None):
def main():
if len(sys.argv) != 2 or sys.argv[1] in ('-h', '--help'):
- print 'Usage: %s <filename.vim>' % sys.argv[0]
+ print('Usage: %s <filename.vim>' % sys.argv[0])
return 2
if sys.argv[1] in ('-v', '--version'):
- print '%s %s' % (SCRIPT_NAME, SCRIPT_VERSION)
+ print('%s %s' % (SCRIPT_NAME, SCRIPT_VERSION))
return
filename = sys.argv[1]
if not (path.exists(filename) and path.isfile(filename)):
- print 'Error: %s not found' % filename
+ print('Error: %s not found' % filename)
return 1
convert(filename, sys.stdout)
sys.stdout.write('\n')
diff --git a/setup.py b/setup.py
index 17bbf814..951404e5 100755
--- a/setup.py
+++ b/setup.py
@@ -1,29 +1,22 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-"""
- Pygments
+"""Pygments
~~~~~~~~
Pygments is a syntax highlighting package written in Python.
- It is a generic syntax highlighter for general use in all kinds of software
- such as forum systems, wikis or other applications that need to prettify
- source code. Highlights are:
+ It is a generic syntax highlighter suitable for use in code hosting, forums,
+ wikis or other applications that need to prettify source code. Highlights
+ are:
- * a wide range of common languages and markup formats is supported
+ * a wide range of over 300 languages and other text formats is supported
* special attention is paid to details, increasing quality by a fair amount
* support for new languages and formats are added easily
* a number of output formats, presently HTML, LaTeX, RTF, SVG, all image \
formats that PIL supports and ANSI sequences
* it is usable as a command-line tool and as a library
- * ... and it highlights even Brainfuck!
-
- The `Pygments tip`_ is installable with ``easy_install Pygments==dev``.
-
- .. _Pygments tip:
- http://bitbucket.org/birkenfeld/pygments-main/get/default.zip#egg=Pygments-dev
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -31,21 +24,22 @@ try:
from setuptools import setup, find_packages
have_setuptools = True
except ImportError:
- from distutils.core import setup
- def find_packages():
- return [
- 'pygments',
- 'pygments.lexers',
- 'pygments.formatters',
- 'pygments.styles',
- 'pygments.filters',
- ]
- have_setuptools = False
-
-try:
- from distutils.command.build_py import build_py_2to3 as build_py
-except ImportError:
- from distutils.command.build_py import build_py
+ try:
+ import ez_setup
+ ez_setup.use_setuptools()
+ from setuptools import setup, find_packages
+ have_setuptools = True
+ except ImportError:
+ from distutils.core import setup
+ def find_packages(*args, **kwargs):
+ return [
+ 'pygments',
+ 'pygments.lexers',
+ 'pygments.formatters',
+ 'pygments.styles',
+ 'pygments.filters',
+ ]
+ have_setuptools = False
if have_setuptools:
add_keywords = dict(
@@ -60,7 +54,7 @@ else:
setup(
name = 'Pygments',
- version = '1.6',
+ version = '2.1a0',
url = 'http://pygments.org/',
license = 'BSD License',
author = 'Georg Brandl',
@@ -68,7 +62,7 @@ setup(
description = 'Pygments is a syntax highlighting package written in Python.',
long_description = __doc__,
keywords = 'syntax highlighting',
- packages = find_packages(),
+ packages = find_packages(exclude=['ez_setup']),
platforms = 'any',
zip_safe = False,
include_package_data = True,
@@ -85,6 +79,5 @@ setup(
'Topic :: Text Processing :: Filters',
'Topic :: Utilities',
],
- cmdclass = {'build_py': build_py},
**add_keywords
)
diff --git a/tests/examplefiles/99_bottles_of_beer.chpl b/tests/examplefiles/99_bottles_of_beer.chpl
new file mode 100644
index 00000000..3629028d
--- /dev/null
+++ b/tests/examplefiles/99_bottles_of_beer.chpl
@@ -0,0 +1,174 @@
+/***********************************************************************
+ * Chapel implementation of "99 bottles of beer"
+ *
+ * by Brad Chamberlain and Steve Deitz
+ * 07/13/2006 in Knoxville airport while waiting for flight home from
+ * HPLS workshop
+ * compiles and runs with chpl compiler version 1.7.0
+ * for more information, contact: chapel_info@cray.com
+ *
+ *
+ * Notes:
+ * o as in all good parallel computations, boundary conditions
+ * constitute the vast bulk of complexity in this code (invite Brad to
+ * tell you about his zany boundary condition simplification scheme)
+ * o uses type inference for variables, arguments
+ * o relies on integer->string coercions
+ * o uses named argument passing (for documentation purposes only)
+ ***********************************************************************/
+
+// allow executable command-line specification of number of bottles
+// (e.g., ./a.out -snumBottles=999999)
+config const numBottles = 99;
+const numVerses = numBottles+1;
+
+// a domain to describe the space of lyrics
+var LyricsSpace: domain(1) = {1..numVerses};
+
+// array of lyrics
+var Lyrics: [LyricsSpace] string;
+
+// parallel computation of lyrics array
+[verse in LyricsSpace] Lyrics(verse) = computeLyric(verse);
+
+// as in any good parallel language, I/O to stdout is serialized.
+// (Note that I/O to a file could be parallelized using a parallel
+// prefix computation on the verse strings' lengths with file seeking)
+writeln(Lyrics);
+
+
+// HELPER FUNCTIONS:
+
+proc computeLyric(verseNum) {
+ var bottleNum = numBottles - (verseNum - 1);
+ var nextBottle = (bottleNum + numVerses - 1)%numVerses;
+ return "\n" // disguise space used to separate elements in array I/O
+ + describeBottles(bottleNum, startOfVerse=true) + " on the wall, "
+ + describeBottles(bottleNum) + ".\n"
+ + computeAction(bottleNum)
+ + describeBottles(nextBottle) + " on the wall.\n";
+}
+
+
+proc describeBottles(bottleNum, startOfVerse:bool = false) {
+ // NOTE: bool should not be necessary here (^^^^); working around bug
+ var bottleDescription = if (bottleNum) then bottleNum:string
+ else (if startOfVerse then "N"
+ else "n")
+ + "o more";
+ return bottleDescription
+ + " bottle" + (if (bottleNum == 1) then "" else "s")
+ + " of beer";
+}
+
+
+proc computeAction(bottleNum) {
+ return if (bottleNum == 0) then "Go to the store and buy some more, "
+ else "Take one down and pass it around, ";
+}
+
+
+// Modules...
+module M1 {
+ var x = 10;
+}
+
+module M2 {
+ use M1;
+ proc main() {
+ writeln("M2 -> M1 -> x " + x);
+ }
+}
+
+
+// Classes, records, unions...
+const PI: real = 3.14159;
+
+record Point {
+ var x, y: real;
+}
+var p: Point;
+writeln("Distance from origin: " + sqrt(p.x ** 2 + p.y ** 2));
+p = new Point(1.0, 2.0);
+writeln("Distance from origin: " + sqrt(p.x ** 2 + p.y ** 2));
+
+class Circle {
+ var p: Point;
+ var r: real;
+}
+var c = new Circle(r=2.0);
+proc Circle.area()
+ return PI * r ** 2;
+writeln("Area of circle: " + c.area());
+
+class Oval: Circle {
+ var r2: real;
+}
+proc Oval.area()
+ return PI * r * r2;
+
+delete c;
+c = nil;
+c = new Oval(r=1.0, r2=2.0);
+writeln("Area of oval: " + c.area());
+
+// This is a valid decimal integer:
+var x = 0000000000012;
+
+union U {
+ var i: int;
+ var r: real;
+}
+
+// chapel ranges are awesome.
+var r1 = 1..10, // 1 2 3 4 5 6 7 8 9 10
+ r2 = 10..1, // no values in this range
+ r3 = 1..10 by -1, // 10 9 8 7 6 5 4 3 2 1
+ r4 = 1..10 by 2, // 1 3 5 7 9
+ r5 = 1..10 by 2 align 0, // 2 4 6 8 10
+ r6 = 1..10 by 2 align 2, // 2 4 6 8 10
+ r7 = 1..10 # 3, // 1 2 3
+ r8 = 1..10 # -2, // 9 10
+ r9 = 1..100 # 10 by 2, // 1 3 5 7 9
+ ra = 1..100 by 2 # 10, // 1 3 5 7 9 11 13 15 17 19
+ rb = 1.. # 100 by 10; // 1 11 21 31 41 51 61 71 81 91
+
+// create a variable with default initialization
+var myVarWithoutInit: real = noinit;
+myVarWithoutInit = 1.0;
+
+// Chapel has <~> operator for read and write I/O operations.
+class IntPair {
+ var x: int;
+ var y: int;
+ proc readWriteThis(f) {
+ f <~> x <~> new ioLiteral(",") <~> y <~> new ioNewline();
+ }
+}
+var ip = new IntPair(17,2);
+write(ip);
+
+var targetDom: {1..10},
+ target: [targetDom] int;
+coforall i in targetDom with (ref target) {
+ targetDom[i] = i ** 3;
+}
+
+var wideOpen = 0o777,
+ mememe = 0o600,
+ clique_y = 0O660,
+ zeroOct = 0o0,
+ minPosOct = 0O1;
+
+private module M3 {
+ private proc foo() {
+
+ }
+
+ private iter bar() {
+
+ }
+
+ private var x: int;
+
+} \ No newline at end of file
diff --git a/tests/examplefiles/Blink.ino b/tests/examplefiles/Blink.ino
new file mode 100644
index 00000000..993bd743
--- /dev/null
+++ b/tests/examplefiles/Blink.ino
@@ -0,0 +1,24 @@
+/*
+ Blink
+ Turns on an LED on for one second, then off for one second, repeatedly.
+
+ This example code is in the public domain.
+ */
+
+// Pin 13 has an LED connected on most Arduino boards.
+// give it a name:
+int led = 13;
+
+// the setup routine runs once when you press reset:
+void setup() {
+ // initialize the digital pin as an output.
+ pinMode(led, OUTPUT);
+}
+
+// the loop routine runs over and over again forever:
+void loop() {
+ digitalWrite(led, HIGH); // turn the LED on (HIGH is the voltage level)
+ delay(1000); // wait for a second
+ digitalWrite(led, LOW); // turn the LED off by making the voltage LOW
+ delay(1000); // wait for a second
+}
diff --git a/tests/examplefiles/Error.pmod b/tests/examplefiles/Error.pmod
new file mode 100644
index 00000000..808ecb0e
--- /dev/null
+++ b/tests/examplefiles/Error.pmod
@@ -0,0 +1,38 @@
+#pike __REAL_VERSION__
+
+constant Generic = __builtin.GenericError;
+
+constant Index = __builtin.IndexError;
+
+constant BadArgument = __builtin.BadArgumentError;
+
+constant Math = __builtin.MathError;
+
+constant Resource = __builtin.ResourceError;
+
+constant Permission = __builtin.PermissionError;
+
+constant Decode = __builtin.DecodeError;
+
+constant Cpp = __builtin.CppError;
+
+constant Compilation = __builtin.CompilationError;
+
+constant MasterLoad = __builtin.MasterLoadError;
+
+constant ModuleLoad = __builtin.ModuleLoadError;
+
+//! Returns an Error object for any argument it receives. If the
+//! argument already is an Error object or is empty, it does nothing.
+object mkerror(mixed error)
+{
+ if (error == UNDEFINED)
+ return error;
+ if (objectp(error) && error->is_generic_error)
+ return error;
+ if (arrayp(error))
+ return Error.Generic(@error);
+ if (stringp(error))
+ return Error.Generic(error);
+ return Error.Generic(sprintf("%O", error));
+} \ No newline at end of file
diff --git a/tests/examplefiles/Errors.scala b/tests/examplefiles/Errors.scala
index 67198c05..7af70280 100644
--- a/tests/examplefiles/Errors.scala
+++ b/tests/examplefiles/Errors.scala
@@ -11,6 +11,11 @@ String
val foo_+ = "foo plus"
val foo_⌬⌬ = "double benzene"
+ // Test some interpolated strings
+ val mu = s"${if (true) "a:b" else "c" {with "braces"}}"
+ val mu2 = f"${if (true) "a:b" else "c" {with "braces"}}"
+ val raw = raw"a raw\nstring\"with escaped quotes"
+
def main(argv: Array[String]) {
println(⌘.interface + " " + foo_+ + " " + foo_⌬⌬ )
}
diff --git a/tests/examplefiles/FakeFile.pike b/tests/examplefiles/FakeFile.pike
new file mode 100644
index 00000000..48f3ea64
--- /dev/null
+++ b/tests/examplefiles/FakeFile.pike
@@ -0,0 +1,360 @@
+#pike __REAL_VERSION__
+
+//! A string wrapper that pretends to be a @[Stdio.File] object
+//! in addition to some features of a @[Stdio.FILE] object.
+
+
+//! This constant can be used to distinguish a FakeFile object
+//! from a real @[Stdio.File] object.
+constant is_fake_file = 1;
+
+protected string data;
+protected int ptr;
+protected int(0..1) r;
+protected int(0..1) w;
+protected int mtime;
+
+protected function read_cb;
+protected function read_oob_cb;
+protected function write_cb;
+protected function write_oob_cb;
+protected function close_cb;
+
+//! @seealso
+//! @[Stdio.File()->close()]
+int close(void|string direction) {
+ direction = lower_case(direction||"rw");
+ int cr = has_value(direction, "r");
+ int cw = has_value(direction, "w");
+
+ if(cr) {
+ r = 0;
+ }
+
+ if(cw) {
+ w = 0;
+ }
+
+ // FIXME: Close callback
+ return 1;
+}
+
+//! @decl void create(string data, void|string type, void|int pointer)
+//! @seealso
+//! @[Stdio.File()->create()]
+void create(string _data, void|string type, int|void _ptr) {
+ if(!_data) error("No data string given to FakeFile.\n");
+ data = _data;
+ ptr = _ptr;
+ mtime = time();
+ if(type) {
+ type = lower_case(type);
+ if(has_value(type, "r"))
+ r = 1;
+ if(has_value(type, "w"))
+ w = 1;
+ }
+ else
+ r = w = 1;
+}
+
+protected string make_type_str() {
+ string type = "";
+ if(r) type += "r";
+ if(w) type += "w";
+ return type;
+}
+
+//! @seealso
+//! @[Stdio.File()->dup()]
+this_program dup() {
+ return this_program(data, make_type_str(), ptr);
+}
+
+//! Always returns 0.
+//! @seealso
+//! @[Stdio.File()->errno()]
+int errno() { return 0; }
+
+//! Returns size and the creation time of the string.
+Stdio.Stat stat() {
+ Stdio.Stat st = Stdio.Stat();
+ st->size = sizeof(data);
+ st->mtime=st->ctime=mtime;
+ st->atime=time();
+ return st;
+}
+
+//! @seealso
+//! @[Stdio.File()->line_iterator()]
+String.SplitIterator line_iterator(int|void trim) {
+ if(trim)
+ return String.SplitIterator( data-"\r", '\n' );
+ return String.SplitIterator( data, '\n' );
+}
+
+protected mixed id;
+
+//! @seealso
+//! @[Stdio.File()->query_id()]
+mixed query_id() { return id; }
+
+//! @seealso
+//! @[Stdio.File()->set_id()]
+void set_id(mixed _id) { id = _id; }
+
+//! @seealso
+//! @[Stdio.File()->read_function()]
+function(:string) read_function(int nbytes) {
+ return lambda() { return read(nbytes); };
+}
+
+//! @seealso
+//! @[Stdio.File()->peek()]
+int(-1..1) peek(int|float|void timeout) {
+ if(!r) return -1;
+ if(ptr >= sizeof(data)) return 0;
+ return 1;
+}
+
+//! Always returns 0.
+//! @seealso
+//! @[Stdio.File()->query_address()]
+string query_address(void|int(0..1) is_local) { return 0; }
+
+//! @seealso
+//! @[Stdio.File()->read()]
+string read(void|int(0..) len, void|int(0..1) not_all) {
+ if(!r) return 0;
+ if (len < 0) error("Cannot read negative number of characters.\n");
+ int start=ptr;
+ ptr += len;
+ if(zero_type(len) || ptr>sizeof(data))
+ ptr = sizeof(data);
+
+ // FIXME: read callback
+ return data[start..ptr-1];
+}
+
+//! @seealso
+//! @[Stdio.FILE()->gets()]
+string gets() {
+ if(!r) return 0;
+ string ret;
+ sscanf(data,"%*"+(string)ptr+"s%[^\n]",ret);
+ if(ret)
+ {
+ ptr+=sizeof(ret)+1;
+ if(ptr>sizeof(data))
+ {
+ ptr=sizeof(data);
+ if(!sizeof(ret))
+ ret = 0;
+ }
+ }
+
+ // FIXME: read callback
+ return ret;
+}
+
+//! @seealso
+//! @[Stdio.FILE()->getchar()]
+int getchar() {
+ if(!r) return 0;
+ int c;
+ if(catch(c=data[ptr]))
+ c=-1;
+ else
+ ptr++;
+
+ // FIXME: read callback
+ return c;
+}
+
+//! @seealso
+//! @[Stdio.FILE()->unread()]
+void unread(string s) {
+ if(!r) return;
+ if(data[ptr-sizeof(s)..ptr-1]==s)
+ ptr-=sizeof(s);
+ else
+ {
+ data=s+data[ptr..];
+ ptr=0;
+ }
+}
+
+//! @seealso
+//! @[Stdio.File()->seek()]
+int seek(int pos, void|int mult, void|int add) {
+ if(mult)
+ pos = pos*mult+add;
+ if(pos<0)
+ {
+ pos = sizeof(data)+pos;
+ if( pos < 0 )
+ pos = 0;
+ }
+ ptr = pos;
+ if( ptr > strlen( data ) )
+ ptr = strlen(data);
+ return ptr;
+}
+
+//! Always returns 1.
+//! @seealso
+//! @[Stdio.File()->sync()]
+int(1..1) sync() { return 1; }
+
+//! @seealso
+//! @[Stdio.File()->tell()]
+int tell() { return ptr; }
+
+//! @seealso
+//! @[Stdio.File()->truncate()]
+int(0..1) truncate(int length) {
+ data = data[..length-1];
+ return sizeof(data)==length;
+}
+
+//! @seealso
+//! @[Stdio.File()->write()]
+int(-1..) write(string|array(string) str, mixed ... extra) {
+ if(!w) return -1;
+ if(arrayp(str)) str=str*"";
+ if(sizeof(extra)) str=sprintf(str, @extra);
+
+ if(ptr==sizeof(data)) {
+ data += str;
+ ptr = sizeof(data);
+ }
+ else if(sizeof(str)==1)
+ data[ptr++] = str[0];
+ else {
+ data = data[..ptr-1] + str + data[ptr+sizeof(str)..];
+ ptr += sizeof(str);
+ }
+
+ // FIXME: write callback
+ return sizeof(str);
+}
+
+//! @seealso
+//! @[Stdio.File()->set_blocking]
+void set_blocking() {
+ close_cb = 0;
+ read_cb = 0;
+ read_oob_cb = 0;
+ write_cb = 0;
+ write_oob_cb = 0;
+}
+
+//! @seealso
+//! @[Stdio.File()->set_blocking_keep_callbacks]
+void set_blocking_keep_callbacks() { }
+
+//! @seealso
+//! @[Stdio.File()->set_blocking]
+void set_nonblocking(function rcb, function wcb, function ccb,
+ function rocb, function wocb) {
+ read_cb = rcb;
+ write_cb = wcb;
+ close_cb = ccb;
+ read_oob_cb = rocb;
+ write_oob_cb = wocb;
+}
+
+//! @seealso
+//! @[Stdio.File()->set_blocking_keep_callbacks]
+void set_nonblocking_keep_callbacks() { }
+
+
+//! @seealso
+//! @[Stdio.File()->set_close_callback]
+void set_close_callback(function cb) { close_cb = cb; }
+
+//! @seealso
+//! @[Stdio.File()->set_read_callback]
+void set_read_callback(function cb) { read_cb = cb; }
+
+//! @seealso
+//! @[Stdio.File()->set_read_oob_callback]
+void set_read_oob_callback(function cb) { read_oob_cb = cb; }
+
+//! @seealso
+//! @[Stdio.File()->set_write_callback]
+void set_write_callback(function cb) { write_cb = cb; }
+
+//! @seealso
+//! @[Stdio.File()->set_write_oob_callback]
+void set_write_oob_callback(function cb) { write_oob_cb = cb; }
+
+
+//! @seealso
+//! @[Stdio.File()->query_close_callback]
+function query_close_callback() { return close_cb; }
+
+//! @seealso
+//! @[Stdio.File()->query_read_callback]
+function query_read_callback() { return read_cb; }
+
+//! @seealso
+//! @[Stdio.File()->query_read_oob_callback]
+function query_read_oob_callback() { return read_oob_cb; }
+
+//! @seealso
+//! @[Stdio.File()->query_write_callback]
+function query_write_callback() { return write_cb; }
+
+//! @seealso
+//! @[Stdio.File()->query_write_oob_callback]
+function query_write_oob_callback() { return write_oob_cb; }
+
+string _sprintf(int t) {
+ return t=='O' && sprintf("%O(%d,%O)", this_program, sizeof(data),
+ make_type_str());
+}
+
+
+// FakeFile specials.
+
+//! A FakeFile can be casted to a string.
+mixed cast(string to) {
+ switch(to) {
+ case "string": return data;
+ case "object": return this;
+ }
+ error("Can not cast object to %O.\n", to);
+}
+
+//! Sizeof on a FakeFile returns the size of its contents.
+int(0..) _sizeof() {
+ return sizeof(data);
+}
+
+//! @ignore
+
+#define NOPE(X) mixed X (mixed ... args) { error("This is a FakeFile. %s is not available.\n", #X); }
+NOPE(assign);
+NOPE(async_connect);
+NOPE(connect);
+NOPE(connect_unix);
+NOPE(open);
+NOPE(open_socket);
+NOPE(pipe);
+NOPE(tcgetattr);
+NOPE(tcsetattr);
+
+// Stdio.Fd
+NOPE(dup2);
+NOPE(lock); // We could implement this
+NOPE(mode); // We could implement this
+NOPE(proxy); // We could implement this
+NOPE(query_fd);
+NOPE(read_oob);
+NOPE(set_close_on_exec);
+NOPE(set_keepalive);
+NOPE(trylock); // We could implement this
+NOPE(write_oob);
+
+//! @endignore \ No newline at end of file
diff --git a/tests/examplefiles/all.nit b/tests/examplefiles/all.nit
new file mode 100644
index 00000000..d4e1ddfa
--- /dev/null
+++ b/tests/examplefiles/all.nit
@@ -0,0 +1,1986 @@
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Alexis Laferrière <alexis.laf@xymus.net>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import gtk
+
+class CalculatorContext
+ var result : nullable Float = null
+
+ var last_op : nullable Char = null
+
+ var current : nullable Float = null
+ var after_point : nullable Int = null
+
+ fun push_op( op : Char )
+ do
+ apply_last_op_if_any
+ if op == 'C' then
+ self.result = 0.0
+ last_op = null
+ else
+ last_op = op # store for next push_op
+ end
+
+ # prepare next current
+ after_point = null
+ current = null
+ end
+
+ fun push_digit( digit : Int )
+ do
+ var current = current
+ if current == null then current = 0.0
+
+ var after_point = after_point
+ if after_point == null then
+ current = current * 10.0 + digit.to_f
+ else
+ current = current + digit.to_f * 10.0.pow(after_point.to_f)
+ self.after_point -= 1
+ end
+
+ self.current = current
+ end
+
+ fun switch_to_decimals
+ do
+ if self.current == null then current = 0.0
+ if after_point != null then return
+
+ after_point = -1
+ end
+
+ fun apply_last_op_if_any
+ do
+ var op = last_op
+
+ var result = result
+ if result == null then result = 0.0
+
+ var current = current
+ if current == null then current = 0.0
+
+ if op == null then
+ result = current
+ else if op == '+' then
+ result = result + current
+ else if op == '-' then
+ result = result - current
+ else if op == '/' then
+ result = result / current
+ else if op == '*' then
+ result = result * current
+ end
+ self.result = result
+ self.current = null
+ end
+end
+
+class CalculatorGui
+ super GtkCallable
+
+ var win : GtkWindow
+ var container : GtkGrid
+
+ var lbl_disp : GtkLabel
+ var but_eq : GtkButton
+ var but_dot : GtkButton
+
+ var context = new CalculatorContext
+
+ redef fun signal( sender, user_data )
+ do
+ var after_point = context.after_point
+ if after_point == null then
+ after_point = 0
+ else
+ after_point = (after_point.abs)
+ end
+
+ if user_data isa Char then # is an operation
+ var c = user_data
+ if c == '.' then
+ but_dot.sensitive= false
+ context.switch_to_decimals
+ lbl_disp.text = "{context.current.to_i}."
+ else
+ but_dot.sensitive= true
+ context.push_op( c )
+
+ var s = context.result.to_precision_native(6)
+ var index : nullable Int = null
+ for i in s.length.times do
+ var chiffre = s.chars[i]
+ if chiffre == '0' and index == null then
+ index = i
+ else if chiffre != '0' then
+ index = null
+ end
+ end
+ if index != null then
+ s = s.substring(0, index)
+ if s.chars[s.length-1] == ',' then s = s.substring(0, s.length-1)
+ end
+ lbl_disp.text = s
+ end
+ else if user_data isa Int then # is a number
+ var n = user_data
+ context.push_digit( n )
+ lbl_disp.text = context.current.to_precision_native(after_point)
+ end
+ end
+
+ init
+ do
+ init_gtk
+
+ win = new GtkWindow( 0 )
+
+ container = new GtkGrid(5,5,true)
+ win.add( container )
+
+ lbl_disp = new GtkLabel( "_" )
+ container.attach( lbl_disp, 0, 0, 5, 1 )
+
+ # digits
+ for n in [0..9] do
+ var but = new GtkButton.with_label( n.to_s )
+ but.request_size( 64, 64 )
+ but.signal_connect( "clicked", self, n )
+ if n == 0 then
+ container.attach( but, 0, 4, 1, 1 )
+ else container.attach( but, (n-1)%3, 3-(n-1)/3, 1, 1 )
+ end
+
+ # operators
+ var r = 1
+ for op in ['+', '-', '*', '/' ] do
+ var but = new GtkButton.with_label( op.to_s )
+ but.request_size( 64, 64 )
+ but.signal_connect( "clicked", self, op )
+ container.attach( but, 3, r, 1, 1 )
+ r+=1
+ end
+
+ # =
+ but_eq = new GtkButton.with_label( "=" )
+ but_eq.request_size( 64, 64 )
+ but_eq.signal_connect( "clicked", self, '=' )
+ container.attach( but_eq, 4, 3, 1, 2 )
+
+ # .
+ but_dot = new GtkButton.with_label( "." )
+ but_dot.request_size( 64, 64 )
+ but_dot.signal_connect( "clicked", self, '.' )
+ container.attach( but_dot, 1, 4, 1, 1 )
+
+ #C
+ var but_c = new GtkButton.with_label( "C" )
+ but_c.request_size( 64, 64 )
+ but_c.signal_connect("clicked", self, 'C')
+ container.attach( but_c, 2, 4, 1, 1 )
+
+ win.show_all
+ end
+end
+
+# context tests
+var context = new CalculatorContext
+context.push_digit( 1 )
+context.push_digit( 2 )
+context.push_op( '+' )
+context.push_digit( 3 )
+context.push_op( '*' )
+context.push_digit( 2 )
+context.push_op( '=' )
+var r = context.result.to_precision( 2 )
+assert r == "30.00" else print r
+
+context = new CalculatorContext
+context.push_digit( 1 )
+context.push_digit( 4 )
+context.switch_to_decimals
+context.push_digit( 1 )
+context.push_op( '*' )
+context.push_digit( 3 )
+context.push_op( '=' )
+r = context.result.to_precision( 2 )
+assert r == "42.30" else print r
+
+context.push_op( '+' )
+context.push_digit( 1 )
+context.push_digit( 1 )
+context.push_op( '=' )
+r = context.result.to_precision( 2 )
+assert r == "53.30" else print r
+
+context = new CalculatorContext
+context.push_digit( 4 )
+context.push_digit( 2 )
+context.switch_to_decimals
+context.push_digit( 3 )
+context.push_op( '/' )
+context.push_digit( 3 )
+context.push_op( '=' )
+r = context.result.to_precision( 2 )
+assert r == "14.10" else print r
+
+#test multiple decimals
+context = new CalculatorContext
+context.push_digit( 5 )
+context.push_digit( 0 )
+context.switch_to_decimals
+context.push_digit( 1 )
+context.push_digit( 2 )
+context.push_digit( 3 )
+context.push_op( '+' )
+context.push_digit( 1 )
+context.push_op( '=' )
+r = context.result.to_precision( 3 )
+assert r == "51.123" else print r
+
+#test 'C' button
+context = new CalculatorContext
+context.push_digit( 1 )
+context.push_digit( 0 )
+context.push_op( '+' )
+context.push_digit( 1 )
+context.push_digit( 0 )
+context.push_op( '=' )
+context.push_op( 'C' )
+r = context.result.to_precision( 1 )
+assert r == "0.0" else print r
+
+# graphical application
+
+if "NIT_TESTING".environ != "true" then
+ var app = new CalculatorGui
+ run_gtk
+end
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Matthieu Lucas <lucasmatthieu@gmail.com>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This sample has been implemented to show you how simple is it to play
+# with native callbacks (C) through an high level with NIT program.
+
+module callback_chimpanze
+import callback_monkey
+
+class Chimpanze
+ super MonkeyActionCallable
+
+ fun create
+ do
+ var monkey = new Monkey
+ print "Hum, I'm sleeping ..."
+ # Invoking method which will take some time to compute, and
+ # will be back in wokeUp method with information.
+ # - Callback method defined in MonkeyActionCallable Interface
+ monkey.wokeUpAction(self, "Hey, I'm awake.")
+ end
+
+ # Inherit callback method, defined by MonkeyActionCallable interface
+ # - Back of wokeUpAction method
+ redef fun wokeUp( sender:Monkey, message:Object )
+ do
+ print message
+ end
+end
+
+var m = new Chimpanze
+m.create
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Matthieu Lucas <lucasmatthieu@gmail.com>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This sample has been implemented to show you how simple is it to play
+# with native callbacks (C) through an high level with NIT program.
+
+module callback_monkey
+
+in "C header" `{
+ #include <stdio.h>
+ #include <stdlib.h>
+
+ typedef struct {
+ int id;
+ int age;
+ } CMonkey;
+
+ typedef struct {
+ MonkeyActionCallable toCall;
+ Object message;
+ } MonkeyAction;
+`}
+
+in "C body" `{
+ // Method which reproduce a callback answer
+ // Please note that a function pointer is only used to reproduce the callback
+ void cbMonkey(CMonkey *mkey, void callbackFunc(CMonkey*, MonkeyAction*), MonkeyAction *data)
+ {
+ sleep(2);
+ callbackFunc( mkey, data );
+ }
+
+ // Back of background treatment, will be redirected to callback function
+ void nit_monkey_callback_func( CMonkey *mkey, MonkeyAction *data )
+ {
+ // To call a your method, the signature must be written like this :
+ // <Interface Name>_<Method>...
+ MonkeyActionCallable_wokeUp( data->toCall, mkey, data->message );
+ }
+`}
+
+# Implementable interface to get callback in defined methods
+interface MonkeyActionCallable
+ fun wokeUp( sender:Monkey, message: Object) is abstract
+end
+
+# Defining my object type Monkey, which is, in a low level, a pointer to a C struct (CMonkey)
+extern class Monkey `{ CMonkey * `}
+
+ new `{
+ CMonkey *monkey = malloc( sizeof(CMonkey) );
+ monkey->age = 10;
+ monkey->id = 1;
+ return monkey;
+ `}
+
+ # Object method which will get a callback in wokeUp method, defined in MonkeyActionCallable interface
+ # Must be defined as Nit/C method because of C call inside
+ fun wokeUpAction( toCall: MonkeyActionCallable, message: Object ) is extern import MonkeyActionCallable.wokeUp `{
+
+ // Allocating memory to keep reference of received parameters :
+ // - Object receiver
+ // - Message
+ MonkeyAction *data = malloc( sizeof(MonkeyAction) );
+
+ // Incrementing reference counter to prevent from releasing
+ MonkeyActionCallable_incr_ref( toCall );
+ Object_incr_ref( message );
+
+ data->toCall = toCall;
+ data->message = message;
+
+ // Calling method which reproduce a callback by passing :
+ // - Receiver
+ // - Function pointer to object return method
+ // - Datas
+ cbMonkey( recv, &nit_monkey_callback_func, data );
+ `}
+end
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Implementation of circular lists
+# This example shows the usage of generics and somewhat a specialisation of collections.
+module circular_list
+
+# Sequences of elements implemented with a double-linked circular list
+class CircularList[E]
+ # Like standard Array or LinkedList, CircularList is a Sequence.
+ super Sequence[E]
+
+ # The first node of the list if any
+ # The special case of an empty list is handled by a null node
+ private var node: nullable CLNode[E] = null
+
+ redef fun iterator do return new CircularListIterator[E](self)
+
+ redef fun first do return self.node.item
+
+ redef fun push(e)
+ do
+ var new_node = new CLNode[E](e)
+ var n = self.node
+ if n == null then
+ # the first node
+ self.node = new_node
+ else
+ # not the first one, so attach nodes correctly.
+ var old_last_node = n.prev
+ new_node.next = n
+ new_node.prev = old_last_node
+ old_last_node.next = new_node
+ n.prev = new_node
+ end
+ end
+
+ redef fun pop
+ do
+ var n = self.node
+ assert n != null
+ var prev = n.prev
+ if prev == n then
+ # the only node
+ self.node = null
+ return n.item
+ end
+ # not the only one do detach nodes correctly.
+ var prev_prev = prev.prev
+ n.prev = prev_prev
+ prev_prev.next = n
+ return prev.item
+ end
+
+ redef fun unshift(e)
+ do
+ # Circularity has benefits.
+ push(e)
+ self.node = self.node.prev
+ end
+
+ redef fun shift
+ do
+ # Circularity has benefits.
+ self.node = self.node.next
+ return self.pop
+ end
+
+ # Move the first at the last position, the second at the first, etc.
+ fun rotate
+ do
+ var n = self.node
+ if n == null then return
+ self.node = n.next
+ end
+
+ # Sort the list using the Josephus algorithm.
+ fun josephus(step: Int)
+ do
+ var res = new CircularList[E]
+ while not self.is_empty do
+ # count 'step'
+ for i in [1..step[ do self.rotate
+ # kill
+ var x = self.shift
+ res.add(x)
+ end
+ self.node = res.node
+ end
+end
+
+# Nodes of a CircularList
+private class CLNode[E]
+ # The current item
+ var item: E
+
+ # The next item in the circular list.
+ # Because of circularity, there is always a next;
+ # so by default let it be self
+ var next: CLNode[E] = self
+
+ # The previous item in the circular list.
+ # Coherence between next and previous nodes has to be maintained by the
+ # circular list.
+ var prev: CLNode[E] = self
+end
+
+# An iterator of a CircularList.
+private class CircularListIterator[E]
+ super IndexedIterator[E]
+
+ redef var index: Int
+
+ # The current node pointed.
+ # Is null if the list is empty.
+ var node: nullable CLNode[E]
+
+ # The list iterated.
+ var list: CircularList[E]
+
+ redef fun is_ok
+ do
+ # Empty lists are not OK.
+ # Pointing again the first node is not OK.
+ return self.node != null and (self.index == 0 or self.node != self.list.node)
+ end
+
+ redef fun next
+ do
+ self.node = self.node.next
+ self.index += 1
+ end
+
+ redef fun item do return self.node.item
+
+ init(list: CircularList[E])
+ do
+ self.node = list.node
+ self.list = list
+ self.index = 0
+ end
+end
+
+var i = new CircularList[Int]
+i.add_all([1, 2, 3, 4, 5, 6, 7])
+print i.first
+print i.join(":")
+
+i.push(8)
+print i.shift
+print i.pop
+i.unshift(0)
+print i.join(":")
+
+i.josephus(3)
+print i.join(":")
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This module beef up the clock module by allowing a clock to be comparable.
+# It show the usage of class refinement
+module clock_more
+
+import clock
+
+redef class Clock
+ # Clock are now comparable
+ super Comparable
+
+ # Comparaison of a clock make only sense with an other clock
+ redef type OTHER: Clock
+
+ redef fun <(o)
+ do
+ # Note: < is the only abstract method of Comparable.
+ # All other operators and methods rely on < and ==.
+ return self.total_minutes < o.total_minutes
+ end
+end
+
+var c1 = new Clock(8, 12)
+var c2 = new Clock(8, 13)
+var c3 = new Clock(9, 13)
+
+print "{c1}<{c2}? {c1<c2}"
+print "{c1}<={c2}? {c1<=c2}"
+print "{c1}>{c2}? {c1>c2}"
+print "{c1}>={c2}? {c1>=c2}"
+print "{c1}<=>{c2}? {c1<=>c2}"
+print "{c1},{c2}? max={c1.max(c2)} min={c1.min(c2)}"
+print "{c1}.is_between({c2}, {c3})? {c1.is_between(c2, c3)}"
+print "{c2}.is_between({c1}, {c3})? {c2.is_between(c1, c3)}"
+
+print "-"
+
+c1.minutes += 1
+
+print "{c1}<{c2}? {c1<c2}"
+print "{c1}<={c2}? {c1<=c2}"
+print "{c1}>{c2}? {c1>c2}"
+print "{c1}>={c2}? {c1>=c2}"
+print "{c1}<=>{c2}? {c1<=>c2}"
+print "{c1},{c2}? max={c1.max(c2)} min={c1.min(c2)}"
+print "{c1}.is_between({c2}, {c3})? {c1.is_between(c2, c3)}"
+print "{c2}.is_between({c1}, {c3})? {c2.is_between(c1, c3)}"
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This module provide a simple wall clock.
+# It is an example of getters and setters.
+# A beefed-up module is available in clock_more
+module clock
+
+# A simple wall clock with 60 minutes and 12 hours.
+class Clock
+ # total number of minutes from 0 to 719
+ var total_minutes: Int
+ # Note: only the read acces is public, the write access is private.
+
+ # number of minutes in the current hour (from 0 to 59)
+ fun minutes: Int do return self.total_minutes % 60
+
+ # set the number of minutes in the current hour.
+ # if m < 0 or m >= 60, the hour will be changed accordinlgy
+ fun minutes=(m: Int) do self.total_minutes = self.hours * 60 + m
+
+ # number of hours (from 0 to 11)
+ fun hours: Int do return self.total_minutes / 60
+
+ # set the number of hours
+ # the minutes will not be updated
+ fun hours=(h: Int) do self.total_minutes = h * 60 + minutes
+
+ # the position of the hour arrow in the [0..60[ interval
+ fun hour_pos: Int do return total_minutes / 12
+
+ # replace the arrow of hours (from 0 to 59).
+ # the hours and the minutes will be updated.
+ fun hour_pos=(h: Int) do self.total_minutes = h * 12
+
+ redef fun to_s do return "{hours}:{minutes}"
+
+ fun reset(hours, minutes: Int) do self.total_minutes = hours*60 + minutes
+
+ init(hours, minutes: Int) do self.reset(hours, minutes)
+
+ redef fun ==(o)
+ do
+ # Note: o is a nullable Object, a type test is required
+ # Thanks to adaptive typing, there is no downcast
+ # i.e. the code is safe!
+ return o isa Clock and self.total_minutes == o.total_minutes
+ end
+end
+
+var c = new Clock(10,50)
+print "It's {c} o'clock."
+
+c.minutes += 22
+print "Now it's {c} o'clock."
+
+print "The short arrow in on the {c.hour_pos/5} and the long arrow in on the {c.minutes/5}."
+
+c.hours -= 2
+print "Now it's {c} o'clock."
+
+var c2 = new Clock(9, 11)
+print "It's {c2} on the second clock."
+print "The two clocks are synchronized: {c == c2}."
+c2.minutes += 1
+print "It's now {c2} on the second clock."
+print "The two clocks are synchronized: {c == c2}."
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Matthieu Lucas <lucasmatthieu@gmail.com>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Sample of the Curl module.
+module curl_http
+
+import curl
+
+# Small class to represent an Http Fetcher
+class MyHttpFetcher
+ super CurlCallbacks
+
+ var curl: Curl
+ var our_body: String = ""
+
+ init(curl: Curl) do self.curl = curl
+
+ # Release curl object
+ fun destroy do self.curl.destroy
+
+ # Header callback
+ redef fun header_callback(line: String) do
+ # We keep this callback silent for testing purposes
+ #if not line.has_prefix("Date:") then print "Header_callback : {line}"
+ end
+
+ # Body callback
+ redef fun body_callback(line: String) do self.our_body = "{self.our_body}{line}"
+
+ # Stream callback - Cf : No one is registered
+ redef fun stream_callback(buffer: String, size: Int, count: Int) do print "Stream_callback : {buffer} - {size} - {count}"
+end
+
+
+# Program
+if args.length < 2 then
+ print "Usage: curl_http <method wished [POST, GET, GET_FILE]> <target url>"
+else
+ var curl = new Curl
+ var url = args[1]
+ var request = new CurlHTTPRequest(url, curl)
+
+ # HTTP Get Request
+ if args[0] == "GET" then
+ request.verbose = false
+ var getResponse = request.execute
+
+ if getResponse isa CurlResponseSuccess then
+ print "Status code : {getResponse.status_code}"
+ print "Body : {getResponse.body_str}"
+ else if getResponse isa CurlResponseFailed then
+ print "Error code : {getResponse.error_code}"
+ print "Error msg : {getResponse.error_msg}"
+ end
+
+ # HTTP Post Request
+ else if args[0] == "POST" then
+ var myHttpFetcher = new MyHttpFetcher(curl)
+ request.delegate = myHttpFetcher
+
+ var postDatas = new HeaderMap
+ postDatas["Bugs Bunny"] = "Daffy Duck"
+ postDatas["Batman"] = "Robin likes special characters @#ùà!è§'(\"é&://,;<>∞~*"
+ postDatas["Batman"] = "Yes you can set multiple identical keys, but APACHE will consider only once, the last one"
+ request.datas = postDatas
+ request.verbose = false
+ var postResponse = request.execute
+
+ print "Our body from the callback : {myHttpFetcher.our_body}"
+
+ if postResponse isa CurlResponseSuccess then
+ print "*** Answer ***"
+ print "Status code : {postResponse.status_code}"
+ print "Body should be empty, because we decided to manage callbacks : {postResponse.body_str.length}"
+ else if postResponse isa CurlResponseFailed then
+ print "Error code : {postResponse.error_code}"
+ print "Error msg : {postResponse.error_msg}"
+ end
+
+ # HTTP Get to file Request
+ else if args[0] == "GET_FILE" then
+ var headers = new HeaderMap
+ headers["Accept"] = "Moo"
+ request.headers = headers
+ request.verbose = false
+ var downloadResponse = request.download_to_file(null)
+
+ if downloadResponse isa CurlFileResponseSuccess then
+ print "*** Answer ***"
+ print "Status code : {downloadResponse.status_code}"
+ print "Size downloaded : {downloadResponse.size_download}"
+ else if downloadResponse isa CurlResponseFailed then
+ print "Error code : {downloadResponse.error_code}"
+ print "Error msg : {downloadResponse.error_msg}"
+ end
+ # Program logic
+ else
+ print "Usage : Method[POST, GET, GET_FILE]"
+ end
+end
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Matthieu Lucas <lucasmatthieu@gmail.com>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Mail sender sample using the Curl module
+module curl_mail
+
+import curl
+
+var curl = new Curl
+var mail_request = new CurlMailRequest(curl)
+
+# Networks
+var response = mail_request.set_outgoing_server("smtps://smtp.example.org:465", "user@example.org", "mypassword")
+if response isa CurlResponseFailed then
+ print "Error code : {response.error_code}"
+ print "Error msg : {response.error_msg}"
+end
+
+# Headers
+mail_request.from = "Billy Bob"
+mail_request.to = ["user@example.org"]
+mail_request.cc = ["bob@example.org"]
+mail_request.bcc = null
+
+var headers_body = new HeaderMap
+headers_body["Content-Type:"] = "text/html; charset=\"UTF-8\""
+headers_body["Content-Transfer-Encoding:"] = "quoted-printable"
+mail_request.headers_body = headers_body
+
+# Content
+mail_request.body = "<h1>Here you can write HTML stuff.</h1>"
+mail_request.subject = "Hello From My Nit Program"
+
+# Others
+mail_request.verbose = false
+
+# Send mail
+response = mail_request.execute
+if response isa CurlResponseFailed then
+ print "Error code : {response.error_code}"
+ print "Error msg : {response.error_msg}"
+else if response isa CurlMailResponseSuccess then
+ print "Mail Sent"
+else
+ print "Unknown Curl Response type"
+end
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2012-2013 Alexis Laferrière <alexis.laf@xymus.net>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Draws an arithmetic operation to the terminal
+module draw_operation
+
+redef enum Int
+ fun n_chars: Int `{
+ int c;
+ if ( abs(recv) >= 10 )
+ c = 1+(int)log10f( (float)abs(recv) );
+ else
+ c = 1;
+ if ( recv < 0 ) c ++;
+ return c;
+ `}
+end
+
+redef enum Char
+ fun as_operator(a, b: Int): Int
+ do
+ if self == '+' then return a + b
+ if self == '-' then return a - b
+ if self == '*' then return a * b
+ if self == '/' then return a / b
+ if self == '%' then return a % b
+ abort
+ end
+
+ fun override_dispc: Bool
+ do
+ return self == '+' or self == '-' or self == '*' or self == '/' or self == '%'
+ end
+
+ fun lines(s: Int): Array[Line]
+ do
+ if self == '+' then
+ return [new Line(new P(0,s/2),1,0,s), new Line(new P(s/2,1),0,1,s-2)]
+ else if self == '-' then
+ return [new Line(new P(0,s/2),1,0,s)]
+ else if self == '*' then
+ var lines = new Array[Line]
+ for y in [1..s-1[ do
+ lines.add( new Line(new P(1,y), 1,0,s-2) )
+ end
+ return lines
+ else if self == '/' then
+ return [new Line(new P(s-1,0), -1,1, s )]
+ else if self == '%' then
+ var q4 = s/4
+ var lines = [new Line(new P(s-1,0),-1,1,s)]
+ for l in [0..q4[ do
+ lines.append([ new Line( new P(0,l), 1,0,q4), new Line( new P(s-1,s-1-l), -1,0,q4) ])
+ end
+ return lines
+ else if self == '1' then
+ return [new Line(new P(s/2,0), 0,1,s),new Line(new P(0,s-1),1,0,s),
+ new Line( new P(s/2,0),-1,1,s/2)]
+ else if self == '2' then
+ return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s/2),
+ new Line( new P(0,s-1),1,0,s), new Line( new P(0,s/2), 0,1,s/2),
+ new Line( new P(0,s/2), 1,0,s)]
+ else if self == '3' then
+ return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s),
+ new Line( new P(0,s-1),1,0,s), new Line( new P(0,s/2), 1,0,s)]
+ else if self == '4' then
+ return [new Line(new P(s-1,0),0,1,s), new Line( new P(0,0), 0,1,s/2),
+ new Line( new P(0,s/2), 1,0,s)]
+ else if self == '5' then
+ return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,s/2),0,1,s/2),
+ new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s/2),
+ new Line( new P(0,s/2), 1,0,s)]
+ else if self == '6' then
+ return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,s/2),0,1,s/2),
+ new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s),
+ new Line( new P(0,s/2), 1,0,s)]
+ else if self == '7' then
+ var tl = new P(0,0)
+ var tr = new P(s-1,0)
+ return [new Line(tl, 1,0,s), new Line(tr,-1,1,s)]
+ else if self == '8' then
+ return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s),
+ new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s),
+ new Line( new P(0,s/2), 1,0,s)]
+ else if self == '9' then
+ return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s),
+ new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s/2),
+ new Line( new P(0,s/2), 1,0,s)]
+ else if self == '0' then
+ return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s),
+ new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s)]
+ end
+ return new Array[Line]
+ end
+end
+
+class P
+ var x : Int
+ var y : Int
+end
+
+redef class String
+ # hack is to support a bug in the evaluation software
+ fun draw(dispc: Char, size, gap: Int, hack: Bool)
+ do
+ var w = size * length +(length-1)*gap
+ var h = size
+ var map = new Array[Array[Char]]
+ for x in [0..w[ do
+ map[x] = new Array[Char].filled_with( ' ', h )
+ end
+
+ var ci = 0
+ for c in self.chars do
+ var local_dispc
+ if c.override_dispc then
+ local_dispc = c
+ else
+ local_dispc = dispc
+ end
+
+ var lines = c.lines( size )
+ for line in lines do
+ var x = line.o.x+ci*size
+ x += ci*gap
+ var y = line.o.y
+ for s in [0..line.len[ do
+ assert map.length > x and map[x].length > y else print "setting {x},{y} as {local_dispc}"
+ map[x][y] = local_dispc
+ x += line.step_x
+ y += line.step_y
+ end
+ end
+
+ ci += 1
+ end
+
+ if hack then
+ for c in [0..size[ do
+ map[c][0] = map[map.length-size+c][0]
+ map[map.length-size+c][0] = ' '
+ end
+ end
+
+ for y in [0..h[ do
+ for x in [0..w[ do
+ printn map[x][y]
+ end
+ print ""
+ end
+ end
+end
+
+class Line
+ var o : P
+ var step_x : Int
+ var step_y : Int
+ var len : Int
+end
+
+var a
+var b
+var op_char
+var disp_char
+var disp_size
+var disp_gap
+
+if "NIT_TESTING".environ == "true" then
+ a = 567
+ b = 13
+ op_char = '*'
+ disp_char = 'O'
+ disp_size = 8
+ disp_gap = 1
+else
+ printn "Left operand: "
+ a = gets.to_i
+
+ printn "Right operand: "
+ b = gets.to_i
+
+ printn "Operator (+, -, *, /, %): "
+ op_char = gets.chars[0]
+
+ printn "Char to display: "
+ disp_char = gets.chars[0]
+
+ printn "Size of text: "
+ disp_size = gets.to_i
+
+ printn "Space between digits: "
+ disp_gap = gets.to_i
+end
+
+var result = op_char.as_operator( a, b )
+
+var len_a = a.n_chars
+var len_b = b.n_chars
+var len_res = result.n_chars
+var max_len = len_a.max( len_b.max( len_res ) ) + 1
+
+# draw first line
+var d = max_len - len_a
+var line_a = ""
+for i in [0..d[ do line_a += " "
+line_a += a.to_s
+line_a.draw( disp_char, disp_size, disp_gap, false )
+
+print ""
+# draw second line
+d = max_len - len_b-1
+var line_b = op_char.to_s
+for i in [0..d[ do line_b += " "
+line_b += b.to_s
+line_b.draw( disp_char, disp_size, disp_gap, false )
+
+# draw -----
+print ""
+for i in [0..disp_size*max_len+(max_len-1)*disp_gap] do
+ printn "_"
+end
+print ""
+print ""
+
+# draw result
+d = max_len - len_res
+var line_res = ""
+for i in [0..d[ do line_res += " "
+line_res += result.to_s
+line_res.draw( disp_char, disp_size, disp_gap, false )
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Alexis Laferrière <alexis.laf@xymus.net>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Example using the privileges module to drop privileges from root
+module drop_privileges
+
+import privileges
+
+# basic command line options
+var opts = new OptionContext
+var opt_ug = new OptionUserAndGroup.for_dropping_privileges
+opt_ug.mandatory = true
+opts.add_option(opt_ug)
+
+# parse and check command line options
+opts.parse(args)
+if not opts.errors.is_empty then
+ print opts.errors
+ print "Usage: drop_privileges [options]"
+ opts.usage
+ exit 1
+end
+
+# original user
+print "before {sys.uid}:{sys.gid}"
+
+# make the switch
+var user_group = opt_ug.value
+assert user_group != null
+user_group.drop_privileges
+
+# final user
+print "after {sys.uid}:{sys.egid}"
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2012-2013 Alexis Laferrière <alexis.laf@xymus.net>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This module illustrates some uses of the FFI, specifically
+# how to use extern methods. Which means to implement a Nit method in C.
+module extern_methods
+
+redef enum Int
+ # Returns self'th fibonnaci number
+ # implemented here in C for optimization purposes
+ fun fib : Int import fib `{
+ if ( recv < 2 )
+ return recv;
+ else
+ return Int_fib( recv-1 ) + Int_fib( recv-2 );
+ `}
+
+ # System call to sleep for "self" seconds
+ fun sleep `{
+ sleep( recv );
+ `}
+
+ # Return atan2l( self, x ) from libmath
+ fun atan_with( x : Int ) : Float `{
+ return atan2( recv, x );
+ `}
+
+ # This method callback to Nit methods from C code
+ # It will use from C code:
+ # * the local fib method
+ # * the + operator, a method of Int
+ # * to_s, a method of all objects
+ # * String.to_cstring, a method of String to return an equivalent char*
+ fun foo import fib, +, to_s, String.to_cstring `{
+ long recv_fib = Int_fib( recv );
+ long recv_plus_fib = Int__plus( recv, recv_fib );
+
+ String nit_string = Int_to_s( recv_plus_fib );
+ char *c_string = String_to_cstring( nit_string );
+
+ printf( "from C: self + fib(self) = %s\n", c_string );
+ `}
+
+ # Equivalent to foo but written in pure Nit
+ fun bar do print "from Nit: self + fib(self) = {self+self.fib}"
+end
+
+print 12.fib
+
+print "sleeping 1 second..."
+1.sleep
+
+print 100.atan_with( 200 )
+8.foo
+8.bar
+
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2004-2008 Jean Privat <jean@pryen.org>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A simple exemple of refinement where a method is added to the integer class.
+module fibonacci
+
+redef class Int
+ # Calculate the self-th element of the fibonacci sequence.
+ fun fibonacci: Int
+ do
+ if self < 2 then
+ return 1
+ else
+ return (self-2).fibonacci + (self-1).fibonacci
+ end
+ end
+end
+
+# Print usage and exit.
+fun usage
+do
+ print "Usage: fibonnaci <integer>"
+ exit 0
+end
+
+# Main part
+if args.length != 1 then
+ usage
+end
+print args.first.to_i.fibonacci
+print "hello world"
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import html
+
+class NitHomepage
+ super HTMLPage
+
+ redef fun head do
+ add("meta").attr("charset", "utf-8")
+ add("title").text("Nit")
+ add("link").attr("rel", "icon").attr("href", "http://nitlanguage.org/favicon.ico").attr("type", "image/x-icon")
+ add("link").attr("rel", "stylesheet").attr("href", "http://nitlanguage.org/style.css").attr("type", "text/css")
+ add("link").attr("rel", "stylesheet").attr("href", "http://nitlanguage.org/local.css").attr("type", "text/css")
+ end
+
+ redef fun body do
+ open("article").add_class("page")
+ open("section").add_class("pageheader")
+ add_html("<a id='toptitle_first' class='toptitle'>the</a><a id='toptitle_second' class='toptitle' href=''>Nit</a><a id='toptitle_third' class='toptitle' href=''>Programming Language</a>")
+ open("header").add_class("header")
+ open("div").add_class("topsubtitle")
+ add("p").text("A Fun Language for Serious Programming")
+ close("div")
+ close("header")
+ close("section")
+
+ open("div").attr("id", "pagebody")
+ open("section").attr("id", "content")
+ add("h1").text("# What is Nit?")
+ add("p").text("Nit is an object-oriented programming language. The goal of Nit is to propose a robust statically typed programming language where structure is not a pain.")
+ add("p").text("So, what does the famous hello world program look like, in Nit?")
+ add_html("<pre><tt><span class='normal'>print </span><span class='string'>'Hello, World!'</span></tt></pre>")
+
+ add("h1").text("# Feature Highlights")
+ add("h2").text("Usability")
+ add("p").text("Nit's goal is to be usable by real programmers for real projects")
+
+ open("ul")
+ open("li")
+ add("a").attr("href", "http://en.wikipedia.org/wiki/KISS_principle").text("KISS principle")
+ close("li")
+ add("li").text("Script-like language without verbosity nor cryptic statements")
+ add("li").text("Painless static types: static typing should help programmers")
+ add("li").text("Efficient development, efficient execution, efficient evolution.")
+ close("ul")
+
+ add("h2").text("Robustness")
+ add("p").text("Nit will help you to write bug-free programs")
+
+ open("ul")
+ add("li").text("Strong static typing")
+ add("li").text("No more NullPointerException")
+ close("ul")
+
+ add("h2").text("Object-Oriented")
+ add("p").text("Nit's guideline is to follow the most powerful OO principles")
+
+ open("ul")
+ open("li")
+ add("a").attr("href", "./everything_is_an_object/").text("Everything is an object")
+ close("li")
+ open("li")
+ add("a").attr("href", "./multiple_inheritance/").text("Multiple inheritance")
+ close("li")
+ open("li")
+ add("a").attr("href", "./refinement/").text("Open classes")
+ close("li")
+ open("li")
+ add("a").attr("href", "./virtual_types/").text("Virtual types")
+ close("li")
+ close("ul")
+
+
+ add("h1").text("# Getting Started")
+ add("p").text("Get Nit from its Git repository:")
+
+ add_html("<pre><code>$ git clone http://nitlanguage.org/nit.git</code></pre>")
+ add("p").text("Build the compiler (may be long):")
+ add_html("<pre><code>$ cd nit\n")
+ add_html("$ make</code></pre>")
+ add("p").text("Compile a program:")
+ add_html("<pre><code>$ bin/nitc examples/hello_world.nit</code></pre>")
+ add("p").text("Execute the program:")
+ add_html("<pre><code>$ ./hello_world</code></pre>")
+ close("section")
+ close("div")
+ close("article")
+ end
+end
+
+var page = new NitHomepage
+page.write_to stdout
+page.write_to_file("nit.html")
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# An example that defines and uses stacks of integers.
+# The implementation is done with a simple linked list.
+# It features: free constructors, nullable types and some adaptive typing.
+module int_stack
+
+# A stack of integer implemented by a simple linked list.
+# Note that this is only a toy class since a real linked list will gain to use
+# generics and extends interfaces, like Collection, from the standard library.
+class IntStack
+ # The head node of the list.
+ # Null means that the stack is empty.
+ private var head: nullable ISNode = null
+
+ # Add a new integer in the stack.
+ fun push(val: Int)
+ do
+ self.head = new ISNode(val, self.head)
+ end
+
+ # Remove and return the last pushed integer.
+ # Return null if the stack is empty.
+ fun pop: nullable Int
+ do
+ var head = self.head
+ if head == null then return null
+ # Note: the followings are statically safe because of the
+ # previous 'if'.
+ var val = head.val
+ self.head = head.next
+ return val
+ end
+
+ # Return the sum of all integers of the stack.
+ # Return 0 if the stack is empty.
+ fun sumall: Int
+ do
+ var sum = 0
+ var cur = self.head
+ while cur != null do
+ # Note: the followings are statically safe because of
+ # the condition of the 'while'.
+ sum += cur.val
+ cur = cur.next
+ end
+ return sum
+ end
+
+ # Note: Because all attributes have a default value, a free constructor
+ # "init()" is implicitly defined.
+end
+
+# A node of a IntStack
+private class ISNode
+ # The integer value stored in the node.
+ var val: Int
+
+ # The next node, if any.
+ var next: nullable ISNode
+
+ # Note: A free constructor "init(val: Int, next: nullable ISNode)" is
+ # implicitly defined.
+end
+
+var l = new IntStack
+l.push(1)
+l.push(2)
+l.push(3)
+
+print l.sumall
+
+# Note: the 'for' control structure cannot be used on IntStack in its current state.
+# It requires a more advanced topic.
+# However, why not using the 'loop' control structure?
+loop
+ var i = l.pop
+ if i == null then break
+ # The following is statically safe because of the previous 'if'.
+ print i * 10
+end
+
+# Note: 'or else' is used to give an alternative of a null expression.
+l.push(5)
+print l.pop or else 0 # l.pop gives 5, so print 5
+print l.pop or else 0 # l.pop gives null, so print the alternative: 0
+
+
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2014 Alexis Laferrière <alexis.laf@xymus.net>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Basic example of OpenGL ES 2.0 usage from the book OpenGL ES 2.0 Programming Guide.
+#
+# Code reference:
+# https://code.google.com/p/opengles-book-samples/source/browse/trunk/LinuxX11/Chapter_2/Hello_Triangle/Hello_Triangle.c
+module opengles2_hello_triangle
+
+import glesv2
+import egl
+import mnit_linux # for sdl
+import x11
+
+if "NIT_TESTING".environ == "true" then exit(0)
+
+var window_width = 800
+var window_height = 600
+
+#
+## SDL
+#
+var sdl_display = new SDLDisplay(window_width, window_height)
+var sdl_wm_info = new SDLSystemWindowManagerInfo
+var x11_window_handle = sdl_wm_info.x11_window_handle
+
+#
+## X11
+#
+var x_display = x_open_default_display
+assert x_display != 0 else print "x11 fail"
+
+#
+## EGL
+#
+var egl_display = new EGLDisplay(x_display)
+assert egl_display.is_valid else print "EGL display is not valid"
+egl_display.initialize
+
+print "EGL version: {egl_display.version}"
+print "EGL vendor: {egl_display.vendor}"
+print "EGL extensions: {egl_display.extensions.join(", ")}"
+print "EGL client APIs: {egl_display.client_apis.join(", ")}"
+
+assert egl_display.is_valid else print egl_display.error
+
+var config_chooser = new EGLConfigChooser
+#config_chooser.surface_type_egl
+config_chooser.blue_size = 8
+config_chooser.green_size = 8
+config_chooser.red_size = 8
+#config_chooser.alpha_size = 8
+#config_chooser.depth_size = 8
+#config_chooser.stencil_size = 8
+#config_chooser.sample_buffers = 1
+config_chooser.close
+
+var configs = config_chooser.choose(egl_display)
+assert configs != null else print "choosing config failed: {egl_display.error}"
+assert not configs.is_empty else print "no EGL config"
+
+print "{configs.length} EGL configs available"
+for config in configs do
+ var attribs = config.attribs(egl_display)
+ print "* caveats: {attribs.caveat}"
+ print " conformant to: {attribs.conformant}"
+ print " size of RGBA: {attribs.red_size} {attribs.green_size} {attribs.blue_size} {attribs.alpha_size}"
+ print " buffer, depth, stencil: {attribs.buffer_size} {attribs.depth_size} {attribs.stencil_size}"
+end
+
+var config = configs.first
+
+var format = config.attribs(egl_display).native_visual_id
+
+# TODO android part
+# Opengles1Display_midway_init(recv, format);
+
+var surface = egl_display.create_window_surface(config, x11_window_handle, [0])
+assert surface.is_ok else print egl_display.error
+
+var context = egl_display.create_context(config)
+assert context.is_ok else print egl_display.error
+
+var make_current_res = egl_display.make_current(surface, surface, context)
+assert make_current_res
+
+var width = surface.attribs(egl_display).width
+var height = surface.attribs(egl_display).height
+print "Width: {width}"
+print "Height: {height}"
+
+assert egl_bind_opengl_es_api else print "eglBingAPI failed: {egl_display.error}"
+
+#
+## GLESv2
+#
+
+print "Can compile shaders? {gl_shader_compiler}"
+assert_no_gl_error
+
+assert gl_shader_compiler else print "Cannot compile shaders"
+
+# gl program
+print gl_error.to_s
+var program = new GLProgram
+if not program.is_ok then
+ print "Program is not ok: {gl_error.to_s}\nLog:"
+ print program.info_log
+ abort
+end
+assert_no_gl_error
+
+# vertex shader
+var vertex_shader = new GLVertexShader
+assert vertex_shader.is_ok else print "Vertex shader is not ok: {gl_error}"
+vertex_shader.source = """
+attribute vec4 vPosition;
+void main()
+{
+ gl_Position = vPosition;
+} """
+vertex_shader.compile
+assert vertex_shader.is_compiled else print "Vertex shader compilation failed with: {vertex_shader.info_log} {program.info_log}"
+assert_no_gl_error
+
+# fragment shader
+var fragment_shader = new GLFragmentShader
+assert fragment_shader.is_ok else print "Fragment shader is not ok: {gl_error}"
+fragment_shader.source = """
+precision mediump float;
+void main()
+{
+ gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
+}
+"""
+fragment_shader.compile
+assert fragment_shader.is_compiled else print "Fragment shader compilation failed with: {fragment_shader.info_log}"
+assert_no_gl_error
+
+program.attach_shader vertex_shader
+program.attach_shader fragment_shader
+program.bind_attrib_location(0, "vPosition")
+program.link
+assert program.is_linked else print "Linking failed: {program.info_log}"
+assert_no_gl_error
+
+# draw!
+var vertices = [0.0, 0.5, 0.0, -0.5, -0.5, 0.0, 0.5, -0.5, 0.0]
+var vertex_array = new VertexArray(0, 3, vertices)
+vertex_array.attrib_pointer
+gl_clear_color(0.5, 0.0, 0.5, 1.0)
+for i in [0..10000[ do
+ printn "."
+ assert_no_gl_error
+ gl_viewport(0, 0, width, height)
+ gl_clear_color_buffer
+ program.use
+ vertex_array.enable
+ vertex_array.draw_arrays_triangles
+ egl_display.swap_buffers(surface)
+end
+
+# delete
+program.delete
+vertex_shader.delete
+fragment_shader.delete
+
+#
+## EGL
+#
+# close
+egl_display.make_current(new EGLSurface.none, new EGLSurface.none, new EGLContext.none)
+egl_display.destroy_context(context)
+egl_display.destroy_surface(surface)
+
+#
+## SDL
+#
+# close
+sdl_display.destroy
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2004-2008 Jean Privat <jean@pryen.org>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# How to print arguments of the command line.
+module print_arguments
+
+for a in args do
+ print a
+end
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2004-2008 Jean Privat <jean@pryen.org>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A procedural program (without explicit class definition).
+# This program manipulates arrays of integers.
+module procedural_array
+
+# The sum of the elements of `a'.
+# Uses a 'for' control structure.
+fun array_sum(a: Array[Int]): Int
+do
+ var sum = 0
+ for i in a do
+ sum = sum + i
+ end
+ return sum
+end
+
+# The sum of the elements of `a' (alternative version).
+# Uses a 'while' control structure.
+fun array_sum_alt(a: Array[Int]): Int
+do
+ var sum = 0
+ var i = 0
+ while i < a.length do
+ sum = sum + a[i]
+ i = i + 1
+ end
+ return sum
+end
+
+# The main part of the program.
+var a = [10, 5, 8, 9]
+print(array_sum(a))
+print(array_sum_alt(a))
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Matthieu Lucas <lucasmatthieu@gmail.com>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Client sample using the Socket module which connect to the server sample.
+module socket_client
+
+import socket
+
+if args.length < 2 then
+ print "Usage : socket_client <host> <port>"
+ return
+end
+
+var s = new Socket.client(args[0], args[1].to_i)
+print "[HOST ADDRESS] : {s.address}"
+print "[HOST] : {s.host}"
+print "[PORT] : {s.port}"
+print "Connecting ... {s.connected}"
+if s.connected then
+ print "Writing ... Hello server !"
+ s.write("Hello server !")
+ print "[Response from server] : {s.read(100)}"
+ print "Closing ..."
+ s.close
+end
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Matthieu Lucas <lucasmatthieu@gmail.com>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Server sample using the Socket module which allow client to connect
+module socket_server
+
+import socket
+
+if args.is_empty then
+ print "Usage : socket_server <port>"
+ return
+end
+
+var socket = new Socket.server(args[0].to_i, 1)
+print "[PORT] : {socket.port.to_s}"
+
+var clients = new Array[Socket]
+var max = socket
+loop
+ var fs = new SocketObserver(true, true, true)
+ fs.readset.set(socket)
+
+ for c in clients do fs.readset.set(c)
+
+ if fs.select(max, 4, 0) == 0 then
+ print "Error occured in select {sys.errno.strerror}"
+ break
+ end
+
+ if fs.readset.is_set(socket) then
+ var ns = socket.accept
+ print "Accepting {ns.address} ... "
+ print "[Message from {ns.address}] : {ns.read(100)}"
+ ns.write("Goodbye client.")
+ print "Closing {ns.address} ..."
+ ns.close
+ end
+end
+
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import template
+
+### Here, definition of the specific templates
+
+# The root template for composers
+class TmplComposers
+ super Template
+
+ # Short list of composers
+ var composers = new Array[TmplComposer]
+
+ # Detailled list of composers
+ var composer_details = new Array[TmplComposerDetail]
+
+ # Add a composer in both lists
+ fun add_composer(firstname, lastname: String, birth, death: Int)
+ do
+ composers.add(new TmplComposer(lastname))
+ composer_details.add(new TmplComposerDetail(firstname, lastname, birth, death))
+ end
+
+ redef fun rendering do
+ add """
+COMPOSERS
+=========
+"""
+ add_all composers
+ add """
+
+DETAILS
+=======
+"""
+ add_all composer_details
+ end
+end
+
+# A composer in the short list of composers
+class TmplComposer
+ super Template
+
+ # Short name
+ var name: String
+
+ init(name: String) do self.name = name
+
+ redef fun rendering do add "- {name}\n"
+end
+
+# A composer in the detailled list of composers
+class TmplComposerDetail
+ super Template
+
+ var firstname: String
+ var lastname: String
+ var birth: Int
+ var death: Int
+
+ init(firstname, lastname: String, birth, death: Int) do
+ self.firstname = firstname
+ self.lastname = lastname
+ self.birth = birth
+ self.death = death
+ end
+
+ redef fun rendering do add """
+
+COMPOSER: {{{firstname}}} {{{lastname}}}
+BIRTH...: {{{birth}}}
+DEATH...: {{{death}}}
+"""
+
+end
+
+### Here a simple usage of the templates
+
+var f = new TmplComposers
+f.add_composer("Johann Sebastian", "Bach", 1685, 1750)
+f.add_composer("George Frideric", "Handel", 1685, 1759)
+f.add_composer("Wolfgang Amadeus", "Mozart", 1756, 1791)
+f.write_to(stdout)
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2014 Lucas Bajolet <r4pass@hotmail.com>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Sample module for a minimal chat server using Websockets on port 8088
+module websocket_server
+
+import websocket
+
+var sock = new WebSocket(8088, 1)
+
+var msg: String
+
+if sock.listener.eof then
+ print sys.errno.strerror
+end
+
+sock.accept
+
+while not sock.listener.eof do
+ if not sock.connected then sock.accept
+ if sys.stdin.poll_in then
+ msg = gets
+ printn "Received message : {msg}"
+ if msg == "exit" then sock.close
+ if msg == "disconnect" then sock.disconnect_client
+ sock.write(msg)
+ end
+ if sock.can_read(10) then
+ msg = sock.read_line
+ if msg != "" then print msg
+ end
+end
+
diff --git a/tests/examplefiles/ANTLRv3.g b/tests/examplefiles/antlr_ANTLRv3.g
index fbe6d654..fbe6d654 100644
--- a/tests/examplefiles/ANTLRv3.g
+++ b/tests/examplefiles/antlr_ANTLRv3.g
diff --git a/tests/examplefiles/autoit_submit.au3 b/tests/examplefiles/autoit_submit.au3
index e5054dea..84fb7150 100644
--- a/tests/examplefiles/autoit_submit.au3
+++ b/tests/examplefiles/autoit_submit.au3
@@ -16,8 +16,10 @@ _IEFormElementOptionSelect ($oSelect, "S2", 1, "byText")
;options raido
_IEFormElementRadioSelect($oForm, "2nd", "type", 1, "byValue")
+#cs
ConsoleWrite(@Error)
Sleep(10000)
+#ce
_IEFormSubmit($oForm, 0)
_IELoadWait($oIE)
Sleep(60000)
diff --git a/tests/examplefiles/automake.mk b/tests/examplefiles/automake.mk
new file mode 100644
index 00000000..47a50a38
--- /dev/null
+++ b/tests/examplefiles/automake.mk
@@ -0,0 +1,7 @@
+if DEBUG
+DBG = debug
+else
+DBG =
+endif
+noinst_PROGRAMS = $(DBG)
+
diff --git a/tests/examplefiles/clojure-weird-keywords.clj b/tests/examplefiles/clojure-weird-keywords.clj
new file mode 100644
index 00000000..2d914c59
--- /dev/null
+++ b/tests/examplefiles/clojure-weird-keywords.clj
@@ -0,0 +1,5 @@
+; Note, clojure lexer is here (and is a good deal more liberal than the language spec:
+; https://github.com/clojure/clojure/blob/master/src/jvm/clojure/lang/LispReader.java#L62
+
+(defn valid [#^java.lang.reflect.Method meth]
+ [:keyword :#initial-hash :h#sh-in-middle :hash-at-end# #js {:keyword "value"}])
diff --git a/tests/examplefiles/core.cljs b/tests/examplefiles/core.cljs
new file mode 100644
index 00000000..f135b832
--- /dev/null
+++ b/tests/examplefiles/core.cljs
@@ -0,0 +1,52 @@
+
+(ns bounder.core
+ (:require [bounder.html :as html]
+ [domina :refer [value set-value! single-node]]
+ [domina.css :refer [sel]]
+ [lowline.functions :refer [debounce]]
+ [enfocus.core :refer [at]]
+ [cljs.reader :as reader]
+ [clojure.string :as s])
+ (:require-macros [enfocus.macros :as em]))
+
+(def filter-input
+ (single-node
+ (sel ".search input")))
+
+(defn project-matches [query project]
+ (let [words (cons (:name project)
+ (map name (:categories project)))
+ to-match (->> words
+ (s/join "")
+ (s/lower-case))]
+ (<= 0 (.indexOf to-match (s/lower-case query)))))
+
+(defn apply-filter-for [projects]
+ (let [query (value filter-input)]
+ (html/render-projects
+ (filter (partial project-matches query)
+ projects))))
+
+(defn filter-category [projects evt]
+ (let [target (.-currentTarget evt)]
+ (set-value! filter-input
+ (.-innerHTML target))
+ (apply-filter-for projects)))
+
+(defn init-listeners [projects]
+ (at js/document
+ ["input"] (em/listen
+ :keyup
+ (debounce
+ (partial apply-filter-for projects)
+ 500))
+ [".category-links li"] (em/listen
+ :click
+ (partial filter-category projects))))
+
+(defn init [projects-edn]
+ (let [projects (reader/read-string projects-edn)]
+ (init-listeners projects)
+ (html/render-projects projects)
+ (html/loaded)))
+
diff --git a/tests/examplefiles/demo.cfm b/tests/examplefiles/demo.cfm
index d94a06a0..78098c05 100644
--- a/tests/examplefiles/demo.cfm
+++ b/tests/examplefiles/demo.cfm
@@ -1,4 +1,11 @@
<!--- cfcomment --->
+<!--- nested <!--- cfcomment ---> --->
+<!--- multi-line
+nested
+<!---
+cfcomment
+--->
+--->
<!-- html comment -->
<html>
<head>
@@ -17,6 +24,9 @@
#IsDate("foo")#<br />
#DaysInMonth(RightNow)#
</cfoutput>
+<cfset x="x">
+<cfset y="y">
+<cfset z="z">
<cfoutput group="x">
#x#
<cfoutput>#y#</cfoutput>
@@ -29,10 +39,12 @@
<cfset greeting = "Hello #person#">
<cfset greeting = "Hello" & " world!">
+<cfset a = 5>
+<cfset b = 10>
<cfset c = a^b>
<cfset c = a MOD b>
<cfset c = a / b>
<cfset c = a * b>
<cfset c = a + b>
<cfset c = a - b>
-
+<!--- <!-- another <!--- nested --> ---> comment --->
diff --git a/tests/examplefiles/demo.css.in b/tests/examplefiles/demo.css.in
new file mode 100644
index 00000000..36330a9d
--- /dev/null
+++ b/tests/examplefiles/demo.css.in
@@ -0,0 +1,6 @@
+%if defined(__foo__)
+.cls {
+ color: #fff;
+}
+%endif
+%literal %foo
diff --git a/tests/examplefiles/demo.hbs b/tests/examplefiles/demo.hbs
new file mode 100644
index 00000000..1b9ed5a7
--- /dev/null
+++ b/tests/examplefiles/demo.hbs
@@ -0,0 +1,12 @@
+<!-- post.handlebars -->
+
+<div class='intro'>
+ {{intro}}
+</div>
+
+{{#if isExpanded}}
+ <div class='body'>{{body}}</div>
+ <button {{action contract}}>Contract</button>
+{{else}}
+ <button {{action expand}}>Show More...</button>
+{{/if}}
diff --git a/tests/examplefiles/demo.js.in b/tests/examplefiles/demo.js.in
new file mode 100644
index 00000000..f44fc53d
--- /dev/null
+++ b/tests/examplefiles/demo.js.in
@@ -0,0 +1,6 @@
+window.foo = {
+#if defined(__foo__)
+ 'key': 'value'
+#endif
+}
+#literal #foo
diff --git a/tests/examplefiles/demo.xul.in b/tests/examplefiles/demo.xul.in
new file mode 100644
index 00000000..9e1f4938
--- /dev/null
+++ b/tests/examplefiles/demo.xul.in
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="iso-8859-1"?>
+<window
+ xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul">
+#if defined(__foo__)
+<description value="Text" />
+#endif
+</window>
diff --git a/tests/examplefiles/docker.docker b/tests/examplefiles/docker.docker
new file mode 100644
index 00000000..d65385b6
--- /dev/null
+++ b/tests/examplefiles/docker.docker
@@ -0,0 +1,5 @@
+maintainer First O'Last
+
+run echo \
+ 123 $bar
+# comment
diff --git a/tests/examplefiles/ember.handlebars b/tests/examplefiles/ember.handlebars
new file mode 100644
index 00000000..515dffbd
--- /dev/null
+++ b/tests/examplefiles/ember.handlebars
@@ -0,0 +1,33 @@
+{{#view EmberFirebaseChat.ChatView class="chat-container"}}
+ <div class="chat-messages-container">
+ <ul class="chat-messages">
+ {{#each message in content}}
+ <li>
+ [{{formatTimestamp "message.timestamp" fmtString="h:mm:ss A"}}]
+ <strong>{{message.sender}}</strong>: {{message.content}}
+ </li>
+ {{/each}}
+ </ul>
+ </div>
+
+ {{! Comment }}
+ {{{unescaped value}}}
+
+ {{#view EmberFirebaseChat.InputView class="chat-input-container"}}
+ <form class="form-inline">
+ {{#if "auth.authed"}}
+ {{#if "auth.hasName"}}
+ <input type="text" id="message" placeholder="Message">
+ <button {{action "postMessage" target="view"}} class="btn">Send</button>
+ {{else}}
+ <input type="text" id="username" placeholder="Enter your username...">
+ <button {{action "pickName" target="view"}} class="btn">Send</button>
+ {{/if}}
+ {{else}}
+ <input type="text" placeholder="Log in with Persona to chat!" disabled="disabled">
+ <button {{action "login"}} class="btn">Login</button>
+ {{/if}}
+ </form>
+ {{/view}}
+{{/view}}
+
diff --git a/tests/examplefiles/eval.rs b/tests/examplefiles/eval.rs
new file mode 100644
index 00000000..17e585a0
--- /dev/null
+++ b/tests/examplefiles/eval.rs
@@ -0,0 +1,606 @@
+// -------------------------------------------------------------------------------------------------
+// Rick, a Rust intercal compiler. Save your souls!
+//
+// Copyright (c) 2015 Georg Brandl
+//
+// This program is free software; you can redistribute it and/or modify it under the terms of the
+// GNU General Public License as published by the Free Software Foundation; either version 2 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
+// even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License along with this program;
+// if not, write to the Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+// -------------------------------------------------------------------------------------------------
+
+/// Interprets INTERCAL source.
+///
+/// The evaluator is used when rick is called with `-i`, or when the compiler generates
+/// the output while compiling (in the constant-output case).
+
+use std::fmt::{ Debug, Display };
+use std::io::Write;
+use std::u16;
+
+use err::{ Res, IE123, IE129, IE252, IE275, IE555, IE633, IE774, IE994 };
+use ast::{ self, Program, Stmt, StmtBody, ComeFrom, Expr, Var, VType };
+use stdops::{ Bind, Array, write_number, read_number, check_chance, check_ovf, pop_jumps,
+ get_random_seed, mingle, select, and_16, and_32, or_16, or_32, xor_16, xor_32 };
+
+
+/// Represents a value (either 16-bit or 32-bit) at runtime.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub enum Val {
+ I16(u16),
+ I32(u32),
+}
+
+impl Val {
+ /// Cast as a 16-bit value; returns an error if 32-bit and too big.
+ pub fn as_u16(&self) -> Res<u16> {
+ match *self {
+ Val::I16(v) => Ok(v),
+ Val::I32(v) => {
+ if v > (u16::MAX as u32) {
+ return IE275.err();
+ }
+ Ok(v as u16)
+ }
+ }
+ }
+
+ /// Cast as a 32-bit value; always succeeds.
+ pub fn as_u32(&self) -> u32 {
+ match *self {
+ Val::I16(v) => v as u32,
+ Val::I32(v) => v
+ }
+ }
+
+ /// Cast as an usize value; always succeeds.
+ pub fn as_usize(&self) -> usize {
+ self.as_u32() as usize
+ }
+
+ /// Create from a 32-bit value; will select the smallest possible type.
+ pub fn from_u32(v: u32) -> Val {
+ if v & 0xFFFF == v {
+ Val::I16(v as u16)
+ } else {
+ Val::I32(v)
+ }
+ }
+}
+
+/// The state of the interpreter's evaluator.
+pub struct Eval<'a> {
+ /// Program to execute.
+ program: &'a Program,
+ /// Stream to use for printing output.
+ stdout: &'a mut Write,
+ /// Whether to print debugging output during execution.
+ debug: bool,
+ /// Variable bindings for the four types of variables.
+ spot: Vec<Bind<u16>>,
+ twospot: Vec<Bind<u32>>,
+ tail: Vec<Bind<Array<u16>>>,
+ hybrid: Vec<Bind<Array<u32>>>,
+ /// The infamous NEXT stack, capable of holding 80 elements.
+ jumps: Vec<ast::LogLine>,
+ /// Abstain counter for each statement.
+ abstain: Vec<u32>,
+ /// Binary I/O "tape" state.
+ last_in: u8,
+ last_out: u8,
+ /// Random number generator state.
+ rand_st: u32,
+ /// Counts the number of executed statements.
+ stmt_ctr: usize,
+}
+
+/// Represents the control flow effect of an executed statement.
+enum StmtRes {
+ /// normal execution, next statement
+ Next,
+ /// jump around, from DO ... NEXT
+ Jump(usize),
+ /// jump back, from RESUME
+ Back(usize),
+ /// start from the first statement, from TRY AGAIN
+ FromTop,
+ /// end the program, from GIVE UP
+ End,
+}
+
+impl<'a> Eval<'a> {
+ /// Construct a new evaluator.
+ pub fn new(program: &'a Program, stdout: &'a mut Write, debug: bool,
+ random: bool) -> Eval<'a> {
+ let abs = program.stmts.iter().map(|stmt| stmt.props.disabled as u32).collect();
+ let nvars = (program.var_info.0.len(),
+ program.var_info.1.len(),
+ program.var_info.2.len(),
+ program.var_info.3.len());
+ Eval {
+ program: program,
+ stdout: stdout,
+ debug: debug,
+ spot: vec![Bind::new(0); nvars.0],
+ twospot: vec![Bind::new(0); nvars.1],
+ tail: vec![Bind::new(Array::empty()); nvars.2],
+ hybrid: vec![Bind::new(Array::empty()); nvars.3],
+ jumps: Vec::with_capacity(80),
+ rand_st: if random { get_random_seed() } else { 0 },
+ abstain: abs,
+ last_in: 0,
+ last_out: 0,
+ stmt_ctr: 0,
+ }
+ }
+
+ /// Interpret the program. Returns either the number of executed statements,
+ /// or an error (RtError).
+ pub fn eval(&mut self) -> Res<usize> {
+ let mut pctr = 0; // index of current statement
+ let program = self.program.clone();
+ let nstmts = program.stmts.len();
+ loop {
+ // check for falling off the end
+ if pctr >= nstmts {
+ // if the last statement was a TRY AGAIN, falling off the end is fine
+ if let StmtBody::TryAgain = program.stmts[program.stmts.len() - 1].body {
+ break;
+ }
+ return IE633.err();
+ }
+ self.stmt_ctr += 1;
+ let stmt = &program.stmts[pctr];
+ // execute statement if not abstained
+ if self.abstain[pctr] == 0 {
+ // check execution chance
+ let (passed, rand_st) = check_chance(stmt.props.chance, self.rand_st);
+ self.rand_st = rand_st;
+ if passed {
+ // try to eval this statement
+ let res = match self.eval_stmt(stmt) {
+ // on error, set the correct line number and bubble up
+ Err(mut err) => {
+ err.set_line(stmt.props.onthewayto);
+ // special treatment for NEXT
+ if let StmtBody::DoNext(n) = stmt.body {
+ if let Some(i) = program.labels.get(&n) {
+ err.set_line(program.stmts[*i as usize].props.srcline);
+ }
+ }
+ return Err(err);
+ }
+ Ok(res) => res
+ };
+ // handle control flow effects
+ match res {
+ StmtRes::Next => { }
+ StmtRes::Jump(n) => {
+ self.jumps.push(pctr as u16); // push the line with the NEXT
+ pctr = n;
+ continue; // do not increment or check for COME FROMs
+ }
+ StmtRes::Back(n) => {
+ pctr = n; // will be incremented below after COME FROM check
+ }
+ StmtRes::FromTop => {
+ pctr = 0; // start from the beginning, do not push any stack
+ continue;
+ }
+ StmtRes::End => break,
+ }
+ }
+ }
+ // if we are on the line with the compiler bug, error out
+ if pctr == self.program.bugline as usize {
+ return IE774.err_with(None, stmt.props.onthewayto);
+ }
+ // try to determine if we have to go to a COME FROM statement
+ // (note: in general, program.stmts[pctr] != stmt)
+ //
+ // the static COME FROM is always a possibility
+ let mut maybe_next = program.stmts[pctr].comefrom;
+ // the complicated case: evaluate all computed-come-from expressions
+ let my_label = program.stmts[pctr].props.label;
+ if program.uses_complex_comefrom && my_label > 0 {
+ for (i, stmt) in program.stmts.iter().enumerate() {
+ if let StmtBody::ComeFrom(ComeFrom::Expr(ref e)) = stmt.body {
+ let v = try!(try!(self.eval_expr(e)).as_u16());
+ if v == my_label {
+ // as soon as we have multiple candidates, we can bail out
+ if maybe_next.is_some() {
+ return IE555.err();
+ }
+ maybe_next = Some(i as u16);
+ }
+ }
+ }
+ }
+ // check for COME FROMs from this line
+ if let Some(next) = maybe_next {
+ let next = next as usize;
+ // check for abstained COME FROM
+ if self.abstain[next] == 0 {
+ // the COME FROM can also have a % chance
+ let (passed, rand_st) = check_chance(program.stmts[next].props.chance,
+ self.rand_st);
+ self.rand_st = rand_st;
+ if passed {
+ pctr = next;
+ continue;
+ }
+ }
+ }
+ // no COME FROM, normal execution
+ pctr += 1;
+ }
+ Ok(self.stmt_ctr)
+ }
+
+ /// Interpret a single statement.
+ fn eval_stmt(&mut self, stmt: &Stmt) -> Res<StmtRes> {
+ if self.debug {
+ println!("\nExecuting Stmt #{} (state before following)", self.stmt_ctr);
+ self.dump_state();
+ println!("{}", stmt);
+ }
+ match stmt.body {
+ StmtBody::Calc(ref var, ref expr) => {
+ let val = try!(self.eval_expr(expr));
+ try!(self.assign(var, val));
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Dim(ref var, ref exprs) => {
+ try!(self.array_dim(var, exprs));
+ Ok(StmtRes::Next)
+ }
+ StmtBody::DoNext(n) => {
+ match self.program.labels.get(&n) {
+ // too many jumps on stack already?
+ Some(_) if self.jumps.len() >= 80 => IE123.err(),
+ Some(i) => Ok(StmtRes::Jump(*i as usize)),
+ None => IE129.err(),
+ }
+ }
+ StmtBody::ComeFrom(_) => {
+ // nothing to do here at runtime
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Resume(ref expr) => {
+ let n = try!(self.eval_expr(expr)).as_u32();
+ // this expect() is safe: if the third arg is true, there will
+ // be no Ok(None) returns
+ let next = try!(pop_jumps(&mut self.jumps, n, true, 0))
+ .expect("https://xkcd.com/378/ ?!");
+ Ok(StmtRes::Back(next as usize))
+ }
+ StmtBody::Forget(ref expr) => {
+ let n = try!(self.eval_expr(expr)).as_u32();
+ try!(pop_jumps(&mut self.jumps, n, false, 0));
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Ignore(ref vars) => {
+ for var in vars {
+ self.set_rw(var, false);
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Remember(ref vars) => {
+ for var in vars {
+ self.set_rw(var, true);
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Stash(ref vars) => {
+ for var in vars {
+ self.stash(var);
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Retrieve(ref vars) => {
+ for var in vars {
+ try!(self.retrieve(var));
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Abstain(ref expr, ref whats) => {
+ let f: Box<Fn(u32) -> u32> = if let Some(ref e) = *expr {
+ let n = try!(self.eval_expr(e)).as_u32();
+ box move |v: u32| v.saturating_add(n)
+ } else {
+ box |_| 1
+ };
+ for what in whats {
+ self.abstain(what, &*f);
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::Reinstate(ref whats) => {
+ for what in whats {
+ self.abstain(what, &|v: u32| v.saturating_sub(1));
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::ReadOut(ref vars) => {
+ for var in vars {
+ match *var {
+ // read out whole array
+ Expr::Var(ref var) if var.is_dim() => {
+ try!(self.array_readout(var));
+ }
+ // read out single var or array element
+ Expr::Var(ref var) => {
+ let varval = try!(self.lookup(var));
+ try!(write_number(self.stdout, varval.as_u32(), 0));
+ }
+ // read out constant
+ Expr::Num(_, v) => try!(write_number(self.stdout, v, 0)),
+ // others will not be generated
+ _ => return IE994.err(),
+ };
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::WriteIn(ref vars) => {
+ for var in vars {
+ if var.is_dim() {
+ // write in whole array
+ try!(self.array_writein(var));
+ } else {
+ // write in single var or array element
+ let n = try!(read_number(0));
+ try!(self.assign(var, Val::from_u32(n)));
+ }
+ }
+ Ok(StmtRes::Next)
+ }
+ // this one is only generated by the constant-program optimizer
+ StmtBody::Print(ref s) => {
+ if let Err(_) = self.stdout.write(&s) {
+ return IE252.err();
+ }
+ Ok(StmtRes::Next)
+ }
+ StmtBody::TryAgain => Ok(StmtRes::FromTop),
+ StmtBody::GiveUp => Ok(StmtRes::End),
+ StmtBody::Error(ref e) => Err((*e).clone()),
+ }
+ }
+
+ /// Evaluate an expression to a value.
+ fn eval_expr(&self, expr: &Expr) -> Res<Val> {
+ match *expr {
+ Expr::Num(vtype, v) => match vtype {
+ VType::I16 => Ok(Val::I16(v as u16)),
+ VType::I32 => Ok(Val::I32(v)),
+ },
+ Expr::Var(ref var) => self.lookup(var),
+ Expr::Mingle(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx)).as_u32();
+ let w = try!(self.eval_expr(wx)).as_u32();
+ let v = try!(check_ovf(v, 0));
+ let w = try!(check_ovf(w, 0));
+ Ok(Val::I32(mingle(v, w)))
+ }
+ Expr::Select(vtype, ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ if vtype == VType::I16 {
+ Ok(Val::I16(select(v.as_u32(), try!(w.as_u16()) as u32) as u16))
+ } else {
+ Ok(Val::I32(select(v.as_u32(), w.as_u32())))
+ }
+ }
+ Expr::And(vtype, ref vx) => {
+ let v = try!(self.eval_expr(vx));
+ match vtype {
+ VType::I16 => Ok(Val::I16(and_16(try!(v.as_u16()) as u32) as u16)),
+ VType::I32 => Ok(Val::I32(and_32(v.as_u32()))),
+ }
+ }
+ Expr::Or(vtype, ref vx) => {
+ let v = try!(self.eval_expr(vx));
+ match vtype {
+ VType::I16 => Ok(Val::I16(or_16(try!(v.as_u16()) as u32) as u16)),
+ VType::I32 => Ok(Val::I32(or_32(v.as_u32()))),
+ }
+ }
+ Expr::Xor(vtype, ref vx) => {
+ let v = try!(self.eval_expr(vx));
+ match vtype {
+ VType::I16 => Ok(Val::I16(xor_16(try!(v.as_u16()) as u32) as u16)),
+ VType::I32 => Ok(Val::I32(xor_32(v.as_u32()))),
+ }
+ }
+ Expr::RsNot(ref vx) => {
+ let v = try!(self.eval_expr(vx));
+ Ok(Val::I32(!v.as_u32()))
+ }
+ Expr::RsAnd(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32(v.as_u32() & w.as_u32()))
+ }
+ Expr::RsOr(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32(v.as_u32() | w.as_u32()))
+ }
+ Expr::RsXor(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32(v.as_u32() ^ w.as_u32()))
+ }
+ Expr::RsRshift(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32(v.as_u32() >> w.as_u32()))
+ }
+ Expr::RsLshift(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32(v.as_u32() << w.as_u32()))
+ }
+ // Expr::RsEqual(ref vx, ref wx) => {
+ // let v = try!(self.eval_expr(vx));
+ // let w = try!(self.eval_expr(wx));
+ // Ok(Val::I32((v.as_u32() == w.as_u32()) as u32))
+ // }
+ Expr::RsNotEqual(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32((v.as_u32() != w.as_u32()) as u32))
+ }
+ Expr::RsPlus(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32(v.as_u32() + w.as_u32()))
+ }
+ Expr::RsMinus(ref vx, ref wx) => {
+ let v = try!(self.eval_expr(vx));
+ let w = try!(self.eval_expr(wx));
+ Ok(Val::I32(v.as_u32() - w.as_u32()))
+ }
+ }
+ }
+
+ #[inline]
+ fn eval_subs(&self, subs: &Vec<Expr>) -> Res<Vec<usize>> {
+ subs.iter().map(|v| self.eval_expr(v).map(|w| w.as_usize())).collect()
+ }
+
+ /// Dimension an array.
+ fn array_dim(&mut self, var: &Var, dims: &Vec<Expr>) -> Res<()> {
+ let dims = try!(self.eval_subs(dims));
+ match *var {
+ Var::A16(n, _) => self.tail[n].dimension(dims, 0),
+ Var::A32(n, _) => self.hybrid[n].dimension(dims, 0),
+ _ => return IE994.err(),
+ }
+ }
+
+ /// Assign to a variable.
+ fn assign(&mut self, var: &Var, val: Val) -> Res<()> {
+ match *var {
+ Var::I16(n) => Ok(self.spot[n].assign(try!(val.as_u16()))),
+ Var::I32(n) => Ok(self.twospot[n].assign(val.as_u32())),
+ Var::A16(n, ref subs) => {
+ let subs = try!(self.eval_subs(subs));
+ self.tail[n].set_md(subs, try!(val.as_u16()), 0)
+ }
+ Var::A32(n, ref subs) => {
+ let subs = try!(self.eval_subs(subs));
+ self.hybrid[n].set_md(subs, val.as_u32(), 0)
+ }
+ }
+ }
+
+ /// Look up the value of a variable.
+ fn lookup(&self, var: &Var) -> Res<Val> {
+ match *var {
+ Var::I16(n) => Ok(Val::I16(self.spot[n].val)),
+ Var::I32(n) => Ok(Val::I32(self.twospot[n].val)),
+ Var::A16(n, ref subs) => {
+ let subs = try!(self.eval_subs(subs));
+ self.tail[n].get_md(subs, 0).map(Val::I16)
+ }
+ Var::A32(n, ref subs) => {
+ let subs = try!(self.eval_subs(subs));
+ self.hybrid[n].get_md(subs, 0).map(Val::I32)
+ }
+ }
+ }
+
+ /// Process a STASH statement.
+ fn stash(&mut self, var: &Var) {
+ match *var {
+ Var::I16(n) => self.spot[n].stash(),
+ Var::I32(n) => self.twospot[n].stash(),
+ Var::A16(n, _) => self.tail[n].stash(),
+ Var::A32(n, _) => self.hybrid[n].stash(),
+ }
+ }
+
+ /// Process a RETRIEVE statement.
+ fn retrieve(&mut self, var: &Var) -> Res<()> {
+ match *var {
+ Var::I16(n) => self.spot[n].retrieve(0),
+ Var::I32(n) => self.twospot[n].retrieve(0),
+ Var::A16(n, _) => self.tail[n].retrieve(0),
+ Var::A32(n, _) => self.hybrid[n].retrieve(0),
+ }
+ }
+
+ /// Process an IGNORE or REMEMBER statement. Cannot fail.
+ fn set_rw(&mut self, var: &Var, rw: bool) {
+ match *var {
+ Var::I16(n) => self.spot[n].rw = rw,
+ Var::I32(n) => self.twospot[n].rw = rw,
+ Var::A16(n, _) => self.tail[n].rw = rw,
+ Var::A32(n, _) => self.hybrid[n].rw = rw,
+ }
+ }
+
+ /// P()rocess an ABSTAIN or REINSTATE statement. Cannot fail.
+ fn abstain(&mut self, what: &ast::Abstain, f: &Fn(u32) -> u32) {
+ if let &ast::Abstain::Label(lbl) = what {
+ let idx = self.program.labels[&lbl] as usize;
+ if self.program.stmts[idx].body != StmtBody::GiveUp {
+ self.abstain[idx] = f(self.abstain[idx]);
+ }
+ } else {
+ for (i, stype) in self.program.stmt_types.iter().enumerate() {
+ if stype == what {
+ self.abstain[i] = f(self.abstain[i]);
+ }
+ }
+ }
+ }
+
+ /// Array readout helper.
+ fn array_readout(&mut self, var: &Var) -> Res<()> {
+ let state = &mut self.last_out;
+ match *var {
+ Var::A16(n, _) => self.tail[n].readout(self.stdout, state, 0),
+ Var::A32(n, _) => self.hybrid[n].readout(self.stdout, state, 0),
+ _ => return IE994.err(),
+ }
+ }
+
+ /// Array writein helper.
+ fn array_writein(&mut self, var: &Var) -> Res<()> {
+ let state = &mut self.last_in;
+ match *var {
+ Var::A16(n, _) => self.tail[n].writein(state, 0),
+ Var::A32(n, _) => self.hybrid[n].writein(state, 0),
+ _ => return IE994.err(),
+ }
+ }
+
+ /// Debug helpers.
+ fn dump_state(&self) {
+ self.dump_state_one(&self.spot, ".");
+ self.dump_state_one(&self.twospot, ":");
+ self.dump_state_one(&self.tail, ",");
+ self.dump_state_one(&self.hybrid, ";");
+ if self.jumps.len() > 0 {
+ println!("Next stack: {:?}", self.jumps);
+ }
+ //println!("Abstained: {:?}", self.abstain);
+ }
+
+ fn dump_state_one<T: Debug + Display>(&self, vec: &Vec<Bind<T>>, sigil: &str) {
+ if vec.len() > 0 {
+ for (i, v) in vec.iter().enumerate() {
+ print!("{}{} = {}, ", sigil, i, v);
+ }
+ println!("");
+ }
+ }
+}
diff --git a/tests/examplefiles/example.als b/tests/examplefiles/example.als
new file mode 100644
index 00000000..3a5ab82b
--- /dev/null
+++ b/tests/examplefiles/example.als
@@ -0,0 +1,217 @@
+module examples/systems/views
+
+/*
+ * Model of views in object-oriented programming.
+ *
+ * Two object references, called the view and the backing,
+ * are related by a view mechanism when changes to the
+ * backing are automatically propagated to the view. Note
+ * that the state of a view need not be a projection of the
+ * state of the backing; the keySet method of Map, for
+ * example, produces two view relationships, and for the
+ * one in which the map is modified by changes to the key
+ * set, the value of the new map cannot be determined from
+ * the key set. Note that in the iterator view mechanism,
+ * the iterator is by this definition the backing object,
+ * since changes are propagated from iterator to collection
+ * and not vice versa. Oddly, a reference may be a view of
+ * more than one backing: there can be two iterators on the
+ * same collection, eg. A reference cannot be a view under
+ * more than one view type.
+ *
+ * A reference is made dirty when it is a backing for a view
+ * with which it is no longer related by the view invariant.
+ * This usually happens when a view is modified, either
+ * directly or via another backing. For example, changing a
+ * collection directly when it has an iterator invalidates
+ * it, as does changing the collection through one iterator
+ * when there are others.
+ *
+ * More work is needed if we want to model more closely the
+ * failure of an iterator when its collection is invalidated.
+ *
+ * As a terminological convention, when there are two
+ * complementary view relationships, we will give them types
+ * t and t'. For example, KeySetView propagates from map to
+ * set, and KeySetView' propagates from set to map.
+ *
+ * author: Daniel Jackson
+ */
+
+open util/ordering[State] as so
+open util/relation as rel
+
+sig Ref {}
+sig Object {}
+
+-- t->b->v in views when v is view of type t of backing b
+-- dirty contains refs that have been invalidated
+sig State {
+ refs: set Ref,
+ obj: refs -> one Object,
+ views: ViewType -> refs -> refs,
+ dirty: set refs
+-- , anyviews: Ref -> Ref -- for visualization
+ }
+-- {anyviews = ViewType.views}
+
+sig Map extends Object {
+ keys: set Ref,
+ map: keys -> one Ref
+ }{all s: State | keys + Ref.map in s.refs}
+sig MapRef extends Ref {}
+fact {State.obj[MapRef] in Map}
+
+sig Iterator extends Object {
+ left, done: set Ref,
+ lastRef: lone done
+ }{all s: State | done + left + lastRef in s.refs}
+sig IteratorRef extends Ref {}
+fact {State.obj[IteratorRef] in Iterator}
+
+sig Set extends Object {
+ elts: set Ref
+ }{all s: State | elts in s.refs}
+sig SetRef extends Ref {}
+fact {State.obj[SetRef] in Set}
+
+abstract sig ViewType {}
+one sig KeySetView, KeySetView', IteratorView extends ViewType {}
+fact ViewTypes {
+ State.views[KeySetView] in MapRef -> SetRef
+ State.views[KeySetView'] in SetRef -> MapRef
+ State.views[IteratorView] in IteratorRef -> SetRef
+ all s: State | s.views[KeySetView] = ~(s.views[KeySetView'])
+ }
+
+/**
+ * mods is refs modified directly or by view mechanism
+ * doesn't handle possibility of modifying an object and its view at once?
+ * should we limit frame conds to non-dirty refs?
+ */
+pred modifies [pre, post: State, rs: set Ref] {
+ let vr = pre.views[ViewType], mods = rs.*vr {
+ all r: pre.refs - mods | pre.obj[r] = post.obj[r]
+ all b: mods, v: pre.refs, t: ViewType |
+ b->v in pre.views[t] => viewFrame [t, pre.obj[v], post.obj[v], post.obj[b]]
+ post.dirty = pre.dirty +
+ {b: pre.refs | some v: Ref, t: ViewType |
+ b->v in pre.views[t] && !viewFrame [t, pre.obj[v], post.obj[v], post.obj[b]]
+ }
+ }
+ }
+
+pred allocates [pre, post: State, rs: set Ref] {
+ no rs & pre.refs
+ post.refs = pre.refs + rs
+ }
+
+/**
+ * models frame condition that limits change to view object from v to v' when backing object changes to b'
+ */
+pred viewFrame [t: ViewType, v, v', b': Object] {
+ t in KeySetView => v'.elts = dom [b'.map]
+ t in KeySetView' => b'.elts = dom [v'.map]
+ t in KeySetView' => (b'.elts) <: (v.map) = (b'.elts) <: (v'.map)
+ t in IteratorView => v'.elts = b'.left + b'.done
+ }
+
+pred MapRef.keySet [pre, post: State, setRefs: SetRef] {
+ post.obj[setRefs].elts = dom [pre.obj[this].map]
+ modifies [pre, post, none]
+ allocates [pre, post, setRefs]
+ post.views = pre.views + KeySetView->this->setRefs + KeySetView'->setRefs->this
+ }
+
+pred MapRef.put [pre, post: State, k, v: Ref] {
+ post.obj[this].map = pre.obj[this].map ++ k->v
+ modifies [pre, post, this]
+ allocates [pre, post, none]
+ post.views = pre.views
+ }
+
+pred SetRef.iterator [pre, post: State, iterRef: IteratorRef] {
+ let i = post.obj[iterRef] {
+ i.left = pre.obj[this].elts
+ no i.done + i.lastRef
+ }
+ modifies [pre,post,none]
+ allocates [pre, post, iterRef]
+ post.views = pre.views + IteratorView->iterRef->this
+ }
+
+pred IteratorRef.remove [pre, post: State] {
+ let i = pre.obj[this], i' = post.obj[this] {
+ i'.left = i.left
+ i'.done = i.done - i.lastRef
+ no i'.lastRef
+ }
+ modifies [pre,post,this]
+ allocates [pre, post, none]
+ pre.views = post.views
+ }
+
+pred IteratorRef.next [pre, post: State, ref: Ref] {
+ let i = pre.obj[this], i' = post.obj[this] {
+ ref in i.left
+ i'.left = i.left - ref
+ i'.done = i.done + ref
+ i'.lastRef = ref
+ }
+ modifies [pre, post, this]
+ allocates [pre, post, none]
+ pre.views = post.views
+ }
+
+pred IteratorRef.hasNext [s: State] {
+ some s.obj[this].left
+ }
+
+assert zippishOK {
+ all
+ ks, vs: SetRef,
+ m: MapRef,
+ ki, vi: IteratorRef,
+ k, v: Ref |
+ let s0=so/first,
+ s1=so/next[s0],
+ s2=so/next[s1],
+ s3=so/next[s2],
+ s4=so/next[s3],
+ s5=so/next[s4],
+ s6=so/next[s5],
+ s7=so/next[s6] |
+ ({
+ precondition [s0, ks, vs, m]
+ no s0.dirty
+ ks.iterator [s0, s1, ki]
+ vs.iterator [s1, s2, vi]
+ ki.hasNext [s2]
+ vi.hasNext [s2]
+ ki.this/next [s2, s3, k]
+ vi.this/next [s3, s4, v]
+ m.put [s4, s5, k, v]
+ ki.remove [s5, s6]
+ vi.remove [s6, s7]
+ } => no State.dirty)
+ }
+
+pred precondition [pre: State, ks, vs, m: Ref] {
+ // all these conditions and other errors discovered in scope of 6 but 8,3
+ // in initial state, must have view invariants hold
+ (all t: ViewType, b, v: pre.refs |
+ b->v in pre.views[t] => viewFrame [t, pre.obj[v], pre.obj[v], pre.obj[b]])
+ // sets are not aliases
+-- ks != vs
+ // sets are not views of map
+-- no (ks+vs)->m & ViewType.pre.views
+ // no iterator currently on either set
+-- no Ref->(ks+vs) & ViewType.pre.views
+ }
+
+check zippishOK for 6 but 8 State, 3 ViewType expect 1
+
+/**
+ * experiment with controlling heap size
+ */
+fact {all s: State | #s.obj < 5}
diff --git a/tests/examplefiles/example.c b/tests/examplefiles/example.c
index a7f546d1..7bf70149 100644
--- a/tests/examplefiles/example.c
+++ b/tests/examplefiles/example.c
@@ -195,7 +195,7 @@ char convertType(int type) {
case TYPE_INT: return 'I';
case TYPE_FLOAT: return 'F';
case TYPE_BOOLEAN: return 'Z';
- default: yyerror("compiler-intern error in convertType().\n");
+ default : yyerror("compiler-intern error in convertType().\n");
}
return 0; /* to avoid compiler-warning */
}
diff --git a/tests/examplefiles/example.chai b/tests/examplefiles/example.chai
new file mode 100644
index 00000000..85f53c38
--- /dev/null
+++ b/tests/examplefiles/example.chai
@@ -0,0 +1,6 @@
+var f = fun(x) { x + 2; }
+// comment
+puts(someFunc(2 + 2 - 1 * 5 / 4));
+var x = "str";
+def dosomething(lhs, rhs) { print("lhs: ${lhs}, rhs: ${rhs}"); }
+callfunc(`+`, 1, 4);
diff --git a/tests/examplefiles/example.cob b/tests/examplefiles/example.cob
index 3f65e498..92d2e300 100644
--- a/tests/examplefiles/example.cob
+++ b/tests/examplefiles/example.cob
@@ -2617,940 +2617,4 @@ GC0710 88 Token-Is-Reserved-Word VALUE " ".
*****************************************************************
** Perform all program-wide initialization operations **
*****************************************************************
- 101-Establish-Working-Env.
- MOVE TRIM(Src-Filename,Leading) TO Src-Filename
- ACCEPT Env-TEMP
- FROM ENVIRONMENT "TEMP"
- END-ACCEPT
- ACCEPT Lines-Per-Page-ENV
- FROM ENVIRONMENT "OCXREF_LINES"
- END-ACCEPT
- INSPECT Src-Filename REPLACING ALL "\" BY "/"
- INSPECT Env-TEMP REPLACING ALL "\" BY "/"
- MOVE Src-Filename TO Program-Path
- MOVE Program-Path TO Heading-2
- CALL "C$JUSTIFY"
- USING Heading-2, "Right"
- END-CALL
- MOVE LENGTH(TRIM(Src-Filename,Trailing)) TO I
- MOVE 0 TO J
- PERFORM UNTIL Src-Filename(I:1) = '/'
- OR I = 0
- SUBTRACT 1 FROM I
- ADD 1 TO J
- END-PERFORM
- UNSTRING Src-Filename((I + 1):J) DELIMITED BY "."
- INTO Filename, Dummy
- END-UNSTRING
- STRING TRIM(Env-TEMP,Trailing)
- "/"
- TRIM(Filename,Trailing)
- ".i"
- DELIMITED SIZE
- INTO Expanded-Src-Filename
- END-STRING
- STRING Program-Path(1:I)
- TRIM(Filename,Trailing)
- ".lst"
- DELIMITED SIZE
- INTO Report-Filename
- END-STRING
- IF Lines-Per-Page-ENV NOT = SPACES
- MOVE NUMVAL(Lines-Per-Page-ENV) TO Lines-Per-Page
- ELSE
- MOVE 60 TO Lines-Per-Page
- END-IF
- ACCEPT Todays-Date
- FROM DATE YYYYMMDD
- END-ACCEPT
- MOVE Todays-Date TO H1X-Date
- H1S-Date
- MOVE "????????????..." TO SPI-Current-Program-ID
- MOVE SPACES TO SPI-Current-Verb
- Held-Reference
- MOVE "Y" TO F-First-Record
- .
- /
- 200-Execute-cobc SECTION.
- 201-Build-Cmd.
- STRING "cobc -E "
- TRIM(Program-Path, Trailing)
- " > "
- TRIM(Expanded-Src-Filename,Trailing)
- DELIMITED SIZE
- INTO Cmd
- END-STRING
- CALL "SYSTEM"
- USING Cmd
- END-CALL
- IF RETURN-CODE NOT = 0
- DISPLAY
- "Cross-reference terminated by previous errors"
- UPON SYSERR
- END-DISPLAY
- GOBACK
- END-IF
- .
-
- 209-Exit.
- EXIT
- .
- /
- 300-Tokenize-Source SECTION.
- 301-Driver.
- OPEN INPUT Expand-Code
- MOVE SPACES TO Expand-Code-Rec
- MOVE 256 TO Src-Ptr
- MOVE 0 TO Num-UserNames
- SPI-Current-Line-No
- MOVE "?" TO SPI-Current-Division
-GC0710 MOVE "N" TO F-Verb-Has-Been-Found.
- PERFORM FOREVER
- PERFORM 310-Get-Token
- IF Token-Is-EOF
- EXIT PERFORM
- END-IF
- MOVE UPPER-CASE(SPI-Current-Token)
- TO SPI-Current-Token-UC
- IF Token-Is-Verb
- MOVE SPI-Current-Token-UC TO SPI-Current-Verb
- SPI-Prior-Token
- IF Held-Reference NOT = SPACES
- MOVE Held-Reference TO Sort-Rec
- MOVE SPACES TO Held-Reference
- RELEASE Sort-Rec
- END-IF
- END-IF
- EVALUATE TRUE
- WHEN In-IDENTIFICATION-DIVISION
- PERFORM 320-IDENTIFICATION-DIVISION
- WHEN In-ENVIRONMENT-DIVISION
- PERFORM 330-ENVIRONMENT-DIVISION
- WHEN In-DATA-DIVISION
- PERFORM 340-DATA-DIVISION
- WHEN In-PROCEDURE-DIVISION
- PERFORM 350-PROCEDURE-DIVISION
- END-EVALUATE
- IF Token-Is-Key-Word
- MOVE SPI-Current-Token-UC TO SPI-Prior-Token
- END-IF
- IF F-Token-Ended-Sentence = "Y"
- AND SPI-Current-Division NOT = "I"
- MOVE SPACES TO SPI-Prior-Token
- SPI-Current-Verb
- END-IF
-
- END-PERFORM
- CLOSE Expand-Code
- EXIT SECTION
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 310-Get-Token.
- *>-- Position to 1st non-blank character
- MOVE F-Token-Ended-Sentence TO F-Last-Token-Ended-Sent
- MOVE "N" TO F-Token-Ended-Sentence
- PERFORM UNTIL Expand-Code-Rec(Src-Ptr : 1) NOT = SPACE
- IF Src-Ptr > 255
- READ Expand-Code AT END
- IF Held-Reference NOT = SPACES
- MOVE Held-Reference TO Sort-Rec
- MOVE SPACES TO Held-Reference
- RELEASE Sort-Rec
- END-IF
- SET Token-Is-EOF TO TRUE
- MOVE 0 TO SPI-Current-Line-No
- EXIT PARAGRAPH
- END-READ
- IF ECR-1 = "#"
- PERFORM 311-Control-Record
- ELSE
- PERFORM 312-Expand-Code-Record
- END-IF
- ELSE
- ADD 1 TO Src-Ptr
- END-IF
- END-PERFORM
- *>-- Extract token string
- MOVE Expand-Code-Rec(Src-Ptr : 1) TO SPI-Current-Char
- MOVE Expand-Code-Rec(Src-Ptr + 1: 1) TO SPI-Next-Char
- IF SPI-Current-Char = "."
- ADD 1 TO Src-Ptr
- MOVE SPI-Current-Char TO SPI-Current-Token
- MOVE SPACE TO SPI-Token-Type
- MOVE "Y" TO F-Token-Ended-Sentence
- EXIT PARAGRAPH
- END-IF
- IF Current-Char-Is-Punct
- AND SPI-Current-Char = "="
- AND SPI-Current-Division = "P"
- ADD 1 TO Src-Ptr
- MOVE "EQUALS" TO SPI-Current-Token
- MOVE "K" TO SPI-Token-Type
- EXIT PARAGRAPH
- END-IF
- IF Current-Char-Is-Punct *> So subscripts don't get flagged w/ "*"
- AND SPI-Current-Char = "("
- AND SPI-Current-Division = "P"
- MOVE SPACES TO SPI-Prior-Token
- END-IF
- IF Current-Char-Is-Punct
- ADD 1 TO Src-Ptr
- MOVE SPI-Current-Char TO SPI-Current-Token
- MOVE SPACE TO SPI-Token-Type
- EXIT PARAGRAPH
- END-IF
- IF Current-Char-Is-Quote
- ADD 1 TO Src-Ptr
- UNSTRING Expand-Code-Rec
- DELIMITED BY SPI-Current-Char
- INTO SPI-Current-Token
- WITH POINTER Src-Ptr
- END-UNSTRING
- IF Expand-Code-Rec(Src-Ptr : 1) = "."
- MOVE "Y" TO F-Token-Ended-Sentence
- ADD 1 TO Src-Ptr
- END-IF
- SET Token-Is-Literal-Alpha TO TRUE
- EXIT PARAGRAPH
- END-IF
- IF Current-Char-Is-X AND Next-Char-Is-Quote
- ADD 2 TO Src-Ptr
- UNSTRING Expand-Code-Rec
- DELIMITED BY SPI-Next-Char
- INTO SPI-Current-Token
- WITH POINTER Src-Ptr
- END-UNSTRING
- IF Expand-Code-Rec(Src-Ptr : 1) = "."
- MOVE "Y" TO F-Token-Ended-Sentence
- ADD 1 TO Src-Ptr
- END-IF
- SET Token-Is-Literal-Number TO TRUE
- EXIT PARAGRAPH
- END-IF
- IF Current-Char-Is-Z AND Next-Char-Is-Quote
- ADD 2 TO Src-Ptr
- UNSTRING Expand-Code-Rec
- DELIMITED BY SPI-Next-Char
- INTO SPI-Current-Token
- WITH POINTER Src-Ptr
- END-UNSTRING
- IF Expand-Code-Rec(Src-Ptr : 1) = "."
- MOVE "Y" TO F-Token-Ended-Sentence
- ADD 1 TO Src-Ptr
- END-IF
- SET Token-Is-Literal-Alpha TO TRUE
- EXIT PARAGRAPH
- END-IF
- IF F-Processing-PICTURE = "Y"
- UNSTRING Expand-Code-Rec
- DELIMITED BY ". " OR " "
- INTO SPI-Current-Token
- DELIMITER IN Delim
- WITH POINTER Src-Ptr
- END-UNSTRING
- IF Delim = ". "
- MOVE "Y" TO F-Token-Ended-Sentence
- ADD 1 TO Src-Ptr
- END-IF
- IF UPPER-CASE(SPI-Current-Token) = "IS"
- MOVE SPACE TO SPI-Token-Type
- EXIT PARAGRAPH
- ELSE
- MOVE "N" TO F-Processing-PICTURE
- MOVE SPACE TO SPI-Token-Type
- EXIT PARAGRAPH
- END-IF
- END-IF
- UNSTRING Expand-Code-Rec
- DELIMITED BY ". " OR " " OR "=" OR "(" OR ")" OR "*"
- OR "/" OR "&" OR ";" OR "," OR "<"
- OR ">" OR ":"
- INTO SPI-Current-Token
- DELIMITER IN Delim
- WITH POINTER Src-Ptr
- END-UNSTRING
- IF Delim = ". "
- MOVE "Y" TO F-Token-Ended-Sentence
- END-IF
- IF Delim NOT = ". " AND " "
- SUBTRACT 1 FROM Src-Ptr
- END-IF
- *>-- Classify Token
- MOVE UPPER-CASE(SPI-Current-Token) TO Search-Token
- IF Search-Token = "EQUAL" OR "EQUALS"
- MOVE "EQUALS" TO SPI-Current-Token
- MOVE "K" TO SPI-Token-Type
- EXIT PARAGRAPH
- END-IF
- SEARCH ALL Reserved-Word
- WHEN RW-Word (RW-Idx) = Search-Token
- MOVE RW-Type (RW-Idx) TO SPI-Token-Type
-GC0710 IF Token-Is-Verb
-GC0710 MOVE "Y" TO F-Verb-Has-Been-Found
-GC0710 END-IF
- EXIT PARAGRAPH
- END-SEARCH
- *>-- Not a reserved word, must be a user name
- SET Token-Is-Identifier TO TRUE *> NEEDS EXPANSION!!!!
- PERFORM 313-Check-For-Numeric-Token
- IF Token-Is-Literal-Number
- IF (F-Last-Token-Ended-Sent = "Y")
- AND (SPI-Current-Division = "D")
- MOVE "LEVEL #" TO SPI-Current-Token
- MOVE "K" TO SPI-Token-Type
- EXIT PARAGRAPH
- ELSE
- EXIT PARAGRAPH
- END-IF
- END-IF
- EXIT PARAGRAPH
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 311-Control-Record.
- UNSTRING ECR-2-256
- DELIMITED BY '"'
- INTO PIC-X10, PIC-X256, Dummy
- END-UNSTRING
- INSPECT PIC-X10 REPLACING ALL '"' BY SPACE
- COMPUTE I = NUMVAL(PIC-X10) - 1
- IF TRIM(PIC-X256,Trailing) = TRIM(Program-Path,Trailing)
- MOVE I TO SPI-Current-Line-No
- SET In-Main-Module TO TRUE
- IF Saved-Section NOT = SPACES
- MOVE Saved-Section TO SPI-Current-Section
- END-IF
- ELSE
- SET In-Copybook TO TRUE
- IF Saved-Section = SPACES
- MOVE SPI-Current-Section TO Saved-Section
- END-IF
- MOVE LENGTH(TRIM(PIC-X256,Trailing)) TO I
- MOVE 0 TO J
- PERFORM UNTIL PIC-X256(I:1) = '/'
- OR I = 0
- SUBTRACT 1 FROM I
- ADD 1 TO J
- END-PERFORM
- UNSTRING PIC-X256((I + 1):J) DELIMITED BY "."
- INTO Filename, Dummy
- END-UNSTRING
- MOVE "[" TO SPI-CS-1
- MOVE Filename TO SPI-CS-2-14
- IF SPI-CS-11-14 NOT = SPACES
- MOVE "..." TO SPI-CS-11-14
- END-IF
- MOVE "]" TO SPI-CS-15
- END-IF
- MOVE SPACES TO Expand-Code-Rec *> Force another READ
- MOVE 256 TO Src-Ptr
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 312-Expand-Code-Record.
- MOVE 1 TO Src-Ptr
- IF In-Main-Module
- ADD 1 To SPI-Current-Line-No
- END-IF
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 313-Check-For-Numeric-Token.
- MOVE SPI-Current-Token TO PIC-X32
- INSPECT PIC-X32
- REPLACING TRAILING SPACES BY "0"
- IF PIC-X32 IS NUMERIC *> Simple Unsigned Integer
- SET Token-Is-Literal-Number TO TRUE
- EXIT PARAGRAPH
- END-IF
- IF PIC-X32(1:1) = "+" OR "-"
- MOVE "0" TO PIC-X32(1:1)
- END-IF
- MOVE 0 TO Tally
- INSPECT PIC-X32
- TALLYING Tally FOR ALL "."
- IF Tally = 1
- INSPECT PIC-X32 REPLACING ALL "." BY "0"
- END-IF
- IF PIC-X32 IS NUMERIC
- SET Token-Is-Literal-Number TO TRUE
- EXIT PARAGRAPH
- END-IF
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 320-IDENTIFICATION-DIVISION.
-GC0710 MOVE "N" TO F-Verb-Has-Been-Found
- IF Token-Is-Key-Word AND SPI-Current-Token = "DIVISION"
- MOVE SPI-Prior-Token TO SPI-Current-Division
- EXIT PARAGRAPH
- END-IF
- IF SPI-Prior-Token = "PROGRAM-ID"
- MOVE SPACES TO SPI-Prior-Token
- MOVE SPI-Current-Token TO SPI-Current-Program-ID
- IF SPI-CP-13-15 NOT = SPACES
- MOVE "..." TO SPI-CP-13-15
- END-IF
- EXIT PARAGRAPH
- END-IF
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 330-ENVIRONMENT-DIVISION.
- IF Token-Is-Key-Word AND SPI-Current-Token = "DIVISION"
- MOVE SPI-Prior-Token TO SPI-Current-Division
- EXIT PARAGRAPH
- END-IF
- IF Token-Is-Key-Word AND SPI-Current-Token = "SECTION"
- MOVE SPI-Prior-Token TO SPI-Current-Section
- EXIT PARAGRAPH
- END-IF
- IF Token-Is-Identifier
- PERFORM 361-Release-Ref
- END-IF
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 340-DATA-DIVISION.
- IF Token-Is-Key-Word AND SPI-Current-Token = "DIVISION"
- MOVE SPI-Prior-Token TO SPI-Current-Division
- EXIT PARAGRAPH
- END-IF
- IF Token-Is-Key-Word AND SPI-Current-Token = "SECTION"
- MOVE SPI-Prior-Token TO SPI-Current-Section
- EXIT PARAGRAPH
- END-IF
- IF (SPI-Current-Token = "PIC" OR "PICTURE")
- AND (Token-Is-Key-Word)
- MOVE "Y" TO F-Processing-PICTURE
- EXIT PARAGRAPH
- END-IF
-GC0710 IF Token-Is-Reserved-Word
-GC0710 AND SPI-Prior-Token = "LEVEL #"
-GC0710 MOVE SPACES TO SPI-Prior-Token
-GC0710 EXIT PARAGRAPH
-GC0710 END-IF
- IF Token-Is-Identifier
- EVALUATE SPI-Prior-Token
- WHEN "FD"
- PERFORM 360-Release-Def
- MOVE SPACES TO SPI-Prior-Token
- WHEN "SD"
- PERFORM 360-Release-Def
- MOVE SPACES TO SPI-Prior-Token
- WHEN "LEVEL #"
- PERFORM 360-Release-Def
- MOVE SPACES TO SPI-Prior-Token
- WHEN "INDEXED"
- PERFORM 360-Release-Def
- MOVE SPACES TO SPI-Prior-Token
- WHEN "USING"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN "INTO"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- EXIT PARAGRAPH
- END-IF
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 350-PROCEDURE-DIVISION.
- IF SPI-Current-Section NOT = "PROCEDURE"
- MOVE "PROCEDURE" TO SPI-Current-Section
- END-IF
-GC0710 IF SPI-Current-Token-UC = "PROGRAM"
-GC0710 AND SPI-Prior-Token = "END"
-GC0710 MOVE "?" TO SPI-Current-Division
-GC0710 EXIT PARAGRAPH
-GC0710 END-IF
- IF Token-Is-Key-Word AND SPI-Current-Token = "DIVISION"
- MOVE SPI-Prior-Token TO SPI-Current-Division
- EXIT PARAGRAPH
- END-IF
- IF SPI-Current-Verb = SPACES
-GC0710 AND F-Verb-Has-Been-Found = "Y"
- IF Token-Is-Identifier
- PERFORM 360-Release-Def
- MOVE SPACES TO SPI-Prior-Token
- END-IF
- EXIT PARAGRAPH
- END-IF
- IF NOT Token-Is-Identifier
- EXIT PARAGRAPH
- END-IF
- EVALUATE SPI-Current-Verb
- WHEN "ACCEPT"
- PERFORM 351-ACCEPT
- WHEN "ADD"
- PERFORM 351-ADD
- WHEN "ALLOCATE"
- PERFORM 351-ALLOCATE
- WHEN "CALL"
- PERFORM 351-CALL
- WHEN "COMPUTE"
- PERFORM 351-COMPUTE
- WHEN "DIVIDE"
- PERFORM 351-DIVIDE
- WHEN "FREE"
- PERFORM 351-FREE
- WHEN "INITIALIZE"
- PERFORM 351-INITIALIZE
- WHEN "INSPECT"
- PERFORM 351-INSPECT
- WHEN "MOVE"
- PERFORM 351-MOVE
- WHEN "MULTIPLY"
- PERFORM 351-MULTIPLY
- WHEN "PERFORM"
- PERFORM 351-PERFORM
- WHEN "SET"
- PERFORM 351-SET
- WHEN "STRING"
- PERFORM 351-STRING
- WHEN "SUBTRACT"
- PERFORM 351-SUBTRACT
- WHEN "TRANSFORM"
- PERFORM 351-TRANSFORM
- WHEN "UNSTRING"
- PERFORM 351-UNSTRING
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-ACCEPT.
- EVALUATE SPI-Prior-Token
- WHEN "ACCEPT"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-ADD.
- EVALUATE SPI-Prior-Token
- WHEN "GIVING"
- PERFORM 362-Release-Upd
- WHEN "TO"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-ALLOCATE.
- EVALUATE SPI-Prior-Token
- WHEN "ALLOCATE"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN "RETURNING"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-CALL.
- EVALUATE SPI-Prior-Token
- WHEN "RETURNING"
- PERFORM 362-Release-Upd
- WHEN "GIVING"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-COMPUTE.
- EVALUATE SPI-Prior-Token
- WHEN "COMPUTE"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-DIVIDE.
- EVALUATE SPI-Prior-Token
- WHEN "INTO"
- PERFORM 363-Set-Upd
- MOVE Sort-Rec TO Held-Reference
- WHEN "GIVING"
- IF Held-Reference NOT = SPACES
- MOVE Held-Reference To Sort-Rec
- MOVE SPACES To Held-Reference
- SR-Ref-Flag
- RELEASE Sort-Rec
- END-IF
- PERFORM 362-Release-Upd
- WHEN "REMAINDER"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-FREE.
- PERFORM 362-Release-Upd
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-INITIALIZE.
- EVALUATE SPI-Prior-Token
- WHEN "INITIALIZE"
- PERFORM 362-Release-Upd
- WHEN "REPLACING"
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-INSPECT.
- EVALUATE SPI-Prior-Token
- WHEN "INSPECT"
- PERFORM 364-Set-Ref
- MOVE SPACES TO Held-Reference
- MOVE SPACES TO SPI-Prior-Token
- WHEN "TALLYING"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN "REPLACING"
- IF Held-Reference NOT = SPACES
- MOVE Held-Reference TO Sort-Rec
- MOVE SPACES TO Held-Reference
- MOVE "*" TO SR-Ref-Flag
- RELEASE Sort-Rec
- END-IF
- MOVE SPACES TO SPI-Prior-Token
- WHEN "CONVERTING"
- IF Held-Reference NOT = SPACES
- MOVE Held-Reference TO Sort-Rec
- MOVE SPACES TO Held-Reference
- MOVE "*" TO SR-Ref-Flag
- RELEASE Sort-Rec
- END-IF
- MOVE SPACES TO SPI-Prior-Token
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-MOVE.
- EVALUATE SPI-Prior-Token
- WHEN "TO"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-MULTIPLY.
- EVALUATE SPI-Prior-Token
- WHEN "BY"
- PERFORM 363-Set-Upd
- MOVE Sort-Rec TO Held-Reference
- WHEN "GIVING"
- MOVE Held-Reference TO Sort-Rec
- MOVE SPACES TO Held-Reference
- SR-Ref-Flag
- RELEASE Sort-Rec
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-PERFORM.
- EVALUATE SPI-Prior-Token
- WHEN "VARYING"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN "AFTER"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-SET.
- EVALUATE SPI-Prior-Token
- WHEN "SET"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-STRING.
- EVALUATE SPI-Prior-Token
- WHEN "INTO"
- PERFORM 362-Release-Upd
- WHEN "POINTER"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-SUBTRACT.
- EVALUATE SPI-Prior-Token
- WHEN "GIVING"
- PERFORM 362-Release-Upd
- WHEN "FROM"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-TRANSFORM.
- EVALUATE SPI-Prior-Token
- WHEN "TRANSFORM"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-UNSTRING.
- EVALUATE SPI-Prior-Token
- WHEN "INTO"
- PERFORM 362-Release-Upd
- WHEN "DELIMITER"
- PERFORM 362-Release-Upd
- WHEN "COUNT"
- PERFORM 362-Release-Upd
- WHEN "POINTER"
- PERFORM 362-Release-Upd
- WHEN "TALLYING"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 360-Release-Def.
- MOVE SPACES TO Sort-Rec
- MOVE SPI-Current-Program-ID TO SR-Prog-ID
- MOVE SPI-Current-Token-UC TO SR-Token-UC
- MOVE SPI-Current-Token TO SR-Token
- MOVE SPI-Current-Section TO SR-Section
- MOVE SPI-Current-Line-No TO SR-Line-No-Def
- MOVE 0 TO SR-Line-No-Ref
- RELEASE Sort-Rec
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 361-Release-Ref.
- PERFORM 364-Set-Ref
- RELEASE Sort-Rec
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 362-Release-Upd.
- PERFORM 363-Set-Upd
- RELEASE Sort-Rec
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 363-Set-Upd.
- MOVE SPACES TO Sort-Rec
- MOVE SPI-Current-Program-ID TO SR-Prog-ID
- MOVE SPI-Current-Token-UC TO SR-Token-UC
- MOVE SPI-Current-Token TO SR-Token
- MOVE SPI-Current-Section TO SR-Section
- MOVE SPI-Current-Line-No TO SR-Line-No-Ref
- MOVE "*" TO SR-Ref-Flag
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 364-Set-Ref.
- MOVE SPACES TO Sort-Rec
- MOVE SPI-Current-Program-ID TO SR-Prog-ID
- MOVE SPI-Current-Token-UC TO SR-Token-UC
- MOVE SPI-Current-Token TO SR-Token
- MOVE SPI-Current-Section TO SR-Section
- MOVE SPI-Current-Line-No TO SR-Line-No-Ref
- .
- /
- 400-Produce-Xref-Listing SECTION.
- 401-Init.
- MOVE SPACES TO Detail-Line-X
- Group-Indicators
- MOVE 0 TO I
- Lines-Left
-GC0710 MOVE 'N' TO F-Duplicate
- .
-
- 402-Process-Sorted-Recs.
- PERFORM FOREVER
- RETURN Sort-File AT END
- EXIT PERFORM
- END-RETURN
- IF SR-Prog-ID NOT = GI-Prog-ID
- OR SR-Token-UC NOT = GI-Token
-GC0710 MOVE 'N' TO F-Duplicate
- IF Detail-Line-X NOT = SPACES
- PERFORM 410-Generate-Report-Line
- END-IF
- IF SR-Prog-ID NOT = GI-Prog-ID
- MOVE 0 TO Lines-Left
- END-IF
- MOVE SR-Prog-ID TO GI-Prog-ID
- MOVE SR-Token-UC TO GI-Token
- END-IF
-GC0710 IF SR-Token-UC = GI-Token
-GC0710 AND SR-Line-No-Def NOT = SPACES
-GC0710 AND Detail-Line-X NOT = SPACES
-GC0710 MOVE 'Y' TO F-Duplicate
-GC0710 PERFORM 410-Generate-Report-Line
-GC0710 MOVE 0 TO I
-GC0710 MOVE SR-Prog-ID TO DLX-Prog-ID
-GC0710 MOVE ' (Duplicate Definition)' TO DLX-Token
-GC0710 MOVE SR-Section TO DLX-Section
-GC0710 MOVE SR-Line-No-Def TO DLX-Line-No-Def
-GC0710 EXIT PERFORM CYCLE
-GC0710 END-IF
-GC0710 IF SR-Token-UC = GI-Token
-GC0710 AND SR-Line-No-Def = SPACES
-GC0710 AND F-Duplicate = 'Y'
-GC0710 MOVE 'N' TO F-Duplicate
-GC0710 PERFORM 410-Generate-Report-Line
-GC0710 MOVE 0 TO I
-GC0710 MOVE SR-Prog-ID TO DLX-Prog-ID
-GC0710 MOVE ' (Duplicate References)' TO DLX-Token
-GC0710 END-IF
- IF Detail-Line-X = SPACES
- MOVE SR-Prog-ID TO DLX-Prog-ID
- MOVE SR-Token TO DLX-Token
- MOVE SR-Section TO DLX-Section
- IF SR-Line-No-Def NOT = SPACES
- MOVE SR-Line-No-Def TO DLX-Line-No-Def
- END-IF
- END-IF
- IF SR-Reference > '000000'
- ADD 1 TO I
- IF I > Line-Nos-Per-Rec
- PERFORM 410-Generate-Report-Line
- MOVE 1 TO I
- END-IF
- MOVE SR-Line-No-Ref TO DLX-Line-No-Ref (I)
- MOVE SR-Ref-Flag TO DLX-Ref-Flag (I)
- END-IF
- END-PERFORM
- IF Detail-Line-X NOT = SPACES
- PERFORM 410-Generate-Report-Line
- END-IF
- EXIT SECTION
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 410-Generate-Report-Line.
- IF Lines-Left < 1
- IF F-First-Record = "Y"
- MOVE "N" TO F-First-Record
- WRITE Report-Rec FROM Heading-1X BEFORE 1
- ELSE
- MOVE SPACES TO Report-Rec
- WRITE Report-Rec BEFORE PAGE
- MOVE SPACES TO Report-Rec
- WRITE Report-Rec BEFORE 1
- WRITE Report-Rec FROM Heading-1X BEFORE 1
- END-IF
- WRITE Report-Rec FROM Heading-2 BEFORE 1
- WRITE Report-Rec FROM Heading-4X BEFORE 1
- WRITE Report-Rec FROM Heading-5X BEFORE 1
- COMPUTE
- Lines-Left = Lines-Per-Page - 4
- END-COMPUTE
- END-IF
- WRITE Report-Rec FROM Detail-Line-X BEFORE 1
- MOVE SPACES TO Detail-Line-X
- MOVE 0 TO I
- SUBTRACT 1 FROM Lines-Left
- .
- /
- 500-Produce-Source-Listing SECTION.
- 501-Generate-Source-Listing.
- OPEN INPUT Source-Code
- Expand-Code
- MOVE 0 TO Source-Line-No
- PERFORM FOREVER
- READ Expand-Code AT END
- EXIT PERFORM
- END-READ
- IF ECR-1 = "#"
- PERFORM 510-Control-Record
- ELSE
- PERFORM 520-Expand-Code-Record
- END-IF
- END-PERFORM
- CLOSE Source-Code
- Expand-Code
- EXIT SECTION
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 510-Control-Record.
- UNSTRING ECR-2-256
- DELIMITED BY '"'
- INTO PIC-X10, PIC-X256, Dummy
- END-UNSTRING
- IF TRIM(PIC-X256,Trailing) = TRIM(Program-Path,Trailing) *> Main Pgm
- SET In-Main-Module TO TRUE
- IF Source-Line-No > 0
- READ Expand-Code END-READ
- END-IF
- ELSE *> COPY
- SET In-Copybook TO TRUE
- END-IF
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 520-Expand-Code-Record.
- IF In-Main-Module
- ADD 1 To SPI-Current-Line-No
- READ Source-Code AT END NEXT SENTENCE END-READ
- ADD 1 TO Source-Line-No
- MOVE SPACES TO Detail-Line-S
- MOVE Source-Line-No TO DLS-Line-No
- MOVE SCR-1-128 TO DLS-Statement
-GC0410 IF SCR-7 = "/"
-GC0410 MOVE 0 TO Lines-Left
-GC0410 END-IF
- PERFORM 530-Generate-Source-Line
- IF SCR-129-256 NOT = SPACES
- MOVE SPACES TO Detail-Line-S
- MOVE SCR-129-256 TO DLS-Statement
- PERFORM 530-Generate-Source-Line
- END-IF
- ELSE
- IF Expand-Code-Rec NOT = SPACES
- MOVE SPACES TO Detail-Line-S
- MOVE ECR-1-128 TO DLS-Statement
- PERFORM 530-Generate-Source-Line
- IF ECR-129-256 NOT = SPACES
- MOVE SPACES TO Detail-Line-S
- MOVE ECR-129-256 TO DLS-Statement
- PERFORM 530-Generate-Source-Line
- END-IF
- END-IF
- END-IF
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 530-Generate-Source-Line.
- IF Lines-Left < 1
- IF F-First-Record = "Y"
- MOVE "N" TO F-First-Record
- WRITE Report-Rec FROM Heading-1S BEFORE 1
- ELSE
- MOVE SPACES TO Report-Rec
- WRITE Report-Rec BEFORE PAGE
- MOVE SPACES TO Report-Rec
- WRITE Report-Rec BEFORE 1
- WRITE Report-Rec FROM Heading-1S BEFORE 1
- END-IF
- WRITE Report-Rec FROM Heading-2 BEFORE 1
- WRITE Report-Rec FROM Heading-4S BEFORE 1
- WRITE Report-Rec FROM Heading-5S BEFORE 1
- COMPUTE
- Lines-Left = Lines-Per-Page - 4
- END-COMPUTE
- END-IF
- WRITE Report-Rec FROM Detail-Line-S BEFORE 1
- MOVE SPACES TO Detail-Line-S
- SUBTRACT 1 FROM Lines-Left
- .
-
END PROGRAM LISTING.
diff --git a/tests/examplefiles/example.coffee b/tests/examplefiles/example.coffee
new file mode 100644
index 00000000..2cbd1df3
--- /dev/null
+++ b/tests/examplefiles/example.coffee
@@ -0,0 +1,27 @@
+# function arrows
+
+methodA:-> 'A'
+methodB:=> 'B'
+methodC:()=> 'C'
+methodD:()-> 'D'
+methodE:(a,b)-> 'E'
+methodF:(c,d)-> 'F'
+-> 'G'
+=> 'H'
+
+(-> 'I')
+(=> 'J')
+
+# strings
+
+"#{wow}"
+"w#{wow}w"
+"#wow"
+"wow#"
+"w#ow"
+
+'#{wow}'
+'w#{wow}w'
+'#wow'
+'wow#'
+'w#ow'
diff --git a/tests/examplefiles/example.e b/tests/examplefiles/example.e
new file mode 100644
index 00000000..2e43954b
--- /dev/null
+++ b/tests/examplefiles/example.e
@@ -0,0 +1,124 @@
+note
+ description : "[
+ This is use to have almost every language element."
+
+ That way, I can correctly test the lexer. %]"
+
+ Don't try to understand what it does. It's not even compilling.
+ ]"
+ date : "August 6, 2013"
+ revision : "0.1"
+
+class
+ SAMPLE
+
+inherit
+ ARGUMENTS
+ rename
+ Command_line as Caller_command,
+ command_name as Application_name
+ undefine
+ out
+ end
+ ANY
+ export
+ {ANY} out
+ redefine
+ out
+ end
+
+
+
+create
+ make
+
+convert
+ as_boolean: {BOOLEAN}
+
+feature {NONE} -- Initialization
+
+ make
+ -- Run application.
+ local
+ i1_:expanded INTEGER
+ f_1:REAL_64
+ l_char:CHARACTER_8
+ do
+ l_char:='!'
+ l_char:='%''
+ l_char:='%%'
+ i1_:=80 - 0x2F0C // 0C70 \\ 0b10110 * 1;
+ f_1:=0.1 / .567
+ f_1:=34.
+ f_1:=12345.67890
+ inspect i1_
+ when 1 then
+ io.output.put_integer (i1_) -- Comment
+ else
+ io.output.put_real (f_1.truncated_to_real)
+ end
+ io.output.put_string (CuRrEnt.out) -- Comment
+ (agent funct_1).call([1,2,"Coucou"])
+ end
+
+feature -- Access
+
+ funct_1(x,y:separate INTEGER;a_text:READABLE_STRING_GENERAL):detachable BOOLEAN
+ obsolete "This function is obsolete"
+ require
+ Is_Attached: AttAched a_text
+ local
+ l_list:LIST[like x]
+ do
+ if (NOT a_text.is_empty=TrUe or elSe ((x<0 aNd x>10) oR (y>0 and then y<10))) xor True thEn
+ ResuLT := FalSe
+ elseif (acROss l_list as la_list SoMe la_list.item<0 end) implies a_text.is_boolean then
+ ResuLT := FalSe
+ else
+ Result := TruE
+ eND
+ from
+ l_list.start
+ until
+ l_list.exhausted
+ loop
+ l_list.forth
+ variant
+ l_list.count - l_list.index
+ end
+ check Current /= Void end
+ debug print("%"Here%"%N") end
+ ensure
+ Is_Cool_Not_Change: is_cool = old is_cool
+ end
+
+ is_cool:BOOLEAN
+ attribute
+ Result:=False
+ end
+
+ froZen c_malloc: POINTER is
+ exTErnal
+ "C inline use <stdlib.h>"
+ alIAs
+ "malloc (1)"
+ end
+
+ as_boolean:BOOLEAN
+ do
+ Result:=True
+ rescue
+ retry
+ end
+
+feature {ANY} -- The redefine feature
+
+ out:STRING_8
+ once
+ reSUlt:=PrecursOr {ANY}
+ Result := "Hello Worl"+('d').out
+ end
+
+invariant
+ Always_Cool: is_cool
+end
diff --git a/tests/examplefiles/example.f90 b/tests/examplefiles/example.f90
new file mode 100644
index 00000000..40462189
--- /dev/null
+++ b/tests/examplefiles/example.f90
@@ -0,0 +1,8 @@
+program main
+ integer, parameter :: mykind = selected_real_kind()
+ print *, 1
+ print *, 1_mykind
+ print *, 1.
+ print *, 1._mykind
+ print *, (1., 1._mykind)
+end program main
diff --git a/tests/examplefiles/example.feature b/tests/examplefiles/example.feature
new file mode 100644
index 00000000..a26268da
--- /dev/null
+++ b/tests/examplefiles/example.feature
@@ -0,0 +1,16 @@
+# First comment
+Feature: My amazing feature
+ Feature description line 1
+ Feature description line 2
+
+#comment
+Scenario Outline: My detailed scenario #string
+ Given That <x> is set
+ When When I <subtract>
+ Then I should get the <remain#der>
+
+ # indented comment
+ Examples:
+ | x | subtract | remain#der |
+ | 12 | 5\|3 | #73 |
+ | #the | 10 | 15 |
diff --git a/tests/examplefiles/example.gd b/tests/examplefiles/example.gd
new file mode 100644
index 00000000..c285ea32
--- /dev/null
+++ b/tests/examplefiles/example.gd
@@ -0,0 +1,23 @@
+#############################################################################
+##
+#W example.gd
+##
+## This file contains a sample of a GAP declaration file.
+##
+DeclareProperty( "SomeProperty", IsLeftModule );
+DeclareGlobalFunction( "SomeGlobalFunction" );
+
+
+#############################################################################
+##
+#C IsQuuxFrobnicator(<R>)
+##
+## <ManSection>
+## <Filt Name="IsQuuxFrobnicator" Arg='R' Type='Category'/>
+##
+## <Description>
+## Tests whether R is a quux frobnicator.
+## </Description>
+## </ManSection>
+##
+DeclareSynonym( "IsQuuxFrobnicator", IsField and IsGroup );
diff --git a/tests/examplefiles/example.gi b/tests/examplefiles/example.gi
new file mode 100644
index 00000000..c9c5e55d
--- /dev/null
+++ b/tests/examplefiles/example.gi
@@ -0,0 +1,64 @@
+#############################################################################
+##
+#W example.gd
+##
+## This file contains a sample of a GAP implementation file.
+##
+
+
+#############################################################################
+##
+#M SomeOperation( <val> )
+##
+## performs some operation on <val>
+##
+InstallMethod( SomeProperty,
+ "for left modules",
+ [ IsLeftModule ], 0,
+ function( M )
+ if IsFreeLeftModule( M ) and not IsTrivial( M ) then
+ return true;
+ fi;
+ TryNextMethod();
+ end );
+
+
+
+#############################################################################
+##
+#F SomeGlobalFunction( )
+##
+## A global variadic funfion.
+##
+InstallGlobalFunction( SomeGlobalFunction, function( arg )
+ if Length( arg ) = 3 then
+ return arg[1] + arg[2] * arg[3];
+ elif Length( arg ) = 2 then
+ return arg[1] - arg[2]
+ else
+ Error( "usage: SomeGlobalFunction( <x>, <y>[, <z>] )" );
+ fi;
+ end );
+
+
+#
+# A plain function.
+#
+SomeFunc := function(x, y)
+ local z, func, tmp, j;
+ z := x * 1.0;
+ y := 17^17 - y;
+ func := a -> a mod 5;
+ tmp := List( [1..50], func );
+ while y > 0 do
+ for j in tmp do
+ Print(j, "\n");
+ od;
+ repeat
+ y := y - 1;
+ until 0 < 1;
+ y := y -1;
+ od;
+ return z;
+end;
+ \ No newline at end of file
diff --git a/tests/examplefiles/example.golo b/tests/examplefiles/example.golo
new file mode 100644
index 00000000..92ff78b5
--- /dev/null
+++ b/tests/examplefiles/example.golo
@@ -0,0 +1,113 @@
+#
+# Comments
+#
+
+module pygments.Example
+
+import some.Module
+
+local function foo = |a, b| -> a + b
+
+----
+golodoc string
+----
+augment java.util.Collection {
+
+ ----
+ sub doc
+ ----
+ function plop = |this, v| {
+ return this: length() + v
+ }
+}
+
+function bar = |a, b| {
+ let msg = "a string"
+ var tmp = ""
+ tmp = tmp + a: toString()
+ println(tmp + b)
+}
+
+function baz = {
+ foreach i in range(0, 5) {
+ if i % 2 == 0 and true or false {
+ print("e")
+ } else {
+ print("o")
+ }
+ }
+}
+
+function userMatch = |v| ->
+ match {
+ when v % 2 == 0 then "e"
+ otherwise "o"
+ }
+}
+
+function add = |x| -> |y| -> x + y
+
+let aChar = 'a'
+
+let multiline =
+"""
+foo
+bar
+baz
+"""
+
+local function myObj = -> DynamicObject():
+ name("foo"):
+ age(25):
+ define("meth", |this| -> this: name() + this: age()
+
+----
+Golo doc string
+----
+function nullTest = {
+ let m = map[
+ ["a", 1],
+ ["b", 2]
+ ]
+
+ println(map: get("a") orIfNull 0)
+ println(map: get("b")?: toString() orIfNull "0")
+
+}
+
+struct Point = { x, y }
+
+function deco1 = |fun| {
+ return |args...| {
+ return "deco1 + " + fun: invokeWithArguments(args)
+ }
+}
+
+@deco1
+function decofoo = |a| {
+ return "foo: " + a
+}
+
+@deco1
+function decobar = |a| -> "bar: " + a
+
+function deco2 = |fun| {
+ return |args...| {
+ return "deco2 + " + fun: invokeWithArguments(args)
+ }
+}
+
+@deco2
+@deco1
+function decobaz = |a| -> "baz: " + a
+
+let deco3 = ^deco1: andThen(^deco2)
+
+@deco3
+function decospam = |a| -> "spam: " + a
+
+@another.Module.deco
+function ping = -> "pong"
+
+@deco("with", params)
+function gnop = -> "gnip"
diff --git a/tests/examplefiles/example.groovy b/tests/examplefiles/example.groovy
new file mode 100644
index 00000000..25ef2eab
--- /dev/null
+++ b/tests/examplefiles/example.groovy
@@ -0,0 +1,2 @@
+#!/usr/bin/env groovy
+println "Hello World"
diff --git a/tests/examplefiles/example.hs b/tests/examplefiles/example.hs
new file mode 100644
index 00000000..f5e2b555
--- /dev/null
+++ b/tests/examplefiles/example.hs
@@ -0,0 +1,31 @@
+module ĈrazyThings where
+
+import "base" Data.Char
+import "base" Data.Char (isControl, isSpace)
+import "base" Data.Char (isControl, --isSpace)
+ isSpace)
+import "base" Data.Char (isControl, -- isSpace)
+ isSpace)
+
+(-->) :: Num a => a -- signature
+(-->) = 2 -- >implementation
+
+--test comment
+-- test comment
+
+main :: IO ()
+main = putStrLn "hello world"
+
+gádd x y = x + y
+ádd x y = x + y
+
+
+data ĈrazyThings =
+ Ĉar |
+ House |
+ Peár
+ deriving (Show, Eq)
+
+-- some char literals:
+
+charl = ['"', 'a', '\ESC', '\'', ' ']
diff --git a/tests/examplefiles/example.hx b/tests/examplefiles/example.hx
index fd93bb49..7584fc81 100644
--- a/tests/examplefiles/example.hx
+++ b/tests/examplefiles/example.hx
@@ -139,4 +139,54 @@ typedef Pt2 = {
y:Float,
?z:Float, //optional z
add : Point -> Void,
-} \ No newline at end of file
+}
+
+
+//top-level class members
+public function test();
+private var attr(get, set) = 1;
+
+
+//pre-proc number
+public static inline function indexOf<T>(arr:Array<T>, v:T) : Int
+{
+ #if (haxe_ver >= 3.1)
+ return arr.indexOf(v);
+ #else
+ #if (flash || js)
+ return untyped arr.indexOf(v);
+ #else
+ return std.Lambda.indexOf(arr, v);
+ #end
+ #end
+}
+
+//macro reification
+var e = macro var $myVar = 0;
+var e = macro ${v}.toLowerCase();
+var e = macro o.$myField;
+var e = macro { $myField : 0 };
+var e = macro $i{varName}++;
+var e = macro $v{myStr};
+var args = [macro "sub", macro 3];
+var e = macro "Hello".toLowerCase($a{args});
+(macro $i{tmp}.addAtom($v{name}, $atom)).finalize(op.pos);
+
+var c = macro class MyClass {
+ public function new() { }
+ public function $funcName() {
+ trace($v{funcName} + " was called");
+ }
+}
+
+var c = macro interface IClass {};
+
+//macro class could have no name...
+var def = macro class {
+ private inline function new(loader) this = loader;
+ private var loader(get,never) : $loaderType;
+ inline private function get_loader() : $loaderType return this;
+};
+
+//ECheckType
+var f = (123:Float); \ No newline at end of file
diff --git a/tests/examplefiles/example.i6t b/tests/examplefiles/example.i6t
new file mode 100644
index 00000000..0f41b425
--- /dev/null
+++ b/tests/examplefiles/example.i6t
@@ -0,0 +1,32 @@
+B/examt: Example Template.
+
+@Purpose: To show the syntax of I6T, specifically the parts relating to the
+inclusion of I7 and at signs in the first column.
+
+@-------------------------------------------------------------------------------
+
+@p Lines.
+
+@c
+{-lines:type}
+! This is a comment.
+{-endlines}
+
+@-This line begins with @-, so it is ignored.
+
+@p Paragraph.
+This is a paragraph.
+@p Another paragraph.
+So
+
+is
+
+this.
+
+@Purpose: This purpose line is ignored.
+
+@c At signs and (+ +).
+[ Foo i;
+print (+score [an I7 value]+), "^";
+@add sp 1 -> i; ! Assembly works even in the first column.
+];
diff --git a/tests/examplefiles/example.i7x b/tests/examplefiles/example.i7x
new file mode 100644
index 00000000..ab94ac69
--- /dev/null
+++ b/tests/examplefiles/example.i7x
@@ -0,0 +1,45 @@
+example by David Corbett begins here.
+
+"Implements testable examples."
+
+An example is a kind of thing. An example can be tested. An example is seldom tested.
+
+example ends here.
+
+----
+[The] documentation [starts here.]
+----
+
+This extension adds examples, which may be tested.
+
+Chapter: Usage
+
+To add an example to the story, we write:
+
+ The foobar is an example.
+
+To interact with it in Inform 6, we write something like:
+
+ To say (E - example): (-
+ print (object) {E};
+ -).
+ [The IDE's documentation viewer does not display the closing -). I don't know how to fix that.]
+
+Section: Testing
+
+We can make an example be tested using:
+
+ now the foobar is tested;
+
+Example: * Exempli Gratia - A simple example.
+
+ *: "Exempli Gratia"
+
+ Include example by David Corbett.
+
+ The Kitchen is a room. The egg is an example, here.
+
+ Before dropping the egg:
+ now the egg is tested.
+
+ Test me with "get egg / drop egg".
diff --git a/tests/examplefiles/example.inf b/tests/examplefiles/example.inf
new file mode 100644
index 00000000..73cdd087
--- /dev/null
+++ b/tests/examplefiles/example.inf
@@ -0,0 +1,374 @@
+!% $SMALL ! This is ICL, not a comment.
+!% -w
+
+!% A comprehensive test of Inform6Lexer.
+
+Switches d2SDq;
+
+Constant Story "Informal Testing";
+Constant Headline "^Not a game.^";!% This is a comment, not ICL.
+
+Release 2;
+Serial "140308";
+Version 5;
+
+Ifndef TARGET_ZCODE;
+Ifndef TARGET_GLULX;
+Ifndef WORDSIZE;
+Default WORDSIZE 2;
+Constant TARGET_ZCODE;
+Endif;
+Endif;
+Endif;
+
+Ifv3; Message "Compiling to version 3"; Endif;
+Ifv5; Message "Not compiling to version 3"; endif;
+ifdef TARGET_ZCODE;
+#IFTRUE (#version_number == 5);
+Message "Compiling to version 5";
+#ENDIF;
+endif ;
+
+Replace CreatureTest;
+
+Include "Parser";
+Include "VerbLib";
+
+# ! A hash is optional at the top level.
+Object kitchen "Kitchen"
+ with description "You are in a kitchen.",
+ arr 1 2 3 4,
+ has light;
+
+#[ Initialise;
+ location = kitchen;
+ print "v"; inversion; "^";
+];
+
+Ifdef VN_1633;
+Replace IsSeeThrough IsSeeThroughOrig;
+[ IsSeeThrough * o;
+ return o hasnt opaque || IsSeeThroughOrig(o);
+];
+Endif;
+
+Abbreviate "test";
+
+Array table buffer 260;
+
+Attribute reversed;
+Attribute opaque alias locked;
+Constant to reversed;
+
+Property long additive additive long alias;
+Property long long long wingspan alias alias;
+
+Class Flier with wingspan 5;
+Class Bird(10) has animate class Flier with wingspan 2;
+
+Constant Constant1;
+Constant Constant2 Constant1;
+Constant Constant3 = Constant2;
+Ifdef VN_1633; Undef Constant; Endif;
+
+Ifdef VN_1633;
+Dictionary 'word' 1 2;
+Ifnot;
+Dictionary dict_word "word";
+Endif;
+
+Fake_action NotReal;
+
+Global global1;
+Global global2 = 69105;
+
+Lowstring low_string "low string";
+
+Iftrue false;
+Message error "Uh-oh!^~false~ shouldn't be ~true~.";
+Endif;
+Iffalse true;
+Message fatalerror "Uh-oh!^~true~ shouldn't be ~false~.";
+Endif;
+
+Nearby person "person"
+ with name 'person',
+ description "This person is barely implemented.",
+ life [ * x y z;
+ Ask: print_ret (The) self, " says nothing.";
+ Answer: print (The) self, " didn't say anything.^"; rfalse;
+ ]
+ has has animate transparent;
+
+Object -> -> test_tube "test tube"
+ with name 'test' "tube" 'testtube',
+ has ~openable ~opaque container;
+
+Bird -> pigeon
+ with name 'pigeon',
+ description [;
+ "The pigeon has a wingspan of ", self.&wingspan-->0, " wing units.";
+ ];
+
+Object -> "thimble" with name 'thimble';
+
+Object -> pebble "pebble" with name 'pebble';
+
+Ifdef TARGET_ZCODE; Trace objects; Endif;
+
+Statusline score;
+
+Stub StubR 3;
+
+Ifdef TARGET_ZCODE;
+Zcharacter "abcdefghijklmnopqrstuvwxyz"
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ "123456789.,!?_#'0/@{005C}-:()";
+Zcharacter table '@!!' '@<<' '@'A';
+Zcharacter table + '@AE' '@{dc}' '@et' '@:y';
+Ifnot;
+Ifdef TARGET_GLULX;
+Message "Glulx doesn't use ~Zcharacter~.^Oh well."; ! '~' and '^' work here.
+Ifnot;
+Message warning "Uh-oh! ^~^"; ! They don't work in other Messages.
+Endif;
+Endif;
+
+Include "Grammar";
+
+Verb"acquire"'collect'='take';
+
+[ NounFilter; return noun ofclass Bird; ];
+
+[ ScopeFilter obj;
+ switch (scope_stage) {
+ 1: rtrue;
+ 2: objectloop (obj in compass) PlaceInScope(obj);
+ 3: "Nothing is in scope.";
+ }
+];
+
+Verb meta "t" 'test'
+ * 'held' held -> TestHeld
+ * number -> TestNumber
+ * reversed -> TestAttribute
+ * 'creature' creature -> TestCreature
+ * 'multiheld' multiheld -> TestMultiheld
+ * 'm' multiexcept 'into'/"in" noun -> TestMultiexcept
+ * 'm' multiinside 'from' noun -> TestMultiinside
+ * multi -> TestMulti
+ * 'filter'/'f' noun=NounFilter -> TestNounFilter
+ * 'filter'/'f' scope=ScopeFilter -> TestScopeFilter
+ * 'special' special -> TestSpecial
+ * topic -> TestTopic;
+
+Verb 'reverse' 'swap' 'exchange'
+ * held 'for' noun -> reverse
+ * noun 'with' noun -> reverse reverse;
+
+Extend "t" last * noun -> TestNoun;
+
+Extend 't' first * -> Test;
+
+Extend 'wave' replace * -> NewWave;
+
+Extend only 'feel' 'touch' replace * noun -> Feel;
+
+[ TestSub a b o;
+ string 25 low_string;
+ print "Test what?> ";
+ table->0 = 260;
+ parse->0 = 61;
+ #Ifdef TARGET_ZCODE;
+ read buffer parse;
+ #Ifnot; ! TARGET_GLULX
+ KeyboardPrimitive(buffer, parse);
+ #Endif; ! TARGET_
+ switch (parse-->1) {
+ 'save':
+ #Ifdef TARGET_ZCODE;
+ #Ifv3;
+ @save ?saved;
+ #Ifnot;
+ save saved;
+ #Endif;
+ #Endif;
+ print "Saving failed.^";
+ 'restore':
+ #Ifdef TARGET_ZCODE;
+ restore saved;
+ #Endif;
+ print "Restoring failed.^";
+ 'restart':
+ @restart;
+ 'quit', 'q//':
+ quit;
+ return 2; rtrue; rfalse; return;
+ 'print', 'p//':
+ print "Print:^",
+ " (string): ", (string) "xyzzy^",
+ " (number): ", (number) 123, "^",
+ " (char): ", (char) 'x', "^",
+ " (address): ", (address) 'plugh//p', "^",
+ " (The): ", (The) person, "^",
+ " (the): ", (the) person, "^",
+ " (A): ", (A) person, "^",
+ " (a): ", (a) person, "^",
+ " (an): ", (an) person, "^",
+ " (name): ", (name) person, "^",
+ " (object): ", (object) person, "^",
+ " (property): ", (property) alias, "^",
+ " (<routine>): ", (LanguageNumber) 123, "^",
+ " <expression>: ", a * 2 - 1, "^",
+ " (<expression>): ", (a + person), "^";
+ print "Escapes:^",
+ " by mnemonic: @!! @<< @'A @AE @et @:y^",
+ " by decimal value: @@64 @@126^",
+ " by Unicode value: @{DC}@{002b}^",
+ " by string variable: @25^";
+ 'font', 'style':
+ font off; print "font off^";
+ font on; print "font on^";
+ style reverse; print "style reverse^"; style roman;
+ style bold; print "style bold^";
+ style underline; print "style underline^";
+ style fixed; print "style fixed^";
+ style roman; print "style roman^";
+ 'statements':
+ spaces 8;
+ objectloop (o) {
+ print "objectloop (o): ", (the) o, "^";
+ }
+ objectloop (o in compass) { ! 'in' is a keyword
+ print "objectloop (o in compass): ", (the) o, "^";
+ }
+ objectloop (o in compass && true) { ! 'in' is an operator
+ print "objectloop (o in compass && true): ", (the) o, "^";
+ }
+ objectloop (o from se_obj) {
+ print "objectloop (o from se_obj): ", (the) o, "^";
+ }
+ objectloop (o near person) {
+ print "objectloop (o near person): ", (the) o, "^";
+ }
+ #Ifdef TARGET_ZCODE;
+ #Trace assembly on;
+@ ! This is assembly.
+ add -4 ($$1+$3)*2 -> b;
+ @get_sibling test_tube -> b ?saved;
+ @inc [b];
+ @je sp (1+3*0) ? equal;
+ @je 1 ((sp)) ?~ different;
+ .! This is a label:
+ equal;
+ print "sp == 1^";
+ jump label;
+ .different;
+ print "sp @@126= 1^";
+ .label;
+ #Trace off; #Endif; ! TARGET_ZCODE
+ a = random(10);
+ switch (a) {
+ 1, 9:
+ box "Testing oneself is best when done alone."
+ " -- Jimmy Carter";
+ 2, 6, to, 3 to 5, to to to:
+ <Take pigeon>;
+ #Ifdef VN_1633;
+ <Jump, person>;
+ #Endif;
+ a = ##Drop;
+ < ! The angle brackets may be separated by whitespace.
+ < (a) pigeon > >;
+ default:
+ do {
+ give person general ~general;
+ } until (person provides life && ~~false);
+ if (a == 7) a = 4;
+ else a = 5;
+ }
+ 'expressions':
+ a = 1+1-1*1/1%1&1|1&&1||1==(1~=(1>(1<(1>=(1<=1)))));
+ a++; ++a; a--; --a;
+ a = person.life;
+ a = kitchen.&arr;
+ a = kitchen.#arr;
+ a = Bird::wingspan;
+ a = kitchen has general;
+ a = kitchen hasnt general;
+ a = kitchen provides arr;
+ a = person in kitchen;
+ a = person notin kitchen;
+ a = person ofclass Bird;
+ a = a == 0 or 1;
+ a = StubR();
+ a = StubR(a);
+ a = StubR(, a);
+ a = "string";
+ a = 'word';
+ a = '''; ! character
+ a = $09afAF;
+ a = $$01;
+ a = ##Eat; a = #a$Eat;
+ a = #g$self;
+ a = #n$!word;
+ a = #r$StubR;
+ a = #dict_par1;
+ default:
+ for (a = 2, b = a; (a < buffer->1 + 2) && (Bird::wingspan): ++a, b--) {
+ print (char) buffer->a;
+ }
+ new_line;
+ for (::) break;
+ }
+ .saved;;
+];
+
+[ TestNumberSub;
+ print_ret parsed_number, " is ", (number) parsed_number, ".";
+];
+
+[ TestAttributeSub; print_ret (The) noun, " has been reversed."; ];
+
+[ CreatureTest obj; return obj has animate; ];
+
+[ TestCreatureSub; print_ret (The) noun, " is a creature."; ];
+
+[ TestMultiheldSub; print_ret "You are holding ", (the) noun, "."; ];
+
+[ TestMultiexceptSub; "You test ", (the) noun, " with ", (the) second, "."; ];
+
+[ TestMultiinsideSub; "You test ", (the) noun, " from ", (the) second, "."; ];
+
+[ TestMultiSub; print_ret (The) noun, " is a thing."; ];
+
+[ TestNounFilterSub; print_ret (The) noun, " is a bird."; ];
+
+[ TestScopeFilterSub; print_ret (The) noun, " is a direction."; ];
+
+[ TestSpecialSub; "Your lucky number is ", parsed_number, "."; ];
+
+[ TestTopicSub; "You discuss a topic."; ];
+
+[ TestNounSub; "That is ", (a) noun, "."; ];
+
+[ TestHeldSub; "You are holding ", (a) noun, "."; ];
+
+[ NewWaveSub; "That would be foolish."; ];
+
+[ FeelSub; print_ret (The) noun, " feels normal."; ];
+
+[ ReverseSub from;
+ from = parent(noun);
+ move noun to parent(second);
+ if (from == to)
+ move second to to;
+ else
+ move second to from;
+ give noun to;
+ from = to;
+ give second from;
+ "You swap ", (the) noun, " and ", (the) second, ".";
+];
+
+End: The End directive ends the source code.
diff --git a/tests/examplefiles/example.j b/tests/examplefiles/example.j
new file mode 100644
index 00000000..16cdde86
--- /dev/null
+++ b/tests/examplefiles/example.j
@@ -0,0 +1,564 @@
+; Example JVM assembly
+; Tested with JasminXT 2.4
+
+.bytecode 49.0
+.source HelloWorld.java
+.class public final enum HelloWorld
+.super java/lang/Object
+.implements java/io/Serializable
+.signature "Ljava/lang/Object;Ljava/io/Serializable;"
+.enclosing method hw/jasmin.HelloWorldRunner.run()V
+.deprecated
+.annotation visible HelloWorld
+ I I = 0
+.end annotation
+.debug "Happy debugging!"
+
+.inner interface public InnerInterface inner 'HelloWorld$InnerInterface' outer HelloWorld
+.inner class public InnerClass inner HelloWorld$InnerClass outer 'HelloWorld'
+
+.field public volatile transient I I
+.field static protected final serialVersionUID 'J' signature "TJ;" = 2147483648
+.field annotation protected 'protected' [[[Lcom/oracle/util/Checksums;
+ .deprecated
+ .signature "[[[Lcom/oracle/util/Checksums;"
+ .attribute foo "foo.txt"
+ .attribute 'foo' "foo.txt"
+.end field
+.field public newline I
+.field public static defaultString 'Ljava/lang/String;'
+
+.method public <init>()V
+ .limit stack 3
+.line 7
+ .var 0 is self LHelloWorld; from 0 to 1
+ aload_0
+ invokenonvirtual java/lang/Object/<init>()V
+ return
+.end method
+
+.method static public main([Ljava/lang/String;)V
+ .limit locals 7
+ .limit stack 10
+ .throws java.lang/RuntimeException
+ .catch java/lang.ClassCastException from cast to 'extra_l' using /extra
+ .signature "([Ljava/lang/String;)V"
+ .stack
+ offset /Input
+ locals Object java/lang/String
+ locals Uninitialized 'End'
+ locals Uninitialized 0
+ locals Top
+ locals Integer
+ locals Float
+ locals Long
+ locals Double
+ locals Null
+ locals UninitializedThis
+ stack Object java/lang/String
+ stack Uninitialized End
+ stack 'Uninitialized' 0
+ stack 'Top'
+ stack Integer
+ stack Float
+ stack Long
+ stack Double
+ stack Null
+ stack UninitializedThis
+ .end stack
+ .stack use 1 locals
+ offset 'extra'
+ .end stack
+ .stack use locals
+ .end stack
+.line 0xd
+ .var 0 is args [Ljava/lang/String;
+ aload_w 0
+ arraylength
+ ifne /Input
+ iconst_1
+ anewarray java/lang/String
+ checkcast [Ljava/lang/String;
+ astore_0
+ aload_0
+ iconst_0
+ ldc "World"
+ dup
+ putstatic HelloWorld.defaultString Ljava/lang/String;
+ aastore
+/Input:
+ iconst_2
+ iconst_3
+ multianewarray [[C 2
+ astore_1
+ aload_1
+ iconst_0
+ aaload
+ astore_2
+ aload_1
+ iconst_1
+ aaload
+ astore_3
+
+<<o:
+ aload_3
+ iconst_0
+ invokestatic HelloWorld/int()I
+ castore
+
+<<\u0020:
+ aload_3
+ dconst_1
+ dconst_0
+ dsub
+ d2i
+ invokestatic HelloWorld/double()D
+ d2i
+ castore
+
+<<!:
+ aload_3
+ lconst_0
+ dup2
+ lxor
+ lconst_1
+ dup2
+ ladd
+ lsub
+ lneg
+ l2i
+ invokestatic HelloWorld/long()J
+ l2i
+ castore
+
+<<H:
+ aload_2
+ fconst_0
+ fconst_1
+ fconst_2
+ dup_x2
+ fdiv
+ fmul
+ f2l
+ l2i
+ swap
+ invokestatic HelloWorld/float(F)F
+ f2i
+ castore
+
+<<e :
+ aload_2
+ iconst_1
+ i2s
+ i2c
+ i2b
+ iconst_1
+ newarray short
+ dup
+ iconst_0
+ iconst_1
+ newarray byte
+ dup
+ iconst_0
+ sipush 0x65
+ bastore
+ iconst_0
+ baload
+ sastore
+ iconst_0
+ saload
+ int2short
+ int2char
+ int2byte
+ castore
+
+ <<l :
+ aload_2
+ iconst_2
+ bipush 0x1b
+*2:
+ iconst_1
+ ishl
+ dup
+ lookupswitch
+ 0: '/lookupswitch'
+ 0x6c: /lookupswitch
+ default: *2
+/lookupswitch:
+ castore
+
+ ldc2_w 2
+ dup2
+ lcmp
+ .set i 4
+ .set 'j' 5
+ .var 4 is i I from 'i++' to End
+ .var 5 is j I signature "I" from i++ to End
+ istore 4
+ goto 1
+i++:
+ iinc 4 1
+1: iconst_0
+ istore_w 5
+ goto_w 2
+j++:
+ iinc_w 5 1
+2: getstatic java/lang/System/out Ljava/io/PrintStream;
+ aload_1
+ iload 4
+ aaload
+ iload_w 5
+ caload
+ invokevirtual java/io/PrintStream/print(C)V
+ iload 5
+ iconst_1
+ if_icmpne $+6
+ jsr extra
+ iload 5
+ iconst_2
+ if_icmplt j++
+ iconst_1
+ iload 4
+ if_icmpgt i++
+
+<<\u00a0:
+ getstatic java/lang/System/out Ljava/io/PrintStream;
+ invokestatic HelloWorld/get"example()LHelloWorld;
+ getfield HelloWorld/newline I
+ invokevirtual java/io/PrintStream/print(C)V
+End:
+ return
+
+extra:
+ astore 6
+ iload 4
+ tableswitch 0 1
+ extra_l
+ extra_string
+ default: 'End'
+ nop
+extra_string:
+ getstatic java/lang/System/out Ljava/io/PrintStream;
+ aload 0
+ iconst_0
+ aaload
+ invokevirtual java/io/PrintStream/print(Ljava/lang/String;)V
+cast:
+ ldc java/lang/String
+ checkcast java/lang/Class
+ pop
+ ldc Ljava/lang/String;
+ checkcast Ljava/lang/Class;
+ pop
+ iconst_1
+ dup
+ newarray boolean
+ checkcast [Z
+ pop
+ newarray 'int'
+ checkcast HelloWorld
+ checkcast LHelloWorld;
+ pop
+extra_l:
+ getstatic java/lang/System/out Ljava/io/PrintStream;
+ dup
+ ldc "\123.\456.\u006c.\n.\r.\t.\f.\b.\".\'.\\"
+ iconst_5
+ invokeinterface java/lang/CharSequence/charAt(I)C 2
+ invokevirtual java/io/PrintStream/print(C)V
+/extra:
+ pop
+ ret 6
+.end method
+
+.method private static get"example()LHelloWorld;
+ .limit locals 3
+ .limit stack 4
+ .catch all from 7 to 53 using 59
+ aconst_null
+ dup
+ dup
+ astore_w 0
+try:
+ goto $+0x11
+finally:
+ astore_w 2
+ putfield HelloWorld/newline I
+ ret_w 2
+ nop
+ aload_0
+ areturn
+ ifnonnull $-2
+ ifnull $+3
+ new HelloWorld
+ dup
+ dup
+ invokespecial HelloWorld/<init>()V
+ astore 0
+ aload 0
+ monitorenter
+ monitorexit
+ new java/lang/RuntimeException
+ dup
+ invokespecial java/lang/RuntimeException/<init>()V
+ athrow
+ aconst_null
+/try:
+ dup
+ aconst_null
+ if_acmpeq $+3
+ areturn
+catch:
+ jsr $+10
+ aload_0
+ dup
+ aconst_null
+ if_acmpne /try
+ areturn
+ astore_1
+ aload_0
+ ldc 10
+ jsr_w finally
+ ret 1
+'single\u0020quoted\u0020label': ; Messes up [@ below if lexed sloppily
+.end method
+
+.method varargs private static int()I
+ .annotation invisible HelloWorld
+ [@ [@ WhatIsThis??? = .annotation ; name, type, exttype
+ I I = 1 ; name, type
+ another-I I = 2
+ Enum e Ljava/util/logging/Level; = FINE
+ .end annotation
+ .annotation
+ s s = "foo"
+ another-s s = "bar"
+ Enum [e Ljava/util/logging/Level; = FINE FINE 'FINE' FINE
+ .end annotation
+ float F = 123.456
+ .end annotation
+ .annotation visibleparam 1 LHelloWorld;
+ x [I = 0x01 0x02 0x03
+ y I = 2
+ .end annotation
+ .annotation invisibleparam 255 HelloWorld
+ a F = 1.2
+ b D = 3.4
+ .end annotation
+ .annotation default
+ I = 0
+ .end annotation
+ .limit locals 4
+ .limit stack 20
+ iconst_1
+ newarray int
+ dup
+ dup
+ instanceof [Z
+ bipush 0x9
+ bipush 0xB
+ iand
+ iconst_5
+ iconst_4
+ dup_x1
+ iconst_m1
+ iadd
+ bipush +-111
+ ineg
+ swap
+ idiv
+ dup_x2
+ dup
+ ishr
+ ishl
+ imul
+ ior
+ bipush -73
+ ixor
+ isub
+ dup
+ iconst_1
+ iadd
+ irem
+ iastore
+ iconst_0
+ iaload
+ istore_0
+ iload_0
+ istore_1
+ iload_1
+ istore_2
+ iload_2
+ istore_3
+ iload_3
+ dup
+ dup
+ dup2_x1
+ if_icmpeq $+33
+ dup
+ dup
+ if_icmpge $+28
+ dup
+ dup
+ if_icmple $+23
+ dup
+ ifle $+19
+ dup
+ ifeq $+15
+ dup
+ iflt $+11
+ dup
+ ifgt $+7
+ dup
+ ifge $+3
+ ireturn
+.end method
+
+.method static private fpstrict double()D
+ .limit locals 7
+ .limit stack 11
+ dconst_1
+ dconst_0
+ dcmpg
+ newarray double
+ dup
+ dconst_0
+ dup2
+ dcmpl
+ ldc2_w 128.
+ ldc2_w -240.221d
+ dneg
+ ldc2_w 158.d
+ dup2
+ dadd
+ dup2_x2
+ drem
+ ddiv
+ pop2
+ dconst_1
+ dmul
+ d2f
+ f2d
+ d2l
+ l2i
+ iconst_2
+ iushr
+ i2d
+ dastore
+ iconst_0
+ daload
+ dstore_0
+ dload_0
+ dstore_1
+ dload_1
+ dstore_2
+ dload_2
+ dstore_3
+ dload_3
+ dstore 4
+ dload 4
+ dstore_w 5
+ dload_w 5
+ dreturn
+.end method
+
+.method static long()J
+ .limit locals 7
+ .limit stack 11
+ iconst_1
+ newarray long
+ dup
+ iconst_0
+ ldc2_w 5718613688
+ ldc2_w 3143486100
+ ldc2_w 0x3
+ ldiv
+ lmul
+ ldc2_w -10000000000
+ lrem
+ ldc_w 0x60
+ i2l
+ lor
+ ldc 0x33
+ i2l
+ land
+ dup2
+ iconst_1
+ lshl
+ iconst_3
+ lshr
+ iconst_3
+ lushr
+ ladd
+ l2d
+ d2l
+ l2f
+ f2l
+ lastore
+ iconst_0
+ laload
+ lstore_0
+ lload_0
+ lstore_1
+ lload_1
+ lstore_2
+ lload_2
+ lstore_3
+ lload_3
+ lstore 4
+ lload 4
+ lstore_w 5
+ lload_w 5
+ lreturn
+.end method
+
+.method private static float(F)F
+ .limit locals 6
+ .limit stack 9
+ iconst_1
+ newarray float
+ dup
+ fload_0
+ dup
+ fcmpg
+ fload_0
+ dup
+ dup
+ dup
+ dup2_x2
+ fadd
+ fsub
+ fneg
+ frem
+ ldc 70
+ i2f
+ fadd
+ fadd
+ swap
+ pop
+ fastore
+ fload_0
+ dup
+ fcmpl
+ faload
+ fstore_0
+ fload_0
+ fstore_1
+ fload_1
+ fstore_2
+ fload_2
+ fstore_3
+ fload_3
+ fstore 4
+ fload 4
+ fstore_w 5
+ fload_w 5
+ freturn
+.end method
+
+.method abstract bridge synthetic 'acc1()V'
+ breakpoint
+.end method
+
+.method native synchronized acc2()V
+.end method
diff --git a/tests/examplefiles/example.java b/tests/examplefiles/example.java
new file mode 100644
index 00000000..78f9d727
--- /dev/null
+++ b/tests/examplefiles/example.java
@@ -0,0 +1,16 @@
+class _PostUnico$deClassá
+{void fo$o() {}
+
+ void PostUnicodeFunctioná() {
+ láb$el:
+ break láb$el;
+
+ }
+}
+
+class áPreUnicode$Class
+{
+ public int $foo;
+ public int á$foo;
+ _PostUnico$deClassá áPreUnicodeFunction() { return null; }
+}
diff --git a/tests/examplefiles/example.jsonld b/tests/examplefiles/example.jsonld
new file mode 100644
index 00000000..48787d75
--- /dev/null
+++ b/tests/examplefiles/example.jsonld
@@ -0,0 +1,27 @@
+{
+ "@context": {
+ "schema": "http://schema.org/",
+ "name": "schema:name",
+ "body": "schema:articleBody",
+ "words": "schema:wordCount",
+ "post": {
+ "@id": "schema:blogPost",
+ "@container": "@index"
+ }
+ },
+ "@id": "http://example.com/",
+ "@type": "schema:Blog",
+ "name": "World Financial News",
+ "post": {
+ "en": {
+ "@id": "http://example.com/posts/1/en",
+ "body": "World commodities were up today with heavy trading of crude oil...",
+ "words": 1539
+ },
+ "de": {
+ "@id": "http://example.com/posts/1/de",
+ "body": "Die Werte an Warenbörsen stiegen im Sog eines starken Handels von Rohöl...",
+ "words": 1204
+ }
+ }
+}
diff --git a/tests/examplefiles/example.kal b/tests/examplefiles/example.kal
new file mode 100644
index 00000000..c05c14ca
--- /dev/null
+++ b/tests/examplefiles/example.kal
@@ -0,0 +1,75 @@
+#!/usr/bin/env kal
+
+# This demo executes GET requests in parallel and in series
+# using `for` loops and `wait for` statements.
+
+# Notice how the serial GET requests always return in order
+# and take longer in total. Parallel requests come back in
+# order of receipt.
+
+http = require 'http'
+
+urls = ['http://www.google.com'
+ 'http://www.apple.com'
+ 'http://www.microsoft.com'
+ 'http://www.nodejs.org'
+ 'http://www.yahoo.com']
+
+# This function does a GET request for each URL in series
+# It will wait for a response from each request before moving on
+# to the next request. Notice the output will be in the same order as the
+# urls variable every time regardless of response time.
+# It is a task rather than a function because it is called asynchronously
+# This allows us to use `return` to implicitly call back
+task series_demo()
+ # The `series` keyword is optional here (for loops are serial by default)
+ total_time = 0
+
+ for series url in urls
+ timer = new Date
+
+ # we use the `safe` keyword because get is a "nonstandard" task
+ # that does not call back with an error argument
+ safe wait for response from http.get url
+
+ delay = new Date() - timer
+ total_time += delay
+
+ print "GET #{url} - #{response.statusCode} - #{response.connection.bytesRead} bytes - #{delay} ms"
+
+ # because we are in a task rather than a function, this actually exectutes a callback
+ return total_time
+
+# This function does a GET request for each URL in parallel
+# It will NOT wait for a response from each request before moving on
+# to the next request. Notice the output will be determined by the order in which
+# the requests complete!
+task parallel_demo()
+ total_time = 0
+
+ # The `parallel` keyword is only meaningful here because the loop contains
+ # a `wait for` statement (meaning callbacks are used)
+ for parallel url in urls
+ timer = new Date
+
+ # we use the `safe` keyword because get is a "nonstandard" task
+ # that does not call back with an error argument
+ safe wait for response from http.get url
+
+ delay = new Date() - timer
+ total_time += delay
+
+ print "GET #{url} - #{response.statusCode} - #{response.connection.bytesRead} bytes - #{delay}ms"
+
+ # because we are in a task rather than a function, this actually exectutes a callback
+ return total_time
+
+print 'Series Requests...'
+wait for time1 from series_demo()
+print "Total duration #{time1}ms"
+
+print ''
+
+print 'Parallel Requests...'
+wait for time2 from parallel_demo()
+print "Total duration #{time2}ms"
diff --git a/tests/examplefiles/example.liquid b/tests/examplefiles/example.liquid
new file mode 100644
index 00000000..8f3ea9e9
--- /dev/null
+++ b/tests/examplefiles/example.liquid
@@ -0,0 +1,42 @@
+# This is an example file. Process it with `./pygmentize -O full -f html -o /liquid-example.html example.liquid`.
+
+{% raw %}
+some {{raw}} liquid syntax
+
+{% raw %}
+{% endraw %}
+
+Just regular text - what happens?
+
+{% comment %}My lovely {{comment}} {% comment %}{% endcomment %}
+
+{% custom_tag params: true %}
+{% custom_block my="abc" c = false %}
+ Just usual {{liquid}}.
+{% endcustom_block %}
+
+{% another_tag "my string param" %}
+
+{{ variable | upcase }}
+{{ var.field | textilize | markdownify }}
+{{ var.field.property | textilize | markdownify }}
+{{ 'string' | truncate: 100 param='df"g' }}
+
+{% cycle '1', 2, var %}
+{% cycle 'group1': '1', var, 2 %}
+{% cycle group2: '1', var, 2 %}
+
+{% if a == 'B' %}
+{% elsif a == 'C%}' %}
+{% else %}
+{% endif %}
+
+{% unless not a %}
+{% else %}
+{% endunless %}
+
+{% case a %}
+{% when 'B' %}
+{% when 'C' %}
+{% else %}
+{% endcase %} \ No newline at end of file
diff --git a/tests/examplefiles/example.ma b/tests/examplefiles/example.ma
new file mode 100644
index 00000000..a8119ea5
--- /dev/null
+++ b/tests/examplefiles/example.ma
@@ -0,0 +1,8 @@
+1 + 1 (* This is a comment *)
+Global`
+SomeNamespace`Foo
+f[x_, y__, 3, z___] := tsneirsnteintie "fosrt" neisnrteiasrn
+E + 3
+Plus[1,Times[2,3]]
+Map[#1 + #2&, SomePairList]
+Plus[1.,-1,-1.,-1.0,] \ No newline at end of file
diff --git a/tests/examplefiles/example.mq4 b/tests/examplefiles/example.mq4
new file mode 100644
index 00000000..54a5fa60
--- /dev/null
+++ b/tests/examplefiles/example.mq4
@@ -0,0 +1,187 @@
+//+------------------------------------------------------------------+
+//| PeriodConverter.mq4 |
+//| Copyright 2006-2014, MetaQuotes Software Corp. |
+//| http://www.metaquotes.net |
+//+------------------------------------------------------------------+
+#property copyright "2006-2014, MetaQuotes Software Corp."
+#property link "http://www.mql4.com"
+#property description "Period Converter to updated format of history base"
+#property strict
+#property show_inputs
+#include <WinUser32.mqh>
+
+input int InpPeriodMultiplier=3; // Period multiplier factor
+int ExtHandle=-1;
+//+------------------------------------------------------------------+
+//| script program start function |
+//+------------------------------------------------------------------+
+void OnStart()
+ {
+ datetime time0;
+ ulong last_fpos=0;
+ long last_volume=0;
+ int i,start_pos,periodseconds;
+ int hwnd=0,cnt=0;
+//---- History header
+ int file_version=401;
+ string c_copyright;
+ string c_symbol=Symbol();
+ int i_period=Period()*InpPeriodMultiplier;
+ int i_digits=Digits;
+ int i_unused[13];
+ MqlRates rate;
+//---
+ ExtHandle=FileOpenHistory(c_symbol+(string)i_period+".hst",FILE_BIN|FILE_WRITE|FILE_SHARE_WRITE|FILE_SHARE_READ|FILE_ANSI);
+ if(ExtHandle<0)
+ return;
+ c_copyright="(C)opyright 2003, MetaQuotes Software Corp.";
+ ArrayInitialize(i_unused,0);
+//--- write history file header
+ FileWriteInteger(ExtHandle,file_version,LONG_VALUE);
+ FileWriteString(ExtHandle,c_copyright,64);
+ FileWriteString(ExtHandle,c_symbol,12);
+ FileWriteInteger(ExtHandle,i_period,LONG_VALUE);
+ FileWriteInteger(ExtHandle,i_digits,LONG_VALUE);
+ FileWriteInteger(ExtHandle,0,LONG_VALUE);
+ FileWriteInteger(ExtHandle,0,LONG_VALUE);
+ FileWriteArray(ExtHandle,i_unused,0,13);
+//--- write history file
+ periodseconds=i_period*60;
+ start_pos=Bars-1;
+ rate.open=Open[start_pos];
+ rate.low=Low[start_pos];
+ rate.high=High[start_pos];
+ rate.tick_volume=(long)Volume[start_pos];
+ rate.spread=0;
+ rate.real_volume=0;
+ //--- normalize open time
+ rate.time=Time[start_pos]/periodseconds;
+ rate.time*=periodseconds;
+ for(i=start_pos-1; i>=0; i--)
+ {
+ if(IsStopped())
+ break;
+ time0=Time[i];
+ //--- history may be updated
+ if(i==0)
+ {
+ //--- modify index if history was updated
+ if(RefreshRates())
+ i=iBarShift(NULL,0,time0);
+ }
+ //---
+ if(time0>=rate.time+periodseconds || i==0)
+ {
+ if(i==0 && time0<rate.time+periodseconds)
+ {
+ rate.tick_volume+=(long)Volume[0];
+ if(rate.low>Low[0])
+ rate.low=Low[0];
+ if(rate.high<High[0])
+ rate.high=High[0];
+ rate.close=Close[0];
+ }
+ last_fpos=FileTell(ExtHandle);
+ last_volume=(long)Volume[i];
+ FileWriteStruct(ExtHandle,rate);
+ cnt++;
+ if(time0>=rate.time+periodseconds)
+ {
+ rate.time=time0/periodseconds;
+ rate.time*=periodseconds;
+ rate.open=Open[i];
+ rate.low=Low[i];
+ rate.high=High[i];
+ rate.close=Close[i];
+ rate.tick_volume=last_volume;
+ }
+ }
+ else
+ {
+ rate.tick_volume+=(long)Volume[i];
+ if(rate.low>Low[i])
+ rate.low=Low[i];
+ if(rate.high<High[i])
+ rate.high=High[i];
+ rate.close=Close[i];
+ }
+ }
+ FileFlush(ExtHandle);
+ Print(cnt," record(s) written");
+//--- collect incoming ticks
+ datetime last_time=LocalTime()-5;
+ while(!IsStopped())
+ {
+ datetime cur_time=LocalTime();
+ //--- check for new rates
+ if(RefreshRates())
+ {
+ time0=Time[0];
+ FileSeek(ExtHandle,last_fpos,SEEK_SET);
+ //--- is there current bar?
+ if(time0<rate.time+periodseconds)
+ {
+ rate.tick_volume+=(long)Volume[0]-last_volume;
+ last_volume=(long)Volume[0];
+ if(rate.low>Low[0])
+ rate.low=Low[0];
+ if(rate.high<High[0])
+ rate.high=High[0];
+ rate.close=Close[0];
+ }
+ else
+ {
+ //--- no, there is new bar
+ rate.tick_volume+=(long)Volume[1]-last_volume;
+ if(rate.low>Low[1])
+ rate.low=Low[1];
+ if(rate.high<High[1])
+ rate.high=High[1];
+ //--- write previous bar remains
+ FileWriteStruct(ExtHandle,rate);
+ last_fpos=FileTell(ExtHandle);
+ //----
+ rate.time=time0/periodseconds;
+ rate.time*=periodseconds;
+ rate.open=Open[0];
+ rate.low=Low[0];
+ rate.high=High[0];
+ rate.close=Close[0];
+ rate.tick_volume=(long)Volume[0];
+ last_volume=rate.tick_volume;
+ }
+ //----
+ FileWriteStruct(ExtHandle,rate);
+ FileFlush(ExtHandle);
+ //---
+ if(hwnd==0)
+ {
+ hwnd=WindowHandle(Symbol(),i_period);
+ if(hwnd!=0)
+ Print("Chart window detected");
+ }
+ //--- refresh window not frequently than 1 time in 2 seconds
+ if(hwnd!=0 && cur_time-last_time>=2)
+ {
+ PostMessageA(hwnd,WM_COMMAND,33324,0);
+ last_time=cur_time;
+ }
+ }
+ Sleep(50);
+ }
+//---
+ }
+//+------------------------------------------------------------------+
+//| |
+//+------------------------------------------------------------------+
+void OnDeinit(const int reason)
+ {
+//---
+ if(ExtHandle>=0)
+ {
+ FileClose(ExtHandle);
+ ExtHandle=-1;
+ }
+//---
+ }
+//+------------------------------------------------------------------+ \ No newline at end of file
diff --git a/tests/examplefiles/example.mqh b/tests/examplefiles/example.mqh
new file mode 100644
index 00000000..ee80ed52
--- /dev/null
+++ b/tests/examplefiles/example.mqh
@@ -0,0 +1,123 @@
+//+------------------------------------------------------------------+
+//| Array.mqh |
+//| Copyright 2009-2013, MetaQuotes Software Corp. |
+//| http://www.mql4.com |
+//+------------------------------------------------------------------+
+#include <Object.mqh>
+//+------------------------------------------------------------------+
+//| Class CArray |
+//| Purpose: Base class of dynamic arrays. |
+//| Derives from class CObject. |
+//+------------------------------------------------------------------+
+class CArray : public CObject
+ {
+protected:
+ int m_step_resize; // increment size of the array
+ int m_data_total; // number of elements
+ int m_data_max; // maximmum size of the array without memory reallocation
+ int m_sort_mode; // mode of array sorting
+
+public:
+ CArray(void);
+ ~CArray(void);
+ //--- methods of access to protected data
+ int Step(void) const { return(m_step_resize); }
+ bool Step(const int step);
+ int Total(void) const { return(m_data_total); }
+ int Available(void) const { return(m_data_max-m_data_total); }
+ int Max(void) const { return(m_data_max); }
+ bool IsSorted(const int mode=0) const { return(m_sort_mode==mode); }
+ int SortMode(void) const { return(m_sort_mode); }
+ //--- cleaning method
+ void Clear(void) { m_data_total=0; }
+ //--- methods for working with files
+ virtual bool Save(const int file_handle);
+ virtual bool Load(const int file_handle);
+ //--- sorting method
+ void Sort(const int mode=0);
+
+protected:
+ virtual void QuickSort(int beg,int end,const int mode=0) { }
+ };
+//+------------------------------------------------------------------+
+//| Constructor |
+//+------------------------------------------------------------------+
+CArray::CArray(void) : m_step_resize(16),
+ m_data_total(0),
+ m_data_max(0),
+ m_sort_mode(-1)
+ {
+ }
+//+------------------------------------------------------------------+
+//| Destructor |
+//+------------------------------------------------------------------+
+CArray::~CArray(void)
+ {
+ }
+//+------------------------------------------------------------------+
+//| Method Set for variable m_step_resize |
+//+------------------------------------------------------------------+
+bool CArray::Step(const int step)
+ {
+//--- check
+ if(step>0)
+ {
+ m_step_resize=step;
+ return(true);
+ }
+//--- failure
+ return(false);
+ }
+//+------------------------------------------------------------------+
+//| Sorting an array in ascending order |
+//+------------------------------------------------------------------+
+void CArray::Sort(const int mode)
+ {
+//--- check
+ if(IsSorted(mode))
+ return;
+ m_sort_mode=mode;
+ if(m_data_total<=1)
+ return;
+//--- sort
+ QuickSort(0,m_data_total-1,mode);
+ }
+//+------------------------------------------------------------------+
+//| Writing header of array to file |
+//+------------------------------------------------------------------+
+bool CArray::Save(const int file_handle)
+ {
+//--- check handle
+ if(file_handle!=INVALID_HANDLE)
+ {
+ //--- write start marker - 0xFFFFFFFFFFFFFFFF
+ if(FileWriteLong(file_handle,-1)==sizeof(long))
+ {
+ //--- write array type
+ if(FileWriteInteger(file_handle,Type(),INT_VALUE)==INT_VALUE)
+ return(true);
+ }
+ }
+//--- failure
+ return(false);
+ }
+//+------------------------------------------------------------------+
+//| Reading header of array from file |
+//+------------------------------------------------------------------+
+bool CArray::Load(const int file_handle)
+ {
+//--- check handle
+ if(file_handle!=INVALID_HANDLE)
+ {
+ //--- read and check start marker - 0xFFFFFFFFFFFFFFFF
+ if(FileReadLong(file_handle)==-1)
+ {
+ //--- read and check array type
+ if(FileReadInteger(file_handle,INT_VALUE)==Type())
+ return(true);
+ }
+ }
+//--- failure
+ return(false);
+ }
+//+------------------------------------------------------------------+
diff --git a/tests/examplefiles/example.ni b/tests/examplefiles/example.ni
new file mode 100644
index 00000000..32279e80
--- /dev/null
+++ b/tests/examplefiles/example.ni
@@ -0,0 +1,57 @@
+ | | |
+"Informal by Nature"
+[ * * * ]
+by
+[ * * * ]
+David Corbett
+
+[This is a [nested] comment.]
+
+Section 1 - Use option translation
+
+Use maximum tests of at least 100 translates as (-
+@c
+Constant MAX_TESTS = {N}; —). | Section 2
+
+A room has a number called size.
+
+The Kitchen is a room. "A nondescript kitchen.“ The Kitchen has size 2.
+
+When play begins:
+ say "Testing:[line break]";
+ test 0.
+
+To test (N — number): (—
+ if (Test({N}) == (+size of the Kitchen [this should succeed]+)) {-open—brace}
+ print ”Success.^”;
+ {-close-brace} else {
+ print “Failure.^";
+ }
+]; ! You shouldn't end a routine within a phrase definition, but it works.
+[ Unused;
+ #Include "\
+@p \
+"; ! At signs hold no power here.
+! Of course, the file "@p .h" must exist.
+-).
+
+Include (-!% This is not ICL.
+
+[ Test x;
+ if (x) {x++;}
+ {–! Single line comment.}
+@inc x;
+@p At signs.
+...
+@Purpose: ...
+...
+@-...
+@c ...
+@inc x;
+@c
+@c
+ return x;
+];
+@Purpose: ...
+@-------------------------------------------------------------------------------
+-).
diff --git a/tests/examplefiles/example.nix b/tests/examplefiles/example.nix
new file mode 100644
index 00000000..515b686f
--- /dev/null
+++ b/tests/examplefiles/example.nix
@@ -0,0 +1,80 @@
+{ stdenv, fetchurl, fetchgit, openssl, zlib, pcre, libxml2, libxslt, expat
+, rtmp ? false
+, fullWebDAV ? false
+, syslog ? false
+, moreheaders ? false, ...}:
+
+let
+ version = "1.4.4";
+ mainSrc = fetchurl {
+ url = "http://nginx.org/download/nginx-${version}.tar.gz";
+ sha256 = "1f82845mpgmhvm151fhn2cnqjggw9w7cvsqbva9rb320wmc9m63w";
+ };
+
+ rtmp-ext = fetchgit {
+ url = git://github.com/arut/nginx-rtmp-module.git;
+ rev = "1cfb7aeb582789f3b15a03da5b662d1811e2a3f1";
+ sha256 = "03ikfd2l8mzsjwx896l07rdrw5jn7jjfdiyl572yb9jfrnk48fwi";
+ };
+
+ dav-ext = fetchgit {
+ url = git://github.com/arut/nginx-dav-ext-module.git;
+ rev = "54cebc1f21fc13391aae692c6cce672fa7986f9d";
+ sha256 = "1dvpq1fg5rslnl05z8jc39sgnvh3akam9qxfl033akpczq1bh8nq";
+ };
+
+ syslog-ext = fetchgit {
+ url = https://github.com/yaoweibin/nginx_syslog_patch.git;
+ rev = "165affd9741f0e30c4c8225da5e487d33832aca3";
+ sha256 = "14dkkafjnbapp6jnvrjg9ip46j00cr8pqc2g7374z9aj7hrvdvhs";
+ };
+
+ moreheaders-ext = fetchgit {
+ url = https://github.com/agentzh/headers-more-nginx-module.git;
+ rev = "refs/tags/v0.23";
+ sha256 = "12pbjgsxnvcf2ff2i2qdn39q4cm5czlgrng96j8ml4cgxvnbdh39";
+ };
+in
+
+stdenv.mkDerivation rec {
+ name = "nginx-${version}";
+ src = mainSrc;
+
+ buildInputs = [ openssl zlib pcre libxml2 libxslt
+ ] ++ stdenv.lib.optional fullWebDAV expat;
+
+ patches = if syslog then [ "${syslog-ext}/syslog_1.4.0.patch" ] else [];
+
+ configureFlags = [
+ "--with-http_ssl_module"
+ "--with-http_spdy_module"
+ "--with-http_xslt_module"
+ "--with-http_sub_module"
+ "--with-http_dav_module"
+ "--with-http_gzip_static_module"
+ "--with-http_secure_link_module"
+ "--with-ipv6"
+ # Install destination problems
+ # "--with-http_perl_module"
+ ] ++ stdenv.lib.optional rtmp "--add-module=${rtmp-ext}"
+ ++ stdenv.lib.optional fullWebDAV "--add-module=${dav-ext}"
+ ++ stdenv.lib.optional syslog "--add-module=${syslog-ext}"
+ ++ stdenv.lib.optional moreheaders "--add-module=${moreheaders-ext}";
+
+ preConfigure = ''
+ export NIX_CFLAGS_COMPILE="$NIX_CFLAGS_COMPILE -I${libxml2 }/include/libxml2"
+ '';
+
+ # escape example
+ postInstall = ''
+ mv $out/sbin $out/bin ''' ''${
+ ${ if true then ${ "" } else false }
+ '';
+
+ meta = {
+ description = "A reverse proxy and lightweight webserver";
+ maintainers = [ stdenv.lib.maintainers.raskin];
+ platforms = stdenv.lib.platforms.all;
+ inherit version;
+ };
+}
diff --git a/tests/examplefiles/example.pp b/tests/examplefiles/example.pp
new file mode 100644
index 00000000..ea697be2
--- /dev/null
+++ b/tests/examplefiles/example.pp
@@ -0,0 +1,8 @@
+exec { 'grep':
+ command => 'grep "\'" -rI *',
+ path => '/bin:/usr/bin',
+}
+
+node default {
+ notify {"Hello World":;}
+}
diff --git a/tests/examplefiles/example.red b/tests/examplefiles/example.red
new file mode 100644
index 00000000..37c17ef8
--- /dev/null
+++ b/tests/examplefiles/example.red
@@ -0,0 +1,257 @@
+Red [
+ Title: "Red console"
+ Author: ["Nenad Rakocevic" "Kaj de Vos"]
+ File: %console.red
+ Tabs: 4
+ Rights: "Copyright (C) 2012-2013 Nenad Rakocevic. All rights reserved."
+ License: {
+ Distributed under the Boost Software License, Version 1.0.
+ See https://github.com/dockimbel/Red/blob/master/BSL-License.txt
+ }
+ Purpose: "Just some code for testing Pygments colorizer"
+ Language: http://www.red-lang.org/
+]
+
+#system-global [
+ #either OS = 'Windows [
+ #import [
+ "kernel32.dll" stdcall [
+ AttachConsole: "AttachConsole" [
+ processID [integer!]
+ return: [integer!]
+ ]
+ SetConsoleTitle: "SetConsoleTitleA" [
+ title [c-string!]
+ return: [integer!]
+ ]
+ ReadConsole: "ReadConsoleA" [
+ consoleInput [integer!]
+ buffer [byte-ptr!]
+ charsToRead [integer!]
+ numberOfChars [int-ptr!]
+ inputControl [int-ptr!]
+ return: [integer!]
+ ]
+ ]
+ ]
+ line-buffer-size: 16 * 1024
+ line-buffer: allocate line-buffer-size
+ ][
+ #switch OS [
+ MacOSX [
+ #define ReadLine-library "libreadline.dylib"
+ ]
+ #default [
+ #define ReadLine-library "libreadline.so.6"
+ #define History-library "libhistory.so.6"
+ ]
+ ]
+ #import [
+ ReadLine-library cdecl [
+ read-line: "readline" [ ; Read a line from the console.
+ prompt [c-string!]
+ return: [c-string!]
+ ]
+ rl-bind-key: "rl_bind_key" [
+ key [integer!]
+ command [integer!]
+ return: [integer!]
+ ]
+ rl-insert: "rl_insert" [
+ count [integer!]
+ key [integer!]
+ return: [integer!]
+ ]
+ ]
+ #if OS <> 'MacOSX [
+ History-library cdecl [
+ add-history: "add_history" [ ; Add line to the history.
+ line [c-string!]
+ ]
+ ]
+ ]
+ ]
+
+ rl-insert-wrapper: func [
+ [cdecl]
+ count [integer!]
+ key [integer!]
+ return: [integer!]
+ ][
+ rl-insert count key
+ ]
+
+ ]
+]
+
+Windows?: system/platform = 'Windows
+
+read-argument: routine [
+ /local
+ args [str-array!]
+ str [red-string!]
+][
+ if system/args-count <> 2 [
+ SET_RETURN(none-value)
+ exit
+ ]
+ args: system/args-list + 1 ;-- skip binary filename
+ str: simple-io/read-txt args/item
+ SET_RETURN(str)
+]
+
+init-console: routine [
+ str [string!]
+ /local
+ ret
+][
+ #either OS = 'Windows [
+ ;ret: AttachConsole -1
+ ;if zero? ret [print-line "ReadConsole failed!" halt]
+
+ ret: SetConsoleTitle as c-string! string/rs-head str
+ if zero? ret [print-line "SetConsoleTitle failed!" halt]
+ ][
+ rl-bind-key as-integer tab as-integer :rl-insert-wrapper
+ ]
+]
+
+input: routine [
+ prompt [string!]
+ /local
+ len ret str buffer line
+][
+ #either OS = 'Windows [
+ len: 0
+ print as c-string! string/rs-head prompt
+ ret: ReadConsole stdin line-buffer line-buffer-size :len null
+ if zero? ret [print-line "ReadConsole failed!" halt]
+ len: len + 1
+ line-buffer/len: null-byte
+ str: string/load as c-string! line-buffer len
+ ][
+ line: read-line as c-string! string/rs-head prompt
+ if line = null [halt] ; EOF
+
+ #if OS <> 'MacOSX [add-history line]
+
+ str: string/load line 1 + length? line
+; free as byte-ptr! line
+ ]
+ SET_RETURN(str)
+]
+
+count-delimiters: function [
+ buffer [string!]
+ return: [block!]
+][
+ list: copy [0 0]
+ c: none
+
+ foreach c buffer [
+ case [
+ escaped? [
+ escaped?: no
+ ]
+ in-comment? [
+ switch c [
+ #"^/" [in-comment?: no]
+ ]
+ ]
+ 'else [
+ switch c [
+ #"^^" [escaped?: yes]
+ #";" [if zero? list/2 [in-comment?: yes]]
+ #"[" [list/1: list/1 + 1]
+ #"]" [list/1: list/1 - 1]
+ #"{" [list/2: list/2 + 1]
+ #"}" [list/2: list/2 - 1]
+ ]
+ ]
+ ]
+ ]
+ list
+]
+
+do-console: function [][
+ buffer: make string! 10000
+ prompt: red-prompt: "red>> "
+ mode: 'mono
+
+ switch-mode: [
+ mode: case [
+ cnt/1 > 0 ['block]
+ cnt/2 > 0 ['string]
+ 'else [
+ prompt: red-prompt
+ do eval
+ 'mono
+ ]
+ ]
+ prompt: switch mode [
+ block ["[^-"]
+ string ["{^-"]
+ mono [red-prompt]
+ ]
+ ]
+
+ eval: [
+ code: load/all buffer
+
+ unless tail? code [
+ set/any 'result do code
+
+ unless unset? :result [
+ if 67 = length? result: mold/part :result 67 [ ;-- optimized for width = 72
+ clear back tail result
+ append result "..."
+ ]
+ print ["==" result]
+ ]
+ ]
+ clear buffer
+ ]
+
+ while [true][
+ unless tail? line: input prompt [
+ append buffer line
+ cnt: count-delimiters buffer
+
+ either Windows? [
+ remove skip tail buffer -2 ;-- clear extra CR (Windows)
+ ][
+ append buffer lf ;-- Unix
+ ]
+
+ switch mode [
+ block [if cnt/1 <= 0 [do switch-mode]]
+ string [if cnt/2 <= 0 [do switch-mode]]
+ mono [do either any [cnt/1 > 0 cnt/2 > 0][switch-mode][eval]]
+ ]
+ ]
+ ]
+]
+
+q: :quit
+
+if script: read-argument [
+ script: load script
+ either any [
+ script/1 <> 'Red
+ not block? script/2
+ ][
+ print "*** Error: not a Red program!"
+ ][
+ do skip script 2
+ ]
+ quit
+]
+
+init-console "Red Console"
+
+print {
+-=== Red Console alpha version ===-
+(only ASCII input supported)
+}
+
+do-console \ No newline at end of file
diff --git a/tests/examplefiles/example.reds b/tests/examplefiles/example.reds
new file mode 100644
index 00000000..eb92310d
--- /dev/null
+++ b/tests/examplefiles/example.reds
@@ -0,0 +1,150 @@
+Red/System [
+ Title: "Red/System example file"
+ Purpose: "Just some code for testing Pygments colorizer"
+ Language: http://www.red-lang.org/
+]
+
+#include %../common/FPU-configuration.reds
+
+; C types
+
+#define time! long!
+#define clock! long!
+
+date!: alias struct! [
+ second [integer!] ; 0-61 (60?)
+ minute [integer!] ; 0-59
+ hour [integer!] ; 0-23
+
+ day [integer!] ; 1-31
+ month [integer!] ; 0-11
+ year [integer!] ; Since 1900
+
+ weekday [integer!] ; 0-6 since Sunday
+ yearday [integer!] ; 0-365
+ daylight-saving-time? [integer!] ; Negative: unknown
+]
+
+#either OS = 'Windows [
+ #define clocks-per-second 1000
+][
+ ; CLOCKS_PER_SEC value for Syllable, Linux (XSI-conformant systems)
+ ; TODO: check for other systems
+ #define clocks-per-second 1000'000
+]
+
+#import [LIBC-file cdecl [
+
+ ; Error handling
+
+ form-error: "strerror" [ ; Return error description.
+ code [integer!]
+ return: [c-string!]
+ ]
+ print-error: "perror" [ ; Print error to standard error output.
+ string [c-string!]
+ ]
+
+
+ ; Memory management
+
+ make: "calloc" [ ; Allocate zero-filled memory.
+ chunks [size!]
+ size [size!]
+ return: [binary!]
+ ]
+ resize: "realloc" [ ; Resize memory allocation.
+ memory [binary!]
+ size [size!]
+ return: [binary!]
+ ]
+ ]
+
+ JVM!: alias struct! [
+ reserved0 [int-ptr!]
+ reserved1 [int-ptr!]
+ reserved2 [int-ptr!]
+
+ DestroyJavaVM [function! [[JNICALL] vm [JVM-ptr!] return: [jint!]]]
+ AttachCurrentThread [function! [[JNICALL] vm [JVM-ptr!] penv [struct! [p [int-ptr!]]] args [byte-ptr!] return: [jint!]]]
+ DetachCurrentThread [function! [[JNICALL] vm [JVM-ptr!] return: [jint!]]]
+ GetEnv [function! [[JNICALL] vm [JVM-ptr!] penv [struct! [p [int-ptr!]]] version [integer!] return: [jint!]]]
+ AttachCurrentThreadAsDaemon [function! [[JNICALL] vm [JVM-ptr!] penv [struct! [p [int-ptr!]]] args [byte-ptr!] return: [jint!]]]
+]
+
+ ;just some datatypes for testing:
+
+ #some-hash
+ 10-1-2013
+ quit
+
+ ;binary:
+ #{00FF0000}
+ #{00FF0000 FF000000}
+ #{00FF0000 FF000000} ;with tab instead of space
+ 2#{00001111}
+ 64#{/wAAAA==}
+ 64#{/wAAA A==} ;with space inside
+ 64#{/wAAA A==} ;with tab inside
+
+
+ ;string with char
+ {bla ^(ff) foo}
+ {bla ^(( foo}
+ ;some numbers:
+ 12
+ 1'000
+ 1.2
+ FF00FF00h
+
+ ;some tests of hexa number notation with not common ending
+ [ff00h ff00h] ff00h{} FFh"foo" 00h(1 + 2) (AEh)
+
+;normal words:
+foo char
+
+;get-word
+:foo
+
+;lit-word:
+'foo 'foo
+
+;multiple comment tests...
+1 + 1
+comment "aa"
+2 + 2
+comment {aa}
+3 + 3
+comment {a^{}
+4 + 4
+comment {{}}
+5 + 5
+comment {
+ foo: 6
+}
+6 + 6
+comment [foo: 6]
+7 + 7
+comment [foo: "[" ]
+8 + 8
+comment [foo: {^{} ]
+9 + 9
+comment [foo: {boo} ]
+10 + 10
+comment 5-May-2014/11:17:34+2:00
+11 + 11
+
+
+to-integer foo
+foo/(a + 1)/b
+
+call/output reform ['which interpreter] path: copy ""
+
+ version-1.1: 00010001h
+
+ #if type = 'exe [
+ push system/stack/frame ;-- save previous frame pointer
+ system/stack/frame: system/stack/top ;-- @@ reposition frame pointer just after the catch flag
+]
+push CATCH_ALL ;-- exceptions root barrier
+push 0 ;-- keep stack aligned on 64-bit \ No newline at end of file
diff --git a/tests/examplefiles/example.rkt b/tests/examplefiles/example.rkt
index a3e4a29e..acc0328e 100644
--- a/tests/examplefiles/example.rkt
+++ b/tests/examplefiles/example.rkt
@@ -1,5 +1,7 @@
#lang racket
+(require (only-in srfi/13 string-contains))
+
; Single-line comment style.
;; Single-line comment style.
@@ -8,45 +10,259 @@
#|
Multi-line comment style ...
+#|### #| nested |#||| |#
... on multiple lines
|#
-(define (a-function x #:keyword [y 0])
+#;(s-expression comment (one line))
+
+#;
+(s-expression comment
+ (multiple lines))
+
+#! shebang comment
+
+#!/shebang comment
+
+#! shebang \
+comment
+
+#!/shebang \
+comment
+
+;; Uncommented numbers after single-line comments
+;; NEL…133
+;; LS
8232
+;; PS
8233
+
+#reader racket
+(define(a-function x #:keyword [y 0])
(define foo0 'symbol) ; ()
[define foo1 'symbol] ; []
{define foo2 'symbol} ; {}
- (and (append (car '(1 2 3))))
+ (define 100-Continue 'symbol)
+ (and (append (car'(1 2 3))))
(regexp-match? #rx"foobar" "foobar")
- (regexp-match? #px"foobar" "foobar")
- (define a 1))
- (let ([b "foo"])
- (displayln b))
+ (regexp-match? #px"\"foo\\(bar\\)?\"" "foobar")
+ (regexp-match? #rx#"foobar" "foobar")
+ (regexp-match? #px#"foobar" "foobar")
+ (define #csa 1)
+ #Ci (let ([#%A|||b #true C
+\|d "foo"])
+ (displayln #cS #%\ab\ #true\ C\
+\\d||))
(for/list ([x (in-list (list 1 2 (list 3 4)))])
- (cond
- [(pair? x) (car x)]
- [else x])))
+ (cond
+ [(pair? x) (car x)]
+ [else x])))
-;; Literal number examples
+;; Literals
(values
;; #b
- #b1.1
- #b-1.1
- #b1e1
- #b0/1
- #b1/1
- #b1e-1
- #b101
-
+ #b1
+ #b+1
+ #b-1
+ #b.1
+ #b1.
+ #b0.1
+ #b+0.1
+ #b-0.1
+ #b1/10
+ #b+1/10
+ #b-1/10
+ #b1e11
+ #b+1e11
+ #b-1e11
+ #b.1e11
+ #b1.e11
+ #b0.1e11
+ #b+0.1e11
+ #b-0.1e11
+ #b1/10e11
+ #b+1/10e11
+ #b-1/10e11
+ #b+i
+ #b1+i
+ #b+1+i
+ #b-1+i
+ #b.1+i
+ #b1.+i
+ #b0.1+i
+ #b+0.1+i
+ #b-0.1+i
+ #b1/10+i
+ #b+1/10+i
+ #b-1/10+i
+ #b1e11+i
+ #b+1e11+i
+ #b-1e11+i
+ #b1.e11+i
+ #b.1e11+i
+ #b0.1e11+i
+ #b+0.1e11+i
+ #b-0.1e11+i
+ #b1/10e11+i
+ #b+1/10e11+i
+ #b-1/10e11+i
+ #b+1i
+ #b1+1i
+ #b+1+1i
+ #b-1+1i
+ #b1.+1i
+ #b.1+1i
+ #b0.1+1i
+ #b+0.1+1i
+ #b-0.1+1i
+ #b1/10+1i
+ #b+1/10+1i
+ #b-1/10+1i
+ #b1e11+1i
+ #b+1e11+1i
+ #b-1e11+1i
+ #b.1e11+1i
+ #b0.1e11+1i
+ #b+0.1e11+1i
+ #b-0.1e11+1i
+ #b1/10e11+1i
+ #b+1/10e11+1i
+ #b-1/10e11+1i
+ #b+1/10e11i
+ #b1+1/10e11i
+ #b+1+1/10e11i
+ #b-1+1/10e11i
+ #b.1+1/10e11i
+ #b0.1+1/10e11i
+ #b+0.1+1/10e11i
+ #b-0.1+1/10e11i
+ #b1/10+1/10e11i
+ #b+1/10+1/10e11i
+ #b-1/10+1/10e11i
+ #b1e11+1/10e11i
+ #b+1e11+1/10e11i
+ #b-1e11+1/10e11i
+ #b.1e11+1/10e11i
+ #b0.1e11+1/10e11i
+ #b+0.1e11+1/10e11i
+ #b-0.1e11+1/10e11i
+ #b1/10e11+1/10e11i
+ #b+1/10e11+1/10e11i
+ #b-1/10e11+1/10e11i
;; #d
- #d-1.23
- #d1.123
- #d1e3
- #d1e-22
- #d1/2
- #d-1/2
#d1
+ #d+1
#d-1
-
+ #d.1
+ #d1.
+ #d1.2
+ #d+1.2
+ #d-1.2
+ #d1/2
+ #d+1/2
+ #d-1/2
+ #d1e3
+ #d+1e3
+ #d-1e3
+ #d.1e3
+ #d1.e3
+ #d1.2e3
+ #d+1.2e3
+ #d-1.2e3
+ #d1/2e3
+ #d+1/2e3
+ #d-1/2e3
+ #d+i
+ #d1+i
+ #d+1+i
+ #d-1+i
+ #d.1+i
+ #d1.+i
+ #d1.2+i
+ #d+1.2+i
+ #d-1.2+i
+ #d1/2+i
+ #d+1/2+i
+ #d-1/2+i
+ #d1e3+i
+ #d+1e3+i
+ #d-1e3+i
+ #d1.e3+i
+ #d.1e3+i
+ #d1.2e3+i
+ #d+1.2e3+i
+ #d-1.2e3+i
+ #d1/2e3+i
+ #d+1/2e3+i
+ #d-1/2e3+i
+ #d+1i
+ #d1+1i
+ #d+1+1i
+ #d-1+1i
+ #d1.+1i
+ #d.1+1i
+ #d1.2+1i
+ #d+1.2+1i
+ #d-1.2+1i
+ #d1/2+1i
+ #d+1/2+1i
+ #d-1/2+1i
+ #d1e3+1i
+ #d+1e3+1i
+ #d-1e3+1i
+ #d.1e3+1i
+ #d1.2e3+1i
+ #d+1.2e3+1i
+ #d-1.2e3+1i
+ #d1/2e3+1i
+ #d+1/2e3+1i
+ #d-1/2e3+1i
+ #d+1/2e3i
+ #d1+1/2e3i
+ #d+1+1/2e3i
+ #d-1+1/2e3i
+ #d.1+1/2e3i
+ #d1.2+1/2e3i
+ #d+1.2+1/2e3i
+ #d-1.2+1/2e3i
+ #d1/2+1/2e3i
+ #d+1/2+1/2e3i
+ #d-1/2+1/2e3i
+ #d1e3+1/2e3i
+ #d+1e3+1/2e3i
+ #d-1e3+1/2e3i
+ #d.1e3+1/2e3i
+ #d1.2e3+1/2e3i
+ #d+1.2e3+1/2e3i
+ #d-1.2e3+1/2e3i
+ #d1/2e3+1/2e3i
+ #d+1/2e3+1/2e3i
+ #d-1/2e3+1/2e3i
+ ;; Extflonums
+ +nan.t
+ 1t3
+ +1t3
+ -1t3
+ .1t3
+ 1.t3
+ 1.2t3
+ +1.2t3
+ -1.2t3
+ 1/2t3
+ +1/2t3
+ -1/2t3
+ 1#t0
+ 1.#t0
+ .2#t0
+ 1.2#t0
+ 1#/2t0
+ 1/2#t0
+ 1#/2#t0
+ 1#t3
+ 1.#t3
+ .2#t3
+ 1.2#t3
+ 1#/2t3
+ 1/2#t3
+ 1#/2#t3
;; No # reader prefix -- same as #d
-1.23
1.123
@@ -56,7 +272,6 @@ Multi-line comment style ...
-1/2
1
-1
-
;; #e
#e-1.23
#e1.123
@@ -66,7 +281,24 @@ Multi-line comment style ...
#e-1
#e1/2
#e-1/2
-
+ ;; #d#e
+ #d#e-1.23
+ #d#e1.123
+ #d#e1e3
+ #d#e1e-22
+ #d#e1
+ #d#e-1
+ #d#e1/2
+ #d#e-1/2
+ ;; #e#d
+ #e#d-1.23
+ #e#d1.123
+ #e#d1e3
+ #e#d1e-22
+ #e#d1
+ #e#d-1
+ #e#d1/2
+ #e#d-1/2
;; #i always float
#i-1.23
#i1.123
@@ -76,7 +308,126 @@ Multi-line comment style ...
#i-1/2
#i1
#i-1
-
+ ;; Implicitly inexact numbers
+ +nan.0
+ 1#
+ 1.#
+ .2#
+ 1.2#
+ 1#/2
+ 1/2#
+ 1#/2#
+ 1#e3
+ 1.#e3
+ .2#e3
+ 1.2#e3
+ 1#/2e3
+ 1/2#e3
+ 1#/2#e3
+ +nan.0+i
+ 1#+i
+ 1.#+i
+ .2#+i
+ 1.2#+i
+ 1#/2+i
+ 1/2#+i
+ 1#/2#+i
+ 1#e3+i
+ 1.#e3+i
+ .2#e3+i
+ 1.2#e3+i
+ 1#/2e3+i
+ 1/2#e3+i
+ 1#/2#e3+i
+ +nan.0i
+ +1#i
+ +1.#i
+ +.2#i
+ +1.2#i
+ +1#/2i
+ +1/2#i
+ +1#/2#i
+ +1#e3i
+ +1.#e3i
+ +.2#e3i
+ +1.2#e3i
+ +1#/2e3i
+ +1/2#e3i
+ +1#/2#e3i
+ 0+nan.0i
+ 0+1#i
+ 0+1.#i
+ 0+.2#i
+ 0+1.2#i
+ 0+1#/2i
+ 0+1/2#i
+ 0+1#/2#i
+ 0+1#e3i
+ 0+1.#e3i
+ 0+.2#e3i
+ 0+1.2#e3i
+ 0+1#/2e3i
+ 0+1/2#e3i
+ 0+1#/2#e3i
+ 1#/2#e3+nan.0i
+ 1#/2#e3+1#i
+ 1#/2#e3+1.#i
+ 1#/2#e3+.2#i
+ 1#/2#e3+1.2#i
+ 1#/2#e3+1#/2i
+ 1#/2#e3+1/2#i
+ 1#/2#e3+1#/2#i
+ 1#/2#e3+1#e3i
+ 1#/2#e3+1.#e3i
+ 1#/2#e3+.2#e3i
+ 1#/2#e3+1.2#e3i
+ 1#/2#e3+1#/2e3i
+ 1#/2#e3+1/2#e3i
+ 1#/2#e3+1#/2#e3i
+ +nan.0@1
+ 1#@1
+ 1.#@1
+ .2#@1
+ 1.2#@1
+ 1#/2@1
+ 1/2#@1
+ 1#/2#@1
+ 1#e3@1
+ 1.#e3@1
+ .2#e3@1
+ 1.2#e3@1
+ 1#/2e3@1
+ 1/2#e3@1
+ 1#/2#e3@1
+ 1@+nan.0
+ 1@1#
+ 1@1.#
+ 1@.2#
+ 1@1.2#
+ 1@1#/2
+ 1@1/2#
+ 1@1#/2#
+ 1@1#e3
+ 1@1.#e3
+ 1@.2#e3
+ 1@1.2#e3
+ 1@1#/2e3
+ 1@1/2#e3
+ 1@1#/2#e3
+ 1#/2#e3@1#
+ 1#/2#e3@1.#
+ 1#/2#e3@.2#
+ 1#/2#e3@1.2#
+ 1#/2#e3@1#/2
+ 1#/2#e3@1/2#
+ 1#/2#e3@1#/2#
+ 1#/2#e3@1#e3
+ 1#/2#e3@1.#e3
+ 1#/2#e3@.2#e3
+ 1#/2#e3@1.2#e3
+ 1#/2#e3@1#/2e3
+ 1#/2#e3@1/2#e3
+ 1#/2#e3@1#/2#e3
;; #o
#o777.777
#o-777.777
@@ -86,10 +437,307 @@ Multi-line comment style ...
#o-3/7
#o777
#o-777
-
+ #e#o777.777
+ #e#o-777.777
+ #e#o777e777
+ #e#o777e-777
+ #e#o3/7
+ #e#o-3/7
+ #e#o777
+ #e#o-777
+ #i#o777.777
+ #i#o-777.777
+ #i#o777e777
+ #i#o777e-777
+ #i#o3/7
+ #i#o-3/7
+ #i#o777
+ #i#o-777
;; #x
#x-f.f
#xf.f
+ #xfsf
+ #xfs-f
+ #x7/f
+ #x-7/f
#x-f
#xf
+ #e#x-f.f
+ #e#xf.f
+ #e#xfsf
+ #e#xfs-f
+ #e#x7/f
+ #e#x-7/f
+ #e#x-f
+ #e#xf
+ #i#x-f.f
+ #i#xf.f
+ #i#xfsf
+ #i#xfs-f
+ #i#x7/f
+ #i#x-7/f
+ #i#x-f
+ #i#xf
+ ;; Not numbers
+ '-1.23x
+ '1.123x
+ '1e3x
+ '1e-22x
+ '1/2x
+ '-1/2x
+ '1x
+ '-1x
+ '/
+ '1/
+ '/2
+ '1//2
+ '1e3.
+ '1e
+ 'e3
+ '.i
+ '1.2.3
+ '1..2
+ '.1.
+ '@
+ '1@
+ '@2
+ '1@@2
+ '1@2@3
+ '1@2i
+ '1+-2i
+ '1i+2
+ '1i+2i
+ '1+2i+3i
+ '-
+ '--1
+ '+
+ '++1
+ '1/2.3
+ '1#2
+ '1#.2
+ '1.#2
+ '.#2
+ '+nan.t+nan.ti
+ '+nan.t@nan.t
+ ;; Booleans
+ #t
+ #T
+ #true
+ #f
+ #F
+ #false
+ ;; Characters, strings, and byte strings
+ #\
+ #\Null9
+ #\n9
+ #\99
+ #\0009
+ #\u3BB
+ #\u03BB9
+ #\U3BB
+ #\U000003BB9
+ #\λ9
+ "string\
+ \a.\b.\t.\n.\v.\f.\r.\e.\".\'.\\.\1.\123.\1234.\x9.\x30.\x303"
+ "\u9.\u1234.\u12345.\U9.\U00100000.\U001000000"
+ #"byte-string\7\xff\t"
+ #<<HERE STRING
+lorem ipsum
+dolor sit amet
+consectetur HERE STRING
+HERE STRING adipisicing elit
+HERE STRING
+ #|
+HERE STRING
+|#
+ ;; Other literals
+ #(vector)
+ #20()
+ #s[prefab-structure 1 2 3]
+ #&{box}
+ #hash(("a" . 5))
+ #hasheq((a . 5) (b . 7))
+ #hasheqv((a . 5) (b . 7))
+ #'(define x 1)
+ #`(define x #,pi)
+ ;; quote, quasiquote, and unquote
+ 'pi
+ ' pi
+ ''pi
+ '`pi
+ '`,pi
+ ',pi
+ `pi
+ ` pi
+ `'pi
+ ``pi
+ `,pi
+ ` , pi
+ `,'pi
+ `,`pi
+ `,`,pi
+ '(+)
+ ' (+)
+ ''(+)
+ '`(+)
+ ',(+)
+ `(+)
+ ` (+)
+ `'(+)
+ ``(+)
+ `,(+)
+ ` , (+)
+ `,'(+)
+ `,`(+)
+ `,`,(+)
+ #readerracket/base'pi.f
+ '#readerracket/base pi.f
+ #readerracket/base`pi.f
+ `#readerracket/base pi.f
+ #readerracket/base`,pi.f
+ `#readerracket/base,pi.f
+ `,#readerracket/base pi.f
+ #readerracket/base'`,pi.f
+ '#readerracket/base`,pi.f
+ '`#readerracket/base,pi.f
+ '`,#readerracket/base pi.f
+ #readerracket/base'(*)
+ '#readerracket/base(*)
+ #readerracket/base`(*)
+ `#readerracket/base(*)
+ #readerracket/base`,(*)
+ `#readerracket/base,(*)
+ `,#readerracket/base(*)
+ #readerracket/base'`,(*)
+ '#readerracket/base`,(*)
+ '`#readerracket/base,(*)
+ '`,#readerracket/base(*)
+ (quote pi)
+ (quote (quote pi))
+ (quote (quasiquote pi))
+ (quote (quasiquote (unquote pi)))
+ (quote (unquote pi))
+ (quasiquote pi)
+ (quasiquote (quote pi))
+ (quasiquote (quasiquote pi))
+ (quasiquote (unquote pi))
+ (quasiquote (unquote (quote pi)))
+ (quasiquote (unquote (quasiquote pi)))
+ (quasiquote (unquote (quasiquote (unquote pi))))
+ (quote (+))
+ (quote (quote (+)))
+ (quote (quasiquote (+)))
+ (quote (unquote (+)))
+ (quasiquote (+))
+ (quasiquote (quote (+)))
+ (quasiquote (quasiquote (+)))
+ (quasiquote (unquote (+)))
+ (quasiquote (unquote (quote (+))))
+ (quasiquote (unquote (quasiquote (+))))
+ (quasiquote (unquote (quasiquote (unquote (+)))))
+ #reader racket/base (quote pi.f)
+ (quote #reader racket/base pi.f)
+ #reader racket/base (quasiquote pi.f)
+ (quasiquote #reader racket/base pi.f)
+ #reader racket/base (quasiquote (unquote pi.f))
+ (quasiquote #reader racket/base (unquote pi.f))
+ (quasiquote (unquote #reader racket/base pi.f))
+ #reader racket/base (quote (quasiquote (unquote pi.f)))
+ (quote #reader racket/base (quasiquote (unquote pi.f)))
+ (quote (quasiquote #reader racket/base (unquote pi.f)))
+ (quote (quasiquote (unquote #reader racket/base pi.f)))
+ #reader racket/base (quote (*))
+ (quote #reader racket/base (*))
+ #reader racket/base (quasiquote (*))
+ (quasiquote #reader racket/base (*))
+ #reader racket/base (quasiquote (unquote (*)))
+ (quasiquote #reader racket/base (unquote (*)))
+ (quasiquote (unquote #reader racket/base (*)))
+ #reader racket/base (quote (quasiquote (unquote (*))))
+ (quote #reader racket/base (quasiquote (unquote (*))))
+ (quote (quasiquote #reader racket/base (unquote (*))))
+ (quote (quasiquote (unquote #reader racket/base (*))))
+ ;; Make sure non-identifiers work with quotes
+ ' "" pi
+ ' #t pi
+ ' #() pi
+ ' #s(s) pi
+ ' #\u3BB pi
+ ' #\U000003BB pi
+ ' #\space pi
+ ' #\. pi
+ ' #"" pi
+ ' #:kw pi
+ ' #&b pi
+ ' #'(define x 1) pi
+ ' #`(define x #,pi) pi
+ ' #I0 pi
+ ' #E0 pi
+ ' #X0 pi
+ ' #O0 pi
+ ' #D0 pi
+ ' #B0 pi
+ ' #<<EOF
+EOF
+ pi
+ ' #rx"" pi
+ ' #rx#"" pi
+ ' #px"" pi
+ ' #px#"" pi
+ ' #hash() pi
+ ' #hasheq[] pi
+ ' #hasheqv{} pi
+ ' #1(v) pi
)
+
+;; Use the following to generate lists of built-ins and keywords.
+;; Run
+;; (displayln (wrap-lines KEYWORDS))
+;; (displayln (wrap-lines BUILTINS))
+;; and copy the results into RacketLexer._keywords and RacketLexer._builtins.
+
+;; (-> (listof string?) string?)
+;; Appends all the strings together, quoting them as appropriate for Python,
+;; with commas and spaces between them, wrapping at 80 characters, with an
+;; indentation of 8 spaces.
+(define (wrap-lines lst)
+ (define INDENTATION '" ")
+ (define WIDTH '80)
+ (define (wrap-lines* lst done-lines current-line)
+ (if (null? lst)
+ (string-append (foldr string-append "" done-lines) current-line)
+ (let* ([str (first lst)]
+ [wrapped-str (if (regexp-match-exact? '#px"[[:ascii:]]+" str)
+ (string-append "'" str "',")
+ (string-append "u'" str "',"))]
+ [new-line (string-append current-line " " wrapped-str)])
+ (if ((string-length new-line) . >= . WIDTH)
+ (wrap-lines* (rest lst)
+ (append done-lines
+ `(,(string-append current-line "\n")))
+ (string-append INDENTATION wrapped-str))
+ (wrap-lines* (rest lst)
+ done-lines
+ new-line)))))
+ (wrap-lines* lst '() INDENTATION))
+
+;; (-> string? boolean?)
+;; Returns #t if str represents a syntax identifier in the current namespace,
+;; otherwise #f.
+(define (syntax-identifier? str)
+ (with-handlers ([exn? exn?])
+ (not (eval (call-with-input-string str read)))))
+
+(define RACKET-NAMESPACE
+ (parameterize ([current-namespace (make-base-namespace)])
+ (namespace-require 'racket)
+ (current-namespace)))
+
+(define BOUND-IDENTIFIERS
+ (parameterize ([current-namespace RACKET-NAMESPACE])
+ (sort (map symbol->string (namespace-mapped-symbols))
+ string<=?)))
+
+(define-values (KEYWORDS BUILTINS)
+ (parameterize ([current-namespace RACKET-NAMESPACE])
+ (partition syntax-identifier? BOUND-IDENTIFIERS)))
diff --git a/tests/examplefiles/example.sh b/tests/examplefiles/example.sh
new file mode 100644
index 00000000..2112cdd1
--- /dev/null
+++ b/tests/examplefiles/example.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+
+printf "%d %s\n" 10 "foo"
+printf "%d %s\n" $((10#1)) "bar"
+
+let "m = 10#${1:1:2}"
+echo $m
+
+m=$((10#${1:4:3} + 10#${1:1:3}))
+echo $m
+
+m=$((10#${1:4:3}))
+echo $m
+
+m=$((10#$1))
+echo $m
+
+m=$((10#1))
+echo $m
+
+m=$((10))
+echo $m
diff --git a/tests/examplefiles/example.slim b/tests/examplefiles/example.slim
new file mode 100644
index 00000000..0e209200
--- /dev/null
+++ b/tests/examplefiles/example.slim
@@ -0,0 +1,31 @@
+doctype html
+html
+ head
+ title Slim Examples
+ meta name="keywords" content="template language"
+ meta name="author" content=author
+ javascript:
+ alert('Slim supports embedded javascript!')
+
+ body
+ h1 Markup examples
+
+ #content
+ p This example shows you how a basic Slim file looks like.
+
+ == yield
+
+ - unless items.empty?
+ table
+ - for item in items do
+ tr
+ td.name = item.name
+ td.price = item.price
+ - else
+ p
+ | No items found. Please add some inventory.
+ Thank you!
+
+ div id="footer"
+ = render 'footer'
+ | Copyright (C) #{year} #{author}
diff --git a/tests/examplefiles/example.sls b/tests/examplefiles/example.sls
new file mode 100644
index 00000000..824700e7
--- /dev/null
+++ b/tests/examplefiles/example.sls
@@ -0,0 +1,51 @@
+include:
+ - moosefs
+
+{% for mnt in salt['cmd.run']('ls /dev/data/moose*').split() %}
+/mnt/moose{{ mnt[-1] }}:
+ mount.mounted:
+ - device: {{ mnt }}
+ - fstype: xfs
+ - mkmnt: True
+ file.directory:
+ - user: mfs
+ - group: mfs
+ - require:
+ - user: mfs
+ - group: mfs
+{% endfor %}
+
+/etc/mfshdd.cfg:
+ file.managed:
+ - source: salt://moosefs/mfshdd.cfg
+ - user: root
+ - group: root
+ - mode: 644
+ - template: jinja
+ - require:
+ - pkg: mfs-chunkserver
+
+/etc/mfschunkserver.cfg:
+ file.managed:
+ - source: salt://moosefs/mfschunkserver.cfg
+ - user: root
+ - group: root
+ - mode: 644
+ - template: jinja
+ - require:
+ - pkg: mfs-chunkserver
+
+mfs-chunkserver:
+ pkg:
+ - installed
+mfschunkserver:
+ service:
+ - running
+ - require:
+{% for mnt in salt['cmd.run']('ls /dev/data/moose*') %}
+ - mount: /mnt/moose{{ mnt[-1] }}
+ - file: /mnt/moose{{ mnt[-1] }}
+{% endfor %}
+ - file: /etc/mfschunkserver.cfg
+ - file: /etc/mfshdd.cfg
+ - file: /var/lib/mfs
diff --git a/tests/examplefiles/example.stan b/tests/examplefiles/example.stan
index e936f54a..69c9ac70 100644
--- a/tests/examplefiles/example.stan
+++ b/tests/examplefiles/example.stan
@@ -5,6 +5,14 @@ It is not a real model and will not compile
*/
# also a comment
// also a comment
+functions {
+ void f1(void a, real b) {
+ return 1 / a;
+ }
+ real f2(int a, vector b, real c) {
+ return a + b + c;
+ }
+}
data {
// valid name
int abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_abc;
@@ -19,6 +27,8 @@ data {
positive_ordered[3] wibble;
corr_matrix[3] grault;
cov_matrix[3] garply;
+ cholesky_factor_cov[3] waldo;
+ cholesky_factor_corr[3] waldo2;
real<lower=-1,upper=1> foo1;
real<lower=0> foo2;
@@ -86,6 +96,7 @@ model {
tmp / tmp;
tmp .* tmp;
tmp ./ tmp;
+ tmp ^ tmp;
! tmp;
- tmp;
+ tmp;
@@ -94,15 +105,18 @@ model {
// lp__ should be highlighted
// normal_log as a function
lp__ <- lp__ + normal_log(plugh, 0, 1);
+ increment_log_prob(normal_log(plugh, 0, 1));
// print statement and string literal
print("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_~@#$%^&*`'-+={}[].,;: ");
print("Hello, world!");
print("");
+
+ // reject statement
+ reject("I just don't like it");
}
generated quantities {
real bar1;
bar1 <- foo + 1;
}
-
diff --git a/tests/examplefiles/example.thy b/tests/examplefiles/example.thy
new file mode 100644
index 00000000..abaa1af8
--- /dev/null
+++ b/tests/examplefiles/example.thy
@@ -0,0 +1,751 @@
+(* from Isabelle2013-2 src/HOL/Power.thy; BSD license *)
+
+(* Title: HOL/Power.thy
+ Author: Lawrence C Paulson, Cambridge University Computer Laboratory
+ Copyright 1997 University of Cambridge
+*)
+
+header {* Exponentiation *}
+
+theory Power
+imports Num
+begin
+
+subsection {* Powers for Arbitrary Monoids *}
+
+class power = one + times
+begin
+
+primrec power :: "'a \<Rightarrow> nat \<Rightarrow> 'a" (infixr "^" 80) where
+ power_0: "a ^ 0 = 1"
+ | power_Suc: "a ^ Suc n = a * a ^ n"
+
+notation (latex output)
+ power ("(_\<^bsup>_\<^esup>)" [1000] 1000)
+
+notation (HTML output)
+ power ("(_\<^bsup>_\<^esup>)" [1000] 1000)
+
+text {* Special syntax for squares. *}
+
+abbreviation (xsymbols)
+ power2 :: "'a \<Rightarrow> 'a" ("(_\<^sup>2)" [1000] 999) where
+ "x\<^sup>2 \<equiv> x ^ 2"
+
+notation (latex output)
+ power2 ("(_\<^sup>2)" [1000] 999)
+
+notation (HTML output)
+ power2 ("(_\<^sup>2)" [1000] 999)
+
+end
+
+context monoid_mult
+begin
+
+subclass power .
+
+lemma power_one [simp]:
+ "1 ^ n = 1"
+ by (induct n) simp_all
+
+lemma power_one_right [simp]:
+ "a ^ 1 = a"
+ by simp
+
+lemma power_commutes:
+ "a ^ n * a = a * a ^ n"
+ by (induct n) (simp_all add: mult_assoc)
+
+lemma power_Suc2:
+ "a ^ Suc n = a ^ n * a"
+ by (simp add: power_commutes)
+
+lemma power_add:
+ "a ^ (m + n) = a ^ m * a ^ n"
+ by (induct m) (simp_all add: algebra_simps)
+
+lemma power_mult:
+ "a ^ (m * n) = (a ^ m) ^ n"
+ by (induct n) (simp_all add: power_add)
+
+lemma power2_eq_square: "a\<^sup>2 = a * a"
+ by (simp add: numeral_2_eq_2)
+
+lemma power3_eq_cube: "a ^ 3 = a * a * a"
+ by (simp add: numeral_3_eq_3 mult_assoc)
+
+lemma power_even_eq:
+ "a ^ (2 * n) = (a ^ n)\<^sup>2"
+ by (subst mult_commute) (simp add: power_mult)
+
+lemma power_odd_eq:
+ "a ^ Suc (2*n) = a * (a ^ n)\<^sup>2"
+ by (simp add: power_even_eq)
+
+lemma power_numeral_even:
+ "z ^ numeral (Num.Bit0 w) = (let w = z ^ (numeral w) in w * w)"
+ unfolding numeral_Bit0 power_add Let_def ..
+
+lemma power_numeral_odd:
+ "z ^ numeral (Num.Bit1 w) = (let w = z ^ (numeral w) in z * w * w)"
+ unfolding numeral_Bit1 One_nat_def add_Suc_right add_0_right
+ unfolding power_Suc power_add Let_def mult_assoc ..
+
+lemma funpow_times_power:
+ "(times x ^^ f x) = times (x ^ f x)"
+proof (induct "f x" arbitrary: f)
+ case 0 then show ?case by (simp add: fun_eq_iff)
+next
+ case (Suc n)
+ def g \<equiv> "\<lambda>x. f x - 1"
+ with Suc have "n = g x" by simp
+ with Suc have "times x ^^ g x = times (x ^ g x)" by simp
+ moreover from Suc g_def have "f x = g x + 1" by simp
+ ultimately show ?case by (simp add: power_add funpow_add fun_eq_iff mult_assoc)
+qed
+
+end
+
+context comm_monoid_mult
+begin
+
+lemma power_mult_distrib:
+ "(a * b) ^ n = (a ^ n) * (b ^ n)"
+ by (induct n) (simp_all add: mult_ac)
+
+end
+
+context semiring_numeral
+begin
+
+lemma numeral_sqr: "numeral (Num.sqr k) = numeral k * numeral k"
+ by (simp only: sqr_conv_mult numeral_mult)
+
+lemma numeral_pow: "numeral (Num.pow k l) = numeral k ^ numeral l"
+ by (induct l, simp_all only: numeral_class.numeral.simps pow.simps
+ numeral_sqr numeral_mult power_add power_one_right)
+
+lemma power_numeral [simp]: "numeral k ^ numeral l = numeral (Num.pow k l)"
+ by (rule numeral_pow [symmetric])
+
+end
+
+context semiring_1
+begin
+
+lemma of_nat_power:
+ "of_nat (m ^ n) = of_nat m ^ n"
+ by (induct n) (simp_all add: of_nat_mult)
+
+lemma power_zero_numeral [simp]: "(0::'a) ^ numeral k = 0"
+ by (simp add: numeral_eq_Suc)
+
+lemma zero_power2: "0\<^sup>2 = 0" (* delete? *)
+ by (rule power_zero_numeral)
+
+lemma one_power2: "1\<^sup>2 = 1" (* delete? *)
+ by (rule power_one)
+
+end
+
+context comm_semiring_1
+begin
+
+text {* The divides relation *}
+
+lemma le_imp_power_dvd:
+ assumes "m \<le> n" shows "a ^ m dvd a ^ n"
+proof
+ have "a ^ n = a ^ (m + (n - m))"
+ using `m \<le> n` by simp
+ also have "\<dots> = a ^ m * a ^ (n - m)"
+ by (rule power_add)
+ finally show "a ^ n = a ^ m * a ^ (n - m)" .
+qed
+
+lemma power_le_dvd:
+ "a ^ n dvd b \<Longrightarrow> m \<le> n \<Longrightarrow> a ^ m dvd b"
+ by (rule dvd_trans [OF le_imp_power_dvd])
+
+lemma dvd_power_same:
+ "x dvd y \<Longrightarrow> x ^ n dvd y ^ n"
+ by (induct n) (auto simp add: mult_dvd_mono)
+
+lemma dvd_power_le:
+ "x dvd y \<Longrightarrow> m \<ge> n \<Longrightarrow> x ^ n dvd y ^ m"
+ by (rule power_le_dvd [OF dvd_power_same])
+
+lemma dvd_power [simp]:
+ assumes "n > (0::nat) \<or> x = 1"
+ shows "x dvd (x ^ n)"
+using assms proof
+ assume "0 < n"
+ then have "x ^ n = x ^ Suc (n - 1)" by simp
+ then show "x dvd (x ^ n)" by simp
+next
+ assume "x = 1"
+ then show "x dvd (x ^ n)" by simp
+qed
+
+end
+
+context ring_1
+begin
+
+lemma power_minus:
+ "(- a) ^ n = (- 1) ^ n * a ^ n"
+proof (induct n)
+ case 0 show ?case by simp
+next
+ case (Suc n) then show ?case
+ by (simp del: power_Suc add: power_Suc2 mult_assoc)
+qed
+
+lemma power_minus_Bit0:
+ "(- x) ^ numeral (Num.Bit0 k) = x ^ numeral (Num.Bit0 k)"
+ by (induct k, simp_all only: numeral_class.numeral.simps power_add
+ power_one_right mult_minus_left mult_minus_right minus_minus)
+
+lemma power_minus_Bit1:
+ "(- x) ^ numeral (Num.Bit1 k) = - (x ^ numeral (Num.Bit1 k))"
+ by (simp only: eval_nat_numeral(3) power_Suc power_minus_Bit0 mult_minus_left)
+
+lemma power_neg_numeral_Bit0 [simp]:
+ "neg_numeral k ^ numeral (Num.Bit0 l) = numeral (Num.pow k (Num.Bit0 l))"
+ by (simp only: neg_numeral_def power_minus_Bit0 power_numeral)
+
+lemma power_neg_numeral_Bit1 [simp]:
+ "neg_numeral k ^ numeral (Num.Bit1 l) = neg_numeral (Num.pow k (Num.Bit1 l))"
+ by (simp only: neg_numeral_def power_minus_Bit1 power_numeral pow.simps)
+
+lemma power2_minus [simp]:
+ "(- a)\<^sup>2 = a\<^sup>2"
+ by (rule power_minus_Bit0)
+
+lemma power_minus1_even [simp]:
+ "-1 ^ (2*n) = 1"
+proof (induct n)
+ case 0 show ?case by simp
+next
+ case (Suc n) then show ?case by (simp add: power_add power2_eq_square)
+qed
+
+lemma power_minus1_odd:
+ "-1 ^ Suc (2*n) = -1"
+ by simp
+
+lemma power_minus_even [simp]:
+ "(-a) ^ (2*n) = a ^ (2*n)"
+ by (simp add: power_minus [of a])
+
+end
+
+context ring_1_no_zero_divisors
+begin
+
+lemma field_power_not_zero:
+ "a \<noteq> 0 \<Longrightarrow> a ^ n \<noteq> 0"
+ by (induct n) auto
+
+lemma zero_eq_power2 [simp]:
+ "a\<^sup>2 = 0 \<longleftrightarrow> a = 0"
+ unfolding power2_eq_square by simp
+
+lemma power2_eq_1_iff:
+ "a\<^sup>2 = 1 \<longleftrightarrow> a = 1 \<or> a = - 1"
+ unfolding power2_eq_square by (rule square_eq_1_iff)
+
+end
+
+context idom
+begin
+
+lemma power2_eq_iff: "x\<^sup>2 = y\<^sup>2 \<longleftrightarrow> x = y \<or> x = - y"
+ unfolding power2_eq_square by (rule square_eq_iff)
+
+end
+
+context division_ring
+begin
+
+text {* FIXME reorient or rename to @{text nonzero_inverse_power} *}
+lemma nonzero_power_inverse:
+ "a \<noteq> 0 \<Longrightarrow> inverse (a ^ n) = (inverse a) ^ n"
+ by (induct n)
+ (simp_all add: nonzero_inverse_mult_distrib power_commutes field_power_not_zero)
+
+end
+
+context field
+begin
+
+lemma nonzero_power_divide:
+ "b \<noteq> 0 \<Longrightarrow> (a / b) ^ n = a ^ n / b ^ n"
+ by (simp add: divide_inverse power_mult_distrib nonzero_power_inverse)
+
+end
+
+
+subsection {* Exponentiation on ordered types *}
+
+context linordered_ring (* TODO: move *)
+begin
+
+lemma sum_squares_ge_zero:
+ "0 \<le> x * x + y * y"
+ by (intro add_nonneg_nonneg zero_le_square)
+
+lemma not_sum_squares_lt_zero:
+ "\<not> x * x + y * y < 0"
+ by (simp add: not_less sum_squares_ge_zero)
+
+end
+
+context linordered_semidom
+begin
+
+lemma zero_less_power [simp]:
+ "0 < a \<Longrightarrow> 0 < a ^ n"
+ by (induct n) (simp_all add: mult_pos_pos)
+
+lemma zero_le_power [simp]:
+ "0 \<le> a \<Longrightarrow> 0 \<le> a ^ n"
+ by (induct n) (simp_all add: mult_nonneg_nonneg)
+
+lemma power_mono:
+ "a \<le> b \<Longrightarrow> 0 \<le> a \<Longrightarrow> a ^ n \<le> b ^ n"
+ by (induct n) (auto intro: mult_mono order_trans [of 0 a b])
+
+lemma one_le_power [simp]: "1 \<le> a \<Longrightarrow> 1 \<le> a ^ n"
+ using power_mono [of 1 a n] by simp
+
+lemma power_le_one: "\<lbrakk>0 \<le> a; a \<le> 1\<rbrakk> \<Longrightarrow> a ^ n \<le> 1"
+ using power_mono [of a 1 n] by simp
+
+lemma power_gt1_lemma:
+ assumes gt1: "1 < a"
+ shows "1 < a * a ^ n"
+proof -
+ from gt1 have "0 \<le> a"
+ by (fact order_trans [OF zero_le_one less_imp_le])
+ have "1 * 1 < a * 1" using gt1 by simp
+ also have "\<dots> \<le> a * a ^ n" using gt1
+ by (simp only: mult_mono `0 \<le> a` one_le_power order_less_imp_le
+ zero_le_one order_refl)
+ finally show ?thesis by simp
+qed
+
+lemma power_gt1:
+ "1 < a \<Longrightarrow> 1 < a ^ Suc n"
+ by (simp add: power_gt1_lemma)
+
+lemma one_less_power [simp]:
+ "1 < a \<Longrightarrow> 0 < n \<Longrightarrow> 1 < a ^ n"
+ by (cases n) (simp_all add: power_gt1_lemma)
+
+lemma power_le_imp_le_exp:
+ assumes gt1: "1 < a"
+ shows "a ^ m \<le> a ^ n \<Longrightarrow> m \<le> n"
+proof (induct m arbitrary: n)
+ case 0
+ show ?case by simp
+next
+ case (Suc m)
+ show ?case
+ proof (cases n)
+ case 0
+ with Suc.prems Suc.hyps have "a * a ^ m \<le> 1" by simp
+ with gt1 show ?thesis
+ by (force simp only: power_gt1_lemma
+ not_less [symmetric])
+ next
+ case (Suc n)
+ with Suc.prems Suc.hyps show ?thesis
+ by (force dest: mult_left_le_imp_le
+ simp add: less_trans [OF zero_less_one gt1])
+ qed
+qed
+
+text{*Surely we can strengthen this? It holds for @{text "0<a<1"} too.*}
+lemma power_inject_exp [simp]:
+ "1 < a \<Longrightarrow> a ^ m = a ^ n \<longleftrightarrow> m = n"
+ by (force simp add: order_antisym power_le_imp_le_exp)
+
+text{*Can relax the first premise to @{term "0<a"} in the case of the
+natural numbers.*}
+lemma power_less_imp_less_exp:
+ "1 < a \<Longrightarrow> a ^ m < a ^ n \<Longrightarrow> m < n"
+ by (simp add: order_less_le [of m n] less_le [of "a^m" "a^n"]
+ power_le_imp_le_exp)
+
+lemma power_strict_mono [rule_format]:
+ "a < b \<Longrightarrow> 0 \<le> a \<Longrightarrow> 0 < n \<longrightarrow> a ^ n < b ^ n"
+ by (induct n)
+ (auto simp add: mult_strict_mono le_less_trans [of 0 a b])
+
+text{*Lemma for @{text power_strict_decreasing}*}
+lemma power_Suc_less:
+ "0 < a \<Longrightarrow> a < 1 \<Longrightarrow> a * a ^ n < a ^ n"
+ by (induct n)
+ (auto simp add: mult_strict_left_mono)
+
+lemma power_strict_decreasing [rule_format]:
+ "n < N \<Longrightarrow> 0 < a \<Longrightarrow> a < 1 \<longrightarrow> a ^ N < a ^ n"
+proof (induct N)
+ case 0 then show ?case by simp
+next
+ case (Suc N) then show ?case
+ apply (auto simp add: power_Suc_less less_Suc_eq)
+ apply (subgoal_tac "a * a^N < 1 * a^n")
+ apply simp
+ apply (rule mult_strict_mono) apply auto
+ done
+qed
+
+text{*Proof resembles that of @{text power_strict_decreasing}*}
+lemma power_decreasing [rule_format]:
+ "n \<le> N \<Longrightarrow> 0 \<le> a \<Longrightarrow> a \<le> 1 \<longrightarrow> a ^ N \<le> a ^ n"
+proof (induct N)
+ case 0 then show ?case by simp
+next
+ case (Suc N) then show ?case
+ apply (auto simp add: le_Suc_eq)
+ apply (subgoal_tac "a * a^N \<le> 1 * a^n", simp)
+ apply (rule mult_mono) apply auto
+ done
+qed
+
+lemma power_Suc_less_one:
+ "0 < a \<Longrightarrow> a < 1 \<Longrightarrow> a ^ Suc n < 1"
+ using power_strict_decreasing [of 0 "Suc n" a] by simp
+
+text{*Proof again resembles that of @{text power_strict_decreasing}*}
+lemma power_increasing [rule_format]:
+ "n \<le> N \<Longrightarrow> 1 \<le> a \<Longrightarrow> a ^ n \<le> a ^ N"
+proof (induct N)
+ case 0 then show ?case by simp
+next
+ case (Suc N) then show ?case
+ apply (auto simp add: le_Suc_eq)
+ apply (subgoal_tac "1 * a^n \<le> a * a^N", simp)
+ apply (rule mult_mono) apply (auto simp add: order_trans [OF zero_le_one])
+ done
+qed
+
+text{*Lemma for @{text power_strict_increasing}*}
+lemma power_less_power_Suc:
+ "1 < a \<Longrightarrow> a ^ n < a * a ^ n"
+ by (induct n) (auto simp add: mult_strict_left_mono less_trans [OF zero_less_one])
+
+lemma power_strict_increasing [rule_format]:
+ "n < N \<Longrightarrow> 1 < a \<longrightarrow> a ^ n < a ^ N"
+proof (induct N)
+ case 0 then show ?case by simp
+next
+ case (Suc N) then show ?case
+ apply (auto simp add: power_less_power_Suc less_Suc_eq)
+ apply (subgoal_tac "1 * a^n < a * a^N", simp)
+ apply (rule mult_strict_mono) apply (auto simp add: less_trans [OF zero_less_one] less_imp_le)
+ done
+qed
+
+lemma power_increasing_iff [simp]:
+ "1 < b \<Longrightarrow> b ^ x \<le> b ^ y \<longleftrightarrow> x \<le> y"
+ by (blast intro: power_le_imp_le_exp power_increasing less_imp_le)
+
+lemma power_strict_increasing_iff [simp]:
+ "1 < b \<Longrightarrow> b ^ x < b ^ y \<longleftrightarrow> x < y"
+by (blast intro: power_less_imp_less_exp power_strict_increasing)
+
+lemma power_le_imp_le_base:
+ assumes le: "a ^ Suc n \<le> b ^ Suc n"
+ and ynonneg: "0 \<le> b"
+ shows "a \<le> b"
+proof (rule ccontr)
+ assume "~ a \<le> b"
+ then have "b < a" by (simp only: linorder_not_le)
+ then have "b ^ Suc n < a ^ Suc n"
+ by (simp only: assms power_strict_mono)
+ from le and this show False
+ by (simp add: linorder_not_less [symmetric])
+qed
+
+lemma power_less_imp_less_base:
+ assumes less: "a ^ n < b ^ n"
+ assumes nonneg: "0 \<le> b"
+ shows "a < b"
+proof (rule contrapos_pp [OF less])
+ assume "~ a < b"
+ hence "b \<le> a" by (simp only: linorder_not_less)
+ hence "b ^ n \<le> a ^ n" using nonneg by (rule power_mono)
+ thus "\<not> a ^ n < b ^ n" by (simp only: linorder_not_less)
+qed
+
+lemma power_inject_base:
+ "a ^ Suc n = b ^ Suc n \<Longrightarrow> 0 \<le> a \<Longrightarrow> 0 \<le> b \<Longrightarrow> a = b"
+by (blast intro: power_le_imp_le_base antisym eq_refl sym)
+
+lemma power_eq_imp_eq_base:
+ "a ^ n = b ^ n \<Longrightarrow> 0 \<le> a \<Longrightarrow> 0 \<le> b \<Longrightarrow> 0 < n \<Longrightarrow> a = b"
+ by (cases n) (simp_all del: power_Suc, rule power_inject_base)
+
+lemma power2_le_imp_le:
+ "x\<^sup>2 \<le> y\<^sup>2 \<Longrightarrow> 0 \<le> y \<Longrightarrow> x \<le> y"
+ unfolding numeral_2_eq_2 by (rule power_le_imp_le_base)
+
+lemma power2_less_imp_less:
+ "x\<^sup>2 < y\<^sup>2 \<Longrightarrow> 0 \<le> y \<Longrightarrow> x < y"
+ by (rule power_less_imp_less_base)
+
+lemma power2_eq_imp_eq:
+ "x\<^sup>2 = y\<^sup>2 \<Longrightarrow> 0 \<le> x \<Longrightarrow> 0 \<le> y \<Longrightarrow> x = y"
+ unfolding numeral_2_eq_2 by (erule (2) power_eq_imp_eq_base) simp
+
+end
+
+context linordered_ring_strict
+begin
+
+lemma sum_squares_eq_zero_iff:
+ "x * x + y * y = 0 \<longleftrightarrow> x = 0 \<and> y = 0"
+ by (simp add: add_nonneg_eq_0_iff)
+
+lemma sum_squares_le_zero_iff:
+ "x * x + y * y \<le> 0 \<longleftrightarrow> x = 0 \<and> y = 0"
+ by (simp add: le_less not_sum_squares_lt_zero sum_squares_eq_zero_iff)
+
+lemma sum_squares_gt_zero_iff:
+ "0 < x * x + y * y \<longleftrightarrow> x \<noteq> 0 \<or> y \<noteq> 0"
+ by (simp add: not_le [symmetric] sum_squares_le_zero_iff)
+
+end
+
+context linordered_idom
+begin
+
+lemma power_abs:
+ "abs (a ^ n) = abs a ^ n"
+ by (induct n) (auto simp add: abs_mult)
+
+lemma abs_power_minus [simp]:
+ "abs ((-a) ^ n) = abs (a ^ n)"
+ by (simp add: power_abs)
+
+lemma zero_less_power_abs_iff [simp, no_atp]:
+ "0 < abs a ^ n \<longleftrightarrow> a \<noteq> 0 \<or> n = 0"
+proof (induct n)
+ case 0 show ?case by simp
+next
+ case (Suc n) show ?case by (auto simp add: Suc zero_less_mult_iff)
+qed
+
+lemma zero_le_power_abs [simp]:
+ "0 \<le> abs a ^ n"
+ by (rule zero_le_power [OF abs_ge_zero])
+
+lemma zero_le_power2 [simp]:
+ "0 \<le> a\<^sup>2"
+ by (simp add: power2_eq_square)
+
+lemma zero_less_power2 [simp]:
+ "0 < a\<^sup>2 \<longleftrightarrow> a \<noteq> 0"
+ by (force simp add: power2_eq_square zero_less_mult_iff linorder_neq_iff)
+
+lemma power2_less_0 [simp]:
+ "\<not> a\<^sup>2 < 0"
+ by (force simp add: power2_eq_square mult_less_0_iff)
+
+lemma abs_power2 [simp]:
+ "abs (a\<^sup>2) = a\<^sup>2"
+ by (simp add: power2_eq_square abs_mult abs_mult_self)
+
+lemma power2_abs [simp]:
+ "(abs a)\<^sup>2 = a\<^sup>2"
+ by (simp add: power2_eq_square abs_mult_self)
+
+lemma odd_power_less_zero:
+ "a < 0 \<Longrightarrow> a ^ Suc (2*n) < 0"
+proof (induct n)
+ case 0
+ then show ?case by simp
+next
+ case (Suc n)
+ have "a ^ Suc (2 * Suc n) = (a*a) * a ^ Suc(2*n)"
+ by (simp add: mult_ac power_add power2_eq_square)
+ thus ?case
+ by (simp del: power_Suc add: Suc mult_less_0_iff mult_neg_neg)
+qed
+
+lemma odd_0_le_power_imp_0_le:
+ "0 \<le> a ^ Suc (2*n) \<Longrightarrow> 0 \<le> a"
+ using odd_power_less_zero [of a n]
+ by (force simp add: linorder_not_less [symmetric])
+
+lemma zero_le_even_power'[simp]:
+ "0 \<le> a ^ (2*n)"
+proof (induct n)
+ case 0
+ show ?case by simp
+next
+ case (Suc n)
+ have "a ^ (2 * Suc n) = (a*a) * a ^ (2*n)"
+ by (simp add: mult_ac power_add power2_eq_square)
+ thus ?case
+ by (simp add: Suc zero_le_mult_iff)
+qed
+
+lemma sum_power2_ge_zero:
+ "0 \<le> x\<^sup>2 + y\<^sup>2"
+ by (intro add_nonneg_nonneg zero_le_power2)
+
+lemma not_sum_power2_lt_zero:
+ "\<not> x\<^sup>2 + y\<^sup>2 < 0"
+ unfolding not_less by (rule sum_power2_ge_zero)
+
+lemma sum_power2_eq_zero_iff:
+ "x\<^sup>2 + y\<^sup>2 = 0 \<longleftrightarrow> x = 0 \<and> y = 0"
+ unfolding power2_eq_square by (simp add: add_nonneg_eq_0_iff)
+
+lemma sum_power2_le_zero_iff:
+ "x\<^sup>2 + y\<^sup>2 \<le> 0 \<longleftrightarrow> x = 0 \<and> y = 0"
+ by (simp add: le_less sum_power2_eq_zero_iff not_sum_power2_lt_zero)
+
+lemma sum_power2_gt_zero_iff:
+ "0 < x\<^sup>2 + y\<^sup>2 \<longleftrightarrow> x \<noteq> 0 \<or> y \<noteq> 0"
+ unfolding not_le [symmetric] by (simp add: sum_power2_le_zero_iff)
+
+end
+
+
+subsection {* Miscellaneous rules *}
+
+lemma power_eq_if: "p ^ m = (if m=0 then 1 else p * (p ^ (m - 1)))"
+ unfolding One_nat_def by (cases m) simp_all
+
+lemma power2_sum:
+ fixes x y :: "'a::comm_semiring_1"
+ shows "(x + y)\<^sup>2 = x\<^sup>2 + y\<^sup>2 + 2 * x * y"
+ by (simp add: algebra_simps power2_eq_square mult_2_right)
+
+lemma power2_diff:
+ fixes x y :: "'a::comm_ring_1"
+ shows "(x - y)\<^sup>2 = x\<^sup>2 + y\<^sup>2 - 2 * x * y"
+ by (simp add: ring_distribs power2_eq_square mult_2) (rule mult_commute)
+
+lemma power_0_Suc [simp]:
+ "(0::'a::{power, semiring_0}) ^ Suc n = 0"
+ by simp
+
+text{*It looks plausible as a simprule, but its effect can be strange.*}
+lemma power_0_left:
+ "0 ^ n = (if n = 0 then 1 else (0::'a::{power, semiring_0}))"
+ by (induct n) simp_all
+
+lemma power_eq_0_iff [simp]:
+ "a ^ n = 0 \<longleftrightarrow>
+ a = (0::'a::{mult_zero,zero_neq_one,no_zero_divisors,power}) \<and> n \<noteq> 0"
+ by (induct n)
+ (auto simp add: no_zero_divisors elim: contrapos_pp)
+
+lemma (in field) power_diff:
+ assumes nz: "a \<noteq> 0"
+ shows "n \<le> m \<Longrightarrow> a ^ (m - n) = a ^ m / a ^ n"
+ by (induct m n rule: diff_induct) (simp_all add: nz field_power_not_zero)
+
+text{*Perhaps these should be simprules.*}
+lemma power_inverse:
+ fixes a :: "'a::division_ring_inverse_zero"
+ shows "inverse (a ^ n) = inverse a ^ n"
+apply (cases "a = 0")
+apply (simp add: power_0_left)
+apply (simp add: nonzero_power_inverse)
+done (* TODO: reorient or rename to inverse_power *)
+
+lemma power_one_over:
+ "1 / (a::'a::{field_inverse_zero, power}) ^ n = (1 / a) ^ n"
+ by (simp add: divide_inverse) (rule power_inverse)
+
+lemma power_divide:
+ "(a / b) ^ n = (a::'a::field_inverse_zero) ^ n / b ^ n"
+apply (cases "b = 0")
+apply (simp add: power_0_left)
+apply (rule nonzero_power_divide)
+apply assumption
+done
+
+text {* Simprules for comparisons where common factors can be cancelled. *}
+
+lemmas zero_compare_simps =
+ add_strict_increasing add_strict_increasing2 add_increasing
+ zero_le_mult_iff zero_le_divide_iff
+ zero_less_mult_iff zero_less_divide_iff
+ mult_le_0_iff divide_le_0_iff
+ mult_less_0_iff divide_less_0_iff
+ zero_le_power2 power2_less_0
+
+
+subsection {* Exponentiation for the Natural Numbers *}
+
+lemma nat_one_le_power [simp]:
+ "Suc 0 \<le> i \<Longrightarrow> Suc 0 \<le> i ^ n"
+ by (rule one_le_power [of i n, unfolded One_nat_def])
+
+lemma nat_zero_less_power_iff [simp]:
+ "x ^ n > 0 \<longleftrightarrow> x > (0::nat) \<or> n = 0"
+ by (induct n) auto
+
+lemma nat_power_eq_Suc_0_iff [simp]:
+ "x ^ m = Suc 0 \<longleftrightarrow> m = 0 \<or> x = Suc 0"
+ by (induct m) auto
+
+lemma power_Suc_0 [simp]:
+ "Suc 0 ^ n = Suc 0"
+ by simp
+
+text{*Valid for the naturals, but what if @{text"0<i<1"}?
+Premises cannot be weakened: consider the case where @{term "i=0"},
+@{term "m=1"} and @{term "n=0"}.*}
+lemma nat_power_less_imp_less:
+ assumes nonneg: "0 < (i\<Colon>nat)"
+ assumes less: "i ^ m < i ^ n"
+ shows "m < n"
+proof (cases "i = 1")
+ case True with less power_one [where 'a = nat] show ?thesis by simp
+next
+ case False with nonneg have "1 < i" by auto
+ from power_strict_increasing_iff [OF this] less show ?thesis ..
+qed
+
+lemma power_dvd_imp_le:
+ "i ^ m dvd i ^ n \<Longrightarrow> (1::nat) < i \<Longrightarrow> m \<le> n"
+ apply (rule power_le_imp_le_exp, assumption)
+ apply (erule dvd_imp_le, simp)
+ done
+
+lemma power2_nat_le_eq_le:
+ fixes m n :: nat
+ shows "m\<^sup>2 \<le> n\<^sup>2 \<longleftrightarrow> m \<le> n"
+ by (auto intro: power2_le_imp_le power_mono)
+
+lemma power2_nat_le_imp_le:
+ fixes m n :: nat
+ assumes "m\<^sup>2 \<le> n"
+ shows "m \<le> n"
+ using assms by (cases m) (simp_all add: power2_eq_square)
+
+
+
+subsection {* Code generator tweak *}
+
+lemma power_power_power [code]:
+ "power = power.power (1::'a::{power}) (op *)"
+ unfolding power_def power.power_def ..
+
+declare power.power.simps [code]
+
+code_identifier
+ code_module Power \<rightharpoonup> (SML) Arith and (OCaml) Arith and (Haskell) Arith
+
+end
+
diff --git a/tests/examplefiles/example.todotxt b/tests/examplefiles/example.todotxt
new file mode 100644
index 00000000..55ee5286
--- /dev/null
+++ b/tests/examplefiles/example.todotxt
@@ -0,0 +1,9 @@
+(A) Call Mom @Phone +Family
+(A) 2014-01-08 Schedule annual checkup +Health
+(B) Outline chapter 5 +Novel @Computer
+(C) Add cover sheets @Office +TPSReports
+Plan backyard herb garden @Home
+Pick up milk @GroceryStore
+Research self-publishing services +Novel @Computer
+x 2014-01-10 Download Todo.txt mobile app @Phone
+x 2014-01-10 2014-01-07 Download Todo.txt CLI @Computer
diff --git a/tests/examplefiles/example.weechatlog b/tests/examplefiles/example.weechatlog
index 9f036166..15e8130f 100644
--- a/tests/examplefiles/example.weechatlog
+++ b/tests/examplefiles/example.weechatlog
@@ -6,4 +6,6 @@
2007 Sep 01 00:23:55 -=- Das Topic von &bitlbee lautet: "Welcome to the control channel. Type help for help information."
2007 Sep 01 00:23:55 <root> Welcome to the BitlBee gateway!
2007 Sep 01 00:23:55 <root>
-2007 Sep 01 00:23:55 <root> If you've never used BitlBee before, please do read the help information using the help command. Lots of FAQ's are answered there. \ No newline at end of file
+2007 Sep 01 00:23:55 <root> If you've never used BitlBee before, please do read the help information using the help command. Lots of FAQ's are answered there.
+# check for fixed pathological matching behavior
+1111111111111111111111111111111
diff --git a/tests/examplefiles/exampleScript.cfc b/tests/examplefiles/exampleScript.cfc
new file mode 100644
index 00000000..002acbcd
--- /dev/null
+++ b/tests/examplefiles/exampleScript.cfc
@@ -0,0 +1,241 @@
+<cfscript>
+/**
+********************************************************************************
+ContentBox - A Modular Content Platform
+Copyright 2012 by Luis Majano and Ortus Solutions, Corp
+www.gocontentbox.org | www.luismajano.com | www.ortussolutions.com
+********************************************************************************
+Apache License, Version 2.0
+
+Copyright Since [2012] [Luis Majano and Ortus Solutions,Corp]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+********************************************************************************
+* A generic content service for content objects
+*/
+component extends="coldbox.system.orm.hibernate.VirtualEntityService" singleton{
+
+ // DI
+ property name="settingService" inject="id:settingService@cb";
+ property name="cacheBox" inject="cachebox";
+ property name="log" inject="logbox:logger:{this}";
+ property name="customFieldService" inject="customFieldService@cb";
+ property name="categoryService" inject="categoryService@cb";
+ property name="commentService" inject="commentService@cb";
+ property name="contentVersionService" inject="contentVersionService@cb";
+ property name="authorService" inject="authorService@cb";
+ property name="populator" inject="wirebox:populator";
+ property name="systemUtil" inject="SystemUtil@cb";
+
+ /*
+ * Constructor
+ * @entityName.hint The content entity name to bind this service to.
+ */
+ ContentService function init(entityName="cbContent"){
+ // init it
+ super.init(entityName=arguments.entityName, useQueryCaching=true);
+
+ // Test scope coloring in pygments
+ this.colorTestVar = "Just for testing pygments!";
+ cookie.colorTestVar = "";
+ client.colorTestVar = ""
+ session.colorTestVar = "";
+ application.colorTestVar = "";
+
+ return this;
+ }
+
+ /**
+ * Clear all content caches
+ * @async.hint Run it asynchronously or not, defaults to false
+ */
+ function clearAllCaches(boolean async=false){
+ var settings = settingService.getAllSettings(asStruct=true);
+ // Get appropriate cache provider
+ var cache = cacheBox.getCache( settings.cb_content_cacheName );
+ cache.clearByKeySnippet(keySnippet="cb-content",async=arguments.async);
+ return this;
+ }
+
+ /**
+ * Clear all page wrapper caches
+ * @async.hint Run it asynchronously or not, defaults to false
+ */
+ function clearAllPageWrapperCaches(boolean async=false){
+ var settings = settingService.getAllSettings(asStruct=true);
+ // Get appropriate cache provider
+ var cache = cacheBox.getCache( settings.cb_content_cacheName );
+ cache.clearByKeySnippet(keySnippet="cb-content-pagewrapper",async=arguments.async);
+ return this;
+ }
+
+ /**
+ * Clear all page wrapper caches
+ * @slug.hint The slug partial to clean on
+ * @async.hint Run it asynchronously or not, defaults to false
+ */
+ function clearPageWrapperCaches(required any slug, boolean async=false){
+ var settings = settingService.getAllSettings(asStruct=true);
+ // Get appropriate cache provider
+ var cache = cacheBox.getCache( settings.cb_content_cacheName );
+ cache.clearByKeySnippet(keySnippet="cb-content-pagewrapper-#arguments.slug#",async=arguments.async);
+ return this;
+ }
+
+ /**
+ * Clear a page wrapper cache
+ * @slug.hint The slug to clean
+ * @async.hint Run it asynchronously or not, defaults to false
+ */
+ function clearPageWrapper(required any slug, boolean async=false){
+ var settings = settingService.getAllSettings(asStruct=true);
+ // Get appropriate cache provider
+ var cache = cacheBox.getCache( settings.cb_content_cacheName );
+ cache.clear("cb-content-pagewrapper-#arguments.slug#/");
+ return this;
+ }
+
+ /**
+ * Searches published content with cool paramters, remember published content only
+ * @searchTerm.hint The search term to search
+ * @max.hint The maximum number of records to paginate
+ * @offset.hint The offset in the pagination
+ * @asQuery.hint Return as query or array of objects, defaults to array of objects
+ * @sortOrder.hint The sorting of the search results, defaults to publishedDate DESC
+ * @isPublished.hint Search for published, non-published or both content objects [true, false, 'all']
+ * @searchActiveContent.hint Search only content titles or both title and active content. Defaults to both.
+ */
+ function searchContent(
+ any searchTerm="",
+ numeric max=0,
+ numeric offset=0,
+ boolean asQuery=false,
+ any sortOrder="publishedDate DESC",
+ any isPublished=true,
+ boolean searchActiveContent=true){
+
+ var results = {};
+ var c = newCriteria();
+
+ // only published content
+ if( isBoolean( arguments.isPublished ) ){
+ // Published bit
+ c.isEq( "isPublished", javaCast( "Boolean", arguments.isPublished ) );
+ // Published eq true evaluate other params
+ if( arguments.isPublished ){
+ c.isLt("publishedDate", now() )
+ .$or( c.restrictions.isNull("expireDate"), c.restrictions.isGT("expireDate", now() ) )
+ .isEq("passwordProtection","");
+ }
+ }
+
+ // Search Criteria
+ if( len( arguments.searchTerm ) ){
+ // like disjunctions
+ c.createAlias("activeContent","ac");
+ // Do we search title and active content or just title?
+ if( arguments.searchActiveContent ){
+ c.$or( c.restrictions.like("title","%#arguments.searchTerm#%"),
+ c.restrictions.like("ac.content", "%#arguments.searchTerm#%") );
+ }
+ else{
+ c.like( "title", "%#arguments.searchTerm#%" );
+ }
+ }
+
+ // run criteria query and projections count
+ results.count = c.count( "contentID" );
+ results.content = c.resultTransformer( c.DISTINCT_ROOT_ENTITY )
+ .list(offset=arguments.offset, max=arguments.max, sortOrder=arguments.sortOrder, asQuery=arguments.asQuery);
+
+ return results;
+ }
+
+/********************************************* PRIVATE *********************************************/
+
+
+ /**
+ * Update the content hits
+ * @contentID.hint The content id to update
+ */
+ private function syncUpdateHits(required contentID){
+ var q = new Query(sql="UPDATE cb_content SET hits = hits + 1 WHERE contentID = #arguments.contentID#").execute();
+ return this;
+ }
+
+
+ private function closureTest(){
+ methodCall(
+ param1,
+ function( arg1, required arg2 ){
+ var settings = settingService.getAllSettings(asStruct=true);
+ // Get appropriate cache provider
+ var cache = cacheBox.getCache( settings.cb_content_cacheName );
+ cache.clear("cb-content-pagewrapper-#arguments.slug#/");
+ return this;
+ },
+ param1
+ );
+ }
+
+ private function StructliteralTest(){
+ return {
+ foo = bar,
+ brad = 'Wood',
+ func = function( arg1, required arg2 ){
+ var settings = settingService.getAllSettings(asStruct=true);
+ // Get appropriate cache provider
+ var cache = cacheBox.getCache( settings.cb_content_cacheName );
+ cache.clear("cb-content-pagewrapper-#arguments.slug#/");
+ return this;
+ },
+ array = [
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 'test',
+ 'testing',
+ 'testerton',
+ {
+ foo = true,
+ brad = false,
+ wood = null
+ }
+ ],
+ last = "final"
+ };
+ }
+
+ private function arrayliteralTest(){
+ return [
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 'test',
+ 'testing',
+ 'testerton',
+ {
+ foo = true,
+ brad = false,
+ wood = null
+ },
+ 'testy-von-testavich'
+ ];
+ }
+
+}
+</cfscript> \ No newline at end of file
diff --git a/tests/examplefiles/exampleTag.cfc b/tests/examplefiles/exampleTag.cfc
new file mode 100644
index 00000000..753bb826
--- /dev/null
+++ b/tests/examplefiles/exampleTag.cfc
@@ -0,0 +1,18 @@
+<cfcomponent>
+
+ <cffunction name="init" access="public" returntype="any">
+ <cfargument name="arg1" type="any" required="true">
+ <cfset this.myVariable = arguments.arg1>
+
+ <cfreturn this>
+ </cffunction>
+
+ <cffunction name="testFunc" access="private" returntype="void">
+ <cfargument name="arg1" type="any" required="false">
+
+ <cfif structKeyExists(arguments, "arg1")>
+ <cfset writeoutput("Argument exists")>
+ </cfif>
+ </cffunction>
+
+</cfcomponent> \ No newline at end of file
diff --git a/tests/examplefiles/example_coq.v b/tests/examplefiles/example_coq.v
new file mode 100644
index 00000000..fd1a7bc8
--- /dev/null
+++ b/tests/examplefiles/example_coq.v
@@ -0,0 +1,4 @@
+Lemma FalseLemma : False <-> False.
+tauto.
+Qed.
+Check FalseLemma.
diff --git a/tests/examplefiles/example_elixir.ex b/tests/examplefiles/example_elixir.ex
index 2e92163d..ddca7f60 100644
--- a/tests/examplefiles/example_elixir.ex
+++ b/tests/examplefiles/example_elixir.ex
@@ -1,363 +1,233 @@
-# We cannot use to_char_list because it depends on inspect,
-# which depends on protocol, which depends on this module.
-import Elixir::Builtin, except: [to_char_list: 1]
-
-defmodule Module do
- require Erlang.ets, as: ETS
-
- @moduledoc """
- This module provides many functions to deal with modules during
- compilation time. It allows a developer to dynamically attach
- documentation, merge data, register attributes and so forth.
-
- After the module is compiled, using many of the functions in
- this module will raise errors, since it is out of their purpose
- to inspect runtime data. Most of the runtime data can be inspected
- via the `__info__(attr)` function attached to each compiled module.
- """
-
- @doc """
- Evalutes the quotes contents in the given module context.
- Raises an error if the module was already compiled.
-
- ## Examples
-
- defmodule Foo do
- contents = quote do: (def sum(a, b), do: a + b)
- Module.eval_quoted __MODULE__, contents, [], __FILE__, __LINE__
- end
-
- Foo.sum(1, 2) #=> 3
- """
- def eval_quoted(module, quoted, binding, filename, line) do
- assert_not_compiled!(:eval_quoted, module)
- { binding, scope } = Erlang.elixir_module.binding_and_scope_for_eval(line, to_char_list(filename), module, binding)
- Erlang.elixir_def.reset_last(module)
- Erlang.elixir.eval_quoted([quoted], binding, line, scope)
- end
-
- @doc """
- Checks if the module is compiled or not.
-
- ## Examples
-
- defmodule Foo do
- Module.compiled?(__MODULE__) #=> false
- end
-
- Module.compiled?(Foo) #=> true
-
- """
- def compiled?(module) do
- table = data_table_for(module)
- table == ETS.info(table, :name)
- end
-
- @doc """
- Reads the data for the given module. This is used
- to read data of uncompiled modules. If the module
- was already compiled, you shoul access the data
- directly by invoking `__info__(:data)` in that module.
-
- ## Examples
-
- defmodule Foo do
- Module.merge_data __MODULE__, value: 1
- Module.read_data __MODULE__ #=> [value: 1]
- end
-
- """
- def read_data(module) do
- assert_not_compiled!(:read_data, module)
- ETS.lookup_element(data_table_for(module), :data, 2)
- end
-
- @doc """
- Reads the data from `module` at the given key `at`.
-
- ## Examples
-
- defmodule Foo do
- Module.merge_data __MODULE__, value: 1
- Module.read_data __MODULE__, :value #=> 1
- end
-
- """
- def read_data(module, at) do
- Orddict.get read_data(module), at
- end
-
- @doc """
- Merge the given data into the module, overriding any
- previous one.
-
- If any of the given data is a registered attribute, it is
- automatically added to the attribute set, instead of marking
- it as data. See register_attribute/2 and add_attribute/3 for
- more info.
-
- ## Examples
-
- defmodule Foo do
- Module.merge_data __MODULE__, value: 1
- end
-
- Foo.__info__(:data) #=> [value: 1]
-
- """
- def merge_data(module, data) do
- assert_not_compiled!(:merge_data, module)
-
- table = data_table_for(module)
- old = ETS.lookup_element(table, :data, 2)
- registered = ETS.lookup_element(table, :registered_attributes, 2)
-
- { attrs, new } = Enum.partition data, fn({k,_}) -> List.member?(registered, k) end
- Enum.each attrs, fn({k,v}) -> add_attribute(module, k, v) end
- ETS.insert(table, { :data, Orddict.merge(old, new) })
- end
-
- @doc """
- Attaches documentation to a given function. It expects
- the module the function belongs to, the line (a non negative
- integer), the kind (def or defmacro), a tuple representing
- the function and its arity and the documentation, which should
- be either a binary or a boolean.
-
- ## Examples
-
- defmodule MyModule do
- Module.add_doc(__MODULE__, __LINE__ + 1, :def, { :version, 0 }, "Manually added docs")
- def version, do: 1
- end
-
- """
- def add_doc(module, line, kind, tuple, doc) when
- is_binary(doc) or is_boolean(doc) do
- assert_not_compiled!(:add_doc, module)
- case kind do
- match: :defp
- :warn
- else:
- table = docs_table_for(module)
- ETS.insert(table, { tuple, line, kind, doc })
- :ok
- end
- end
+# Numbers
+0b0101011
+1234 ; 0x1A ; 0xbeef ; 0763 ; 0o123
+3.14 ; 5.0e21 ; 0.5e-12
+100_000_000
+
+# these are not valid numbers
+0b012 ; 0xboar ; 0o888
+0B01 ; 0XAF ; 0O123
+
+# Characters
+?a ; ?1 ; ?\n ; ?\s ; ?\c ; ? ; ?,
+?\x{12} ; ?\x{abcd}
+?\x34 ; ?\xF
+
+# these show that only the first digit is part of the character
+?\123 ; ?\12 ; ?\7
+
+# Atoms
+:this ; :that
+:'complex atom'
+:"with' \"\" 'quotes"
+:" multi
+ line ' \s \123 \xff
+atom"
+:... ; :<<>> ; :%{} ; :% ; :{}
+:++; :--; :*; :~~~; :::
+:% ; :. ; :<-
+
+# Strings
+"Hello world"
+"Interspersed \x{ff} codes \7 \8 \65 \016 and \t\s\\s\z\+ \\ escapes"
+"Quotes ' inside \" \123 the \"\" \xF \\xF string \\\" end"
+"Multiline
+ string"
+
+# Char lists
+'this is a list'
+'escapes \' \t \\\''
+'Multiline
+ char
+ list
+'
+
+# Binaries
+<<1, 2, 3>>
+<<"hello"::binary, c :: utf8, x::[4, unit(2)]>> = "hello™1"
+
+# Sigils
+~r/this + i\s "a" regex/
+~R'this + i\s "a" regex too'
+~w(hello #{ ["has" <> "123", '\c\d', "\123 interpol" | []] } world)s
+~W(hello #{no "123" \c\d \123 interpol} world)s
+
+~s{Escapes terminators \{ and \}, but no {balancing} # outside of sigil here }
+
+~S"No escapes \s\t\n and no #{interpolation}"
+
+:"atoms work #{"to" <> "o"}"
+
+# Operators
+x = 1 + 2.0 * 3
+y = true and false; z = false or true
+... = 144
+... == !x && y || z
+"hello" |> String.upcase |> String.downcase()
+{^z, a} = {true, x}
+
+# Free operators (added in 1.0.0)
+p ~>> f = bind(p, f)
+p1 ~> p2 = pair_right(p1, p2)
+p1 <~ p2 = pair_left(p1, p2)
+p1 <~> p2 = pair_both(p1, p2)
+p |~> f = map(p, f)
+p1 <|> p2 = either(p1, p2)
+
+# Lists, tuples, maps, keywords
+[1, :a, 'hello'] ++ [2, 3]
+[:head | [?t, ?a, ?i, ?l]]
+
+{:one, 2.0, "three"}
+
+[...: "this", <<>>: "is", %{}: "a keyword", %: "list", {}: "too"]
+["this is an atom too": 1, "so is this": 2]
+[option: "value", key: :word]
+[++: "operator", ~~~: :&&&]
+
+map = %{shortcut: "syntax"}
+%{map | "update" => "me"}
+%{ 12 => 13, :weird => ['thing'] }
+
+# Comprehensions
+for x <- 1..10, x < 5, do: {x, x}
+pixels = "12345678"
+for << <<r::4, g::4, b::4, a::size(4)>> <- pixels >> do
+ [r, {g, %{"b" => a}}]
+end
+
+# String interpolation
+"String #{inspect "interpolation"} is quite #{1+4+7} difficult"
+
+# Identifiers
+abc_123 = 1
+_018OP = 2
+A__0 == 3
+
+# Modules
+defmodule Long.Module.Name do
+ @moduledoc "Simple module docstring"
@doc """
- Checks if a function was defined, regardless if it is
- a macro or a private function. Use function_defined?/3
- to assert for an specific type.
-
- ## Examples
-
- defmodule Example do
- Module.function_defined? __MODULE__, { :version, 0 } #=> false
- def version, do: 1
- Module.function_defined? __MODULE__, { :version, 0 } #=> true
- end
-
+ Multiline docstring
+ "with quotes"
+ and #{ inspect %{"interpolation" => "in" <> "action"} }
+ now with #{ {:a, 'tuple'} }
+ and #{ inspect {
+ :tuple,
+ %{ with: "nested #{ inspect %{ :interpolation => %{} } }" }
+ } }
"""
- def function_defined?(module, tuple) when is_tuple(tuple) do
- assert_not_compiled!(:function_defined?, module)
- table = function_table_for(module)
- ETS.lookup(table, tuple) != []
- end
+ defstruct [:a, :name, :height]
- @doc """
- Checks if a function was defined and also for its `kind`.
- `kind` can be either :def, :defp or :defmacro.
+ @doc ~S'''
+ No #{interpolation} of any kind.
+ \000 \x{ff}
- ## Examples
-
- defmodule Example do
- Module.function_defined? __MODULE__, { :version, 0 }, :defp #=> false
- def version, do: 1
- Module.function_defined? __MODULE__, { :version, 0 }, :defp #=> false
- end
-
- """
- def function_defined?(module, tuple, kind) do
- List.member? defined_functions(module, kind), tuple
- end
-
- @doc """
- Return all functions defined in the given module.
-
- ## Examples
-
- defmodule Example do
- def version, do: 1
- Module.defined_functions __MODULE__ #=> [{:version,1}]
- end
-
- """
- def defined_functions(module) do
- assert_not_compiled!(:defined_functions, module)
- table = function_table_for(module)
- lc { tuple, _, _ } in ETS.tab2list(table), do: tuple
- end
-
- @doc """
- Returns all functions defined in te given module according
- to its kind.
-
- ## Examples
-
- defmodule Example do
- def version, do: 1
- Module.defined_functions __MODULE__, :def #=> [{:version,1}]
- Module.defined_functions __MODULE__, :defp #=> []
- end
-
- """
- def defined_functions(module, kind) do
- assert_not_compiled!(:defined_functions, module)
- table = function_table_for(module)
- entry = kind_to_entry(kind)
- ETS.lookup_element(table, entry, 2)
- end
-
- @doc """
- Adds a compilation callback hook that is invoked
- exactly before the module is compiled.
-
- This callback is useful when used with `use` as a mechanism
- to clean up any internal data in the module before it is compiled.
-
- ## Examples
-
- Imagine you are creating a module/library that is meant for
- external usage called `MyLib`. It could be defined as:
-
- defmodule MyLib do
- def __using__(target) do
- Module.merge_data target, some_data: true
- Module.add_compile_callback(target, __MODULE__, :__callback__)
- end
-
- defmacro __callback__(target) do
- value = Orddict.get(Module.read_data(target), :some_data, [])
- quote do: (def my_lib_value, do: unquote(value))
- end
- end
-
- And a module could use `MyLib` with:
-
- defmodule App do
- use ModuleTest::ToBeUsed
- end
-
- In the example above, `MyLib` defines a data to the target. This data
- can be updated throughout the module definition and therefore, the final
- value of the data can only be compiled using a compiation callback,
- which will read the final value of :some_data and compile to a function.
- """
- def add_compile_callback(module, target, fun // :__compiling__) do
- assert_not_compiled!(:add_compile_callback, module)
- new = { target, fun }
- table = data_table_for(module)
- old = ETS.lookup_element(table, :compile_callbacks, 2)
- ETS.insert(table, { :compile_callbacks, [new|old] })
- end
-
- @doc """
- Adds an Erlang attribute to the given module with the given
- key and value. The same attribute can be added more than once.
-
- ## Examples
-
- defmodule MyModule do
- Module.add_attribute __MODULE__, :custom_threshold_for_lib, 10
- end
-
- """
- def add_attribute(module, key, value) when is_atom(key) do
- assert_not_compiled!(:add_attribute, module)
- table = data_table_for(module)
- attrs = ETS.lookup_element(table, :attributes, 2)
- ETS.insert(table, { :attributes, [{key, value}|attrs] })
- end
-
- @doc """
- Deletes all attributes that matches the given key.
-
- ## Examples
-
- defmodule MyModule do
- Module.add_attribute __MODULE__, :custom_threshold_for_lib, 10
- Module.delete_attribute __MODULE__, :custom_threshold_for_lib
- end
-
- """
- def delete_attribute(module, key) when is_atom(key) do
- assert_not_compiled!(:delete_attribute, module)
- table = data_table_for(module)
- attrs = ETS.lookup_element(table, :attributes, 2)
- final = lc {k,v} in attrs, k != key, do: {k,v}
- ETS.insert(table, { :attributes, final })
- end
-
- @doc """
- Registers an attribute. This allows a developer to use the data API
- but Elixir will register the data as an attribute automatically.
- By default, `vsn`, `behavior` and other Erlang attributes are
- automatically registered.
-
- ## Examples
-
- defmodule MyModule do
- Module.register_attribute __MODULE__, :custom_threshold_for_lib
- @custom_threshold_for_lib 10
- end
-
- """
- def register_attribute(module, new) do
- assert_not_compiled!(:register_attribute, module)
- table = data_table_for(module)
- old = ETS.lookup_element(table, :registered_attributes, 2)
- ETS.insert(table, { :registered_attributes, [new|old] })
- end
+ \n #{\x{ff}}
+ '''
+ def func(a, b \\ []), do: :ok
@doc false
- # Used internally to compile documentation. This function
- # is private and must be used only internally.
- def compile_doc(module, line, kind, pair) do
- case read_data(module, :doc) do
- match: nil
- # We simply discard nil
- match: doc
- result = add_doc(module, line, kind, pair, doc)
- merge_data(module, doc: nil)
- result
- end
+ def __before_compile__(_) do
+ :ok
end
+end
- ## Helpers
+# Structs
+defmodule Second.Module do
+ s = %Long.Module.Name{name: "Silly"}
+ %Long.Module.Name{s | height: {192, :cm}}
+ ".. #{%Long.Module.Name{s | height: {192, :cm}}} .."
+end
- defp kind_to_entry(:def), do: :public
- defp kind_to_entry(:defp), do: :private
- defp kind_to_entry(:defmacro), do: :macros
+# Types, pseudo-vars, attributes
+defmodule M do
+ @custom_attr :some_constant
- defp to_char_list(list) when is_list(list), do: list
- defp to_char_list(bin) when is_binary(bin), do: binary_to_list(bin)
+ @before_compile Long.Module.Name
- defp data_table_for(module) do
- list_to_atom Erlang.lists.concat([:d, module])
- end
+ @typedoc "This is a type"
+ @type typ :: integer
- defp function_table_for(module) do
- list_to_atom Erlang.lists.concat([:f, module])
- end
-
- defp docs_table_for(module) do
- list_to_atom Erlang.lists.concat([:o, module])
- end
-
- defp assert_not_compiled!(fun, module) do
- compiled?(module) ||
- raise ArgumentError, message:
- "could not call #{fun} on module #{module} because it was already compiled"
- end
-end \ No newline at end of file
+ @typedoc """
+ Another type
+ """
+ @opaque typtyp :: 1..10
+
+ @spec func(typ, typtyp) :: :ok | :fail
+ def func(a, b) do
+ a || b || :ok || :fail
+ Path.expand("..", __DIR__)
+ IO.inspect __ENV__
+ __NOTAPSEUDOVAR__ = 11
+ __MODULE__.func(b, a)
+ end
+
+ defmacro m() do
+ __CALLER__
+ end
+end
+
+# Functions
+anon = fn x, y, z ->
+ fn(a, b, c) ->
+ &(x + y - z * a / &1 + b + div(&2, c))
+ end
+end
+
+&Set.put(&1, &2) ; & Set.put(&1, &2) ; &( Set.put(&1, &1) )
+
+# Function calls
+anon.(1, 2, 3); self; hd([1,2,3])
+Kernel.spawn(fn -> :ok end)
+IO.ANSI.black
+
+# Control flow
+if :this do
+ :that
+else
+ :otherwise
+end
+
+pid = self
+receive do
+ {:EXIT, _} -> :done
+ {^pid, :_} -> nil
+ after 100 -> :no_luck
+end
+
+case __ENV__.line do
+ x when is_integer(x) -> x
+ x when x in 1..12 -> -x
+end
+
+cond do
+ false -> "too bad"
+ 4 > 5 -> "oops"
+ true -> nil
+end
+
+# Lexical scope modifiers
+import Kernel, except: [spawn: 1, +: 2, /: 2, Unless: 2]
+alias Long.Module.Name, as: N0men123_and4
+use Bitwise
+
+4 &&& 5
+2 <<< 3
+
+# Protocols
+defprotocol Useless do
+ def func1(this)
+ def func2(that)
+end
+
+defimpl Useless, for: Atom do
+end
+
+# Exceptions
+defmodule NotAnError do
+ defexception [:message]
+end
+
+raise NotAnError, message: "This is not an error"
diff --git a/tests/examplefiles/hash_syntax.rb b/tests/examplefiles/hash_syntax.rb
new file mode 100644
index 00000000..35b27723
--- /dev/null
+++ b/tests/examplefiles/hash_syntax.rb
@@ -0,0 +1,5 @@
+{ :old_syntax => 'ok' }
+{ 'stings as key' => 'should be ok' }
+{ new_syntax: 'broken until now' }
+{ withoutunderscore: 'should be ok' }
+{ _underscoreinfront: 'might be ok, if I understand the pygments code correct' }
diff --git a/tests/examplefiles/hello.at b/tests/examplefiles/hello.at
new file mode 100644
index 00000000..23af2f2d
--- /dev/null
+++ b/tests/examplefiles/hello.at
@@ -0,0 +1,6 @@
+def me := object: {
+ def name := "Kevin";
+ def sayHello(peerName) {
+ system.println(peerName + " says hello!");
+ };
+};
diff --git a/tests/examplefiles/hello.golo b/tests/examplefiles/hello.golo
new file mode 100644
index 00000000..7e8ca214
--- /dev/null
+++ b/tests/examplefiles/hello.golo
@@ -0,0 +1,5 @@
+module hello.World
+
+function main = |args| {
+ println("Hello world!")
+}
diff --git a/tests/examplefiles/hello.lsl b/tests/examplefiles/hello.lsl
new file mode 100644
index 00000000..61697e7f
--- /dev/null
+++ b/tests/examplefiles/hello.lsl
@@ -0,0 +1,12 @@
+default
+{
+ state_entry()
+ {
+ llSay(0, "Hello, Avatar!");
+ }
+
+ touch_start(integer total_number)
+ {
+ llSay(0, "Touched.");
+ }
+}
diff --git a/tests/examplefiles/File.hy b/tests/examplefiles/hybris_File.hy
index 9c86c641..9c86c641 100644
--- a/tests/examplefiles/File.hy
+++ b/tests/examplefiles/hybris_File.hy
diff --git a/tests/examplefiles/mg_sample.pro b/tests/examplefiles/idl_sample.pro
index 814d510d..814d510d 100644
--- a/tests/examplefiles/mg_sample.pro
+++ b/tests/examplefiles/idl_sample.pro
diff --git a/tests/examplefiles/iex_example b/tests/examplefiles/iex_example
new file mode 100644
index 00000000..22407e4e
--- /dev/null
+++ b/tests/examplefiles/iex_example
@@ -0,0 +1,23 @@
+iex> :" multi
+...> line ' \s \123 \x20
+...> atom"
+:" multi\n line ' S \natom"
+
+iex(1)> <<"hello"::binary, c :: utf8, x::[4, unit(2)]>> = "hello™1"
+"hello™1"
+
+iex(2)> c
+8482
+
+iex> 1 + :atom
+** (ArithmeticError) bad argument in arithmetic expression
+ :erlang.+(1, :atom)
+
+iex(3)> 1 +
+...(3)> 2 +
+...(3)> 3
+6
+
+iex> IO.puts "Hello world"
+Hello world
+:ok
diff --git a/tests/examplefiles/import.hs b/tests/examplefiles/import.hs
deleted file mode 100644
index 09058ae6..00000000
--- a/tests/examplefiles/import.hs
+++ /dev/null
@@ -1,4 +0,0 @@
-import "mtl" Control.Monad.Trans
-
-main :: IO ()
-main = putStrLn "hello world"
diff --git a/tests/examplefiles/inet_pton6.dg b/tests/examplefiles/inet_pton6.dg
index 4104b3e7..3813d5b8 100644
--- a/tests/examplefiles/inet_pton6.dg
+++ b/tests/examplefiles/inet_pton6.dg
@@ -1,5 +1,5 @@
-re = import!
-sys = import!
+import '/re'
+import '/sys'
# IPv6address = hexpart [ ":" IPv4address ]
@@ -20,7 +20,7 @@ addrv6 = re.compile $ r'(?i)(?:{})(?::{})?$'.format hexpart addrv4
#
# :return: a decimal integer
#
-base_n = (q digits) -> foldl (x y) -> (x * q + y) 0 digits
+base_n = q digits -> foldl (x y -> x * q + y) 0 digits
# Parse a sequence of hexadecimal numbers
@@ -29,7 +29,7 @@ base_n = (q digits) -> foldl (x y) -> (x * q + y) 0 digits
#
# :return: an iterable of Python ints
#
-unhex = q -> q and map p -> (int p 16) (q.split ':')
+unhex = q -> q and map (p -> int p 16) (q.split ':')
# Parse an IPv6 address as specified in RFC 4291.
@@ -39,33 +39,33 @@ unhex = q -> q and map p -> (int p 16) (q.split ':')
# :return: an integer which, written in binary form, points to the same node.
#
inet_pton6 = address ->
- raise $ ValueError 'not a valid IPv6 address' if not (match = addrv6.match address)
+ not (match = addrv6.match address) => raise $ ValueError 'not a valid IPv6 address'
start, end, *ipv4 = match.groups!
is_ipv4 = not $ None in ipv4
shift = (7 - start.count ':' - 2 * is_ipv4) * 16
- raise $ ValueError 'not a valid IPv6 address' if (end is None and shift) or shift < 0
+ (end is None and shift) or shift < 0 => raise $ ValueError 'not a valid IPv6 address'
hexaddr = (base_n 0x10000 (unhex start) << shift) + base_n 0x10000 (unhex $ end or '')
- (hexaddr << 32) + base_n 0x100 (map int ipv4) if is_ipv4 else hexaddr
+ if (is_ipv4 => (hexaddr << 32) + base_n 0x100 (map int ipv4)) (otherwise => hexaddr)
-inet6_type = q -> switch
- not q = 'unspecified'
- q == 1 = 'loopback'
- (q >> 32) == 0x000000000000ffff = 'IPv4-mapped'
- (q >> 64) == 0xfe80000000000000 = 'link-local'
- (q >> 120) != 0x00000000000000ff = 'general unicast'
- (q >> 112) % (1 << 4) == 0x0000000000000000 = 'multicast w/ reserved scope value'
- (q >> 112) % (1 << 4) == 0x000000000000000f = 'multicast w/ reserved scope value'
- (q >> 112) % (1 << 4) == 0x0000000000000001 = 'interface-local multicast'
- (q >> 112) % (1 << 4) == 0x0000000000000004 = 'admin-local multicast'
- (q >> 112) % (1 << 4) == 0x0000000000000005 = 'site-local multicast'
- (q >> 112) % (1 << 4) == 0x0000000000000008 = 'organization-local multicast'
- (q >> 112) % (1 << 4) == 0x000000000000000e = 'global multicast'
- (q >> 112) % (1 << 4) != 0x0000000000000002 = 'multicast w/ unknown scope value'
- (q >> 24) % (1 << 112) == 0x00000000000001ff = 'solicited-node multicast'
- True = 'link-local multicast'
+inet6_type = q -> if
+ q == 0 => 'unspecified'
+ q == 1 => 'loopback'
+ (q >> 32) == 0x000000000000ffff => 'IPv4-mapped'
+ (q >> 64) == 0xfe80000000000000 => 'link-local'
+ (q >> 120) != 0x00000000000000ff => 'general unicast'
+ (q >> 112) % (1 << 4) == 0x0000000000000000 => 'multicast w/ reserved scope value'
+ (q >> 112) % (1 << 4) == 0x000000000000000f => 'multicast w/ reserved scope value'
+ (q >> 112) % (1 << 4) == 0x0000000000000001 => 'interface-local multicast'
+ (q >> 112) % (1 << 4) == 0x0000000000000004 => 'admin-local multicast'
+ (q >> 112) % (1 << 4) == 0x0000000000000005 => 'site-local multicast'
+ (q >> 112) % (1 << 4) == 0x0000000000000008 => 'organization-local multicast'
+ (q >> 112) % (1 << 4) == 0x000000000000000e => 'global multicast'
+ (q >> 112) % (1 << 4) != 0x0000000000000002 => 'multicast w/ unknown scope value'
+ (q >> 24) % (1 << 112) == 0x00000000000001ff => 'solicited-node multicast'
+ otherwise => 'link-local multicast'
-print $ (x -> (inet6_type x, hex x)) $ inet_pton6 $ sys.stdin.read!.strip!
+print $ (x -> inet6_type x, hex x) $ inet_pton6 $ sys.stdin.read!.strip!
diff --git a/tests/examplefiles/interp.scala b/tests/examplefiles/interp.scala
new file mode 100644
index 00000000..4131b75e
--- /dev/null
+++ b/tests/examplefiles/interp.scala
@@ -0,0 +1,10 @@
+val n = 123;
+val a = s"n=$n";
+val a2 = s"n=$n''";
+val b = s"""n=$n""";
+val c = f"n=$n%f";
+val d = f"""n=$n%f""";
+val d2 = s"""a"""";
+val e = s"abc\u00e9";
+val f = s"a${n}b";
+val g = s"a${n + 1}b";
diff --git a/tests/examplefiles/language.hy b/tests/examplefiles/language.hy
new file mode 100644
index 00000000..9768c39c
--- /dev/null
+++ b/tests/examplefiles/language.hy
@@ -0,0 +1,165 @@
+;;;; This contains some of the core Hy functions used
+;;;; to make functional programming slightly easier.
+;;;;
+
+
+(defn _numeric-check [x]
+ (if (not (numeric? x))
+ (raise (TypeError (.format "{0!r} is not a number" x)))))
+
+(defn cycle [coll]
+ "Yield an infinite repetition of the items in coll"
+ (setv seen [])
+ (for [x coll]
+ (yield x)
+ (.append seen x))
+ (while seen
+ (for [x seen]
+ (yield x))))
+
+(defn dec [n]
+ "Decrement n by 1"
+ (_numeric-check n)
+ (- n 1))
+
+(defn distinct [coll]
+ "Return a generator from the original collection with duplicates
+ removed"
+ (let [[seen []] [citer (iter coll)]]
+ (for [val citer]
+ (if (not_in val seen)
+ (do
+ (yield val)
+ (.append seen val))))))
+
+(defn drop [count coll]
+ "Drop `count` elements from `coll` and yield back the rest"
+ (let [[citer (iter coll)]]
+ (try (for [i (range count)]
+ (next citer))
+ (catch [StopIteration]))
+ citer))
+
+(defn even? [n]
+ "Return true if n is an even number"
+ (_numeric-check n)
+ (= (% n 2) 0))
+
+(defn filter [pred coll]
+ "Return all elements from `coll` that pass `pred`"
+ (let [[citer (iter coll)]]
+ (for [val citer]
+ (if (pred val)
+ (yield val)))))
+
+(defn inc [n]
+ "Increment n by 1"
+ (_numeric-check n)
+ (+ n 1))
+
+(defn instance? [klass x]
+ (isinstance x klass))
+
+(defn iterable? [x]
+ "Return true if x is iterable"
+ (try (do (iter x) true)
+ (catch [Exception] false)))
+
+(defn iterate [f x]
+ (setv val x)
+ (while true
+ (yield val)
+ (setv val (f val))))
+
+(defn iterator? [x]
+ "Return true if x is an iterator"
+ (try (= x (iter x))
+ (catch [TypeError] false)))
+
+(defn neg? [n]
+ "Return true if n is < 0"
+ (_numeric-check n)
+ (< n 0))
+
+(defn none? [x]
+ "Return true if x is None"
+ (is x None))
+
+(defn numeric? [x]
+ (import numbers)
+ (instance? numbers.Number x))
+
+(defn nth [coll index]
+ "Return nth item in collection or sequence, counting from 0"
+ (if (not (neg? index))
+ (if (iterable? coll)
+ (try (first (list (take 1 (drop index coll))))
+ (catch [IndexError] None))
+ (try (get coll index)
+ (catch [IndexError] None)))
+ None))
+
+(defn odd? [n]
+ "Return true if n is an odd number"
+ (_numeric-check n)
+ (= (% n 2) 1))
+
+(defn pos? [n]
+ "Return true if n is > 0"
+ (_numeric_check n)
+ (> n 0))
+
+(defn remove [pred coll]
+ "Return coll with elements removed that pass `pred`"
+ (let [[citer (iter coll)]]
+ (for [val citer]
+ (if (not (pred val))
+ (yield val)))))
+
+(defn repeat [x &optional n]
+ "Yield x forever or optionally n times"
+ (if (none? n)
+ (setv dispatch (fn [] (while true (yield x))))
+ (setv dispatch (fn [] (for [_ (range n)] (yield x)))))
+ (dispatch))
+
+(defn repeatedly [func]
+ "Yield result of running func repeatedly"
+ (while true
+ (yield (func))))
+
+(defn take [count coll]
+ "Take `count` elements from `coll`, or the whole set if the total
+ number of entries in `coll` is less than `count`."
+ (let [[citer (iter coll)]]
+ (for [_ (range count)]
+ (yield (next citer)))))
+
+(defn take-nth [n coll]
+ "Return every nth member of coll
+ raises ValueError for (not (pos? n))"
+ (if (pos? n)
+ (let [[citer (iter coll)] [skip (dec n)]]
+ (for [val citer]
+ (yield val)
+ (for [_ (range skip)]
+ (next citer))))
+ (raise (ValueError "n must be positive"))))
+
+(defn take-while [pred coll]
+ "Take all elements while `pred` is true"
+ (let [[citer (iter coll)]]
+ (for [val citer]
+ (if (pred val)
+ (yield val)
+ (break)))))
+
+(defn zero? [n]
+ "Return true if n is 0"
+ (_numeric_check n)
+ (= n 0))
+
+(def *exports* ["cycle" "dec" "distinct" "drop" "even?" "filter" "inc"
+ "instance?" "iterable?" "iterate" "iterator?" "neg?"
+ "none?" "nth" "numeric?" "odd?" "pos?" "remove" "repeat"
+ "repeatedly" "take" "take_nth" "take_while" "zero?"])
diff --git a/tests/examplefiles/limbo.b b/tests/examplefiles/limbo.b
new file mode 100644
index 00000000..e55a0a62
--- /dev/null
+++ b/tests/examplefiles/limbo.b
@@ -0,0 +1,456 @@
+implement Ninewin;
+include "sys.m";
+ sys: Sys;
+include "draw.m";
+ draw: Draw;
+ Image, Display, Pointer: import draw;
+include "arg.m";
+include "keyboard.m";
+include "tk.m";
+include "wmclient.m";
+ wmclient: Wmclient;
+ Window: import wmclient;
+include "sh.m";
+ sh: Sh;
+
+# run a p9 graphics program (default rio) under inferno wm,
+# making available to it:
+# /dev/winname - naming the current inferno window (changing on resize)
+# /dev/mouse - pointer file + resize events; write to change position
+# /dev/cursor - change appearance of cursor.
+# /dev/draw - inferno draw device
+# /dev/cons - read keyboard events, write to 9win stdout.
+
+Ninewin: module {
+ init: fn(ctxt: ref Draw->Context, argv: list of string);
+};
+winname: string;
+
+init(ctxt: ref Draw->Context, argv: list of string)
+{
+ size := Draw->Point(500, 500);
+ sys = load Sys Sys->PATH;
+ draw = load Draw Draw->PATH;
+ wmclient = load Wmclient Wmclient->PATH;
+ wmclient->init();
+ sh = load Sh Sh->PATH;
+
+ buts := Wmclient->Resize;
+ if(ctxt == nil){
+ ctxt = wmclient->makedrawcontext();
+ buts = Wmclient->Plain;
+ }
+ arg := load Arg Arg->PATH;
+ arg->init(argv);
+ arg->setusage("9win [-s] [-x width] [-y height]");
+ exportonly := 0;
+ while(((opt := arg->opt())) != 0){
+ case opt {
+ 's' =>
+ exportonly = 1;
+ 'x' =>
+ size.x = int arg->earg();
+ 'y' =>
+ size.y = int arg->earg();
+ * =>
+ arg->usage();
+ }
+ }
+ if(size.x < 1 || size.y < 1)
+ arg->usage();
+ argv = arg->argv();
+ if(argv != nil && hd argv == "-s"){
+ exportonly = 1;
+ argv = tl argv;
+ }
+ if(argv == nil && !exportonly)
+ argv = "rio" :: nil;
+ if(argv != nil && exportonly){
+ sys->fprint(sys->fildes(2), "9win: no command allowed with -s flag\n");
+ raise "fail:usage";
+ }
+ title := "9win";
+ if(!exportonly)
+ title += " " + hd argv;
+ w := wmclient->window(ctxt, title, buts);
+ w.reshape(((0, 0), size));
+ w.onscreen(nil);
+ if(w.image == nil){
+ sys->fprint(sys->fildes(2), "9win: cannot get image to draw on\n");
+ raise "fail:no window";
+ }
+
+ sys->pctl(Sys->FORKNS|Sys->NEWPGRP, nil);
+ ld := "/n/9win";
+ if(sys->bind("#s", ld, Sys->MREPL) == -1 &&
+ sys->bind("#s", ld = "/n/local", Sys->MREPL) == -1){
+ sys->fprint(sys->fildes(2), "9win: cannot bind files: %r\n");
+ raise "fail:error";
+ }
+ w.startinput("kbd" :: "ptr" :: nil);
+ spawn ptrproc(rq := chan of Sys->Rread, ptr := chan[10] of ref Pointer, reshape := chan[1] of int);
+
+
+ fwinname := sys->file2chan(ld, "winname");
+ fconsctl := sys->file2chan(ld, "consctl");
+ fcons := sys->file2chan(ld, "cons");
+ fmouse := sys->file2chan(ld, "mouse");
+ fcursor := sys->file2chan(ld, "cursor");
+ if(!exportonly){
+ spawn run(sync := chan of string, w.ctl, ld, argv);
+ if((e := <-sync) != nil){
+ sys->fprint(sys->fildes(2), "9win: %s", e);
+ raise "fail:error";
+ }
+ }
+ spawn serveproc(w, rq, fwinname, fconsctl, fcons, fmouse, fcursor);
+ if(!exportonly){
+ # handle events synchronously so that we don't get a "killed" message
+ # from the shell.
+ handleevents(w, ptr, reshape);
+ }else{
+ spawn handleevents(w, ptr, reshape);
+ sys->bind(ld, "/dev", Sys->MBEFORE);
+ export(sys->fildes(0), w.ctl);
+ }
+}
+
+handleevents(w: ref Window, ptr: chan of ref Pointer, reshape: chan of int)
+{
+ for(;;)alt{
+ c := <-w.ctxt.ctl or
+ c = <-w.ctl =>
+ e := w.wmctl(c);
+ if(e != nil)
+ sys->fprint(sys->fildes(2), "9win: ctl error: %s\n", e);
+ if(e == nil && c != nil && c[0] == '!'){
+ alt{
+ reshape <-= 1 =>
+ ;
+ * =>
+ ;
+ }
+ winname = nil;
+ }
+ p := <-w.ctxt.ptr =>
+ if(w.pointer(*p) == 0){
+ # XXX would block here if client isn't reading mouse... but we do want to
+ # extert back-pressure, which conflicts.
+ alt{
+ ptr <-= p =>
+ ;
+ * =>
+ ; # sys->fprint(sys->fildes(2), "9win: discarding mouse event\n");
+ }
+ }
+ }
+}
+
+serveproc(w: ref Window, mouserq: chan of Sys->Rread, fwinname, fconsctl, fcons, fmouse, fcursor: ref Sys->FileIO)
+{
+ winid := 0;
+ krc: list of Sys->Rread;
+ ks: string;
+
+ for(;;)alt {
+ c := <-w.ctxt.kbd =>
+ ks[len ks] = inf2p9key(c);
+ if(krc != nil){
+ hd krc <-= (array of byte ks, nil);
+ ks = nil;
+ krc = tl krc;
+ }
+ (nil, d, nil, wc) := <-fcons.write =>
+ if(wc != nil){
+ sys->write(sys->fildes(1), d, len d);
+ wc <-= (len d, nil);
+ }
+ (nil, nil, nil, rc) := <-fcons.read =>
+ if(rc != nil){
+ if(ks != nil){
+ rc <-= (array of byte ks, nil);
+ ks = nil;
+ }else
+ krc = rc :: krc;
+ }
+ (offset, nil, nil, rc) := <-fwinname.read =>
+ if(rc != nil){
+ if(winname == nil){
+ winname = sys->sprint("noborder.9win.%d", winid++);
+ if(w.image.name(winname, 1) == -1){
+ sys->fprint(sys->fildes(2), "9win: namewin %q failed: %r", winname);
+ rc <-= (nil, "namewin failure");
+ break;
+ }
+ }
+ d := array of byte winname;
+ if(offset < len d)
+ d = d[offset:];
+ else
+ d = nil;
+ rc <-= (d, nil);
+ }
+ (nil, nil, nil, wc) := <-fwinname.write =>
+ if(wc != nil)
+ wc <-= (-1, "permission denied");
+ (nil, nil, nil, rc) := <-fconsctl.read =>
+ if(rc != nil)
+ rc <-= (nil, "permission denied");
+ (nil, d, nil, wc) := <-fconsctl.write =>
+ if(wc != nil){
+ if(string d != "rawon")
+ wc <-= (-1, "cannot change console mode");
+ else
+ wc <-= (len d, nil);
+ }
+ (nil, nil, nil, rc) := <-fmouse.read =>
+ if(rc != nil)
+ mouserq <-= rc;
+ (nil, d, nil, wc) := <-fmouse.write =>
+ if(wc != nil){
+ e := cursorset(w, string d);
+ if(e == nil)
+ wc <-= (len d, nil);
+ else
+ wc <-= (-1, e);
+ }
+ (nil, nil, nil, rc) := <-fcursor.read =>
+ if(rc != nil)
+ rc <-= (nil, "permission denied");
+ (nil, d, nil, wc) := <-fcursor.write =>
+ if(wc != nil){
+ e := cursorswitch(w, d);
+ if(e == nil)
+ wc <-= (len d, nil);
+ else
+ wc <-= (-1, e);
+ }
+ }
+}
+
+ptrproc(rq: chan of Sys->Rread, ptr: chan of ref Pointer, reshape: chan of int)
+{
+ rl: list of Sys->Rread;
+ c := ref Pointer(0, (0, 0), 0);
+ for(;;){
+ ch: int;
+ alt{
+ p := <-ptr =>
+ ch = 'm';
+ c = p;
+ <-reshape =>
+ ch = 'r';
+ rc := <-rq =>
+ rl = rc :: rl;
+ continue;
+ }
+ if(rl == nil)
+ rl = <-rq :: rl;
+ hd rl <-= (sys->aprint("%c%11d %11d %11d %11d ", ch, c.xy.x, c.xy.y, c.buttons, c.msec), nil);
+ rl = tl rl;
+ }
+}
+
+cursorset(w: ref Window, m: string): string
+{
+ if(m == nil || m[0] != 'm')
+ return "invalid mouse message";
+ x := int m[1:];
+ for(i := 1; i < len m; i++)
+ if(m[i] == ' '){
+ while(m[i] == ' ')
+ i++;
+ break;
+ }
+ if(i == len m)
+ return "invalid mouse message";
+ y := int m[i:];
+ return w.wmctl(sys->sprint("ptr %d %d", x, y));
+}
+
+cursorswitch(w: ref Window, d: array of byte): string
+{
+ Hex: con "0123456789abcdef";
+ if(len d != 2*4+64)
+ return w.wmctl("cursor");
+ hot := Draw->Point(bglong(d, 0*4), bglong(d, 1*4));
+ s := sys->sprint("cursor %d %d 16 32 ", hot.x, hot.y);
+ for(i := 2*4; i < len d; i++){
+ c := int d[i];
+ s[len s] = Hex[c >> 4];
+ s[len s] = Hex[c & 16rf];
+ }
+ return w.wmctl(s);
+}
+
+run(sync, ctl: chan of string, ld: string, argv: list of string)
+{
+ Rcmeta: con "|<>&^*[]?();";
+ sys->pctl(Sys->FORKNS, nil);
+ if(sys->bind("#₪", "/srv", Sys->MCREATE) == -1){
+ sync <-= sys->sprint("cannot bind srv device: %r");
+ exit;
+ }
+ srvname := "/srv/9win."+string sys->pctl(0, nil); # XXX do better.
+ fd := sys->create(srvname, Sys->ORDWR, 8r600);
+ if(fd == nil){
+ sync <-= sys->sprint("cannot create %s: %r", srvname);
+ exit;
+ }
+ sync <-= nil;
+ spawn export(fd, ctl);
+ sh->run(nil, "os" ::
+ "rc" :: "-c" ::
+ "mount "+srvname+" /mnt/term;"+
+ "rm "+srvname+";"+
+ "bind -b /mnt/term"+ld+" /dev;"+
+ "bind /mnt/term/dev/draw /dev/draw ||"+
+ "bind -a /mnt/term/dev /dev;"+
+ quotedc("cd"::"/mnt/term"+cwd()::nil, Rcmeta)+";"+
+ quotedc(argv, Rcmeta)+";"::
+ nil
+ );
+}
+
+export(fd: ref Sys->FD, ctl: chan of string)
+{
+ sys->export(fd, "/", Sys->EXPWAIT);
+ ctl <-= "exit";
+}
+
+inf2p9key(c: int): int
+{
+ KF: import Keyboard;
+
+ P9KF: con 16rF000;
+ Spec: con 16rF800;
+ Khome: con P9KF|16r0D;
+ Kup: con P9KF|16r0E;
+ Kpgup: con P9KF|16r0F;
+ Kprint: con P9KF|16r10;
+ Kleft: con P9KF|16r11;
+ Kright: con P9KF|16r12;
+ Kdown: con Spec|16r00;
+ Kview: con Spec|16r00;
+ Kpgdown: con P9KF|16r13;
+ Kins: con P9KF|16r14;
+ Kend: con P9KF|16r18;
+ Kalt: con P9KF|16r15;
+ Kshift: con P9KF|16r16;
+ Kctl: con P9KF|16r17;
+
+ case c {
+ Keyboard->LShift =>
+ return Kshift;
+ Keyboard->LCtrl =>
+ return Kctl;
+ Keyboard->LAlt =>
+ return Kalt;
+ Keyboard->Home =>
+ return Khome;
+ Keyboard->End =>
+ return Kend;
+ Keyboard->Up =>
+ return Kup;
+ Keyboard->Down =>
+ return Kdown;
+ Keyboard->Left =>
+ return Kleft;
+ Keyboard->Right =>
+ return Kright;
+ Keyboard->Pgup =>
+ return Kpgup;
+ Keyboard->Pgdown =>
+ return Kpgdown;
+ Keyboard->Ins =>
+ return Kins;
+
+ # function keys
+ KF|1 or
+ KF|2 or
+ KF|3 or
+ KF|4 or
+ KF|5 or
+ KF|6 or
+ KF|7 or
+ KF|8 or
+ KF|9 or
+ KF|10 or
+ KF|11 or
+ KF|12 =>
+ return (c - KF) + P9KF;
+ }
+ return c;
+}
+
+cwd(): string
+{
+ return sys->fd2path(sys->open(".", Sys->OREAD));
+}
+
+# from string.b, waiting for declaration to be uncommented.
+quotedc(argv: list of string, cl: string): string
+{
+ s := "";
+ while (argv != nil) {
+ arg := hd argv;
+ for (i := 0; i < len arg; i++) {
+ c := arg[i];
+ if (c == ' ' || c == '\t' || c == '\n' || c == '\'' || in(c, cl))
+ break;
+ }
+ if (i < len arg || arg == nil) {
+ s += "'" + arg[0:i];
+ for (; i < len arg; i++) {
+ if (arg[i] == '\'')
+ s[len s] = '\'';
+ s[len s] = arg[i];
+ }
+ s[len s] = '\'';
+ } else
+ s += arg;
+ if (tl argv != nil)
+ s[len s] = ' ';
+ argv = tl argv;
+ }
+ return s;
+}
+
+in(c: int, s: string): int
+{
+ n := len s;
+ if(n == 0)
+ return 0;
+ ans := 0;
+ negate := 0;
+ if(s[0] == '^') {
+ negate = 1;
+ s = s[1:];
+ n--;
+ }
+ for(i := 0; i < n; i++) {
+ if(s[i] == '-' && i > 0 && i < n-1) {
+ if(c >= s[i-1] && c <= s[i+1]) {
+ ans = 1;
+ break;
+ }
+ i++;
+ }
+ else
+ if(c == s[i]) {
+ ans = 1;
+ break;
+ }
+ }
+ if(negate)
+ ans = !ans;
+
+ # just to showcase labels
+skip:
+ return ans;
+}
+
+bglong(d: array of byte, i: int): int
+{
+ return int d[i] | (int d[i+1]<<8) | (int d[i+2]<<16) | (int d[i+3]<<24);
+}
diff --git a/tests/examplefiles/livescript-demo.ls b/tests/examplefiles/livescript-demo.ls
index 2ff68c63..03cbcc99 100644
--- a/tests/examplefiles/livescript-demo.ls
+++ b/tests/examplefiles/livescript-demo.ls
@@ -7,7 +7,9 @@ dashes-identifiers = ->
underscores_i$d = ->
/regexp1/
//regexp2//g
- 'strings' and "strings" and \strings
+ 'strings' and "strings" and \strings and \#$-"\'strings
+
+another-word-list = <[ more words ]>
[2 til 10]
|> map (* 2)
diff --git a/tests/examplefiles/main.cmake b/tests/examplefiles/main.cmake
index dac3da43..71dc3ce7 100644
--- a/tests/examplefiles/main.cmake
+++ b/tests/examplefiles/main.cmake
@@ -1,3 +1,5 @@
+CMAKE_MINIMUM_REQUIRED(VERSION 2.6 FATAL_ERROR)
+
SET( SOURCES back.c io.c main.c )
MESSAGE( ${SOURCES} ) # three arguments, prints "back.cio.cmain.c"
MESSAGE( "${SOURCES}" ) # one argument, prints "back.c;io.c;main.c"
diff --git a/tests/examplefiles/matlab_sample b/tests/examplefiles/matlab_sample
index 4f61afe8..bb00b517 100644
--- a/tests/examplefiles/matlab_sample
+++ b/tests/examplefiles/matlab_sample
@@ -28,3 +28,7 @@ y = exp(x);
{%
a block comment
%}
+
+function no_arg_func
+fprintf('%s\n', 'function with no args')
+end
diff --git a/tests/examplefiles/modula2_test_cases.def b/tests/examplefiles/modula2_test_cases.def
new file mode 100644
index 00000000..ce86a55b
--- /dev/null
+++ b/tests/examplefiles/modula2_test_cases.def
@@ -0,0 +1,354 @@
+(* Test Cases for Modula-2 Lexer *)
+
+(* Notes:
+ (1) Without dialect option nor embedded dialect tag, the lexer operates in
+ fallback mode, recognising the *combined* literals, punctuation symbols
+ and operators of all supported dialects, and the *combined* reserved
+ words and builtins of PIM Modula-2, ISO Modula-2 and Modula-2 R10.
+ (1) If multiple embedded dialect tags are present, the lexer will use the
+ first valid tag and ignore any subsequent dialect tags in the file.
+ (2) An embedded dialect tag overrides any command line dialect option. *)
+
+
+(* Testing command line dialect option *)
+
+(* for PIM Modula-2 : pygmentize -O full,dialect=m2pim ...
+ for ISO Modula-2 : pygmentize -O full,dialect=m2iso ...
+ for Modula-2 R10 : pygmentize -O full,dialect=m2r10 ...
+ for Objective Modula-2 : pygmentize -O full,dialect=objm2 ... *)
+
+(* for Aglet extensions : pygmentize -O full,dialect=m2iso+aglet ...
+ for GNU extensions : pygmentize -O full,dialect=m2pim+gm2 ...
+ for p1 extensions : pygmentize -O full,dialect=m2iso+p1 ...
+ for XDS extensions : pygmentize -O full,dialect=m2iso+xds ...
+
+
+(* Testing embedded dialect tags *)
+
+(* !m2pim*) (* <-- remove whitespace before ! for PIM Modula-2 *)
+(* !m2iso*) (* <-- remove whitespace before ! for ISO Modula-2 *)
+(* !m2r10*) (* <-- remove whitespace before ! for Modula-2 R10 *)
+(* !objm2*) (* <-- remove whitespace before ! for Objective Modula-2 *)
+
+(* !m2iso+aglet*) (* <-- remove whitespace before ! for Aglet extensions *)
+(* !m2pim+gm2*) (* <-- remove whitespace before ! for GNU extensions *)
+(* !m2iso+p1*) (* <-- remove whitespace before ! for p1 extensions *)
+(* !m2iso+xds*) (* <-- remove whitespace before ! for XDS extensions *)
+
+
+(* Dialect Indicating Names *)
+
+(* recognised names should be highlighted *)
+
+QUALIFIED (* PIM and ISO *)
+
+PACKEDSET (* ISO only *)
+
+ARGLIST (* M2 R10 and ObjM2 *)
+
+BYCOPY (* ObjM2 only *)
+
+BITSET8 (* Aglet, GNU and M2 R10 *)
+
+__FILE__ (* GNU only *)
+
+BCD (* p1 and M2 R10 *)
+
+SEQ (* XDS only *)
+
+
+(* Literal Tests *)
+
+(* recognised literals should be rendered as one unit
+ unrecognised literals should be rendered as error *)
+
+ch := 'a'; ch := "a"; (* all dialects *)
+ch := 0u20; unich := 0u2038 (* M2 R10 *)
+
+s := 'The cat said "meow!".';
+s := "It is eight O'clock.";
+
+
+n := 123; n = 1000000; (* all dialects *)
+n := 123; n = 1'000'000; (* M2 R10 *)
+
+n := 0b0110; n:= 0b0110'1100'0111; (* M2 R10 *)
+n := 0xFF00; n:= 0xDEAD'BEEF'0F00; (* M2 R10 *)
+
+r := 1.23; r := 1000000.000001; (* all dialects *)
+r := 1.23; r := 1'000'000.000'001; (* M2 R10 *)
+
+r := 1.234E6; r:= 1.234E-6; r := 1.234567E1000; (* PIM + ISO *)
+r := 1.234e6; r:= 1.234e-6; r := 1.234'567e1'000; (* M2 R10 *)
+
+ch := 0377C; n := 0377B; n := 07FF0H; (* ISO + PIM *)
+
+
+(* Non-Alphabetic Operator Tests *)
+
+(* supported operators should be rendered as one unit
+ unsupported operators should be rendered as errors *)
+
+a := b + c - d * e / f; (* all dialects *)
+
+SetDiff := A \ B; (* M2 R10 *)
+
+dotProduct := v1 *. v2; catArray := array1 +> array2; (* M2 R10 *)
+
+bool := a = b; bool := a > b; bool := a < b;
+bool := a # b; bool := a >= b; bool := a <= b;
+
+bool := a <> b; (* PIM + ISO *)
+
+bool := a == b; (* M2 R10 *)
+
+(*&*) IF a & b THEN ... END; (* PIM + ISO *)
+
+(*~*) IF ~ b THEN ... END; (* PIM + ISO *)
+
+(*::*) int := real :: INTEGER; (* M2 R10 *)
+
+(*++*) FOR i++ IN range DO ... END; (* M2 R10 *)
+(*--*) FOR i-- IN range DO ... END; (* M2 R10 *)
+
+(*^*) next := this^.next; (* all dialects *)
+(*@*) next := this@.next; (* ISO *)
+
+(*`*) str := `NSString alloc init; (* ObjM2 *)
+
+
+(* Punctuation Tests *)
+
+(* supported punctuation should be rendered as one unit
+ unsupported punctuation should be rendered as an error *)
+
+(*.*) Foo.Bar.Baz; (*..*) TYPE Sign = [-1..1] OF INTEGER;
+
+(*|:*) CASE foo OF | 1 : bar | 2 : bam | 3 : boo END;
+(*!:*) CASE foo OF 1 : bar ! 2 : bam ! 3 : boo END; (* ISO *)
+
+(*[]()*) array[n] := foo();
+
+(*{}*) CONST Bar = { 1, 2, 3 };
+
+(*?*) TPROPERTIES = isCollection, isIndexed | isRigid?; (* M2 R10 *)
+
+(*~*) CONST ~ isFoobar = Foo AND Bar; (* M2 R10 *)
+(*->*) isFoobar -> PROCEDURE [ABS]; (* M2 R10 *)
+
+(*<<>>*) GENLIB Foo FROM Template FOR Bar = <<ARRAY OF CHAR>> END; (* M2 R10 *)
+
+
+(* Single Line Comment Test *)
+
+(* should be rendered as comment if supported, as error if unsupported *)
+
+// This is a single line comment (M2 R10 + ObjM2)
+
+
+(* Pragma Delimiter Tests *)
+
+(* PIM style pragma should be rendered as pragma in PIM dialects,
+ as multiline comment in all other dialects. *)
+
+(*$INLINE*) (* PIM *)
+
+(* ISO style pragma should be rendered as error in PIM dialects,
+ as pragma in all other dialects. *)
+
+<*INLINE*> (* all other dialects *)
+
+
+(* Operator Substitution Test When in Algol mode *)
+
+IF foo # bar THEN ... END; (* # should be rendered as not equal symbol *)
+
+IF foo >= bar THEN ... END; (* >= should be rendered as not less symbol *)
+
+IF foo <= bar THEN ... END; (* <= should be rendered as not greater symbol *)
+
+IF foo == bar THEN ... END; (* == should be rendered as identity symbol *)
+
+dotProduct := v1 *. v2; (* *. should be rendered as dot product symbol *)
+
+
+(* Reserved Words and Builtins Test *)
+
+(* supported reserved words and builtins should be highlighted *)
+
+(* reserved words common to all dialects *)
+
+AND ARRAY BEGIN BY CASE CONST DEFINITION DIV DO ELSE ELSIF END EXIT FOR FROM
+IF IMPLEMENTATION IMPORT IN LOOP MOD MODULE NOT OF OR POINTER PROCEDURE
+RECORD REPEAT RETURN SET THEN TO TYPE UNTIL VAR WHILE
+
+(* builtins common to all dialects *)
+
+ABS BOOLEAN CARDINAL CHAR CHR FALSE INTEGER LONGINT LONGREAL
+MAX MIN NIL ODD ORD REAL TRUE
+
+(* pseudo builtins common to all dialects *)
+
+ADDRESS BYTE WORD ADR
+
+
+(* additional reserved words for PIM *)
+
+EXPORT QUALIFIED WITH
+
+(* additional builtins for PIM *)
+
+BITSET CAP DEC DISPOSE EXCL FLOAT HALT HIGH INC INCL NEW NIL PROC SIZE TRUNC VAL
+
+(* additional pseudo-builtins for PIM *)
+
+SYSTEM PROCESS TSIZE NEWPROCESS TRANSFER
+
+
+(* additional reserved words for ISO 10514-1 *)
+
+EXCEPT EXPORT FINALLY FORWARD PACKEDSET QUALIFIED REM RETRY WITH
+
+(* additional reserved words for ISO 10514-2 & ISO 10514-3 *)
+
+ABSTRACT AS CLASS GUARD INHERIT OVERRIDE READONLY REVEAL TRACED UNSAFEGUARDED
+
+(* additional builtins for ISO 10514-1 *)
+
+BITSET CAP CMPLX COMPLEX DEC DISPOSE EXCL FLOAT HALT HIGH IM INC INCL INT
+INTERRUPTIBLE LENGTH LFLOAT LONGCOMPLEX NEW PROC PROTECTION RE SIZE TRUNC
+UNINTERRUBTIBLE VAL
+
+(* additional builtins for ISO 10514-2 & ISO 10514-3 *)
+
+CREATE DESTROY EMPTY ISMEMBER SELF
+
+
+(* additional pseudo-builtins for ISO *)
+
+(* SYSTEM *)
+SYSTEM BITSPERLOC LOCSPERBYTE LOCSPERWORD LOC ADDADR SUBADR DIFADR MAKEADR
+ADR ROTATE SHIFT CAST TSIZE
+
+(* COROUTINES *)
+COROUTINES ATTACH COROUTINE CURRENT DETACH HANDLER INTERRUPTSOURCE IOTRANSFER
+IsATTACHED LISTEN NEWCOROUTINE PROT TRANSFER
+
+(* EXCEPTIONS *)
+EXCEPTIONS AllocateSource CurrentNumber ExceptionNumber ExceptionSource
+GetMessage IsCurrentSource IsExceptionalExecution RAISE
+
+(* TERMINATION *)
+TERMINATION IsTerminating HasHalted
+
+(* M2EXCEPTION *)
+M2EXCEPTION M2Exceptions M2Exception IsM2Exception indexException rangeException
+caseSelectException invalidLocation functionException wholeValueException
+wholeDivException realValueException realDivException complexValueException
+complexDivException protException sysException coException exException
+
+
+(* additional reserved words for M2 R10 *)
+
+ALIAS ARGLIST BLUEPRINT COPY GENLIB INDETERMINATE NEW NONE OPAQUE REFERENTIAL
+RELEASE RETAIN
+
+(* with symbolic assembler language extension *)
+ASM REG
+
+(* additional builtins for M2 R10 *)
+
+CARDINAL COUNT EMPTY EXISTS INSERT LENGTH LONGCARD OCTET PTR PRED READ READNEW
+REMOVE RETRIEVE SORT STORE SUBSET SUCC TLIMIT TMAX TMIN TRUE TSIZE UNICHAR
+WRITE WRITEF
+
+(* additional pseudo-builtins for M2 R10 *)
+
+(* TPROPERTIES *)
+TPROPERTIES PROPERTY LITERAL TPROPERTY TLITERAL TBUILTIN TDYN TREFC TNIL
+TBASE TPRECISION TMAXEXP TMINEXP
+
+(* CONVERSION *)
+CONVERSION TSXFSIZE SXF VAL
+
+(* UNSAFE *)
+UNSAFE CAST INTRINSIC AVAIL ADD SUB ADDC SUBC FETCHADD FETCHSUB SHL SHR ASHR
+ROTL ROTR ROTLC ROTRC BWNOT BWAND BWOR BWXOR BWNAND BWNOR SETBIT TESTBIT
+LSBIT MSBIT CSBITS BAIL HALT TODO FFI ADDR VARGLIST VARGC
+
+(* ATOMIC *)
+ATOMIC INTRINSIC AVAIL SWAP CAS INC DEC BWAND BWNAND BWOR BWXOR
+
+(* COMPILER *)
+COMPILER DEBUG MODNAME PROCNAME LINENUM DEFAULT HASH
+
+(* ASSEMBLER *)
+ASSEMBLER REGISTER SETREG GETREG CODE
+
+
+(* standard library ADT identifiers for M2 R10 *)
+
+(* rendered as builtins when dialect is set to Modula-2 R10,
+ this can be turned off by option treat_stdlib_adts_as_builtins=off *)
+BCD LONGBCD BITSET SHORTBITSET LONGBITSET LONGLONGBITSET COMPLEX LONGCOMPLEX
+SHORTCARD LONGLONGCARD SHORTINT LONGLONGINT POSINT SHORTPOSINT LONGPOSINT
+LONGLONGPOSINT BITSET8 BITSET16 BITSET32 BITSET64 BITSET128 BS8 BS16 BS32
+BS64 BS128 CARDINAL8 CARDINAL16 CARDINAL32 CARDINAL64 CARDINAL128 CARD8
+CARD16 CARD32 CARD64 CARD128 INTEGER8 INTEGER16 INTEGER32 INTEGER64
+INTEGER128 INT8 INT16 INT32 INT64 INT128 STRING UNISTRING
+
+
+(* additional reserved words for ObjM2 *)
+
+(* Note: ObjM2 is a superset of M2 R10 *)
+
+BYCOPY BYREF CLASS CONTINUE CRITICAL INOUT METHOD ON OPTIONAL OUT PRIVATE
+PROTECTED PROTOCOL PUBLIC SUPER TRY
+
+(* additional builtins for ObjM2 *)
+
+OBJECT NO YES
+
+
+(* additional builtins for Aglet Extensions to ISO *)
+
+BITSET8 BITSET16 BITSET32 CARDINAL8 CARDINAL16 CARDINAL32 INTEGER8 INTEGER16
+INTEGER32
+
+
+(* additional reserved words for GNU Extensions to PIM *)
+
+ASM __ATTRIBUTE__ __BUILTIN__ __COLUMN__ __DATE__ __FILE__ __FUNCTION__
+__LINE__ __MODULE__ VOLATILE
+
+(* additional builtins for GNU Extensions to PIM *)
+
+BITSET8 BITSET16 BITSET32 CARDINAL8 CARDINAL16 CARDINAL32 CARDINAL64 COMPLEX32
+COMPLEX64 COMPLEX96 COMPLEX128 INTEGER8 INTEGER16 INTEGER32 INTEGER64 REAL8
+REAL16 REAL32 REAL96 REAL128 THROW
+
+
+(* additional pseudo-builtins for p1 Extensions to ISO *)
+
+BCD
+
+
+(* additional reserved words for XDS Extensions to ISO *)
+
+SEQ
+
+(* additional builtins for XDS Extensions to ISO *)
+
+ASH ASSERT DIFFADR_TYPE ENTIER INDEX LEN LONGCARD SHORTCARD SHORTINT
+
+(* additional pseudo-builtins for XDS Extensions to ISO *)
+
+(* SYSTEM *)
+PROCESS NEWPROCESS BOOL8 BOOL16 BOOL32 CARD8 CARD16 CARD32 INT8 INT16 INT32
+REF MOVE FILL GET PUT CC int unsigned size_t void
+
+(* COMPILER *)
+COMPILER OPTION EQUATION
+
+
+(* end of file *) \ No newline at end of file
diff --git a/tests/examplefiles/objc_example.m b/tests/examplefiles/objc_example.m
index 67b33022..f3f85f65 100644
--- a/tests/examplefiles/objc_example.m
+++ b/tests/examplefiles/objc_example.m
@@ -1,32 +1,179 @@
-#import "Somefile.h"
+// Test various types of includes
+#import <Foundation/Foundation.h>
+# import <AppKit/AppKit.h>
+#import "stdio.h"
+#\
+ import \
+ "stdlib.h"
+# /*line1*/ \
+import /* line 2 */ \
+"stdlib.h" // line 3
-@implementation ABC
+// Commented out code with preprocessor
+#if 0
+#define MY_NUMBER 3
+#endif
-- (id)a:(B)b {
- return 1;
+ #\
+ if 1
+#define TEST_NUMBER 3
+#endif
+
+// Empty preprocessor
+#
+
+// Class forward declaration
+@class MyClass;
+
+// Empty classes
+@interface EmptyClass
+@end
+@interface EmptyClass2
+{
+}
+@end
+@interface EmptyClass3 : EmptyClass2
+{
+}
+@end
+
+// Custom class inheriting from built-in
+@interface MyClass : NSObject
+{
+@public
+ NSString *myString;
+ __weak NSString *_weakString;
+@protected
+ NSTextField *_textField;
+@private
+ NSDate *privateDate;
}
+// Various property aatributes
+@property(copy, readwrite, nonatomic) NSString *myString;
+@property(weak) NSString *weakString;
+@property(retain, strong, atomic) IBOutlet NSTextField *textField;
+
+// Class methods
++ (void)classMethod1:(NSString *)arg;
++ (void)classMethod2:(NSString *) arg; // Test space before arg
+
@end
-@implementation ABC
+typedef id B;
-- (void)xyz;
+#pragma mark MyMarker
+// MyClass.m
+// Class extension to declare private property
+@interface MyClass ()
+@property(retain) NSDate *privateDate;
+- (void)hiddenMethod;
@end
-NSDictionary *dictionary = [NSDictionary dictionaryWithObjectsAndKeys:
- @"quattuor", @"four", @"quinque", @"five", @"sex", @"six", nil];
+// Special category
+@interface MyClass (Special)
+@property(retain) NSDate *specialDate;
+@end
+
+@implementation MyClass
+@synthesize myString;
+@synthesize privateDate;
+
+- (id)a:(B)b {
+ /**
+ * C-style comment
+ */
+
+ // Selector keywords/types
+ SEL someMethod = @selector(hiddenMethod);
+
+ // Boolean types
+ Boolean b1 = FALSE;
+ BOOL b2 = NO;
+ bool b3 = true;
+ /**
+ * Number literals
+ */
+ // Int Literal
+ NSNumber *n1 = @( 1 );
+ // Method call
+ NSNumber *n2 = @( [b length] );
+ // Define variable
+ NSNumber *n3 = @( TEST_NUMBER );
+ // Arthimetic expression
+ NSNumber *n4 = @(1 + 2);
+ // From variable
+ int myInt = 5;
+ NSNumber *n5 = @(myInt);
+ // Nest expression
+ NSNumber *n6 = @(1 + (2 + 6.0));
+ // Bool literal
+ NSNumber *n7 = @NO;
+ // Bool expression
+ NSNumber *n8 = @(YES);
+ // Character
+ NSNumber *n9 = @'a';
+ // int
+ NSNumber *n10 = @123;
+ // unsigned
+ NSNumber *n11 = @1234U;
+ // long
+ NSNumber *n12 = @1234567890L;
+ // float
+ NSNumber *n13 = @3.14F;
+ // double
+ NSNumber *n14 = @3.14F;
+
+ // Array literals
+ NSArray *arr = @[ @"1", @"2" ];
+ arr = @[ @[ @"1", @"2" ], [arr lastObject] ];
+ [arr lastObject];
+ [@[ @"1", @"2" ] lastObject];
+
+ // Dictionary literals
+ NSDictionary *d = @{ @"key": @"value" };
+ [[d allKeys] lastObject];
+ [[@{ @"key": @"value" } allKeys] lastObject];
+ d = @{ @"key": @{ @"key": @"value" } };
-NSString *key;
-for (key in dictionary) {
- NSLog(@"English: %@, Latin: %@", key, [dictionary valueForKey:key]);
+ [self hiddenMethod];
+ [b length];
+ [privateDate class];
+
+ NSDictionary *dictionary = [NSDictionary dictionaryWithObjectsAndKeys:
+ @"1", @"one", @"2", @"two", @"3", @"three", nil];
+
+ NSString *key;
+ for (key in dictionary) {
+ NSLog(@"Number: %@, Word: %@", key, [dictionary valueForKey:key]);
+ }
+
+ // Blocks
+ int (^myBlock)(int arg1, int arg2);
+ NSString *(^myName)(NSString *) = ^(NSString *value) {
+ return value;
+ };
+
+ return nil;
}
-// Literals
-NSArray *a = @[ @"1", @"2" ];
+- (void)hiddenMethod {
+ // Synchronized block
+ @synchronized(self) {
+ [myString retain];
+ [myString release];
+ }
+}
-NSDictionary *d = @{ @"key": @"value" };
++ (void)classMethod1:(NSString *)arg {}
++ (void)classMethod2:(NSString *) arg
+{
+ // Autorelease pool block
+ @autoreleasepool {
+ NSLog(@"Hello, World!");
+ }
+}
-NSNumber *n1 = @( 1 );
-NSNumber *n2 = @( [a length] );
+@end
diff --git a/tests/examplefiles/objc_example2.m b/tests/examplefiles/objc_example2.m
deleted file mode 100644
index 8cd9b060..00000000
--- a/tests/examplefiles/objc_example2.m
+++ /dev/null
@@ -1,24 +0,0 @@
-// MyClass.h
-@interface MyClass : NSObject
-{
- NSString *value;
- NSTextField *textField;
-@private
- NSDate *lastModifiedDate;
-}
-@property(copy, readwrite) NSString *value;
-@property(retain) IBOutlet NSTextField *textField;
-@end
-
-// MyClass.m
-// Class extension to declare private property
-@interface MyClass ()
-@property(retain) NSDate *lastModifiedDate;
-@end
-
-@implementation MyClass
-@synthesize value;
-@synthesize textField;
-@synthesize lastModifiedDate;
-// implementation continues
-@end
diff --git a/tests/examplefiles/example.p b/tests/examplefiles/openedge_example
index e8c17e33..e8c17e33 100644
--- a/tests/examplefiles/example.p
+++ b/tests/examplefiles/openedge_example
diff --git a/tests/examplefiles/pawn_example b/tests/examplefiles/pawn_example
new file mode 100644
index 00000000..ee2ecca2
--- /dev/null
+++ b/tests/examplefiles/pawn_example
@@ -0,0 +1,25 @@
+{include.i}
+{nested.i {include.i}}
+
+&SCOPED-DEFINE MY_NAME "Abe"
+
+DEF VAR i AS INT NO-UNDO.
+i = 0xABE + 1337 / (1 * 1.00)
+
+def var clowercasetest as char no-undo.
+DEF VAR vardashtest AS DATETIME-TZ NO-UNDO.
+
+DEFINE TEMP-TABLE ttNames NO-UNDO
+ FIELD cName AS CHAR
+ INDEX IXPK_ttNames IS PRIMARY UNIQUE cName.
+
+/* One-line comment */
+/* Two-line
+ Comment */
+
+CREATE ttNames.
+ASSIGN ttNames.cName = {&MY_NAME}.
+
+FOR EACH ttNames:
+ MESSAGE "Hello, " + ttNames.cName + '!' VIEW-AS ALERT-BOX.
+END.
diff --git a/tests/examplefiles/pycon_test.pycon b/tests/examplefiles/pycon_test.pycon
index ff702864..9c4fc3d3 100644
--- a/tests/examplefiles/pycon_test.pycon
+++ b/tests/examplefiles/pycon_test.pycon
@@ -9,6 +9,9 @@ KeyboardInterrupt
>>> 1/0
Traceback (most recent call last):
-...
+ ...
ZeroDivisionError
+>>> 1/0 # this used to swallow the traceback
+Traceback (most recent call last):
+ ...
diff --git a/tests/examplefiles/qbasic_example b/tests/examplefiles/qbasic_example
new file mode 100644
index 00000000..27041af6
--- /dev/null
+++ b/tests/examplefiles/qbasic_example
@@ -0,0 +1,2 @@
+10 print RIGHT$("hi there", 5)
+20 goto 10
diff --git a/tests/examplefiles/r6rs-comments.scm b/tests/examplefiles/r6rs-comments.scm
new file mode 100644
index 00000000..cd5c3636
--- /dev/null
+++ b/tests/examplefiles/r6rs-comments.scm
@@ -0,0 +1,23 @@
+#!r6rs
+
+#|
+
+ The FACT procedure computes the factorial
+
+ of a non-negative integer.
+
+|#
+
+(define fact
+
+ (lambda (n)
+
+ ;; base case
+
+ (if (= n 0)
+
+ #;(= n 1)
+
+ 1 ; identity of *
+
+ (* n (fact (- n 1))))))
diff --git a/tests/examplefiles/resourcebundle_demo b/tests/examplefiles/resourcebundle_demo
new file mode 100644
index 00000000..e1daa56a
--- /dev/null
+++ b/tests/examplefiles/resourcebundle_demo
@@ -0,0 +1,9 @@
+root:table {
+ usage:string { "Usage: genrb [Options] files" }
+ version:int { 122 }
+ errorcodes:array {
+ :string { "Invalid argument" }
+ :string { "File not found" }
+ :string { "\x00 \r \t \n \u1234" }
+ }
+}
diff --git a/tests/examplefiles/robotframework.txt b/tests/examplefiles/robotframework_test.txt
index 63ba63e6..0d8179c0 100644
--- a/tests/examplefiles/robotframework.txt
+++ b/tests/examplefiles/robotframework_test.txt
@@ -6,6 +6,7 @@ Test Setup Keyword argument argument with ${VARIABLE}
*** Variables ***
${VARIABLE} Variable value
@{LIST} List variable here
+&{DICT} Key1=Value1 Key2=Value2
*** Test Cases ***
Keyword-driven example
diff --git a/tests/examplefiles/rql-queries.rql b/tests/examplefiles/rql-queries.rql
new file mode 100644
index 00000000..1d86df3c
--- /dev/null
+++ b/tests/examplefiles/rql-queries.rql
@@ -0,0 +1,34 @@
+Any N, N2 where N is Note, N2 is Note, N a_faire_par P1, P1 nom 'john', N2 a_faire_par P2, P2 nom 'jane' ;
+DISTINCT Any N, D, C, T, A ORDERBY D DESC LIMIT 40 where N is Note, N diem D, W is Workcase, W concerned_by N, N cost C, N text T, N author A, N diem <= today
+Bookmark B WHERE B owned_by G, G eid 5;
+Any X WHERE E eid 22762, NOT E is_in X, X modification_date D ORDERBY D DESC LIMIT 41;
+Any A, R, SUB ORDERBY R WHERE A is "Workcase", S is Division, S concerned_by A, A subject SUB, S eid 85, A ref R;
+Any D, T, L WHERE D is Document, A concerned_by D,A eid 14533, D title T, D location L;
+Any N,A,B,C,D ORDERBY A DESC WHERE N is Note, W concerned_by N, W eid 14533, N diem A,N author B,N text C,N cost D;
+Any X ORDERBY D DESC LIMIT 41 WHERE E eid 18134, NOT E concerned_by X, X modification_date D
+DISTINCT Any N, D, C, T, A ORDERBY D ASC LIMIT 40 WHERE N is Note, N diem D, P is Person, N to_be_contacted_by G, N cost C, N text T, N author A, G login "john";
+INSERT Person X: X surname "Doe", X firstname "John";
+Workcase W where W ref "ABCD12";
+Workcase W where W ref LIKE "AB%";
+Any X WHERE X X eid 53
+Any X WHERE X Document X occurence_of F, F class C, C name 'Comics' X owned_by U, U login 'syt' X available true
+Person P WHERE P work_for P, S name 'Acme', P interested_by T, T name 'training'
+Note N WHERE N written_on D, D day> (today -10), N written_by P, P name 'joe' or P name 'jack'
+Person P WHERE (P interested_by T, T name 'training') or (P city 'Paris')
+Any N, P WHERE X is Person, X name N, X first_name P
+String N, P WHERE X is Person, X name N, X first_name P
+INSERT Person X: X name 'widget'
+INSERT Person X, Person Y: X name 'foo', Y name 'nice', X friend Y
+INSERT Person X: X name 'foo', X friend Y WHERE name 'nice'
+SET X name 'bar', X first_name 'original' where X is Person X name 'foo'
+SET X know Y WHERE X friend Y
+DELETE Person X WHERE X name 'foo'
+DELETE X friend Y WHERE X is Person, X name 'foo'
+Any X WHERE X name LIKE '%lt'
+Any X WHERE X name IN ( 'joe', 'jack', 'william', 'averell')
+Any X, V WHERE X concerns P, P eid 42, X corrected_in V?
+Any C, P WHERE C is Card, P? documented_by C
+Point P where P abs X, P ord Y, P value X+Y
+Document X where X class C, C name 'Cartoon', X owned_by U, U login 'joe', X available true
+(Any X WHERE X is Document) UNION (Any X WHERE X is File)
+Any A,B WHERE A creation_date B WITH A BEING (Any X WHERE X is Document) UNION (Any X WHERE X is File)
diff --git a/tests/examplefiles/rust_example.rs b/tests/examplefiles/rust_example.rs
deleted file mode 100644
index 1c0a70c3..00000000
--- a/tests/examplefiles/rust_example.rs
+++ /dev/null
@@ -1,233 +0,0 @@
-// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// based on:
-// http://shootout.alioth.debian.org/u32/benchmark.php?test=nbody&lang=java
-
-extern mod std;
-
-use core::os;
-
-// Using sqrt from the standard library is way slower than using libc
-// directly even though std just calls libc, I guess it must be
-// because the the indirection through another dynamic linker
-// stub. Kind of shocking. Might be able to make it faster still with
-// an llvm intrinsic.
-#[nolink]
-extern mod libc {
- #[legacy_exports];
- fn sqrt(n: float) -> float;
-}
-
-fn main() {
- let args = os::args();
- let args = if os::getenv(~"RUST_BENCH").is_some() {
- ~[~"", ~"4000000"]
- } else if args.len() <= 1u {
- ~[~"", ~"100000"]
- } else {
- args
- };
- let n = int::from_str(args[1]).get();
- let mut bodies: ~[Body::props] = NBodySystem::make();
- io::println(fmt!("%f", NBodySystem::energy(bodies)));
- let mut i = 0;
- while i < n {
- NBodySystem::advance(bodies, 0.01);
- i += 1;
- }
- io::println(fmt!("%f", NBodySystem::energy(bodies)));
-}
-
-mod NBodySystem {
- use Body;
-
- pub fn make() -> ~[Body::props] {
- let mut bodies: ~[Body::props] =
- ~[Body::sun(),
- Body::jupiter(),
- Body::saturn(),
- Body::uranus(),
- Body::neptune()];
-
- let mut px = 0.0;
- let mut py = 0.0;
- let mut pz = 0.0;
-
- let mut i = 0;
- while i < 5 {
- px += bodies[i].vx * bodies[i].mass;
- py += bodies[i].vy * bodies[i].mass;
- pz += bodies[i].vz * bodies[i].mass;
-
- i += 1;
- }
-
- // side-effecting
- Body::offset_momentum(&mut bodies[0], px, py, pz);
-
- return bodies;
- }
-
- pub fn advance(bodies: &mut [Body::props], dt: float) {
- let mut i = 0;
- while i < 5 {
- let mut j = i + 1;
- while j < 5 {
- advance_one(&mut bodies[i],
- &mut bodies[j], dt);
- j += 1;
- }
-
- i += 1;
- }
-
- i = 0;
- while i < 5 {
- move_(&mut bodies[i], dt);
- i += 1;
- }
- }
-
- pub fn advance_one(bi: &mut Body::props,
- bj: &mut Body::props,
- dt: float) unsafe {
- let dx = bi.x - bj.x;
- let dy = bi.y - bj.y;
- let dz = bi.z - bj.z;
-
- let dSquared = dx * dx + dy * dy + dz * dz;
-
- let distance = ::libc::sqrt(dSquared);
- let mag = dt / (dSquared * distance);
-
- bi.vx -= dx * bj.mass * mag;
- bi.vy -= dy * bj.mass * mag;
- bi.vz -= dz * bj.mass * mag;
-
- bj.vx += dx * bi.mass * mag;
- bj.vy += dy * bi.mass * mag;
- bj.vz += dz * bi.mass * mag;
- }
-
- pub fn move_(b: &mut Body::props, dt: float) {
- b.x += dt * b.vx;
- b.y += dt * b.vy;
- b.z += dt * b.vz;
- }
-
- pub fn energy(bodies: &[Body::props]) -> float unsafe {
- let mut dx;
- let mut dy;
- let mut dz;
- let mut distance;
- let mut e = 0.0;
-
- let mut i = 0;
- while i < 5 {
- e +=
- 0.5 * bodies[i].mass *
- (bodies[i].vx * bodies[i].vx + bodies[i].vy * bodies[i].vy
- + bodies[i].vz * bodies[i].vz);
-
- let mut j = i + 1;
- while j < 5 {
- dx = bodies[i].x - bodies[j].x;
- dy = bodies[i].y - bodies[j].y;
- dz = bodies[i].z - bodies[j].z;
-
- distance = ::libc::sqrt(dx * dx + dy * dy + dz * dz);
- e -= bodies[i].mass * bodies[j].mass / distance;
-
- j += 1;
- }
-
- i += 1;
- }
- return e;
-
- }
-}
-
-mod Body {
- use Body;
-
- pub const PI: float = 3.141592653589793;
- pub const SOLAR_MASS: float = 39.478417604357432;
- // was 4 * PI * PI originally
- pub const DAYS_PER_YEAR: float = 365.24;
-
- pub type props =
- {mut x: float,
- mut y: float,
- mut z: float,
- mut vx: float,
- mut vy: float,
- mut vz: float,
- mass: float};
-
- pub fn jupiter() -> Body::props {
- return {mut x: 4.84143144246472090e+00,
- mut y: -1.16032004402742839e+00,
- mut z: -1.03622044471123109e-01,
- mut vx: 1.66007664274403694e-03 * DAYS_PER_YEAR,
- mut vy: 7.69901118419740425e-03 * DAYS_PER_YEAR,
- mut vz: -6.90460016972063023e-05 * DAYS_PER_YEAR,
- mass: 9.54791938424326609e-04 * SOLAR_MASS};
- }
-
- pub fn saturn() -> Body::props {
- return {mut x: 8.34336671824457987e+00,
- mut y: 4.12479856412430479e+00,
- mut z: -4.03523417114321381e-01,
- mut vx: -2.76742510726862411e-03 * DAYS_PER_YEAR,
- mut vy: 4.99852801234917238e-03 * DAYS_PER_YEAR,
- mut vz: 2.30417297573763929e-05 * DAYS_PER_YEAR,
- mass: 2.85885980666130812e-04 * SOLAR_MASS};
- }
-
- pub fn uranus() -> Body::props {
- return {mut x: 1.28943695621391310e+01,
- mut y: -1.51111514016986312e+01,
- mut z: -2.23307578892655734e-01,
- mut vx: 2.96460137564761618e-03 * DAYS_PER_YEAR,
- mut vy: 2.37847173959480950e-03 * DAYS_PER_YEAR,
- mut vz: -2.96589568540237556e-05 * DAYS_PER_YEAR,
- mass: 4.36624404335156298e-05 * SOLAR_MASS};
- }
-
- pub fn neptune() -> Body::props {
- return {mut x: 1.53796971148509165e+01,
- mut y: -2.59193146099879641e+01,
- mut z: 1.79258772950371181e-01,
- mut vx: 2.68067772490389322e-03 * DAYS_PER_YEAR,
- mut vy: 1.62824170038242295e-03 * DAYS_PER_YEAR,
- mut vz: -9.51592254519715870e-05 * DAYS_PER_YEAR,
- mass: 5.15138902046611451e-05 * SOLAR_MASS};
- }
-
- pub fn sun() -> Body::props {
- return {mut x: 0.0,
- mut y: 0.0,
- mut z: 0.0,
- mut vx: 0.0,
- mut vy: 0.0,
- mut vz: 0.0,
- mass: SOLAR_MASS};
- }
-
- pub fn offset_momentum(props: &mut Body::props,
- px: float, py: float, pz: float) {
- props.vx = -px / SOLAR_MASS;
- props.vy = -py / SOLAR_MASS;
- props.vz = -pz / SOLAR_MASS;
- }
-
-}
diff --git a/tests/examplefiles/scope.cirru b/tests/examplefiles/scope.cirru
new file mode 100644
index 00000000..d3bd8f16
--- /dev/null
+++ b/tests/examplefiles/scope.cirru
@@ -0,0 +1,211 @@
+
+-- demo
+
+define a (read cd) $ if (> a cd)
+ print demo
+ print "not demo"
+
+say $ print a $ save $ b $ x $ c 8
+
+print fun
+
+-- test on folding
+
+a $
+
+b $ c
+
+d $ e $ f
+
+g $ h $ i j $ k $
+
+-- test on comma
+
+print (, a)
+ a
+ , b
+ , c (, d)
+
+-- test on HTML
+
+doctype
+
+html
+ head
+ title $ = Cirru
+ script (:defer) $ :src build/build.js
+ link (:rel stylesheet) $ :href css/page.css
+ link (:rel icon)
+ :href http://logo.cirru.org/cirru-32x32.png?v=3
+ body
+ textarea.demo.source $ :placeholder "Source Code"
+ textarea.demo.target $ :placeholder "Compiled Data"
+ @insert ../html/ga.html
+
+-- test on indentation
+
+a $ b $ c
+
+e f
+ (g)
+ h
+
+-- test on parentheses
+
+3 4 (1) 4
+
+((((1))))
+
+x
+
+-- test on quotes
+
+a b c d
+
+"a b c d"
+
+"a b \" c d"
+
+"a b" "c d"
+
+-- test on unfolding
+
+set
+ add 1 $
+ , x y
+ add 5 $
+ add 2
+
+-- test on HTML attributes
+
+div
+ div
+ :class a
+ div
+ :class a b c d
+
+ div
+ :class a (@ b) (@ c) d
+
+ div
+ :class a
+ @if (@ b)
+ div b
+ div c
+ div
+ :class a
+ @if (@ b) b c
+
+-- test on helpers
+
+@if (@call a b) (div) (span)
+
+@each members
+ div (@ name)
+
+@each a
+ div (@ b)
+ @each c
+ div (@ d)
+
+-- test on HTML structure
+
+@rich more
+ #demo-more-box
+ #demo-more
+ :data-lang-text demo-more
+ #demo-more-list
+ @each room
+ .demo-more-room
+ span.demo-name
+ @ topic
+ span.demo-join
+ :data-lang-text demo-join
+ :data-id (@ id)
+
+-- text on bool
+
+print #true
+print #false
+print #yes
+print #no
+print #t
+print #f
+
+-- test on Cirru js
+
+set a 1
+set a (= "This is a string")
+set b #t
+
+-- this is comment
+
+number 1.4
+string x
+regex ^\s$
+regex "^\\s-\"$"
+sentence this is a string
+
+array 1 2 3 (= nothing) #t (= #t)
+
+set c (array 1 (= nothing))
+
+set d $ object (a (= google))
+ b (= reader)
+ c 1
+ d $ array 1 2 (= string)
+
+1 c
+-1 c
+
+:b d
+.log console a 2
+.log console
+
+set demo $ object
+ call $ \ x (.log console x) (. this call)
+. demo (.call 1) (.call 4)
+
+=.x d 3
+
+set d null
+
+new Array 1 2 3
+
+set x (:length c)
+set str (= str)
+set c (.toUpperCase str)
+
+\ x (+ x 1)
+\ (x y) (+ x y)
+\ x (set aa 1) (+ aa x)
+
+set f (\ x (+ x 1))
+
++ a 1 2
++= a 1
+
+> 1 2 3
+
+if (> 2 1) (+ a 1)
+else 2
+
+if (> a 2)
+ .log console (= "large")
+elseif (> a 1)
+ .log console (= "still good")
+else
+ .log console (= "so so")
+
+set a $ if (> 2 1) #t #f
+
+switch a
+ 1 (.log console 1)
+ 2 (.log console 2)
+ else (.log console (= "something else"))
+
+set a $ array 2 +3 -4
+for (a x i) (.log console x i)
+
+set a 0
+while (< a 10) (+= a 1) (.log console a)
diff --git a/tests/examplefiles/simple.md b/tests/examplefiles/simple.croc
index 8f12771a..8f12771a 100644
--- a/tests/examplefiles/simple.md
+++ b/tests/examplefiles/simple.croc
diff --git a/tests/examplefiles/sparql.rq b/tests/examplefiles/sparql.rq
new file mode 100644
index 00000000..caedfd14
--- /dev/null
+++ b/tests/examplefiles/sparql.rq
@@ -0,0 +1,23 @@
+# This is a test SPARQL query
+
+PREFIX foaf: <http://xmlns.com/foaf/0.1/>
+PREFIX ex: <http://example.org/>
+PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
+PREFIX dcterms: <http://purl.org/dc/terms/>
+
+SELECT ?person (COUNT(?nick) AS ?nickCount) {
+ ?person foaf:nick ?nick ;
+ foaf:lastName "Smith" ;
+ foaf:age "21"^^xsd:int ;
+ ex:title 'Mr' ; # single-quoted string
+ ex:height 1.80 ; # float
+ ex:distanceToSun +1.4e8 ; # float with exponent
+ ex:ownsACat true ;
+ dcterms:description "Someone with a cat called \"cat\"."@en .
+ OPTIONAL { ?person foaf:isPrimaryTopicOf ?page }
+ OPTIONAL { ?person foaf:name ?name
+ { ?person foaf:depiction ?img }
+ UNION
+ { ?person foaf:firstName ?firstN } }
+ FILTER ( bound(?page) || bound(?img) || bound(?firstN) )
+} GROUP BY ?person ORDER BY ?img
diff --git a/tests/examplefiles/subr.el b/tests/examplefiles/subr.el
new file mode 100644
index 00000000..deadca6e
--- /dev/null
+++ b/tests/examplefiles/subr.el
@@ -0,0 +1,4868 @@
+;;; subr.el --- basic lisp subroutines for Emacs -*- coding: utf-8; lexical-binding:t -*-
+
+;; Copyright (C) 1985-1986, 1992, 1994-1995, 1999-2015 Free Software
+;; Foundation, Inc.
+
+;; Maintainer: emacs-devel@gnu.org
+;; Keywords: internal
+;; Package: emacs
+
+;; This file is part of GNU Emacs.
+
+;; GNU Emacs is free software: you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation, either version 3 of the License, or
+;; (at your option) any later version.
+
+;; GNU Emacs is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+;; GNU General Public License for more details.
+
+;; You should have received a copy of the GNU General Public License
+;; along with GNU Emacs. If not, see <http://www.gnu.org/licenses/>.
+
+;;; Commentary:
+
+;;; Code:
+
+;; Beware: while this file has tag `utf-8', before it's compiled, it gets
+;; loaded as "raw-text", so non-ASCII chars won't work right during bootstrap.
+
+(defmacro declare-function (_fn _file &optional _arglist _fileonly)
+ "Tell the byte-compiler that function FN is defined, in FILE.
+Optional ARGLIST is the argument list used by the function.
+The FILE argument is not used by the byte-compiler, but by the
+`check-declare' package, which checks that FILE contains a
+definition for FN. ARGLIST is used by both the byte-compiler
+and `check-declare' to check for consistency.
+
+FILE can be either a Lisp file (in which case the \".el\"
+extension is optional), or a C file. C files are expanded
+relative to the Emacs \"src/\" directory. Lisp files are
+searched for using `locate-library', and if that fails they are
+expanded relative to the location of the file containing the
+declaration. A FILE with an \"ext:\" prefix is an external file.
+`check-declare' will check such files if they are found, and skip
+them without error if they are not.
+
+FILEONLY non-nil means that `check-declare' will only check that
+FILE exists, not that it defines FN. This is intended for
+function-definitions that `check-declare' does not recognize, e.g.
+`defstruct'.
+
+To specify a value for FILEONLY without passing an argument list,
+set ARGLIST to t. This is necessary because nil means an
+empty argument list, rather than an unspecified one.
+
+Note that for the purposes of `check-declare', this statement
+must be the first non-whitespace on a line.
+
+For more information, see Info node `(elisp)Declaring Functions'."
+ ;; Does nothing - byte-compile-declare-function does the work.
+ nil)
+
+
+;;;; Basic Lisp macros.
+
+(defalias 'not 'null)
+
+(defmacro noreturn (form)
+ "Evaluate FORM, expecting it not to return.
+If FORM does return, signal an error."
+ (declare (debug t))
+ `(prog1 ,form
+ (error "Form marked with `noreturn' did return")))
+
+(defmacro 1value (form)
+ "Evaluate FORM, expecting a constant return value.
+This is the global do-nothing version. There is also `testcover-1value'
+that complains if FORM ever does return differing values."
+ (declare (debug t))
+ form)
+
+(defmacro def-edebug-spec (symbol spec)
+ "Set the `edebug-form-spec' property of SYMBOL according to SPEC.
+Both SYMBOL and SPEC are unevaluated. The SPEC can be:
+0 (instrument no arguments); t (instrument all arguments);
+a symbol (naming a function with an Edebug specification); or a list.
+The elements of the list describe the argument types; see
+Info node `(elisp)Specification List' for details."
+ `(put (quote ,symbol) 'edebug-form-spec (quote ,spec)))
+
+(defmacro lambda (&rest cdr)
+ "Return a lambda expression.
+A call of the form (lambda ARGS DOCSTRING INTERACTIVE BODY) is
+self-quoting; the result of evaluating the lambda expression is the
+expression itself. The lambda expression may then be treated as a
+function, i.e., stored as the function value of a symbol, passed to
+`funcall' or `mapcar', etc.
+
+ARGS should take the same form as an argument list for a `defun'.
+DOCSTRING is an optional documentation string.
+ If present, it should describe how to call the function.
+ But documentation strings are usually not useful in nameless functions.
+INTERACTIVE should be a call to the function `interactive', which see.
+It may also be omitted.
+BODY should be a list of Lisp expressions.
+
+\(fn ARGS [DOCSTRING] [INTERACTIVE] BODY)"
+ (declare (doc-string 2) (indent defun)
+ (debug (&define lambda-list
+ [&optional stringp]
+ [&optional ("interactive" interactive)]
+ def-body)))
+ ;; Note that this definition should not use backquotes; subr.el should not
+ ;; depend on backquote.el.
+ (list 'function (cons 'lambda cdr)))
+
+(defmacro setq-local (var val)
+ "Set variable VAR to value VAL in current buffer."
+ ;; Can't use backquote here, it's too early in the bootstrap.
+ (list 'set (list 'make-local-variable (list 'quote var)) val))
+
+(defmacro defvar-local (var val &optional docstring)
+ "Define VAR as a buffer-local variable with default value VAL.
+Like `defvar' but additionally marks the variable as being automatically
+buffer-local wherever it is set."
+ (declare (debug defvar) (doc-string 3))
+ ;; Can't use backquote here, it's too early in the bootstrap.
+ (list 'progn (list 'defvar var val docstring)
+ (list 'make-variable-buffer-local (list 'quote var))))
+
+(defun apply-partially (fun &rest args)
+ "Return a function that is a partial application of FUN to ARGS.
+ARGS is a list of the first N arguments to pass to FUN.
+The result is a new function which does the same as FUN, except that
+the first N arguments are fixed at the values with which this function
+was called."
+ (lambda (&rest args2)
+ (apply fun (append args args2))))
+
+(defmacro push (newelt place)
+ "Add NEWELT to the list stored in the generalized variable PLACE.
+This is morally equivalent to (setf PLACE (cons NEWELT PLACE)),
+except that PLACE is only evaluated once (after NEWELT)."
+ (declare (debug (form gv-place)))
+ (if (symbolp place)
+ ;; Important special case, to avoid triggering GV too early in
+ ;; the bootstrap.
+ (list 'setq place
+ (list 'cons newelt place))
+ (require 'macroexp)
+ (macroexp-let2 macroexp-copyable-p v newelt
+ (gv-letplace (getter setter) place
+ (funcall setter `(cons ,v ,getter))))))
+
+(defmacro pop (place)
+ "Return the first element of PLACE's value, and remove it from the list.
+PLACE must be a generalized variable whose value is a list.
+If the value is nil, `pop' returns nil but does not actually
+change the list."
+ (declare (debug (gv-place)))
+ ;; We use `car-safe' here instead of `car' because the behavior is the same
+ ;; (if it's not a cons cell, the `cdr' would have signaled an error already),
+ ;; but `car-safe' is total, so the byte-compiler can safely remove it if the
+ ;; result is not used.
+ `(car-safe
+ ,(if (symbolp place)
+ ;; So we can use `pop' in the bootstrap before `gv' can be used.
+ (list 'prog1 place (list 'setq place (list 'cdr place)))
+ (gv-letplace (getter setter) place
+ (macroexp-let2 macroexp-copyable-p x getter
+ `(prog1 ,x ,(funcall setter `(cdr ,x))))))))
+
+(defmacro when (cond &rest body)
+ "If COND yields non-nil, do BODY, else return nil.
+When COND yields non-nil, eval BODY forms sequentially and return
+value of last one, or nil if there are none.
+
+\(fn COND BODY...)"
+ (declare (indent 1) (debug t))
+ (list 'if cond (cons 'progn body)))
+
+(defmacro unless (cond &rest body)
+ "If COND yields nil, do BODY, else return nil.
+When COND yields nil, eval BODY forms sequentially and return
+value of last one, or nil if there are none.
+
+\(fn COND BODY...)"
+ (declare (indent 1) (debug t))
+ (cons 'if (cons cond (cons nil body))))
+
+(defmacro dolist (spec &rest body)
+ "Loop over a list.
+Evaluate BODY with VAR bound to each car from LIST, in turn.
+Then evaluate RESULT to get return value, default nil.
+
+\(fn (VAR LIST [RESULT]) BODY...)"
+ (declare (indent 1) (debug ((symbolp form &optional form) body)))
+ ;; It would be cleaner to create an uninterned symbol,
+ ;; but that uses a lot more space when many functions in many files
+ ;; use dolist.
+ ;; FIXME: This cost disappears in byte-compiled lexical-binding files.
+ (let ((temp '--dolist-tail--))
+ ;; This is not a reliable test, but it does not matter because both
+ ;; semantics are acceptable, tho one is slightly faster with dynamic
+ ;; scoping and the other is slightly faster (and has cleaner semantics)
+ ;; with lexical scoping.
+ (if lexical-binding
+ `(let ((,temp ,(nth 1 spec)))
+ (while ,temp
+ (let ((,(car spec) (car ,temp)))
+ ,@body
+ (setq ,temp (cdr ,temp))))
+ ,@(cdr (cdr spec)))
+ `(let ((,temp ,(nth 1 spec))
+ ,(car spec))
+ (while ,temp
+ (setq ,(car spec) (car ,temp))
+ ,@body
+ (setq ,temp (cdr ,temp)))
+ ,@(if (cdr (cdr spec))
+ `((setq ,(car spec) nil) ,@(cdr (cdr spec))))))))
+
+(defmacro dotimes (spec &rest body)
+ "Loop a certain number of times.
+Evaluate BODY with VAR bound to successive integers running from 0,
+inclusive, to COUNT, exclusive. Then evaluate RESULT to get
+the return value (nil if RESULT is omitted).
+
+\(fn (VAR COUNT [RESULT]) BODY...)"
+ (declare (indent 1) (debug dolist))
+ ;; It would be cleaner to create an uninterned symbol,
+ ;; but that uses a lot more space when many functions in many files
+ ;; use dotimes.
+ ;; FIXME: This cost disappears in byte-compiled lexical-binding files.
+ (let ((temp '--dotimes-limit--)
+ (start 0)
+ (end (nth 1 spec)))
+ ;; This is not a reliable test, but it does not matter because both
+ ;; semantics are acceptable, tho one is slightly faster with dynamic
+ ;; scoping and the other has cleaner semantics.
+ (if lexical-binding
+ (let ((counter '--dotimes-counter--))
+ `(let ((,temp ,end)
+ (,counter ,start))
+ (while (< ,counter ,temp)
+ (let ((,(car spec) ,counter))
+ ,@body)
+ (setq ,counter (1+ ,counter)))
+ ,@(if (cddr spec)
+ ;; FIXME: This let often leads to "unused var" warnings.
+ `((let ((,(car spec) ,counter)) ,@(cddr spec))))))
+ `(let ((,temp ,end)
+ (,(car spec) ,start))
+ (while (< ,(car spec) ,temp)
+ ,@body
+ (setq ,(car spec) (1+ ,(car spec))))
+ ,@(cdr (cdr spec))))))
+
+(defmacro declare (&rest _specs)
+ "Do not evaluate any arguments, and return nil.
+If a `declare' form appears as the first form in the body of a
+`defun' or `defmacro' form, SPECS specifies various additional
+information about the function or macro; these go into effect
+during the evaluation of the `defun' or `defmacro' form.
+
+The possible values of SPECS are specified by
+`defun-declarations-alist' and `macro-declarations-alist'.
+
+For more information, see info node `(elisp)Declare Form'."
+ ;; FIXME: edebug spec should pay attention to defun-declarations-alist.
+ nil)
+
+(defmacro ignore-errors (&rest body)
+ "Execute BODY; if an error occurs, return nil.
+Otherwise, return result of last form in BODY.
+See also `with-demoted-errors' that does something similar
+without silencing all errors."
+ (declare (debug t) (indent 0))
+ `(condition-case nil (progn ,@body) (error nil)))
+
+;;;; Basic Lisp functions.
+
+(defun ignore (&rest _ignore)
+ "Do nothing and return nil.
+This function accepts any number of arguments, but ignores them."
+ (interactive)
+ nil)
+
+;; Signal a compile-error if the first arg is missing.
+(defun error (&rest args)
+ "Signal an error, making error message by passing all args to `format'.
+In Emacs, the convention is that error messages start with a capital
+letter but *do not* end with a period. Please follow this convention
+for the sake of consistency."
+ (declare (advertised-calling-convention (string &rest args) "23.1"))
+ (signal 'error (list (apply 'format args))))
+
+(defun user-error (format &rest args)
+ "Signal a pilot error, making error message by passing all args to `format'.
+In Emacs, the convention is that error messages start with a capital
+letter but *do not* end with a period. Please follow this convention
+for the sake of consistency.
+This is just like `error' except that `user-error's are expected to be the
+result of an incorrect manipulation on the part of the user, rather than the
+result of an actual problem."
+ (signal 'user-error (list (apply #'format format args))))
+
+(defun define-error (name message &optional parent)
+ "Define NAME as a new error signal.
+MESSAGE is a string that will be output to the echo area if such an error
+is signaled without being caught by a `condition-case'.
+PARENT is either a signal or a list of signals from which it inherits.
+Defaults to `error'."
+ (unless parent (setq parent 'error))
+ (let ((conditions
+ (if (consp parent)
+ (apply #'append
+ (mapcar (lambda (parent)
+ (cons parent
+ (or (get parent 'error-conditions)
+ (error "Unknown signal `%s'" parent))))
+ parent))
+ (cons parent (get parent 'error-conditions)))))
+ (put name 'error-conditions
+ (delete-dups (copy-sequence (cons name conditions))))
+ (when message (put name 'error-message message))))
+
+;; We put this here instead of in frame.el so that it's defined even on
+;; systems where frame.el isn't loaded.
+(defun frame-configuration-p (object)
+ "Return non-nil if OBJECT seems to be a frame configuration.
+Any list whose car is `frame-configuration' is assumed to be a frame
+configuration."
+ (and (consp object)
+ (eq (car object) 'frame-configuration)))
+
+
+;;;; List functions.
+
+(defsubst caar (x)
+ "Return the car of the car of X."
+ (car (car x)))
+
+(defsubst cadr (x)
+ "Return the car of the cdr of X."
+ (car (cdr x)))
+
+(defsubst cdar (x)
+ "Return the cdr of the car of X."
+ (cdr (car x)))
+
+(defsubst cddr (x)
+ "Return the cdr of the cdr of X."
+ (cdr (cdr x)))
+
+(defun last (list &optional n)
+ "Return the last link of LIST. Its car is the last element.
+If LIST is nil, return nil.
+If N is non-nil, return the Nth-to-last link of LIST.
+If N is bigger than the length of LIST, return LIST."
+ (if n
+ (and (>= n 0)
+ (let ((m (safe-length list)))
+ (if (< n m) (nthcdr (- m n) list) list)))
+ (and list
+ (nthcdr (1- (safe-length list)) list))))
+
+(defun butlast (list &optional n)
+ "Return a copy of LIST with the last N elements removed.
+If N is omitted or nil, the last element is removed from the
+copy."
+ (if (and n (<= n 0)) list
+ (nbutlast (copy-sequence list) n)))
+
+(defun nbutlast (list &optional n)
+ "Modifies LIST to remove the last N elements.
+If N is omitted or nil, remove the last element."
+ (let ((m (length list)))
+ (or n (setq n 1))
+ (and (< n m)
+ (progn
+ (if (> n 0) (setcdr (nthcdr (- (1- m) n) list) nil))
+ list))))
+
+(defun zerop (number)
+ "Return t if NUMBER is zero."
+ ;; Used to be in C, but it's pointless since (= 0 n) is faster anyway because
+ ;; = has a byte-code.
+ (declare (compiler-macro (lambda (_) `(= 0 ,number))))
+ (= 0 number))
+
+(defun delete-dups (list)
+ "Destructively remove `equal' duplicates from LIST.
+Store the result in LIST and return it. LIST must be a proper list.
+Of several `equal' occurrences of an element in LIST, the first
+one is kept."
+ (let ((tail list))
+ (while tail
+ (setcdr tail (delete (car tail) (cdr tail)))
+ (setq tail (cdr tail))))
+ list)
+
+;; See http://lists.gnu.org/archive/html/emacs-devel/2013-05/msg00204.html
+(defun delete-consecutive-dups (list &optional circular)
+ "Destructively remove `equal' consecutive duplicates from LIST.
+First and last elements are considered consecutive if CIRCULAR is
+non-nil."
+ (let ((tail list) last)
+ (while (consp tail)
+ (if (equal (car tail) (cadr tail))
+ (setcdr tail (cddr tail))
+ (setq last (car tail)
+ tail (cdr tail))))
+ (if (and circular
+ (cdr list)
+ (equal last (car list)))
+ (nbutlast list)
+ list)))
+
+(defun number-sequence (from &optional to inc)
+ "Return a sequence of numbers from FROM to TO (both inclusive) as a list.
+INC is the increment used between numbers in the sequence and defaults to 1.
+So, the Nth element of the list is (+ FROM (* N INC)) where N counts from
+zero. TO is only included if there is an N for which TO = FROM + N * INC.
+If TO is nil or numerically equal to FROM, return (FROM).
+If INC is positive and TO is less than FROM, or INC is negative
+and TO is larger than FROM, return nil.
+If INC is zero and TO is neither nil nor numerically equal to
+FROM, signal an error.
+
+This function is primarily designed for integer arguments.
+Nevertheless, FROM, TO and INC can be integer or float. However,
+floating point arithmetic is inexact. For instance, depending on
+the machine, it may quite well happen that
+\(number-sequence 0.4 0.6 0.2) returns the one element list (0.4),
+whereas (number-sequence 0.4 0.8 0.2) returns a list with three
+elements. Thus, if some of the arguments are floats and one wants
+to make sure that TO is included, one may have to explicitly write
+TO as (+ FROM (* N INC)) or use a variable whose value was
+computed with this exact expression. Alternatively, you can,
+of course, also replace TO with a slightly larger value
+\(or a slightly more negative value if INC is negative)."
+ (if (or (not to) (= from to))
+ (list from)
+ (or inc (setq inc 1))
+ (when (zerop inc) (error "The increment can not be zero"))
+ (let (seq (n 0) (next from))
+ (if (> inc 0)
+ (while (<= next to)
+ (setq seq (cons next seq)
+ n (1+ n)
+ next (+ from (* n inc))))
+ (while (>= next to)
+ (setq seq (cons next seq)
+ n (1+ n)
+ next (+ from (* n inc)))))
+ (nreverse seq))))
+
+(defun copy-tree (tree &optional vecp)
+ "Make a copy of TREE.
+If TREE is a cons cell, this recursively copies both its car and its cdr.
+Contrast to `copy-sequence', which copies only along the cdrs. With second
+argument VECP, this copies vectors as well as conses."
+ (if (consp tree)
+ (let (result)
+ (while (consp tree)
+ (let ((newcar (car tree)))
+ (if (or (consp (car tree)) (and vecp (vectorp (car tree))))
+ (setq newcar (copy-tree (car tree) vecp)))
+ (push newcar result))
+ (setq tree (cdr tree)))
+ (nconc (nreverse result) tree))
+ (if (and vecp (vectorp tree))
+ (let ((i (length (setq tree (copy-sequence tree)))))
+ (while (>= (setq i (1- i)) 0)
+ (aset tree i (copy-tree (aref tree i) vecp)))
+ tree)
+ tree)))
+
+;;;; Various list-search functions.
+
+(defun assoc-default (key alist &optional test default)
+ "Find object KEY in a pseudo-alist ALIST.
+ALIST is a list of conses or objects. Each element
+ (or the element's car, if it is a cons) is compared with KEY by
+ calling TEST, with two arguments: (i) the element or its car,
+ and (ii) KEY.
+If that is non-nil, the element matches; then `assoc-default'
+ returns the element's cdr, if it is a cons, or DEFAULT if the
+ element is not a cons.
+
+If no element matches, the value is nil.
+If TEST is omitted or nil, `equal' is used."
+ (let (found (tail alist) value)
+ (while (and tail (not found))
+ (let ((elt (car tail)))
+ (when (funcall (or test 'equal) (if (consp elt) (car elt) elt) key)
+ (setq found t value (if (consp elt) (cdr elt) default))))
+ (setq tail (cdr tail)))
+ value))
+
+(defun assoc-ignore-case (key alist)
+ "Like `assoc', but ignores differences in case and text representation.
+KEY must be a string. Upper-case and lower-case letters are treated as equal.
+Unibyte strings are converted to multibyte for comparison."
+ (declare (obsolete assoc-string "22.1"))
+ (assoc-string key alist t))
+
+(defun assoc-ignore-representation (key alist)
+ "Like `assoc', but ignores differences in text representation.
+KEY must be a string.
+Unibyte strings are converted to multibyte for comparison."
+ (declare (obsolete assoc-string "22.1"))
+ (assoc-string key alist nil))
+
+(defun member-ignore-case (elt list)
+ "Like `member', but ignore differences in case and text representation.
+ELT must be a string. Upper-case and lower-case letters are treated as equal.
+Unibyte strings are converted to multibyte for comparison.
+Non-strings in LIST are ignored."
+ (while (and list
+ (not (and (stringp (car list))
+ (eq t (compare-strings elt 0 nil (car list) 0 nil t)))))
+ (setq list (cdr list)))
+ list)
+
+(defun assq-delete-all (key alist)
+ "Delete from ALIST all elements whose car is `eq' to KEY.
+Return the modified alist.
+Elements of ALIST that are not conses are ignored."
+ (while (and (consp (car alist))
+ (eq (car (car alist)) key))
+ (setq alist (cdr alist)))
+ (let ((tail alist) tail-cdr)
+ (while (setq tail-cdr (cdr tail))
+ (if (and (consp (car tail-cdr))
+ (eq (car (car tail-cdr)) key))
+ (setcdr tail (cdr tail-cdr))
+ (setq tail tail-cdr))))
+ alist)
+
+(defun rassq-delete-all (value alist)
+ "Delete from ALIST all elements whose cdr is `eq' to VALUE.
+Return the modified alist.
+Elements of ALIST that are not conses are ignored."
+ (while (and (consp (car alist))
+ (eq (cdr (car alist)) value))
+ (setq alist (cdr alist)))
+ (let ((tail alist) tail-cdr)
+ (while (setq tail-cdr (cdr tail))
+ (if (and (consp (car tail-cdr))
+ (eq (cdr (car tail-cdr)) value))
+ (setcdr tail (cdr tail-cdr))
+ (setq tail tail-cdr))))
+ alist)
+
+(defun alist-get (key alist &optional default remove)
+ "Get the value associated to KEY in ALIST.
+DEFAULT is the value to return if KEY is not found in ALIST.
+REMOVE, if non-nil, means that when setting this element, we should
+remove the entry if the new value is `eql' to DEFAULT."
+ (ignore remove) ;;Silence byte-compiler.
+ (let ((x (assq key alist)))
+ (if x (cdr x) default)))
+
+(defun remove (elt seq)
+ "Return a copy of SEQ with all occurrences of ELT removed.
+SEQ must be a list, vector, or string. The comparison is done with `equal'."
+ (if (nlistp seq)
+ ;; If SEQ isn't a list, there's no need to copy SEQ because
+ ;; `delete' will return a new object.
+ (delete elt seq)
+ (delete elt (copy-sequence seq))))
+
+(defun remq (elt list)
+ "Return LIST with all occurrences of ELT removed.
+The comparison is done with `eq'. Contrary to `delq', this does not use
+side-effects, and the argument LIST is not modified."
+ (while (and (eq elt (car list)) (setq list (cdr list))))
+ (if (memq elt list)
+ (delq elt (copy-sequence list))
+ list))
+
+;;;; Keymap support.
+
+(defun kbd (keys)
+ "Convert KEYS to the internal Emacs key representation.
+KEYS should be a string constant in the format used for
+saving keyboard macros (see `edmacro-mode')."
+ ;; Don't use a defalias, since the `pure' property is only true for
+ ;; the calling convention of `kbd'.
+ (read-kbd-macro keys))
+(put 'kbd 'pure t)
+
+(defun undefined ()
+ "Beep to tell the user this binding is undefined."
+ (interactive)
+ (ding)
+ (message "%s is undefined" (key-description (this-single-command-keys)))
+ (setq defining-kbd-macro nil)
+ (force-mode-line-update)
+ ;; If this is a down-mouse event, don't reset prefix-arg;
+ ;; pass it to the command run by the up event.
+ (setq prefix-arg
+ (when (memq 'down (event-modifiers last-command-event))
+ current-prefix-arg)))
+
+;; Prevent the \{...} documentation construct
+;; from mentioning keys that run this command.
+(put 'undefined 'suppress-keymap t)
+
+(defun suppress-keymap (map &optional nodigits)
+ "Make MAP override all normally self-inserting keys to be undefined.
+Normally, as an exception, digits and minus-sign are set to make prefix args,
+but optional second arg NODIGITS non-nil treats them like other chars."
+ (define-key map [remap self-insert-command] 'undefined)
+ (or nodigits
+ (let (loop)
+ (define-key map "-" 'negative-argument)
+ ;; Make plain numbers do numeric args.
+ (setq loop ?0)
+ (while (<= loop ?9)
+ (define-key map (char-to-string loop) 'digit-argument)
+ (setq loop (1+ loop))))))
+
+(defun make-composed-keymap (maps &optional parent)
+ "Construct a new keymap composed of MAPS and inheriting from PARENT.
+When looking up a key in the returned map, the key is looked in each
+keymap of MAPS in turn until a binding is found.
+If no binding is found in MAPS, the lookup continues in PARENT, if non-nil.
+As always with keymap inheritance, a nil binding in MAPS overrides
+any corresponding binding in PARENT, but it does not override corresponding
+bindings in other keymaps of MAPS.
+MAPS can be a list of keymaps or a single keymap.
+PARENT if non-nil should be a keymap."
+ `(keymap
+ ,@(if (keymapp maps) (list maps) maps)
+ ,@parent))
+
+(defun define-key-after (keymap key definition &optional after)
+ "Add binding in KEYMAP for KEY => DEFINITION, right after AFTER's binding.
+This is like `define-key' except that the binding for KEY is placed
+just after the binding for the event AFTER, instead of at the beginning
+of the map. Note that AFTER must be an event type (like KEY), NOT a command
+\(like DEFINITION).
+
+If AFTER is t or omitted, the new binding goes at the end of the keymap.
+AFTER should be a single event type--a symbol or a character, not a sequence.
+
+Bindings are always added before any inherited map.
+
+The order of bindings in a keymap only matters when it is used as
+a menu, so this function is not useful for non-menu keymaps."
+ (unless after (setq after t))
+ (or (keymapp keymap)
+ (signal 'wrong-type-argument (list 'keymapp keymap)))
+ (setq key
+ (if (<= (length key) 1) (aref key 0)
+ (setq keymap (lookup-key keymap
+ (apply 'vector
+ (butlast (mapcar 'identity key)))))
+ (aref key (1- (length key)))))
+ (let ((tail keymap) done inserted)
+ (while (and (not done) tail)
+ ;; Delete any earlier bindings for the same key.
+ (if (eq (car-safe (car (cdr tail))) key)
+ (setcdr tail (cdr (cdr tail))))
+ ;; If we hit an included map, go down that one.
+ (if (keymapp (car tail)) (setq tail (car tail)))
+ ;; When we reach AFTER's binding, insert the new binding after.
+ ;; If we reach an inherited keymap, insert just before that.
+ ;; If we reach the end of this keymap, insert at the end.
+ (if (or (and (eq (car-safe (car tail)) after)
+ (not (eq after t)))
+ (eq (car (cdr tail)) 'keymap)
+ (null (cdr tail)))
+ (progn
+ ;; Stop the scan only if we find a parent keymap.
+ ;; Keep going past the inserted element
+ ;; so we can delete any duplications that come later.
+ (if (eq (car (cdr tail)) 'keymap)
+ (setq done t))
+ ;; Don't insert more than once.
+ (or inserted
+ (setcdr tail (cons (cons key definition) (cdr tail))))
+ (setq inserted t)))
+ (setq tail (cdr tail)))))
+
+(defun map-keymap-sorted (function keymap)
+ "Implement `map-keymap' with sorting.
+Don't call this function; it is for internal use only."
+ (let (list)
+ (map-keymap (lambda (a b) (push (cons a b) list))
+ keymap)
+ (setq list (sort list
+ (lambda (a b)
+ (setq a (car a) b (car b))
+ (if (integerp a)
+ (if (integerp b) (< a b)
+ t)
+ (if (integerp b) t
+ ;; string< also accepts symbols.
+ (string< a b))))))
+ (dolist (p list)
+ (funcall function (car p) (cdr p)))))
+
+(defun keymap--menu-item-binding (val)
+ "Return the binding part of a menu-item."
+ (cond
+ ((not (consp val)) val) ;Not a menu-item.
+ ((eq 'menu-item (car val))
+ (let* ((binding (nth 2 val))
+ (plist (nthcdr 3 val))
+ (filter (plist-get plist :filter)))
+ (if filter (funcall filter binding)
+ binding)))
+ ((and (consp (cdr val)) (stringp (cadr val)))
+ (cddr val))
+ ((stringp (car val))
+ (cdr val))
+ (t val))) ;Not a menu-item either.
+
+(defun keymap--menu-item-with-binding (item binding)
+ "Build a menu-item like ITEM but with its binding changed to BINDING."
+ (cond
+ ((not (consp item)) binding) ;Not a menu-item.
+ ((eq 'menu-item (car item))
+ (setq item (copy-sequence item))
+ (let ((tail (nthcdr 2 item)))
+ (setcar tail binding)
+ ;; Remove any potential filter.
+ (if (plist-get (cdr tail) :filter)
+ (setcdr tail (plist-put (cdr tail) :filter nil))))
+ item)
+ ((and (consp (cdr item)) (stringp (cadr item)))
+ (cons (car item) (cons (cadr item) binding)))
+ (t (cons (car item) binding))))
+
+(defun keymap--merge-bindings (val1 val2)
+ "Merge bindings VAL1 and VAL2."
+ (let ((map1 (keymap--menu-item-binding val1))
+ (map2 (keymap--menu-item-binding val2)))
+ (if (not (and (keymapp map1) (keymapp map2)))
+ ;; There's nothing to merge: val1 takes precedence.
+ val1
+ (let ((map (list 'keymap map1 map2))
+ (item (if (keymapp val1) (if (keymapp val2) nil val2) val1)))
+ (keymap--menu-item-with-binding item map)))))
+
+(defun keymap-canonicalize (map)
+ "Return a simpler equivalent keymap.
+This resolves inheritance and redefinitions. The returned keymap
+should behave identically to a copy of KEYMAP w.r.t `lookup-key'
+and use in active keymaps and menus.
+Subkeymaps may be modified but are not canonicalized."
+ ;; FIXME: Problem with the difference between a nil binding
+ ;; that hides a binding in an inherited map and a nil binding that's ignored
+ ;; to let some further binding visible. Currently a nil binding hides all.
+ ;; FIXME: we may want to carefully (re)order elements in case they're
+ ;; menu-entries.
+ (let ((bindings ())
+ (ranges ())
+ (prompt (keymap-prompt map)))
+ (while (keymapp map)
+ (setq map (map-keymap ;; -internal
+ (lambda (key item)
+ (if (consp key)
+ ;; Treat char-ranges specially.
+ (push (cons key item) ranges)
+ (push (cons key item) bindings)))
+ map)))
+ ;; Create the new map.
+ (setq map (funcall (if ranges 'make-keymap 'make-sparse-keymap) prompt))
+ (dolist (binding ranges)
+ ;; Treat char-ranges specially. FIXME: need to merge as well.
+ (define-key map (vector (car binding)) (cdr binding)))
+ ;; Process the bindings starting from the end.
+ (dolist (binding (prog1 bindings (setq bindings ())))
+ (let* ((key (car binding))
+ (oldbind (assq key bindings)))
+ (push (if (not oldbind)
+ ;; The normal case: no duplicate bindings.
+ binding
+ ;; This is the second binding for this key.
+ (setq bindings (delq oldbind bindings))
+ (cons key (keymap--merge-bindings (cdr binding)
+ (cdr oldbind))))
+ bindings)))
+ (nconc map bindings)))
+
+(put 'keyboard-translate-table 'char-table-extra-slots 0)
+
+(defun keyboard-translate (from to)
+ "Translate character FROM to TO on the current terminal.
+This function creates a `keyboard-translate-table' if necessary
+and then modifies one entry in it."
+ (or (char-table-p keyboard-translate-table)
+ (setq keyboard-translate-table
+ (make-char-table 'keyboard-translate-table nil)))
+ (aset keyboard-translate-table from to))
+
+;;;; Key binding commands.
+
+(defun global-set-key (key command)
+ "Give KEY a global binding as COMMAND.
+COMMAND is the command definition to use; usually it is
+a symbol naming an interactively-callable function.
+KEY is a key sequence; noninteractively, it is a string or vector
+of characters or event types, and non-ASCII characters with codes
+above 127 (such as ISO Latin-1) can be included if you use a vector.
+
+Note that if KEY has a local binding in the current buffer,
+that local binding will continue to shadow any global binding
+that you make with this function."
+ (interactive "KSet key globally: \nCSet key %s to command: ")
+ (or (vectorp key) (stringp key)
+ (signal 'wrong-type-argument (list 'arrayp key)))
+ (define-key (current-global-map) key command))
+
+(defun local-set-key (key command)
+ "Give KEY a local binding as COMMAND.
+COMMAND is the command definition to use; usually it is
+a symbol naming an interactively-callable function.
+KEY is a key sequence; noninteractively, it is a string or vector
+of characters or event types, and non-ASCII characters with codes
+above 127 (such as ISO Latin-1) can be included if you use a vector.
+
+The binding goes in the current buffer's local map, which in most
+cases is shared with all other buffers in the same major mode."
+ (interactive "KSet key locally: \nCSet key %s locally to command: ")
+ (let ((map (current-local-map)))
+ (or map
+ (use-local-map (setq map (make-sparse-keymap))))
+ (or (vectorp key) (stringp key)
+ (signal 'wrong-type-argument (list 'arrayp key)))
+ (define-key map key command)))
+
+(defun global-unset-key (key)
+ "Remove global binding of KEY.
+KEY is a string or vector representing a sequence of keystrokes."
+ (interactive "kUnset key globally: ")
+ (global-set-key key nil))
+
+(defun local-unset-key (key)
+ "Remove local binding of KEY.
+KEY is a string or vector representing a sequence of keystrokes."
+ (interactive "kUnset key locally: ")
+ (if (current-local-map)
+ (local-set-key key nil))
+ nil)
+
+;;;; substitute-key-definition and its subroutines.
+
+(defvar key-substitution-in-progress nil
+ "Used internally by `substitute-key-definition'.")
+
+(defun substitute-key-definition (olddef newdef keymap &optional oldmap prefix)
+ "Replace OLDDEF with NEWDEF for any keys in KEYMAP now defined as OLDDEF.
+In other words, OLDDEF is replaced with NEWDEF where ever it appears.
+Alternatively, if optional fourth argument OLDMAP is specified, we redefine
+in KEYMAP as NEWDEF those keys which are defined as OLDDEF in OLDMAP.
+
+If you don't specify OLDMAP, you can usually get the same results
+in a cleaner way with command remapping, like this:
+ (define-key KEYMAP [remap OLDDEF] NEWDEF)
+\n(fn OLDDEF NEWDEF KEYMAP &optional OLDMAP)"
+ ;; Don't document PREFIX in the doc string because we don't want to
+ ;; advertise it. It's meant for recursive calls only. Here's its
+ ;; meaning
+
+ ;; If optional argument PREFIX is specified, it should be a key
+ ;; prefix, a string. Redefined bindings will then be bound to the
+ ;; original key, with PREFIX added at the front.
+ (or prefix (setq prefix ""))
+ (let* ((scan (or oldmap keymap))
+ (prefix1 (vconcat prefix [nil]))
+ (key-substitution-in-progress
+ (cons scan key-substitution-in-progress)))
+ ;; Scan OLDMAP, finding each char or event-symbol that
+ ;; has any definition, and act on it with hack-key.
+ (map-keymap
+ (lambda (char defn)
+ (aset prefix1 (length prefix) char)
+ (substitute-key-definition-key defn olddef newdef prefix1 keymap))
+ scan)))
+
+(defun substitute-key-definition-key (defn olddef newdef prefix keymap)
+ (let (inner-def skipped menu-item)
+ ;; Find the actual command name within the binding.
+ (if (eq (car-safe defn) 'menu-item)
+ (setq menu-item defn defn (nth 2 defn))
+ ;; Skip past menu-prompt.
+ (while (stringp (car-safe defn))
+ (push (pop defn) skipped))
+ ;; Skip past cached key-equivalence data for menu items.
+ (if (consp (car-safe defn))
+ (setq defn (cdr defn))))
+ (if (or (eq defn olddef)
+ ;; Compare with equal if definition is a key sequence.
+ ;; That is useful for operating on function-key-map.
+ (and (or (stringp defn) (vectorp defn))
+ (equal defn olddef)))
+ (define-key keymap prefix
+ (if menu-item
+ (let ((copy (copy-sequence menu-item)))
+ (setcar (nthcdr 2 copy) newdef)
+ copy)
+ (nconc (nreverse skipped) newdef)))
+ ;; Look past a symbol that names a keymap.
+ (setq inner-def
+ (or (indirect-function defn t) defn))
+ ;; For nested keymaps, we use `inner-def' rather than `defn' so as to
+ ;; avoid autoloading a keymap. This is mostly done to preserve the
+ ;; original non-autoloading behavior of pre-map-keymap times.
+ (if (and (keymapp inner-def)
+ ;; Avoid recursively scanning
+ ;; where KEYMAP does not have a submap.
+ (let ((elt (lookup-key keymap prefix)))
+ (or (null elt) (natnump elt) (keymapp elt)))
+ ;; Avoid recursively rescanning keymap being scanned.
+ (not (memq inner-def key-substitution-in-progress)))
+ ;; If this one isn't being scanned already, scan it now.
+ (substitute-key-definition olddef newdef keymap inner-def prefix)))))
+
+
+;;;; The global keymap tree.
+
+;; global-map, esc-map, and ctl-x-map have their values set up in
+;; keymap.c; we just give them docstrings here.
+
+(defvar global-map nil
+ "Default global keymap mapping Emacs keyboard input into commands.
+The value is a keymap which is usually (but not necessarily) Emacs's
+global map.")
+
+(defvar esc-map nil
+ "Default keymap for ESC (meta) commands.
+The normal global definition of the character ESC indirects to this keymap.")
+
+(defvar ctl-x-map nil
+ "Default keymap for C-x commands.
+The normal global definition of the character C-x indirects to this keymap.")
+
+(defvar ctl-x-4-map (make-sparse-keymap)
+ "Keymap for subcommands of C-x 4.")
+(defalias 'ctl-x-4-prefix ctl-x-4-map)
+(define-key ctl-x-map "4" 'ctl-x-4-prefix)
+
+(defvar ctl-x-5-map (make-sparse-keymap)
+ "Keymap for frame commands.")
+(defalias 'ctl-x-5-prefix ctl-x-5-map)
+(define-key ctl-x-map "5" 'ctl-x-5-prefix)
+
+
+;;;; Event manipulation functions.
+
+(defconst listify-key-sequence-1 (logior 128 ?\M-\C-@))
+
+(defun listify-key-sequence (key)
+ "Convert a key sequence to a list of events."
+ (if (vectorp key)
+ (append key nil)
+ (mapcar (function (lambda (c)
+ (if (> c 127)
+ (logxor c listify-key-sequence-1)
+ c)))
+ key)))
+
+(defun eventp (obj)
+ "True if the argument is an event object."
+ (when obj
+ (or (integerp obj)
+ (and (symbolp obj) obj (not (keywordp obj)))
+ (and (consp obj) (symbolp (car obj))))))
+
+(defun event-modifiers (event)
+ "Return a list of symbols representing the modifier keys in event EVENT.
+The elements of the list may include `meta', `control',
+`shift', `hyper', `super', `alt', `click', `double', `triple', `drag',
+and `down'.
+EVENT may be an event or an event type. If EVENT is a symbol
+that has never been used in an event that has been read as input
+in the current Emacs session, then this function may fail to include
+the `click' modifier."
+ (let ((type event))
+ (if (listp type)
+ (setq type (car type)))
+ (if (symbolp type)
+ ;; Don't read event-symbol-elements directly since we're not
+ ;; sure the symbol has already been parsed.
+ (cdr (internal-event-symbol-parse-modifiers type))
+ (let ((list nil)
+ (char (logand type (lognot (logior ?\M-\^@ ?\C-\^@ ?\S-\^@
+ ?\H-\^@ ?\s-\^@ ?\A-\^@)))))
+ (if (not (zerop (logand type ?\M-\^@)))
+ (push 'meta list))
+ (if (or (not (zerop (logand type ?\C-\^@)))
+ (< char 32))
+ (push 'control list))
+ (if (or (not (zerop (logand type ?\S-\^@)))
+ (/= char (downcase char)))
+ (push 'shift list))
+ (or (zerop (logand type ?\H-\^@))
+ (push 'hyper list))
+ (or (zerop (logand type ?\s-\^@))
+ (push 'super list))
+ (or (zerop (logand type ?\A-\^@))
+ (push 'alt list))
+ list))))
+
+(defun event-basic-type (event)
+ "Return the basic type of the given event (all modifiers removed).
+The value is a printing character (not upper case) or a symbol.
+EVENT may be an event or an event type. If EVENT is a symbol
+that has never been used in an event that has been read as input
+in the current Emacs session, then this function may return nil."
+ (if (consp event)
+ (setq event (car event)))
+ (if (symbolp event)
+ (car (get event 'event-symbol-elements))
+ (let* ((base (logand event (1- ?\A-\^@)))
+ (uncontrolled (if (< base 32) (logior base 64) base)))
+ ;; There are some numbers that are invalid characters and
+ ;; cause `downcase' to get an error.
+ (condition-case ()
+ (downcase uncontrolled)
+ (error uncontrolled)))))
+
+(defsubst mouse-movement-p (object)
+ "Return non-nil if OBJECT is a mouse movement event."
+ (eq (car-safe object) 'mouse-movement))
+
+(defun mouse-event-p (object)
+ "Return non-nil if OBJECT is a mouse click event."
+ ;; is this really correct? maybe remove mouse-movement?
+ (memq (event-basic-type object) '(mouse-1 mouse-2 mouse-3 mouse-movement)))
+
+(defun event-start (event)
+ "Return the starting position of EVENT.
+EVENT should be a mouse click, drag, or key press event. If
+EVENT is nil, the value of `posn-at-point' is used instead.
+
+The following accessor functions are used to access the elements
+of the position:
+
+`posn-window': The window the event is in.
+`posn-area': A symbol identifying the area the event occurred in,
+or nil if the event occurred in the text area.
+`posn-point': The buffer position of the event.
+`posn-x-y': The pixel-based coordinates of the event.
+`posn-col-row': The estimated column and row corresponding to the
+position of the event.
+`posn-actual-col-row': The actual column and row corresponding to the
+position of the event.
+`posn-string': The string object of the event, which is either
+nil or (STRING . POSITION)'.
+`posn-image': The image object of the event, if any.
+`posn-object': The image or string object of the event, if any.
+`posn-timestamp': The time the event occurred, in milliseconds.
+
+For more information, see Info node `(elisp)Click Events'."
+ (if (consp event) (nth 1 event)
+ (or (posn-at-point)
+ (list (selected-window) (point) '(0 . 0) 0))))
+
+(defun event-end (event)
+ "Return the ending position of EVENT.
+EVENT should be a click, drag, or key press event.
+
+See `event-start' for a description of the value returned."
+ (if (consp event) (nth (if (consp (nth 2 event)) 2 1) event)
+ (or (posn-at-point)
+ (list (selected-window) (point) '(0 . 0) 0))))
+
+(defsubst event-click-count (event)
+ "Return the multi-click count of EVENT, a click or drag event.
+The return value is a positive integer."
+ (if (and (consp event) (integerp (nth 2 event))) (nth 2 event) 1))
+
+;;;; Extracting fields of the positions in an event.
+
+(defun posnp (obj)
+ "Return non-nil if OBJ appears to be a valid `posn' object specifying a window.
+If OBJ is a valid `posn' object, but specifies a frame rather
+than a window, return nil."
+ ;; FIXME: Correct the behavior of this function so that all valid
+ ;; `posn' objects are recognized, after updating other code that
+ ;; depends on its present behavior.
+ (and (windowp (car-safe obj))
+ (atom (car-safe (setq obj (cdr obj)))) ;AREA-OR-POS.
+ (integerp (car-safe (car-safe (setq obj (cdr obj))))) ;XOFFSET.
+ (integerp (car-safe (cdr obj))))) ;TIMESTAMP.
+
+(defsubst posn-window (position)
+ "Return the window in POSITION.
+POSITION should be a list of the form returned by the `event-start'
+and `event-end' functions."
+ (nth 0 position))
+
+(defsubst posn-area (position)
+ "Return the window area recorded in POSITION, or nil for the text area.
+POSITION should be a list of the form returned by the `event-start'
+and `event-end' functions."
+ (let ((area (if (consp (nth 1 position))
+ (car (nth 1 position))
+ (nth 1 position))))
+ (and (symbolp area) area)))
+
+(defun posn-point (position)
+ "Return the buffer location in POSITION.
+POSITION should be a list of the form returned by the `event-start'
+and `event-end' functions.
+Returns nil if POSITION does not correspond to any buffer location (e.g.
+a click on a scroll bar)."
+ (or (nth 5 position)
+ (let ((pt (nth 1 position)))
+ (or (car-safe pt)
+ ;; Apparently this can also be `vertical-scroll-bar' (bug#13979).
+ (if (integerp pt) pt)))))
+
+(defun posn-set-point (position)
+ "Move point to POSITION.
+Select the corresponding window as well."
+ (if (not (windowp (posn-window position)))
+ (error "Position not in text area of window"))
+ (select-window (posn-window position))
+ (if (numberp (posn-point position))
+ (goto-char (posn-point position))))
+
+(defsubst posn-x-y (position)
+ "Return the x and y coordinates in POSITION.
+The return value has the form (X . Y), where X and Y are given in
+pixels. POSITION should be a list of the form returned by
+`event-start' and `event-end'."
+ (nth 2 position))
+
+(declare-function scroll-bar-scale "scroll-bar" (num-denom whole))
+
+(defun posn-col-row (position)
+ "Return the nominal column and row in POSITION, measured in characters.
+The column and row values are approximations calculated from the x
+and y coordinates in POSITION and the frame's default character width
+and default line height, including spacing.
+For a scroll-bar event, the result column is 0, and the row
+corresponds to the vertical position of the click in the scroll bar.
+POSITION should be a list of the form returned by the `event-start'
+and `event-end' functions."
+ (let* ((pair (posn-x-y position))
+ (frame-or-window (posn-window position))
+ (frame (if (framep frame-or-window)
+ frame-or-window
+ (window-frame frame-or-window)))
+ (window (when (windowp frame-or-window) frame-or-window))
+ (area (posn-area position)))
+ (cond
+ ((null frame-or-window)
+ '(0 . 0))
+ ((eq area 'vertical-scroll-bar)
+ (cons 0 (scroll-bar-scale pair (1- (window-height window)))))
+ ((eq area 'horizontal-scroll-bar)
+ (cons (scroll-bar-scale pair (window-width window)) 0))
+ (t
+ ;; FIXME: This should take line-spacing properties on
+ ;; newlines into account.
+ (let* ((spacing (when (display-graphic-p frame)
+ (or (with-current-buffer
+ (window-buffer (frame-selected-window frame))
+ line-spacing)
+ (frame-parameter frame 'line-spacing)))))
+ (cond ((floatp spacing)
+ (setq spacing (truncate (* spacing
+ (frame-char-height frame)))))
+ ((null spacing)
+ (setq spacing 0)))
+ (cons (/ (car pair) (frame-char-width frame))
+ (/ (cdr pair) (+ (frame-char-height frame) spacing))))))))
+
+(defun posn-actual-col-row (position)
+ "Return the window row number in POSITION and character number in that row.
+
+Return nil if POSITION does not contain the actual position; in that case
+\`posn-col-row' can be used to get approximate values.
+POSITION should be a list of the form returned by the `event-start'
+and `event-end' functions.
+
+This function does not account for the width on display, like the
+number of visual columns taken by a TAB or image. If you need
+the coordinates of POSITION in character units, you should use
+\`posn-col-row', not this function."
+ (nth 6 position))
+
+(defsubst posn-timestamp (position)
+ "Return the timestamp of POSITION.
+POSITION should be a list of the form returned by the `event-start'
+and `event-end' functions."
+ (nth 3 position))
+
+(defun posn-string (position)
+ "Return the string object of POSITION.
+Value is a cons (STRING . STRING-POS), or nil if not a string.
+POSITION should be a list of the form returned by the `event-start'
+and `event-end' functions."
+ (let ((x (nth 4 position)))
+ ;; Apparently this can also be `handle' or `below-handle' (bug#13979).
+ (when (consp x) x)))
+
+(defsubst posn-image (position)
+ "Return the image object of POSITION.
+Value is a list (image ...), or nil if not an image.
+POSITION should be a list of the form returned by the `event-start'
+and `event-end' functions."
+ (nth 7 position))
+
+(defsubst posn-object (position)
+ "Return the object (image or string) of POSITION.
+Value is a list (image ...) for an image object, a cons cell
+\(STRING . STRING-POS) for a string object, and nil for a buffer position.
+POSITION should be a list of the form returned by the `event-start'
+and `event-end' functions."
+ (or (posn-image position) (posn-string position)))
+
+(defsubst posn-object-x-y (position)
+ "Return the x and y coordinates relative to the object of POSITION.
+The return value has the form (DX . DY), where DX and DY are
+given in pixels. POSITION should be a list of the form returned
+by `event-start' and `event-end'."
+ (nth 8 position))
+
+(defsubst posn-object-width-height (position)
+ "Return the pixel width and height of the object of POSITION.
+The return value has the form (WIDTH . HEIGHT). POSITION should
+be a list of the form returned by `event-start' and `event-end'."
+ (nth 9 position))
+
+
+;;;; Obsolescent names for functions.
+
+(define-obsolete-function-alias 'window-dot 'window-point "22.1")
+(define-obsolete-function-alias 'set-window-dot 'set-window-point "22.1")
+(define-obsolete-function-alias 'read-input 'read-string "22.1")
+(define-obsolete-function-alias 'show-buffer 'set-window-buffer "22.1")
+(define-obsolete-function-alias 'eval-current-buffer 'eval-buffer "22.1")
+(define-obsolete-function-alias 'string-to-int 'string-to-number "22.1")
+
+(make-obsolete 'forward-point "use (+ (point) N) instead." "23.1")
+(make-obsolete 'buffer-has-markers-at nil "24.3")
+
+(defun insert-string (&rest args)
+ "Mocklisp-compatibility insert function.
+Like the function `insert' except that any argument that is a number
+is converted into a string by expressing it in decimal."
+ (declare (obsolete insert "22.1"))
+ (dolist (el args)
+ (insert (if (integerp el) (number-to-string el) el))))
+
+(defun makehash (&optional test)
+ (declare (obsolete make-hash-table "22.1"))
+ (make-hash-table :test (or test 'eql)))
+
+(defun log10 (x)
+ "Return (log X 10), the log base 10 of X."
+ (declare (obsolete log "24.4"))
+ (log x 10))
+
+;; These are used by VM and some old programs
+(defalias 'focus-frame 'ignore "")
+(make-obsolete 'focus-frame "it does nothing." "22.1")
+(defalias 'unfocus-frame 'ignore "")
+(make-obsolete 'unfocus-frame "it does nothing." "22.1")
+(make-obsolete 'make-variable-frame-local
+ "explicitly check for a frame-parameter instead." "22.2")
+(set-advertised-calling-convention
+ 'all-completions '(string collection &optional predicate) "23.1")
+(set-advertised-calling-convention 'unintern '(name obarray) "23.3")
+(set-advertised-calling-convention 'indirect-function '(object) "25.1")
+(set-advertised-calling-convention 'redirect-frame-focus '(frame focus-frame) "24.3")
+(set-advertised-calling-convention 'decode-char '(ch charset) "21.4")
+(set-advertised-calling-convention 'encode-char '(ch charset) "21.4")
+
+;;;; Obsolescence declarations for variables, and aliases.
+
+;; Special "default-FOO" variables which contain the default value of
+;; the "FOO" variable are nasty. Their implementation is brittle, and
+;; slows down several unrelated variable operations; furthermore, they
+;; can lead to really odd behavior if you decide to make them
+;; buffer-local.
+
+;; Not used at all in Emacs, last time I checked:
+(make-obsolete-variable 'default-mode-line-format 'mode-line-format "23.2")
+(make-obsolete-variable 'default-header-line-format 'header-line-format "23.2")
+(make-obsolete-variable 'default-line-spacing 'line-spacing "23.2")
+(make-obsolete-variable 'default-abbrev-mode 'abbrev-mode "23.2")
+(make-obsolete-variable 'default-ctl-arrow 'ctl-arrow "23.2")
+(make-obsolete-variable 'default-truncate-lines 'truncate-lines "23.2")
+(make-obsolete-variable 'default-left-margin 'left-margin "23.2")
+(make-obsolete-variable 'default-tab-width 'tab-width "23.2")
+(make-obsolete-variable 'default-case-fold-search 'case-fold-search "23.2")
+(make-obsolete-variable 'default-left-margin-width 'left-margin-width "23.2")
+(make-obsolete-variable 'default-right-margin-width 'right-margin-width "23.2")
+(make-obsolete-variable 'default-left-fringe-width 'left-fringe-width "23.2")
+(make-obsolete-variable 'default-right-fringe-width 'right-fringe-width "23.2")
+(make-obsolete-variable 'default-fringes-outside-margins 'fringes-outside-margins "23.2")
+(make-obsolete-variable 'default-scroll-bar-width 'scroll-bar-width "23.2")
+(make-obsolete-variable 'default-vertical-scroll-bar 'vertical-scroll-bar "23.2")
+(make-obsolete-variable 'default-indicate-empty-lines 'indicate-empty-lines "23.2")
+(make-obsolete-variable 'default-indicate-buffer-boundaries 'indicate-buffer-boundaries "23.2")
+(make-obsolete-variable 'default-fringe-indicator-alist 'fringe-indicator-alist "23.2")
+(make-obsolete-variable 'default-fringe-cursor-alist 'fringe-cursor-alist "23.2")
+(make-obsolete-variable 'default-scroll-up-aggressively 'scroll-up-aggressively "23.2")
+(make-obsolete-variable 'default-scroll-down-aggressively 'scroll-down-aggressively "23.2")
+(make-obsolete-variable 'default-fill-column 'fill-column "23.2")
+(make-obsolete-variable 'default-cursor-type 'cursor-type "23.2")
+(make-obsolete-variable 'default-cursor-in-non-selected-windows 'cursor-in-non-selected-windows "23.2")
+(make-obsolete-variable 'default-buffer-file-coding-system 'buffer-file-coding-system "23.2")
+(make-obsolete-variable 'default-major-mode 'major-mode "23.2")
+(make-obsolete-variable 'default-enable-multibyte-characters
+ "use enable-multibyte-characters or set-buffer-multibyte instead" "23.2")
+
+(make-obsolete-variable 'define-key-rebound-commands nil "23.2")
+(make-obsolete-variable 'redisplay-end-trigger-functions 'jit-lock-register "23.1")
+(make-obsolete-variable 'deferred-action-list 'post-command-hook "24.1")
+(make-obsolete-variable 'deferred-action-function 'post-command-hook "24.1")
+(make-obsolete-variable 'redisplay-dont-pause nil "24.5")
+(make-obsolete 'window-redisplay-end-trigger nil "23.1")
+(make-obsolete 'set-window-redisplay-end-trigger nil "23.1")
+
+(make-obsolete 'process-filter-multibyte-p nil "23.1")
+(make-obsolete 'set-process-filter-multibyte nil "23.1")
+
+;; Lisp manual only updated in 22.1.
+(define-obsolete-variable-alias 'executing-macro 'executing-kbd-macro
+ "before 19.34")
+
+(define-obsolete-variable-alias 'x-lost-selection-hooks
+ 'x-lost-selection-functions "22.1")
+(define-obsolete-variable-alias 'x-sent-selection-hooks
+ 'x-sent-selection-functions "22.1")
+
+;; This was introduced in 21.4 for pre-unicode unification. That
+;; usage was rendered obsolete in 23.1 which uses Unicode internally.
+;; Other uses are possible, so this variable is not _really_ obsolete,
+;; but Stefan insists to mark it so.
+(make-obsolete-variable 'translation-table-for-input nil "23.1")
+
+(defvaralias 'messages-buffer-max-lines 'message-log-max)
+
+;;;; Alternate names for functions - these are not being phased out.
+
+(defalias 'send-string 'process-send-string)
+(defalias 'send-region 'process-send-region)
+(defalias 'string= 'string-equal)
+(defalias 'string< 'string-lessp)
+(defalias 'move-marker 'set-marker)
+(defalias 'rplaca 'setcar)
+(defalias 'rplacd 'setcdr)
+(defalias 'beep 'ding) ;preserve lingual purity
+(defalias 'indent-to-column 'indent-to)
+(defalias 'backward-delete-char 'delete-backward-char)
+(defalias 'search-forward-regexp (symbol-function 're-search-forward))
+(defalias 'search-backward-regexp (symbol-function 're-search-backward))
+(defalias 'int-to-string 'number-to-string)
+(defalias 'store-match-data 'set-match-data)
+(defalias 'chmod 'set-file-modes)
+(defalias 'mkdir 'make-directory)
+;; These are the XEmacs names:
+(defalias 'point-at-eol 'line-end-position)
+(defalias 'point-at-bol 'line-beginning-position)
+
+(defalias 'user-original-login-name 'user-login-name)
+
+
+;;;; Hook manipulation functions.
+
+(defun add-hook (hook function &optional append local)
+ "Add to the value of HOOK the function FUNCTION.
+FUNCTION is not added if already present.
+FUNCTION is added (if necessary) at the beginning of the hook list
+unless the optional argument APPEND is non-nil, in which case
+FUNCTION is added at the end.
+
+The optional fourth argument, LOCAL, if non-nil, says to modify
+the hook's buffer-local value rather than its global value.
+This makes the hook buffer-local, and it makes t a member of the
+buffer-local value. That acts as a flag to run the hook
+functions of the global value as well as in the local value.
+
+HOOK should be a symbol, and FUNCTION may be any valid function. If
+HOOK is void, it is first set to nil. If HOOK's value is a single
+function, it is changed to a list of functions."
+ (or (boundp hook) (set hook nil))
+ (or (default-boundp hook) (set-default hook nil))
+ (if local (unless (local-variable-if-set-p hook)
+ (set (make-local-variable hook) (list t)))
+ ;; Detect the case where make-local-variable was used on a hook
+ ;; and do what we used to do.
+ (unless (and (consp (symbol-value hook)) (memq t (symbol-value hook)))
+ (setq local t)))
+ (let ((hook-value (if local (symbol-value hook) (default-value hook))))
+ ;; If the hook value is a single function, turn it into a list.
+ (when (or (not (listp hook-value)) (functionp hook-value))
+ (setq hook-value (list hook-value)))
+ ;; Do the actual addition if necessary
+ (unless (member function hook-value)
+ (when (stringp function)
+ (setq function (purecopy function)))
+ (setq hook-value
+ (if append
+ (append hook-value (list function))
+ (cons function hook-value))))
+ ;; Set the actual variable
+ (if local
+ (progn
+ ;; If HOOK isn't a permanent local,
+ ;; but FUNCTION wants to survive a change of modes,
+ ;; mark HOOK as partially permanent.
+ (and (symbolp function)
+ (get function 'permanent-local-hook)
+ (not (get hook 'permanent-local))
+ (put hook 'permanent-local 'permanent-local-hook))
+ (set hook hook-value))
+ (set-default hook hook-value))))
+
+(defun remove-hook (hook function &optional local)
+ "Remove from the value of HOOK the function FUNCTION.
+HOOK should be a symbol, and FUNCTION may be any valid function. If
+FUNCTION isn't the value of HOOK, or, if FUNCTION doesn't appear in the
+list of hooks to run in HOOK, then nothing is done. See `add-hook'.
+
+The optional third argument, LOCAL, if non-nil, says to modify
+the hook's buffer-local value rather than its default value."
+ (or (boundp hook) (set hook nil))
+ (or (default-boundp hook) (set-default hook nil))
+ ;; Do nothing if LOCAL is t but this hook has no local binding.
+ (unless (and local (not (local-variable-p hook)))
+ ;; Detect the case where make-local-variable was used on a hook
+ ;; and do what we used to do.
+ (when (and (local-variable-p hook)
+ (not (and (consp (symbol-value hook))
+ (memq t (symbol-value hook)))))
+ (setq local t))
+ (let ((hook-value (if local (symbol-value hook) (default-value hook))))
+ ;; Remove the function, for both the list and the non-list cases.
+ (if (or (not (listp hook-value)) (eq (car hook-value) 'lambda))
+ (if (equal hook-value function) (setq hook-value nil))
+ (setq hook-value (delete function (copy-sequence hook-value))))
+ ;; If the function is on the global hook, we need to shadow it locally
+ ;;(when (and local (member function (default-value hook))
+ ;; (not (member (cons 'not function) hook-value)))
+ ;; (push (cons 'not function) hook-value))
+ ;; Set the actual variable
+ (if (not local)
+ (set-default hook hook-value)
+ (if (equal hook-value '(t))
+ (kill-local-variable hook)
+ (set hook hook-value))))))
+
+(defmacro letrec (binders &rest body)
+ "Bind variables according to BINDERS then eval BODY.
+The value of the last form in BODY is returned.
+Each element of BINDERS is a list (SYMBOL VALUEFORM) which binds
+SYMBOL to the value of VALUEFORM.
+All symbols are bound before the VALUEFORMs are evalled."
+ ;; Only useful in lexical-binding mode.
+ ;; As a special-form, we could implement it more efficiently (and cleanly,
+ ;; making the vars actually unbound during evaluation of the binders).
+ (declare (debug let) (indent 1))
+ `(let ,(mapcar #'car binders)
+ ,@(mapcar (lambda (binder) `(setq ,@binder)) binders)
+ ,@body))
+
+(defmacro with-wrapper-hook (hook args &rest body)
+ "Run BODY, using wrapper functions from HOOK with additional ARGS.
+HOOK is an abnormal hook. Each hook function in HOOK \"wraps\"
+around the preceding ones, like a set of nested `around' advices.
+
+Each hook function should accept an argument list consisting of a
+function FUN, followed by the additional arguments in ARGS.
+
+The first hook function in HOOK is passed a FUN that, if it is called
+with arguments ARGS, performs BODY (i.e., the default operation).
+The FUN passed to each successive hook function is defined based
+on the preceding hook functions; if called with arguments ARGS,
+it does what the `with-wrapper-hook' call would do if the
+preceding hook functions were the only ones present in HOOK.
+
+Each hook function may call its FUN argument as many times as it wishes,
+including never. In that case, such a hook function acts to replace
+the default definition altogether, and any preceding hook functions.
+Of course, a subsequent hook function may do the same thing.
+
+Each hook function definition is used to construct the FUN passed
+to the next hook function, if any. The last (or \"outermost\")
+FUN is then called once."
+ (declare (indent 2) (debug (form sexp body))
+ (obsolete "use a <foo>-function variable modified by `add-function'."
+ "24.4"))
+ ;; We need those two gensyms because CL's lexical scoping is not available
+ ;; for function arguments :-(
+ (let ((funs (make-symbol "funs"))
+ (global (make-symbol "global"))
+ (argssym (make-symbol "args"))
+ (runrestofhook (make-symbol "runrestofhook")))
+ ;; Since the hook is a wrapper, the loop has to be done via
+ ;; recursion: a given hook function will call its parameter in order to
+ ;; continue looping.
+ `(letrec ((,runrestofhook
+ (lambda (,funs ,global ,argssym)
+ ;; `funs' holds the functions left on the hook and `global'
+ ;; holds the functions left on the global part of the hook
+ ;; (in case the hook is local).
+ (if (consp ,funs)
+ (if (eq t (car ,funs))
+ (funcall ,runrestofhook
+ (append ,global (cdr ,funs)) nil ,argssym)
+ (apply (car ,funs)
+ (apply-partially
+ (lambda (,funs ,global &rest ,argssym)
+ (funcall ,runrestofhook ,funs ,global ,argssym))
+ (cdr ,funs) ,global)
+ ,argssym))
+ ;; Once there are no more functions on the hook, run
+ ;; the original body.
+ (apply (lambda ,args ,@body) ,argssym)))))
+ (funcall ,runrestofhook ,hook
+ ;; The global part of the hook, if any.
+ ,(if (symbolp hook)
+ `(if (local-variable-p ',hook)
+ (default-value ',hook)))
+ (list ,@args)))))
+
+(defun add-to-list (list-var element &optional append compare-fn)
+ "Add ELEMENT to the value of LIST-VAR if it isn't there yet.
+The test for presence of ELEMENT is done with `equal', or with
+COMPARE-FN if that's non-nil.
+If ELEMENT is added, it is added at the beginning of the list,
+unless the optional argument APPEND is non-nil, in which case
+ELEMENT is added at the end.
+
+The return value is the new value of LIST-VAR.
+
+This is handy to add some elements to configuration variables,
+but please do not abuse it in Elisp code, where you are usually
+better off using `push' or `cl-pushnew'.
+
+If you want to use `add-to-list' on a variable that is not
+defined until a certain package is loaded, you should put the
+call to `add-to-list' into a hook function that will be run only
+after loading the package. `eval-after-load' provides one way to
+do this. In some cases other hooks, such as major mode hooks,
+can do the job."
+ (declare
+ (compiler-macro
+ (lambda (exp)
+ ;; FIXME: Something like this could be used for `set' as well.
+ (if (or (not (eq 'quote (car-safe list-var)))
+ (special-variable-p (cadr list-var))
+ (not (macroexp-const-p append)))
+ exp
+ (let* ((sym (cadr list-var))
+ (append (eval append))
+ (msg (format "`add-to-list' can't use lexical var `%s'; use `push' or `cl-pushnew'"
+ sym))
+ ;; Big ugly hack so we only output a warning during
+ ;; byte-compilation, and so we can use
+ ;; byte-compile-not-lexical-var-p to silence the warning
+ ;; when a defvar has been seen but not yet executed.
+ (warnfun (lambda ()
+ ;; FIXME: We should also emit a warning for let-bound
+ ;; variables with dynamic binding.
+ (when (assq sym byte-compile--lexical-environment)
+ (byte-compile-log-warning msg t :error))))
+ (code
+ (macroexp-let2 macroexp-copyable-p x element
+ `(if ,(if compare-fn
+ (progn
+ (require 'cl-lib)
+ `(cl-member ,x ,sym :test ,compare-fn))
+ ;; For bootstrapping reasons, don't rely on
+ ;; cl--compiler-macro-member for the base case.
+ `(member ,x ,sym))
+ ,sym
+ ,(if append
+ `(setq ,sym (append ,sym (list ,x)))
+ `(push ,x ,sym))))))
+ (if (not (macroexp--compiling-p))
+ code
+ `(progn
+ (macroexp--funcall-if-compiled ',warnfun)
+ ,code)))))))
+ (if (cond
+ ((null compare-fn)
+ (member element (symbol-value list-var)))
+ ((eq compare-fn 'eq)
+ (memq element (symbol-value list-var)))
+ ((eq compare-fn 'eql)
+ (memql element (symbol-value list-var)))
+ (t
+ (let ((lst (symbol-value list-var)))
+ (while (and lst
+ (not (funcall compare-fn element (car lst))))
+ (setq lst (cdr lst)))
+ lst)))
+ (symbol-value list-var)
+ (set list-var
+ (if append
+ (append (symbol-value list-var) (list element))
+ (cons element (symbol-value list-var))))))
+
+
+(defun add-to-ordered-list (list-var element &optional order)
+ "Add ELEMENT to the value of LIST-VAR if it isn't there yet.
+The test for presence of ELEMENT is done with `eq'.
+
+The resulting list is reordered so that the elements are in the
+order given by each element's numeric list order. Elements
+without a numeric list order are placed at the end of the list.
+
+If the third optional argument ORDER is a number (integer or
+float), set the element's list order to the given value. If
+ORDER is nil or omitted, do not change the numeric order of
+ELEMENT. If ORDER has any other value, remove the numeric order
+of ELEMENT if it has one.
+
+The list order for each element is stored in LIST-VAR's
+`list-order' property.
+
+The return value is the new value of LIST-VAR."
+ (let ((ordering (get list-var 'list-order)))
+ (unless ordering
+ (put list-var 'list-order
+ (setq ordering (make-hash-table :weakness 'key :test 'eq))))
+ (when order
+ (puthash element (and (numberp order) order) ordering))
+ (unless (memq element (symbol-value list-var))
+ (set list-var (cons element (symbol-value list-var))))
+ (set list-var (sort (symbol-value list-var)
+ (lambda (a b)
+ (let ((oa (gethash a ordering))
+ (ob (gethash b ordering)))
+ (if (and oa ob)
+ (< oa ob)
+ oa)))))))
+
+(defun add-to-history (history-var newelt &optional maxelt keep-all)
+ "Add NEWELT to the history list stored in the variable HISTORY-VAR.
+Return the new history list.
+If MAXELT is non-nil, it specifies the maximum length of the history.
+Otherwise, the maximum history length is the value of the `history-length'
+property on symbol HISTORY-VAR, if set, or the value of the `history-length'
+variable.
+Remove duplicates of NEWELT if `history-delete-duplicates' is non-nil.
+If optional fourth arg KEEP-ALL is non-nil, add NEWELT to history even
+if it is empty or a duplicate."
+ (unless maxelt
+ (setq maxelt (or (get history-var 'history-length)
+ history-length)))
+ (let ((history (symbol-value history-var))
+ tail)
+ (when (and (listp history)
+ (or keep-all
+ (not (stringp newelt))
+ (> (length newelt) 0))
+ (or keep-all
+ (not (equal (car history) newelt))))
+ (if history-delete-duplicates
+ (setq history (delete newelt history)))
+ (setq history (cons newelt history))
+ (when (integerp maxelt)
+ (if (= 0 maxelt)
+ (setq history nil)
+ (setq tail (nthcdr (1- maxelt) history))
+ (when (consp tail)
+ (setcdr tail nil)))))
+ (set history-var history)))
+
+
+;;;; Mode hooks.
+
+(defvar delay-mode-hooks nil
+ "If non-nil, `run-mode-hooks' should delay running the hooks.")
+(defvar delayed-mode-hooks nil
+ "List of delayed mode hooks waiting to be run.")
+(make-variable-buffer-local 'delayed-mode-hooks)
+(put 'delay-mode-hooks 'permanent-local t)
+
+(defvar change-major-mode-after-body-hook nil
+ "Normal hook run in major mode functions, before the mode hooks.")
+
+(defvar after-change-major-mode-hook nil
+ "Normal hook run at the very end of major mode functions.")
+
+(defun run-mode-hooks (&rest hooks)
+ "Run mode hooks `delayed-mode-hooks' and HOOKS, or delay HOOKS.
+If the variable `delay-mode-hooks' is non-nil, does not run any hooks,
+just adds the HOOKS to the list `delayed-mode-hooks'.
+Otherwise, runs hooks in the sequence: `change-major-mode-after-body-hook',
+`delayed-mode-hooks' (in reverse order), HOOKS, and finally
+`after-change-major-mode-hook'. Major mode functions should use
+this instead of `run-hooks' when running their FOO-mode-hook."
+ (if delay-mode-hooks
+ ;; Delaying case.
+ (dolist (hook hooks)
+ (push hook delayed-mode-hooks))
+ ;; Normal case, just run the hook as before plus any delayed hooks.
+ (setq hooks (nconc (nreverse delayed-mode-hooks) hooks))
+ (setq delayed-mode-hooks nil)
+ (apply 'run-hooks (cons 'change-major-mode-after-body-hook hooks))
+ (run-hooks 'after-change-major-mode-hook)))
+
+(defmacro delay-mode-hooks (&rest body)
+ "Execute BODY, but delay any `run-mode-hooks'.
+These hooks will be executed by the first following call to
+`run-mode-hooks' that occurs outside any `delayed-mode-hooks' form.
+Only affects hooks run in the current buffer."
+ (declare (debug t) (indent 0))
+ `(progn
+ (make-local-variable 'delay-mode-hooks)
+ (let ((delay-mode-hooks t))
+ ,@body)))
+
+;; PUBLIC: find if the current mode derives from another.
+
+(defun derived-mode-p (&rest modes)
+ "Non-nil if the current major mode is derived from one of MODES.
+Uses the `derived-mode-parent' property of the symbol to trace backwards."
+ (let ((parent major-mode))
+ (while (and (not (memq parent modes))
+ (setq parent (get parent 'derived-mode-parent))))
+ parent))
+
+;;;; Minor modes.
+
+;; If a minor mode is not defined with define-minor-mode,
+;; add it here explicitly.
+;; isearch-mode is deliberately excluded, since you should
+;; not call it yourself.
+(defvar minor-mode-list '(auto-save-mode auto-fill-mode abbrev-mode
+ overwrite-mode view-mode
+ hs-minor-mode)
+ "List of all minor mode functions.")
+
+(defun add-minor-mode (toggle name &optional keymap after toggle-fun)
+ "Register a new minor mode.
+
+This is an XEmacs-compatibility function. Use `define-minor-mode' instead.
+
+TOGGLE is a symbol which is the name of a buffer-local variable that
+is toggled on or off to say whether the minor mode is active or not.
+
+NAME specifies what will appear in the mode line when the minor mode
+is active. NAME should be either a string starting with a space, or a
+symbol whose value is such a string.
+
+Optional KEYMAP is the keymap for the minor mode that will be added
+to `minor-mode-map-alist'.
+
+Optional AFTER specifies that TOGGLE should be added after AFTER
+in `minor-mode-alist'.
+
+Optional TOGGLE-FUN is an interactive function to toggle the mode.
+It defaults to (and should by convention be) TOGGLE.
+
+If TOGGLE has a non-nil `:included' property, an entry for the mode is
+included in the mode-line minor mode menu.
+If TOGGLE has a `:menu-tag', that is used for the menu item's label."
+ (unless (memq toggle minor-mode-list)
+ (push toggle minor-mode-list))
+
+ (unless toggle-fun (setq toggle-fun toggle))
+ (unless (eq toggle-fun toggle)
+ (put toggle :minor-mode-function toggle-fun))
+ ;; Add the name to the minor-mode-alist.
+ (when name
+ (let ((existing (assq toggle minor-mode-alist)))
+ (if existing
+ (setcdr existing (list name))
+ (let ((tail minor-mode-alist) found)
+ (while (and tail (not found))
+ (if (eq after (caar tail))
+ (setq found tail)
+ (setq tail (cdr tail))))
+ (if found
+ (let ((rest (cdr found)))
+ (setcdr found nil)
+ (nconc found (list (list toggle name)) rest))
+ (push (list toggle name) minor-mode-alist))))))
+ ;; Add the toggle to the minor-modes menu if requested.
+ (when (get toggle :included)
+ (define-key mode-line-mode-menu
+ (vector toggle)
+ (list 'menu-item
+ (concat
+ (or (get toggle :menu-tag)
+ (if (stringp name) name (symbol-name toggle)))
+ (let ((mode-name (if (symbolp name) (symbol-value name))))
+ (if (and (stringp mode-name) (string-match "[^ ]+" mode-name))
+ (concat " (" (match-string 0 mode-name) ")"))))
+ toggle-fun
+ :button (cons :toggle toggle))))
+
+ ;; Add the map to the minor-mode-map-alist.
+ (when keymap
+ (let ((existing (assq toggle minor-mode-map-alist)))
+ (if existing
+ (setcdr existing keymap)
+ (let ((tail minor-mode-map-alist) found)
+ (while (and tail (not found))
+ (if (eq after (caar tail))
+ (setq found tail)
+ (setq tail (cdr tail))))
+ (if found
+ (let ((rest (cdr found)))
+ (setcdr found nil)
+ (nconc found (list (cons toggle keymap)) rest))
+ (push (cons toggle keymap) minor-mode-map-alist)))))))
+
+;;;; Load history
+
+(defsubst autoloadp (object)
+ "Non-nil if OBJECT is an autoload."
+ (eq 'autoload (car-safe object)))
+
+;; (defun autoload-type (object)
+;; "Returns the type of OBJECT or `function' or `command' if the type is nil.
+;; OBJECT should be an autoload object."
+;; (when (autoloadp object)
+;; (let ((type (nth 3 object)))
+;; (cond ((null type) (if (nth 2 object) 'command 'function))
+;; ((eq 'keymap t) 'macro)
+;; (type)))))
+
+;; (defalias 'autoload-file #'cadr
+;; "Return the name of the file from which AUTOLOAD will be loaded.
+;; \n\(fn AUTOLOAD)")
+
+(defun symbol-file (symbol &optional type)
+ "Return the name of the file that defined SYMBOL.
+The value is normally an absolute file name. It can also be nil,
+if the definition is not associated with any file. If SYMBOL
+specifies an autoloaded function, the value can be a relative
+file name without extension.
+
+If TYPE is nil, then any kind of definition is acceptable. If
+TYPE is `defun', `defvar', or `defface', that specifies function
+definition, variable definition, or face definition only."
+ (if (and (or (null type) (eq type 'defun))
+ (symbolp symbol)
+ (autoloadp (symbol-function symbol)))
+ (nth 1 (symbol-function symbol))
+ (let ((files load-history)
+ file)
+ (while files
+ (if (if type
+ (if (eq type 'defvar)
+ ;; Variables are present just as their names.
+ (member symbol (cdr (car files)))
+ ;; Other types are represented as (TYPE . NAME).
+ (member (cons type symbol) (cdr (car files))))
+ ;; We accept all types, so look for variable def
+ ;; and then for any other kind.
+ (or (member symbol (cdr (car files)))
+ (rassq symbol (cdr (car files)))))
+ (setq file (car (car files)) files nil))
+ (setq files (cdr files)))
+ file)))
+
+(defun locate-library (library &optional nosuffix path interactive-call)
+ "Show the precise file name of Emacs library LIBRARY.
+LIBRARY should be a relative file name of the library, a string.
+It can omit the suffix (a.k.a. file-name extension) if NOSUFFIX is
+nil (which is the default, see below).
+This command searches the directories in `load-path' like `\\[load-library]'
+to find the file that `\\[load-library] RET LIBRARY RET' would load.
+Optional second arg NOSUFFIX non-nil means don't add suffixes `load-suffixes'
+to the specified name LIBRARY.
+
+If the optional third arg PATH is specified, that list of directories
+is used instead of `load-path'.
+
+When called from a program, the file name is normally returned as a
+string. When run interactively, the argument INTERACTIVE-CALL is t,
+and the file name is displayed in the echo area."
+ (interactive (list (completing-read "Locate library: "
+ (apply-partially
+ 'locate-file-completion-table
+ load-path (get-load-suffixes)))
+ nil nil
+ t))
+ (let ((file (locate-file library
+ (or path load-path)
+ (append (unless nosuffix (get-load-suffixes))
+ load-file-rep-suffixes))))
+ (if interactive-call
+ (if file
+ (message "Library is file %s" (abbreviate-file-name file))
+ (message "No library %s in search path" library)))
+ file))
+
+
+;;;; Process stuff.
+
+(defun process-lines (program &rest args)
+ "Execute PROGRAM with ARGS, returning its output as a list of lines.
+Signal an error if the program returns with a non-zero exit status."
+ (with-temp-buffer
+ (let ((status (apply 'call-process program nil (current-buffer) nil args)))
+ (unless (eq status 0)
+ (error "%s exited with status %s" program status))
+ (goto-char (point-min))
+ (let (lines)
+ (while (not (eobp))
+ (setq lines (cons (buffer-substring-no-properties
+ (line-beginning-position)
+ (line-end-position))
+ lines))
+ (forward-line 1))
+ (nreverse lines)))))
+
+(defun process-live-p (process)
+ "Returns non-nil if PROCESS is alive.
+A process is considered alive if its status is `run', `open',
+`listen', `connect' or `stop'. Value is nil if PROCESS is not a
+process."
+ (and (processp process)
+ (memq (process-status process)
+ '(run open listen connect stop))))
+
+;; compatibility
+
+(make-obsolete
+ 'process-kill-without-query
+ "use `process-query-on-exit-flag' or `set-process-query-on-exit-flag'."
+ "22.1")
+(defun process-kill-without-query (process &optional _flag)
+ "Say no query needed if PROCESS is running when Emacs is exited.
+Optional second argument if non-nil says to require a query.
+Value is t if a query was formerly required."
+ (let ((old (process-query-on-exit-flag process)))
+ (set-process-query-on-exit-flag process nil)
+ old))
+
+(defun process-kill-buffer-query-function ()
+ "Ask before killing a buffer that has a running process."
+ (let ((process (get-buffer-process (current-buffer))))
+ (or (not process)
+ (not (memq (process-status process) '(run stop open listen)))
+ (not (process-query-on-exit-flag process))
+ (yes-or-no-p
+ (format "Buffer %S has a running process; kill it? "
+ (buffer-name (current-buffer)))))))
+
+(add-hook 'kill-buffer-query-functions 'process-kill-buffer-query-function)
+
+;; process plist management
+
+(defun process-get (process propname)
+ "Return the value of PROCESS' PROPNAME property.
+This is the last value stored with `(process-put PROCESS PROPNAME VALUE)'."
+ (plist-get (process-plist process) propname))
+
+(defun process-put (process propname value)
+ "Change PROCESS' PROPNAME property to VALUE.
+It can be retrieved with `(process-get PROCESS PROPNAME)'."
+ (set-process-plist process
+ (plist-put (process-plist process) propname value)))
+
+
+;;;; Input and display facilities.
+
+(defconst read-key-empty-map (make-sparse-keymap))
+
+(defvar read-key-delay 0.01) ;Fast enough for 100Hz repeat rate, hopefully.
+
+(defun read-key (&optional prompt)
+ "Read a key from the keyboard.
+Contrary to `read-event' this will not return a raw event but instead will
+obey the input decoding and translations usually done by `read-key-sequence'.
+So escape sequences and keyboard encoding are taken into account.
+When there's an ambiguity because the key looks like the prefix of
+some sort of escape sequence, the ambiguity is resolved via `read-key-delay'."
+ ;; This overriding-terminal-local-map binding also happens to
+ ;; disable quail's input methods, so although read-key-sequence
+ ;; always inherits the input method, in practice read-key does not
+ ;; inherit the input method (at least not if it's based on quail).
+ (let ((overriding-terminal-local-map nil)
+ (overriding-local-map read-key-empty-map)
+ (echo-keystrokes 0)
+ (old-global-map (current-global-map))
+ (timer (run-with-idle-timer
+ ;; Wait long enough that Emacs has the time to receive and
+ ;; process all the raw events associated with the single-key.
+ ;; But don't wait too long, or the user may find the delay
+ ;; annoying (or keep hitting more keys which may then get
+ ;; lost or misinterpreted).
+ ;; This is only relevant for keys which Emacs perceives as
+ ;; "prefixes", such as C-x (because of the C-x 8 map in
+ ;; key-translate-table and the C-x @ map in function-key-map)
+ ;; or ESC (because of terminal escape sequences in
+ ;; input-decode-map).
+ read-key-delay t
+ (lambda ()
+ (let ((keys (this-command-keys-vector)))
+ (unless (zerop (length keys))
+ ;; `keys' is non-empty, so the user has hit at least
+ ;; one key; there's no point waiting any longer, even
+ ;; though read-key-sequence thinks we should wait
+ ;; for more input to decide how to interpret the
+ ;; current input.
+ (throw 'read-key keys)))))))
+ (unwind-protect
+ (progn
+ (use-global-map
+ (let ((map (make-sparse-keymap)))
+ ;; Don't hide the menu-bar and tool-bar entries.
+ (define-key map [menu-bar] (lookup-key global-map [menu-bar]))
+ (define-key map [tool-bar]
+ ;; This hack avoids evaluating the :filter (Bug#9922).
+ (or (cdr (assq 'tool-bar global-map))
+ (lookup-key global-map [tool-bar])))
+ map))
+ (let* ((keys
+ (catch 'read-key (read-key-sequence-vector prompt nil t)))
+ (key (aref keys 0)))
+ (if (and (> (length keys) 1)
+ (memq key '(mode-line header-line
+ left-fringe right-fringe)))
+ (aref keys 1)
+ key)))
+ (cancel-timer timer)
+ (use-global-map old-global-map))))
+
+(defvar read-passwd-map
+ ;; BEWARE: `defconst' would purecopy it, breaking the sharing with
+ ;; minibuffer-local-map along the way!
+ (let ((map (make-sparse-keymap)))
+ (set-keymap-parent map minibuffer-local-map)
+ (define-key map "\C-u" #'delete-minibuffer-contents) ;bug#12570
+ map)
+ "Keymap used while reading passwords.")
+
+(defun read-passwd (prompt &optional confirm default)
+ "Read a password, prompting with PROMPT, and return it.
+If optional CONFIRM is non-nil, read the password twice to make sure.
+Optional DEFAULT is a default password to use instead of empty input.
+
+This function echoes `.' for each character that the user types.
+You could let-bind `read-hide-char' to another hiding character, though.
+
+Once the caller uses the password, it can erase the password
+by doing (clear-string STRING)."
+ (if confirm
+ (let (success)
+ (while (not success)
+ (let ((first (read-passwd prompt nil default))
+ (second (read-passwd "Confirm password: " nil default)))
+ (if (equal first second)
+ (progn
+ (and (arrayp second) (clear-string second))
+ (setq success first))
+ (and (arrayp first) (clear-string first))
+ (and (arrayp second) (clear-string second))
+ (message "Password not repeated accurately; please start over")
+ (sit-for 1))))
+ success)
+ (let ((hide-chars-fun
+ (lambda (beg end _len)
+ (clear-this-command-keys)
+ (setq beg (min end (max (minibuffer-prompt-end)
+ beg)))
+ (dotimes (i (- end beg))
+ (put-text-property (+ i beg) (+ 1 i beg)
+ 'display (string (or read-hide-char ?.))))))
+ minibuf)
+ (minibuffer-with-setup-hook
+ (lambda ()
+ (setq minibuf (current-buffer))
+ ;; Turn off electricity.
+ (setq-local post-self-insert-hook nil)
+ (setq-local buffer-undo-list t)
+ (setq-local select-active-regions nil)
+ (use-local-map read-passwd-map)
+ (setq-local inhibit-modification-hooks nil) ;bug#15501.
+ (setq-local show-paren-mode nil) ;bug#16091.
+ (add-hook 'after-change-functions hide-chars-fun nil 'local))
+ (unwind-protect
+ (let ((enable-recursive-minibuffers t)
+ (read-hide-char (or read-hide-char ?.)))
+ (read-string prompt nil t default)) ; t = "no history"
+ (when (buffer-live-p minibuf)
+ (with-current-buffer minibuf
+ ;; Not sure why but it seems that there might be cases where the
+ ;; minibuffer is not always properly reset later on, so undo
+ ;; whatever we've done here (bug#11392).
+ (remove-hook 'after-change-functions hide-chars-fun 'local)
+ (kill-local-variable 'post-self-insert-hook)
+ ;; And of course, don't keep the sensitive data around.
+ (erase-buffer))))))))
+
+(defun read-number (prompt &optional default)
+ "Read a numeric value in the minibuffer, prompting with PROMPT.
+DEFAULT specifies a default value to return if the user just types RET.
+The value of DEFAULT is inserted into PROMPT.
+This function is used by the `interactive' code letter `n'."
+ (let ((n nil)
+ (default1 (if (consp default) (car default) default)))
+ (when default1
+ (setq prompt
+ (if (string-match "\\(\\):[ \t]*\\'" prompt)
+ (replace-match (format " (default %s)" default1) t t prompt 1)
+ (replace-regexp-in-string "[ \t]*\\'"
+ (format " (default %s) " default1)
+ prompt t t))))
+ (while
+ (progn
+ (let ((str (read-from-minibuffer
+ prompt nil nil nil nil
+ (when default
+ (if (consp default)
+ (mapcar 'number-to-string (delq nil default))
+ (number-to-string default))))))
+ (condition-case nil
+ (setq n (cond
+ ((zerop (length str)) default1)
+ ((stringp str) (read str))))
+ (error nil)))
+ (unless (numberp n)
+ (message "Please enter a number.")
+ (sit-for 1)
+ t)))
+ n))
+
+(defun read-char-choice (prompt chars &optional inhibit-keyboard-quit)
+ "Read and return one of CHARS, prompting for PROMPT.
+Any input that is not one of CHARS is ignored.
+
+If optional argument INHIBIT-KEYBOARD-QUIT is non-nil, ignore
+keyboard-quit events while waiting for a valid input."
+ (unless (consp chars)
+ (error "Called `read-char-choice' without valid char choices"))
+ (let (char done show-help (helpbuf " *Char Help*"))
+ (let ((cursor-in-echo-area t)
+ (executing-kbd-macro executing-kbd-macro)
+ (esc-flag nil))
+ (save-window-excursion ; in case we call help-form-show
+ (while (not done)
+ (unless (get-text-property 0 'face prompt)
+ (setq prompt (propertize prompt 'face 'minibuffer-prompt)))
+ (setq char (let ((inhibit-quit inhibit-keyboard-quit))
+ (read-key prompt)))
+ (and show-help (buffer-live-p (get-buffer helpbuf))
+ (kill-buffer helpbuf))
+ (cond
+ ((not (numberp char)))
+ ;; If caller has set help-form, that's enough.
+ ;; They don't explicitly have to add help-char to chars.
+ ((and help-form
+ (eq char help-char)
+ (setq show-help t)
+ (help-form-show)))
+ ((memq char chars)
+ (setq done t))
+ ((and executing-kbd-macro (= char -1))
+ ;; read-event returns -1 if we are in a kbd macro and
+ ;; there are no more events in the macro. Attempt to
+ ;; get an event interactively.
+ (setq executing-kbd-macro nil))
+ ((not inhibit-keyboard-quit)
+ (cond
+ ((and (null esc-flag) (eq char ?\e))
+ (setq esc-flag t))
+ ((memq char '(?\C-g ?\e))
+ (keyboard-quit))))))))
+ ;; Display the question with the answer. But without cursor-in-echo-area.
+ (message "%s%s" prompt (char-to-string char))
+ char))
+
+(defun sit-for (seconds &optional nodisp obsolete)
+ "Redisplay, then wait for SECONDS seconds. Stop when input is available.
+SECONDS may be a floating-point value.
+\(On operating systems that do not support waiting for fractions of a
+second, floating-point values are rounded down to the nearest integer.)
+
+If optional arg NODISP is t, don't redisplay, just wait for input.
+Redisplay does not happen if input is available before it starts.
+
+Value is t if waited the full time with no input arriving, and nil otherwise.
+
+An obsolete, but still supported form is
+\(sit-for SECONDS &optional MILLISECONDS NODISP)
+where the optional arg MILLISECONDS specifies an additional wait period,
+in milliseconds; this was useful when Emacs was built without
+floating point support."
+ (declare (advertised-calling-convention (seconds &optional nodisp) "22.1"))
+ ;; This used to be implemented in C until the following discussion:
+ ;; http://lists.gnu.org/archive/html/emacs-devel/2006-07/msg00401.html
+ ;; Then it was moved here using an implementation based on an idle timer,
+ ;; which was then replaced by the use of read-event.
+ (if (numberp nodisp)
+ (setq seconds (+ seconds (* 1e-3 nodisp))
+ nodisp obsolete)
+ (if obsolete (setq nodisp obsolete)))
+ (cond
+ (noninteractive
+ (sleep-for seconds)
+ t)
+ ((input-pending-p t)
+ nil)
+ ((<= seconds 0)
+ (or nodisp (redisplay)))
+ (t
+ (or nodisp (redisplay))
+ ;; FIXME: we should not read-event here at all, because it's much too
+ ;; difficult to reliably "undo" a read-event by pushing it onto
+ ;; unread-command-events.
+ ;; For bug#14782, we need read-event to do the keyboard-coding-system
+ ;; decoding (hence non-nil as second arg under POSIX ttys).
+ ;; For bug#15614, we need read-event not to inherit-input-method.
+ ;; So we temporarily suspend input-method-function.
+ (let ((read (let ((input-method-function nil))
+ (read-event nil t seconds))))
+ (or (null read)
+ (progn
+ ;; https://lists.gnu.org/archive/html/emacs-devel/2006-10/msg00394.html
+ ;; We want `read' appear in the next command's this-command-event
+ ;; but not in the current one.
+ ;; By pushing (cons t read), we indicate that `read' has not
+ ;; yet been recorded in this-command-keys, so it will be recorded
+ ;; next time it's read.
+ ;; And indeed the `seconds' argument to read-event correctly
+ ;; prevented recording this event in the current command's
+ ;; this-command-keys.
+ (push (cons t read) unread-command-events)
+ nil))))))
+
+;; Behind display-popup-menus-p test.
+(declare-function x-popup-dialog "menu.c" (position contents &optional header))
+
+(defun y-or-n-p (prompt)
+ "Ask user a \"y or n\" question. Return t if answer is \"y\".
+PROMPT is the string to display to ask the question. It should
+end in a space; `y-or-n-p' adds \"(y or n) \" to it.
+
+No confirmation of the answer is requested; a single character is
+enough. SPC also means yes, and DEL means no.
+
+To be precise, this function translates user input into responses
+by consulting the bindings in `query-replace-map'; see the
+documentation of that variable for more information. In this
+case, the useful bindings are `act', `skip', `recenter',
+`scroll-up', `scroll-down', and `quit'.
+An `act' response means yes, and a `skip' response means no.
+A `quit' response means to invoke `keyboard-quit'.
+If the user enters `recenter', `scroll-up', or `scroll-down'
+responses, perform the requested window recentering or scrolling
+and ask again.
+
+Under a windowing system a dialog box will be used if `last-nonmenu-event'
+is nil and `use-dialog-box' is non-nil."
+ ;; ¡Beware! when I tried to edebug this code, Emacs got into a weird state
+ ;; where all the keys were unbound (i.e. it somehow got triggered
+ ;; within read-key, apparently). I had to kill it.
+ (let ((answer 'recenter)
+ (padded (lambda (prompt &optional dialog)
+ (let ((l (length prompt)))
+ (concat prompt
+ (if (or (zerop l) (eq ?\s (aref prompt (1- l))))
+ "" " ")
+ (if dialog "" "(y or n) "))))))
+ (cond
+ (noninteractive
+ (setq prompt (funcall padded prompt))
+ (let ((temp-prompt prompt))
+ (while (not (memq answer '(act skip)))
+ (let ((str (read-string temp-prompt)))
+ (cond ((member str '("y" "Y")) (setq answer 'act))
+ ((member str '("n" "N")) (setq answer 'skip))
+ (t (setq temp-prompt (concat "Please answer y or n. "
+ prompt))))))))
+ ((and (display-popup-menus-p)
+ (listp last-nonmenu-event)
+ use-dialog-box)
+ (setq prompt (funcall padded prompt t)
+ answer (x-popup-dialog t `(,prompt ("Yes" . act) ("No" . skip)))))
+ (t
+ (setq prompt (funcall padded prompt))
+ (while
+ (let* ((scroll-actions '(recenter scroll-up scroll-down
+ scroll-other-window scroll-other-window-down))
+ (key
+ (let ((cursor-in-echo-area t))
+ (when minibuffer-auto-raise
+ (raise-frame (window-frame (minibuffer-window))))
+ (read-key (propertize (if (memq answer scroll-actions)
+ prompt
+ (concat "Please answer y or n. "
+ prompt))
+ 'face 'minibuffer-prompt)))))
+ (setq answer (lookup-key query-replace-map (vector key) t))
+ (cond
+ ((memq answer '(skip act)) nil)
+ ((eq answer 'recenter)
+ (recenter) t)
+ ((eq answer 'scroll-up)
+ (ignore-errors (scroll-up-command)) t)
+ ((eq answer 'scroll-down)
+ (ignore-errors (scroll-down-command)) t)
+ ((eq answer 'scroll-other-window)
+ (ignore-errors (scroll-other-window)) t)
+ ((eq answer 'scroll-other-window-down)
+ (ignore-errors (scroll-other-window-down)) t)
+ ((or (memq answer '(exit-prefix quit)) (eq key ?\e))
+ (signal 'quit nil) t)
+ (t t)))
+ (ding)
+ (discard-input))))
+ (let ((ret (eq answer 'act)))
+ (unless noninteractive
+ (message "%s%c" prompt (if ret ?y ?n)))
+ ret)))
+
+
+;;; Atomic change groups.
+
+(defmacro atomic-change-group (&rest body)
+ "Perform BODY as an atomic change group.
+This means that if BODY exits abnormally,
+all of its changes to the current buffer are undone.
+This works regardless of whether undo is enabled in the buffer.
+
+This mechanism is transparent to ordinary use of undo;
+if undo is enabled in the buffer and BODY succeeds, the
+user can undo the change normally."
+ (declare (indent 0) (debug t))
+ (let ((handle (make-symbol "--change-group-handle--"))
+ (success (make-symbol "--change-group-success--")))
+ `(let ((,handle (prepare-change-group))
+ ;; Don't truncate any undo data in the middle of this.
+ (undo-outer-limit nil)
+ (undo-limit most-positive-fixnum)
+ (undo-strong-limit most-positive-fixnum)
+ (,success nil))
+ (unwind-protect
+ (progn
+ ;; This is inside the unwind-protect because
+ ;; it enables undo if that was disabled; we need
+ ;; to make sure that it gets disabled again.
+ (activate-change-group ,handle)
+ ,@body
+ (setq ,success t))
+ ;; Either of these functions will disable undo
+ ;; if it was disabled before.
+ (if ,success
+ (accept-change-group ,handle)
+ (cancel-change-group ,handle))))))
+
+(defun prepare-change-group (&optional buffer)
+ "Return a handle for the current buffer's state, for a change group.
+If you specify BUFFER, make a handle for BUFFER's state instead.
+
+Pass the handle to `activate-change-group' afterward to initiate
+the actual changes of the change group.
+
+To finish the change group, call either `accept-change-group' or
+`cancel-change-group' passing the same handle as argument. Call
+`accept-change-group' to accept the changes in the group as final;
+call `cancel-change-group' to undo them all. You should use
+`unwind-protect' to make sure the group is always finished. The call
+to `activate-change-group' should be inside the `unwind-protect'.
+Once you finish the group, don't use the handle again--don't try to
+finish the same group twice. For a simple example of correct use, see
+the source code of `atomic-change-group'.
+
+The handle records only the specified buffer. To make a multibuffer
+change group, call this function once for each buffer you want to
+cover, then use `nconc' to combine the returned values, like this:
+
+ (nconc (prepare-change-group buffer-1)
+ (prepare-change-group buffer-2))
+
+You can then activate that multibuffer change group with a single
+call to `activate-change-group' and finish it with a single call
+to `accept-change-group' or `cancel-change-group'."
+
+ (if buffer
+ (list (cons buffer (with-current-buffer buffer buffer-undo-list)))
+ (list (cons (current-buffer) buffer-undo-list))))
+
+(defun activate-change-group (handle)
+ "Activate a change group made with `prepare-change-group' (which see)."
+ (dolist (elt handle)
+ (with-current-buffer (car elt)
+ (if (eq buffer-undo-list t)
+ (setq buffer-undo-list nil)))))
+
+(defun accept-change-group (handle)
+ "Finish a change group made with `prepare-change-group' (which see).
+This finishes the change group by accepting its changes as final."
+ (dolist (elt handle)
+ (with-current-buffer (car elt)
+ (if (eq (cdr elt) t)
+ (setq buffer-undo-list t)))))
+
+(defun cancel-change-group (handle)
+ "Finish a change group made with `prepare-change-group' (which see).
+This finishes the change group by reverting all of its changes."
+ (dolist (elt handle)
+ (with-current-buffer (car elt)
+ (setq elt (cdr elt))
+ (save-restriction
+ ;; Widen buffer temporarily so if the buffer was narrowed within
+ ;; the body of `atomic-change-group' all changes can be undone.
+ (widen)
+ (let ((old-car
+ (if (consp elt) (car elt)))
+ (old-cdr
+ (if (consp elt) (cdr elt))))
+ ;; Temporarily truncate the undo log at ELT.
+ (when (consp elt)
+ (setcar elt nil) (setcdr elt nil))
+ (unless (eq last-command 'undo) (undo-start))
+ ;; Make sure there's no confusion.
+ (when (and (consp elt) (not (eq elt (last pending-undo-list))))
+ (error "Undoing to some unrelated state"))
+ ;; Undo it all.
+ (save-excursion
+ (while (listp pending-undo-list) (undo-more 1)))
+ ;; Reset the modified cons cell ELT to its original content.
+ (when (consp elt)
+ (setcar elt old-car)
+ (setcdr elt old-cdr))
+ ;; Revert the undo info to what it was when we grabbed the state.
+ (setq buffer-undo-list elt))))))
+
+;;;; Display-related functions.
+
+;; For compatibility.
+(define-obsolete-function-alias 'redraw-modeline
+ 'force-mode-line-update "24.3")
+
+(defun momentary-string-display (string pos &optional exit-char message)
+ "Momentarily display STRING in the buffer at POS.
+Display remains until next event is input.
+If POS is a marker, only its position is used; its buffer is ignored.
+Optional third arg EXIT-CHAR can be a character, event or event
+description list. EXIT-CHAR defaults to SPC. If the input is
+EXIT-CHAR it is swallowed; otherwise it is then available as
+input (as a command if nothing else).
+Display MESSAGE (optional fourth arg) in the echo area.
+If MESSAGE is nil, instructions to type EXIT-CHAR are displayed there."
+ (or exit-char (setq exit-char ?\s))
+ (let ((ol (make-overlay pos pos))
+ (str (copy-sequence string)))
+ (unwind-protect
+ (progn
+ (save-excursion
+ (overlay-put ol 'after-string str)
+ (goto-char pos)
+ ;; To avoid trouble with out-of-bounds position
+ (setq pos (point))
+ ;; If the string end is off screen, recenter now.
+ (if (<= (window-end nil t) pos)
+ (recenter (/ (window-height) 2))))
+ (message (or message "Type %s to continue editing.")
+ (single-key-description exit-char))
+ (let ((event (read-key)))
+ ;; `exit-char' can be an event, or an event description list.
+ (or (eq event exit-char)
+ (eq event (event-convert-list exit-char))
+ (setq unread-command-events
+ (append (this-single-command-raw-keys))))))
+ (delete-overlay ol))))
+
+
+;;;; Overlay operations
+
+(defun copy-overlay (o)
+ "Return a copy of overlay O."
+ (let ((o1 (if (overlay-buffer o)
+ (make-overlay (overlay-start o) (overlay-end o)
+ ;; FIXME: there's no easy way to find the
+ ;; insertion-type of the two markers.
+ (overlay-buffer o))
+ (let ((o1 (make-overlay (point-min) (point-min))))
+ (delete-overlay o1)
+ o1)))
+ (props (overlay-properties o)))
+ (while props
+ (overlay-put o1 (pop props) (pop props)))
+ o1))
+
+(defun remove-overlays (&optional beg end name val)
+ "Clear BEG and END of overlays whose property NAME has value VAL.
+Overlays might be moved and/or split.
+BEG and END default respectively to the beginning and end of buffer."
+ ;; This speeds up the loops over overlays.
+ (unless beg (setq beg (point-min)))
+ (unless end (setq end (point-max)))
+ (overlay-recenter end)
+ (if (< end beg)
+ (setq beg (prog1 end (setq end beg))))
+ (save-excursion
+ (dolist (o (overlays-in beg end))
+ (when (eq (overlay-get o name) val)
+ ;; Either push this overlay outside beg...end
+ ;; or split it to exclude beg...end
+ ;; or delete it entirely (if it is contained in beg...end).
+ (if (< (overlay-start o) beg)
+ (if (> (overlay-end o) end)
+ (progn
+ (move-overlay (copy-overlay o)
+ (overlay-start o) beg)
+ (move-overlay o end (overlay-end o)))
+ (move-overlay o (overlay-start o) beg))
+ (if (> (overlay-end o) end)
+ (move-overlay o end (overlay-end o))
+ (delete-overlay o)))))))
+
+;;;; Miscellanea.
+
+(defvar suspend-hook nil
+ "Normal hook run by `suspend-emacs', before suspending.")
+
+(defvar suspend-resume-hook nil
+ "Normal hook run by `suspend-emacs', after Emacs is continued.")
+
+(defvar temp-buffer-show-hook nil
+ "Normal hook run by `with-output-to-temp-buffer' after displaying the buffer.
+When the hook runs, the temporary buffer is current, and the window it
+was displayed in is selected.")
+
+(defvar temp-buffer-setup-hook nil
+ "Normal hook run by `with-output-to-temp-buffer' at the start.
+When the hook runs, the temporary buffer is current.
+This hook is normally set up with a function to put the buffer in Help
+mode.")
+
+(defconst user-emacs-directory
+ (if (eq system-type 'ms-dos)
+ ;; MS-DOS cannot have initial dot.
+ "~/_emacs.d/"
+ "~/.emacs.d/")
+ "Directory beneath which additional per-user Emacs-specific files are placed.
+Various programs in Emacs store information in this directory.
+Note that this should end with a directory separator.
+See also `locate-user-emacs-file'.")
+
+;;;; Misc. useful functions.
+
+(defsubst buffer-narrowed-p ()
+ "Return non-nil if the current buffer is narrowed."
+ (/= (- (point-max) (point-min)) (buffer-size)))
+
+(defun find-tag-default-bounds ()
+ "Determine the boundaries of the default tag, based on text at point.
+Return a cons cell with the beginning and end of the found tag.
+If there is no plausible default, return nil."
+ (let (from to bound)
+ (when (or (progn
+ ;; Look at text around `point'.
+ (save-excursion
+ (skip-syntax-backward "w_") (setq from (point)))
+ (save-excursion
+ (skip-syntax-forward "w_") (setq to (point)))
+ (> to from))
+ ;; Look between `line-beginning-position' and `point'.
+ (save-excursion
+ (and (setq bound (line-beginning-position))
+ (skip-syntax-backward "^w_" bound)
+ (> (setq to (point)) bound)
+ (skip-syntax-backward "w_")
+ (setq from (point))))
+ ;; Look between `point' and `line-end-position'.
+ (save-excursion
+ (and (setq bound (line-end-position))
+ (skip-syntax-forward "^w_" bound)
+ (< (setq from (point)) bound)
+ (skip-syntax-forward "w_")
+ (setq to (point)))))
+ (cons from to))))
+
+(defun find-tag-default ()
+ "Determine default tag to search for, based on text at point.
+If there is no plausible default, return nil."
+ (let ((bounds (find-tag-default-bounds)))
+ (when bounds
+ (buffer-substring-no-properties (car bounds) (cdr bounds)))))
+
+(defun find-tag-default-as-regexp ()
+ "Return regexp that matches the default tag at point.
+If there is no tag at point, return nil.
+
+When in a major mode that does not provide its own
+`find-tag-default-function', return a regexp that matches the
+symbol at point exactly."
+ (let ((tag (funcall (or find-tag-default-function
+ (get major-mode 'find-tag-default-function)
+ 'find-tag-default))))
+ (if tag (regexp-quote tag))))
+
+(defun find-tag-default-as-symbol-regexp ()
+ "Return regexp that matches the default tag at point as symbol.
+If there is no tag at point, return nil.
+
+When in a major mode that does not provide its own
+`find-tag-default-function', return a regexp that matches the
+symbol at point exactly."
+ (let ((tag-regexp (find-tag-default-as-regexp)))
+ (if (and tag-regexp
+ (eq (or find-tag-default-function
+ (get major-mode 'find-tag-default-function)
+ 'find-tag-default)
+ 'find-tag-default))
+ (format "\\_<%s\\_>" tag-regexp)
+ tag-regexp)))
+
+(defun play-sound (sound)
+ "SOUND is a list of the form `(sound KEYWORD VALUE...)'.
+The following keywords are recognized:
+
+ :file FILE - read sound data from FILE. If FILE isn't an
+absolute file name, it is searched in `data-directory'.
+
+ :data DATA - read sound data from string DATA.
+
+Exactly one of :file or :data must be present.
+
+ :volume VOL - set volume to VOL. VOL must an integer in the
+range 0..100 or a float in the range 0..1.0. If not specified,
+don't change the volume setting of the sound device.
+
+ :device DEVICE - play sound on DEVICE. If not specified,
+a system-dependent default device name is used.
+
+Note: :data and :device are currently not supported on Windows."
+ (if (fboundp 'play-sound-internal)
+ (play-sound-internal sound)
+ (error "This Emacs binary lacks sound support")))
+
+(declare-function w32-shell-dos-semantics "w32-fns" nil)
+
+(defun shell-quote-argument (argument)
+ "Quote ARGUMENT for passing as argument to an inferior shell."
+ (cond
+ ((eq system-type 'ms-dos)
+ ;; Quote using double quotes, but escape any existing quotes in
+ ;; the argument with backslashes.
+ (let ((result "")
+ (start 0)
+ end)
+ (if (or (null (string-match "[^\"]" argument))
+ (< (match-end 0) (length argument)))
+ (while (string-match "[\"]" argument start)
+ (setq end (match-beginning 0)
+ result (concat result (substring argument start end)
+ "\\" (substring argument end (1+ end)))
+ start (1+ end))))
+ (concat "\"" result (substring argument start) "\"")))
+
+ ((and (eq system-type 'windows-nt) (w32-shell-dos-semantics))
+
+ ;; First, quote argument so that CommandLineToArgvW will
+ ;; understand it. See
+ ;; http://msdn.microsoft.com/en-us/library/17w5ykft%28v=vs.85%29.aspx
+ ;; After we perform that level of quoting, escape shell
+ ;; metacharacters so that cmd won't mangle our argument. If the
+ ;; argument contains no double quote characters, we can just
+ ;; surround it with double quotes. Otherwise, we need to prefix
+ ;; each shell metacharacter with a caret.
+
+ (setq argument
+ ;; escape backslashes at end of string
+ (replace-regexp-in-string
+ "\\(\\\\*\\)$"
+ "\\1\\1"
+ ;; escape backslashes and quotes in string body
+ (replace-regexp-in-string
+ "\\(\\\\*\\)\""
+ "\\1\\1\\\\\""
+ argument)))
+
+ (if (string-match "[%!\"]" argument)
+ (concat
+ "^\""
+ (replace-regexp-in-string
+ "\\([%!()\"<>&|^]\\)"
+ "^\\1"
+ argument)
+ "^\"")
+ (concat "\"" argument "\"")))
+
+ (t
+ (if (equal argument "")
+ "''"
+ ;; Quote everything except POSIX filename characters.
+ ;; This should be safe enough even for really weird shells.
+ (replace-regexp-in-string
+ "\n" "'\n'"
+ (replace-regexp-in-string "[^-0-9a-zA-Z_./\n]" "\\\\\\&" argument))))
+ ))
+
+(defun string-or-null-p (object)
+ "Return t if OBJECT is a string or nil.
+Otherwise, return nil."
+ (or (stringp object) (null object)))
+
+(defun booleanp (object)
+ "Return t if OBJECT is one of the two canonical boolean values: t or nil.
+Otherwise, return nil."
+ (and (memq object '(nil t)) t))
+
+(defun special-form-p (object)
+ "Non-nil if and only if OBJECT is a special form."
+ (if (and (symbolp object) (fboundp object))
+ (setq object (indirect-function object t)))
+ (and (subrp object) (eq (cdr (subr-arity object)) 'unevalled)))
+
+(defun macrop (object)
+ "Non-nil if and only if OBJECT is a macro."
+ (let ((def (indirect-function object t)))
+ (when (consp def)
+ (or (eq 'macro (car def))
+ (and (autoloadp def) (memq (nth 4 def) '(macro t)))))))
+
+(defun field-at-pos (pos)
+ "Return the field at position POS, taking stickiness etc into account."
+ (let ((raw-field (get-char-property (field-beginning pos) 'field)))
+ (if (eq raw-field 'boundary)
+ (get-char-property (1- (field-end pos)) 'field)
+ raw-field)))
+
+(defun sha1 (object &optional start end binary)
+ "Return the SHA1 (Secure Hash Algorithm) of an OBJECT.
+OBJECT is either a string or a buffer. Optional arguments START and
+END are character positions specifying which portion of OBJECT for
+computing the hash. If BINARY is non-nil, return a string in binary
+form."
+ (secure-hash 'sha1 object start end binary))
+
+(defun function-get (f prop &optional autoload)
+ "Return the value of property PROP of function F.
+If AUTOLOAD is non-nil and F is autoloaded, try to autoload it
+in the hope that it will set PROP. If AUTOLOAD is `macro', only do it
+if it's an autoloaded macro."
+ (let ((val nil))
+ (while (and (symbolp f)
+ (null (setq val (get f prop)))
+ (fboundp f))
+ (let ((fundef (symbol-function f)))
+ (if (and autoload (autoloadp fundef)
+ (not (equal fundef
+ (autoload-do-load fundef f
+ (if (eq autoload 'macro)
+ 'macro)))))
+ nil ;Re-try `get' on the same `f'.
+ (setq f fundef))))
+ val))
+
+;;;; Support for yanking and text properties.
+;; Why here in subr.el rather than in simple.el? --Stef
+
+(defvar yank-handled-properties)
+(defvar yank-excluded-properties)
+
+(defun remove-yank-excluded-properties (start end)
+ "Process text properties between START and END, inserted for a `yank'.
+Perform the handling specified by `yank-handled-properties', then
+remove properties specified by `yank-excluded-properties'."
+ (let ((inhibit-read-only t))
+ (dolist (handler yank-handled-properties)
+ (let ((prop (car handler))
+ (fun (cdr handler))
+ (run-start start))
+ (while (< run-start end)
+ (let ((value (get-text-property run-start prop))
+ (run-end (next-single-property-change
+ run-start prop nil end)))
+ (funcall fun value run-start run-end)
+ (setq run-start run-end)))))
+ (if (eq yank-excluded-properties t)
+ (set-text-properties start end nil)
+ (remove-list-of-text-properties start end yank-excluded-properties))))
+
+(defvar yank-undo-function)
+
+(defun insert-for-yank (string)
+ "Call `insert-for-yank-1' repetitively for each `yank-handler' segment.
+
+See `insert-for-yank-1' for more details."
+ (let (to)
+ (while (setq to (next-single-property-change 0 'yank-handler string))
+ (insert-for-yank-1 (substring string 0 to))
+ (setq string (substring string to))))
+ (insert-for-yank-1 string))
+
+(defun insert-for-yank-1 (string)
+ "Insert STRING at point for the `yank' command.
+This function is like `insert', except it honors the variables
+`yank-handled-properties' and `yank-excluded-properties', and the
+`yank-handler' text property.
+
+Properties listed in `yank-handled-properties' are processed,
+then those listed in `yank-excluded-properties' are discarded.
+
+If STRING has a non-nil `yank-handler' property on its first
+character, the normal insert behavior is altered. The value of
+the `yank-handler' property must be a list of one to four
+elements, of the form (FUNCTION PARAM NOEXCLUDE UNDO).
+FUNCTION, if non-nil, should be a function of one argument, an
+ object to insert; it is called instead of `insert'.
+PARAM, if present and non-nil, replaces STRING as the argument to
+ FUNCTION or `insert'; e.g. if FUNCTION is `yank-rectangle', PARAM
+ may be a list of strings to insert as a rectangle.
+If NOEXCLUDE is present and non-nil, the normal removal of
+ `yank-excluded-properties' is not performed; instead FUNCTION is
+ responsible for the removal. This may be necessary if FUNCTION
+ adjusts point before or after inserting the object.
+UNDO, if present and non-nil, should be a function to be called
+ by `yank-pop' to undo the insertion of the current object. It is
+ given two arguments, the start and end of the region. FUNCTION
+ may set `yank-undo-function' to override UNDO."
+ (let* ((handler (and (stringp string)
+ (get-text-property 0 'yank-handler string)))
+ (param (or (nth 1 handler) string))
+ (opoint (point))
+ (inhibit-read-only inhibit-read-only)
+ end)
+
+ (setq yank-undo-function t)
+ (if (nth 0 handler) ; FUNCTION
+ (funcall (car handler) param)
+ (insert param))
+ (setq end (point))
+
+ ;; Prevent read-only properties from interfering with the
+ ;; following text property changes.
+ (setq inhibit-read-only t)
+
+ (unless (nth 2 handler) ; NOEXCLUDE
+ (remove-yank-excluded-properties opoint end))
+
+ ;; If last inserted char has properties, mark them as rear-nonsticky.
+ (if (and (> end opoint)
+ (text-properties-at (1- end)))
+ (put-text-property (1- end) end 'rear-nonsticky t))
+
+ (if (eq yank-undo-function t) ; not set by FUNCTION
+ (setq yank-undo-function (nth 3 handler))) ; UNDO
+ (if (nth 4 handler) ; COMMAND
+ (setq this-command (nth 4 handler)))))
+
+(defun insert-buffer-substring-no-properties (buffer &optional start end)
+ "Insert before point a substring of BUFFER, without text properties.
+BUFFER may be a buffer or a buffer name.
+Arguments START and END are character positions specifying the substring.
+They default to the values of (point-min) and (point-max) in BUFFER."
+ (let ((opoint (point)))
+ (insert-buffer-substring buffer start end)
+ (let ((inhibit-read-only t))
+ (set-text-properties opoint (point) nil))))
+
+(defun insert-buffer-substring-as-yank (buffer &optional start end)
+ "Insert before point a part of BUFFER, stripping some text properties.
+BUFFER may be a buffer or a buffer name.
+Arguments START and END are character positions specifying the substring.
+They default to the values of (point-min) and (point-max) in BUFFER.
+Before insertion, process text properties according to
+`yank-handled-properties' and `yank-excluded-properties'."
+ ;; Since the buffer text should not normally have yank-handler properties,
+ ;; there is no need to handle them here.
+ (let ((opoint (point)))
+ (insert-buffer-substring buffer start end)
+ (remove-yank-excluded-properties opoint (point))))
+
+(defun yank-handle-font-lock-face-property (face start end)
+ "If `font-lock-defaults' is nil, apply FACE as a `face' property.
+START and END denote the start and end of the text to act on.
+Do nothing if FACE is nil."
+ (and face
+ (null font-lock-defaults)
+ (put-text-property start end 'face face)))
+
+;; This removes `mouse-face' properties in *Help* buffer buttons:
+;; http://lists.gnu.org/archive/html/emacs-devel/2002-04/msg00648.html
+(defun yank-handle-category-property (category start end)
+ "Apply property category CATEGORY's properties between START and END."
+ (when category
+ (let ((start2 start))
+ (while (< start2 end)
+ (let ((end2 (next-property-change start2 nil end))
+ (original (text-properties-at start2)))
+ (set-text-properties start2 end2 (symbol-plist category))
+ (add-text-properties start2 end2 original)
+ (setq start2 end2))))))
+
+
+;;;; Synchronous shell commands.
+
+(defun start-process-shell-command (name buffer &rest args)
+ "Start a program in a subprocess. Return the process object for it.
+NAME is name for process. It is modified if necessary to make it unique.
+BUFFER is the buffer (or buffer name) to associate with the process.
+ Process output goes at end of that buffer, unless you specify
+ an output stream or filter function to handle the output.
+ BUFFER may be also nil, meaning that this process is not associated
+ with any buffer
+COMMAND is the shell command to run.
+
+An old calling convention accepted any number of arguments after COMMAND,
+which were just concatenated to COMMAND. This is still supported but strongly
+discouraged."
+ (declare (advertised-calling-convention (name buffer command) "23.1"))
+ ;; We used to use `exec' to replace the shell with the command,
+ ;; but that failed to handle (...) and semicolon, etc.
+ (start-process name buffer shell-file-name shell-command-switch
+ (mapconcat 'identity args " ")))
+
+(defun start-file-process-shell-command (name buffer &rest args)
+ "Start a program in a subprocess. Return the process object for it.
+Similar to `start-process-shell-command', but calls `start-file-process'."
+ (declare (advertised-calling-convention (name buffer command) "23.1"))
+ (start-file-process
+ name buffer
+ (if (file-remote-p default-directory) "/bin/sh" shell-file-name)
+ (if (file-remote-p default-directory) "-c" shell-command-switch)
+ (mapconcat 'identity args " ")))
+
+(defun call-process-shell-command (command &optional infile buffer display
+ &rest args)
+ "Execute the shell command COMMAND synchronously in separate process.
+The remaining arguments are optional.
+The program's input comes from file INFILE (nil means `/dev/null').
+Insert output in BUFFER before point; t means current buffer;
+ nil for BUFFER means discard it; 0 means discard and don't wait.
+BUFFER can also have the form (REAL-BUFFER STDERR-FILE); in that case,
+REAL-BUFFER says what to do with standard output, as above,
+while STDERR-FILE says what to do with standard error in the child.
+STDERR-FILE may be nil (discard standard error output),
+t (mix it with ordinary output), or a file name string.
+
+Fourth arg DISPLAY non-nil means redisplay buffer as output is inserted.
+Wildcards and redirection are handled as usual in the shell.
+
+If BUFFER is 0, `call-process-shell-command' returns immediately with value nil.
+Otherwise it waits for COMMAND to terminate and returns a numeric exit
+status or a signal description string.
+If you quit, the process is killed with SIGINT, or SIGKILL if you quit again.
+
+An old calling convention accepted any number of arguments after DISPLAY,
+which were just concatenated to COMMAND. This is still supported but strongly
+discouraged."
+ (declare (advertised-calling-convention
+ (command &optional infile buffer display) "24.5"))
+ ;; We used to use `exec' to replace the shell with the command,
+ ;; but that failed to handle (...) and semicolon, etc.
+ (call-process shell-file-name
+ infile buffer display
+ shell-command-switch
+ (mapconcat 'identity (cons command args) " ")))
+
+(defun process-file-shell-command (command &optional infile buffer display
+ &rest args)
+ "Process files synchronously in a separate process.
+Similar to `call-process-shell-command', but calls `process-file'."
+ (declare (advertised-calling-convention
+ (command &optional infile buffer display) "24.5"))
+ (process-file
+ (if (file-remote-p default-directory) "/bin/sh" shell-file-name)
+ infile buffer display
+ (if (file-remote-p default-directory) "-c" shell-command-switch)
+ (mapconcat 'identity (cons command args) " ")))
+
+;;;; Lisp macros to do various things temporarily.
+
+(defmacro track-mouse (&rest body)
+ "Evaluate BODY with mouse movement events enabled.
+Within a `track-mouse' form, mouse motion generates input events that
+ you can read with `read-event'.
+Normally, mouse motion is ignored."
+ (declare (debug t) (indent 0))
+ `(internal--track-mouse (lambda () ,@body)))
+
+(defmacro with-current-buffer (buffer-or-name &rest body)
+ "Execute the forms in BODY with BUFFER-OR-NAME temporarily current.
+BUFFER-OR-NAME must be a buffer or the name of an existing buffer.
+The value returned is the value of the last form in BODY. See
+also `with-temp-buffer'."
+ (declare (indent 1) (debug t))
+ `(save-current-buffer
+ (set-buffer ,buffer-or-name)
+ ,@body))
+
+(defun internal--before-with-selected-window (window)
+ (let ((other-frame (window-frame window)))
+ (list window (selected-window)
+ ;; Selecting a window on another frame also changes that
+ ;; frame's frame-selected-window. We must save&restore it.
+ (unless (eq (selected-frame) other-frame)
+ (frame-selected-window other-frame))
+ ;; Also remember the top-frame if on ttys.
+ (unless (eq (selected-frame) other-frame)
+ (tty-top-frame other-frame)))))
+
+(defun internal--after-with-selected-window (state)
+ ;; First reset frame-selected-window.
+ (when (window-live-p (nth 2 state))
+ ;; We don't use set-frame-selected-window because it does not
+ ;; pass the `norecord' argument to Fselect_window.
+ (select-window (nth 2 state) 'norecord)
+ (and (frame-live-p (nth 3 state))
+ (not (eq (tty-top-frame) (nth 3 state)))
+ (select-frame (nth 3 state) 'norecord)))
+ ;; Then reset the actual selected-window.
+ (when (window-live-p (nth 1 state))
+ (select-window (nth 1 state) 'norecord)))
+
+(defmacro with-selected-window (window &rest body)
+ "Execute the forms in BODY with WINDOW as the selected window.
+The value returned is the value of the last form in BODY.
+
+This macro saves and restores the selected window, as well as the
+selected window of each frame. It does not change the order of
+recently selected windows. If the previously selected window of
+some frame is no longer live at the end of BODY, that frame's
+selected window is left alone. If the selected window is no
+longer live, then whatever window is selected at the end of BODY
+remains selected.
+
+This macro uses `save-current-buffer' to save and restore the
+current buffer, since otherwise its normal operation could
+potentially make a different buffer current. It does not alter
+the buffer list ordering."
+ (declare (indent 1) (debug t))
+ `(let ((save-selected-window--state
+ (internal--before-with-selected-window ,window)))
+ (save-current-buffer
+ (unwind-protect
+ (progn (select-window (car save-selected-window--state) 'norecord)
+ ,@body)
+ (internal--after-with-selected-window save-selected-window--state)))))
+
+(defmacro with-selected-frame (frame &rest body)
+ "Execute the forms in BODY with FRAME as the selected frame.
+The value returned is the value of the last form in BODY.
+
+This macro saves and restores the selected frame, and changes the
+order of neither the recently selected windows nor the buffers in
+the buffer list."
+ (declare (indent 1) (debug t))
+ (let ((old-frame (make-symbol "old-frame"))
+ (old-buffer (make-symbol "old-buffer")))
+ `(let ((,old-frame (selected-frame))
+ (,old-buffer (current-buffer)))
+ (unwind-protect
+ (progn (select-frame ,frame 'norecord)
+ ,@body)
+ (when (frame-live-p ,old-frame)
+ (select-frame ,old-frame 'norecord))
+ (when (buffer-live-p ,old-buffer)
+ (set-buffer ,old-buffer))))))
+
+(defmacro save-window-excursion (&rest body)
+ "Execute BODY, then restore previous window configuration.
+This macro saves the window configuration on the selected frame,
+executes BODY, then calls `set-window-configuration' to restore
+the saved window configuration. The return value is the last
+form in BODY. The window configuration is also restored if BODY
+exits nonlocally.
+
+BEWARE: Most uses of this macro introduce bugs.
+E.g. it should not be used to try and prevent some code from opening
+a new window, since that window may sometimes appear in another frame,
+in which case `save-window-excursion' cannot help."
+ (declare (indent 0) (debug t))
+ (let ((c (make-symbol "wconfig")))
+ `(let ((,c (current-window-configuration)))
+ (unwind-protect (progn ,@body)
+ (set-window-configuration ,c)))))
+
+(defun internal-temp-output-buffer-show (buffer)
+ "Internal function for `with-output-to-temp-buffer'."
+ (with-current-buffer buffer
+ (set-buffer-modified-p nil)
+ (goto-char (point-min)))
+
+ (if temp-buffer-show-function
+ (funcall temp-buffer-show-function buffer)
+ (with-current-buffer buffer
+ (let* ((window
+ (let ((window-combination-limit
+ ;; When `window-combination-limit' equals
+ ;; `temp-buffer' or `temp-buffer-resize' and
+ ;; `temp-buffer-resize-mode' is enabled in this
+ ;; buffer bind it to t so resizing steals space
+ ;; preferably from the window that was split.
+ (if (or (eq window-combination-limit 'temp-buffer)
+ (and (eq window-combination-limit
+ 'temp-buffer-resize)
+ temp-buffer-resize-mode))
+ t
+ window-combination-limit)))
+ (display-buffer buffer)))
+ (frame (and window (window-frame window))))
+ (when window
+ (unless (eq frame (selected-frame))
+ (make-frame-visible frame))
+ (setq minibuffer-scroll-window window)
+ (set-window-hscroll window 0)
+ ;; Don't try this with NOFORCE non-nil!
+ (set-window-start window (point-min) t)
+ ;; This should not be necessary.
+ (set-window-point window (point-min))
+ ;; Run `temp-buffer-show-hook', with the chosen window selected.
+ (with-selected-window window
+ (run-hooks 'temp-buffer-show-hook))))))
+ ;; Return nil.
+ nil)
+
+;; Doc is very similar to with-temp-buffer-window.
+(defmacro with-output-to-temp-buffer (bufname &rest body)
+ "Bind `standard-output' to buffer BUFNAME, eval BODY, then show that buffer.
+
+This construct makes buffer BUFNAME empty before running BODY.
+It does not make the buffer current for BODY.
+Instead it binds `standard-output' to that buffer, so that output
+generated with `prin1' and similar functions in BODY goes into
+the buffer.
+
+At the end of BODY, this marks buffer BUFNAME unmodified and displays
+it in a window, but does not select it. The normal way to do this is
+by calling `display-buffer', then running `temp-buffer-show-hook'.
+However, if `temp-buffer-show-function' is non-nil, it calls that
+function instead (and does not run `temp-buffer-show-hook'). The
+function gets one argument, the buffer to display.
+
+The return value of `with-output-to-temp-buffer' is the value of the
+last form in BODY. If BODY does not finish normally, the buffer
+BUFNAME is not displayed.
+
+This runs the hook `temp-buffer-setup-hook' before BODY,
+with the buffer BUFNAME temporarily current. It runs the hook
+`temp-buffer-show-hook' after displaying buffer BUFNAME, with that
+buffer temporarily current, and the window that was used to display it
+temporarily selected. But it doesn't run `temp-buffer-show-hook'
+if it uses `temp-buffer-show-function'.
+
+By default, the setup hook puts the buffer into Help mode before running BODY.
+If BODY does not change the major mode, the show hook makes the buffer
+read-only, and scans it for function and variable names to make them into
+clickable cross-references.
+
+See the related form `with-temp-buffer-window'."
+ (declare (debug t))
+ (let ((old-dir (make-symbol "old-dir"))
+ (buf (make-symbol "buf")))
+ `(let* ((,old-dir default-directory)
+ (,buf
+ (with-current-buffer (get-buffer-create ,bufname)
+ (prog1 (current-buffer)
+ (kill-all-local-variables)
+ ;; FIXME: delete_all_overlays
+ (setq default-directory ,old-dir)
+ (setq buffer-read-only nil)
+ (setq buffer-file-name nil)
+ (setq buffer-undo-list t)
+ (let ((inhibit-read-only t)
+ (inhibit-modification-hooks t))
+ (erase-buffer)
+ (run-hooks 'temp-buffer-setup-hook)))))
+ (standard-output ,buf))
+ (prog1 (progn ,@body)
+ (internal-temp-output-buffer-show ,buf)))))
+
+(defmacro with-temp-file (file &rest body)
+ "Create a new buffer, evaluate BODY there, and write the buffer to FILE.
+The value returned is the value of the last form in BODY.
+See also `with-temp-buffer'."
+ (declare (indent 1) (debug t))
+ (let ((temp-file (make-symbol "temp-file"))
+ (temp-buffer (make-symbol "temp-buffer")))
+ `(let ((,temp-file ,file)
+ (,temp-buffer
+ (get-buffer-create (generate-new-buffer-name " *temp file*"))))
+ (unwind-protect
+ (prog1
+ (with-current-buffer ,temp-buffer
+ ,@body)
+ (with-current-buffer ,temp-buffer
+ (write-region nil nil ,temp-file nil 0)))
+ (and (buffer-name ,temp-buffer)
+ (kill-buffer ,temp-buffer))))))
+
+(defmacro with-temp-message (message &rest body)
+ "Display MESSAGE temporarily if non-nil while BODY is evaluated.
+The original message is restored to the echo area after BODY has finished.
+The value returned is the value of the last form in BODY.
+MESSAGE is written to the message log buffer if `message-log-max' is non-nil.
+If MESSAGE is nil, the echo area and message log buffer are unchanged.
+Use a MESSAGE of \"\" to temporarily clear the echo area."
+ (declare (debug t) (indent 1))
+ (let ((current-message (make-symbol "current-message"))
+ (temp-message (make-symbol "with-temp-message")))
+ `(let ((,temp-message ,message)
+ (,current-message))
+ (unwind-protect
+ (progn
+ (when ,temp-message
+ (setq ,current-message (current-message))
+ (message "%s" ,temp-message))
+ ,@body)
+ (and ,temp-message
+ (if ,current-message
+ (message "%s" ,current-message)
+ (message nil)))))))
+
+(defmacro with-temp-buffer (&rest body)
+ "Create a temporary buffer, and evaluate BODY there like `progn'.
+See also `with-temp-file' and `with-output-to-string'."
+ (declare (indent 0) (debug t))
+ (let ((temp-buffer (make-symbol "temp-buffer")))
+ `(let ((,temp-buffer (generate-new-buffer " *temp*")))
+ ;; FIXME: kill-buffer can change current-buffer in some odd cases.
+ (with-current-buffer ,temp-buffer
+ (unwind-protect
+ (progn ,@body)
+ (and (buffer-name ,temp-buffer)
+ (kill-buffer ,temp-buffer)))))))
+
+(defmacro with-silent-modifications (&rest body)
+ "Execute BODY, pretending it does not modify the buffer.
+If BODY performs real modifications to the buffer's text, other
+than cosmetic ones, undo data may become corrupted.
+
+This macro will run BODY normally, but doesn't count its buffer
+modifications as being buffer modifications. This affects things
+like `buffer-modified-p', checking whether the file is locked by
+someone else, running buffer modification hooks, and other things
+of that nature.
+
+Typically used around modifications of text-properties which do
+not really affect the buffer's content."
+ (declare (debug t) (indent 0))
+ (let ((modified (make-symbol "modified")))
+ `(let* ((,modified (buffer-modified-p))
+ (buffer-undo-list t)
+ (inhibit-read-only t)
+ (inhibit-modification-hooks t))
+ (unwind-protect
+ (progn
+ ,@body)
+ (unless ,modified
+ (restore-buffer-modified-p nil))))))
+
+(defmacro with-output-to-string (&rest body)
+ "Execute BODY, return the text it sent to `standard-output', as a string."
+ (declare (indent 0) (debug t))
+ `(let ((standard-output
+ (get-buffer-create (generate-new-buffer-name " *string-output*"))))
+ (unwind-protect
+ (progn
+ (let ((standard-output standard-output))
+ ,@body)
+ (with-current-buffer standard-output
+ (buffer-string)))
+ (kill-buffer standard-output))))
+
+(defmacro with-local-quit (&rest body)
+ "Execute BODY, allowing quits to terminate BODY but not escape further.
+When a quit terminates BODY, `with-local-quit' returns nil but
+requests another quit. That quit will be processed as soon as quitting
+is allowed once again. (Immediately, if `inhibit-quit' is nil.)"
+ (declare (debug t) (indent 0))
+ `(condition-case nil
+ (let ((inhibit-quit nil))
+ ,@body)
+ (quit (setq quit-flag t)
+ ;; This call is to give a chance to handle quit-flag
+ ;; in case inhibit-quit is nil.
+ ;; Without this, it will not be handled until the next function
+ ;; call, and that might allow it to exit thru a condition-case
+ ;; that intends to handle the quit signal next time.
+ (eval '(ignore nil)))))
+
+(defmacro while-no-input (&rest body)
+ "Execute BODY only as long as there's no pending input.
+If input arrives, that ends the execution of BODY,
+and `while-no-input' returns t. Quitting makes it return nil.
+If BODY finishes, `while-no-input' returns whatever value BODY produced."
+ (declare (debug t) (indent 0))
+ (let ((catch-sym (make-symbol "input")))
+ `(with-local-quit
+ (catch ',catch-sym
+ (let ((throw-on-input ',catch-sym))
+ (or (input-pending-p)
+ (progn ,@body)))))))
+
+(defmacro condition-case-unless-debug (var bodyform &rest handlers)
+ "Like `condition-case' except that it does not prevent debugging.
+More specifically if `debug-on-error' is set then the debugger will be invoked
+even if this catches the signal."
+ (declare (debug condition-case) (indent 2))
+ `(condition-case ,var
+ ,bodyform
+ ,@(mapcar (lambda (handler)
+ `((debug ,@(if (listp (car handler)) (car handler)
+ (list (car handler))))
+ ,@(cdr handler)))
+ handlers)))
+
+(define-obsolete-function-alias 'condition-case-no-debug
+ 'condition-case-unless-debug "24.1")
+
+(defmacro with-demoted-errors (format &rest body)
+ "Run BODY and demote any errors to simple messages.
+FORMAT is a string passed to `message' to format any error message.
+It should contain a single %-sequence; e.g., \"Error: %S\".
+
+If `debug-on-error' is non-nil, run BODY without catching its errors.
+This is to be used around code which is not expected to signal an error
+but which should be robust in the unexpected case that an error is signaled.
+
+For backward compatibility, if FORMAT is not a constant string, it
+is assumed to be part of BODY, in which case the message format
+used is \"Error: %S\"."
+ (declare (debug t) (indent 1))
+ (let ((err (make-symbol "err"))
+ (format (if (and (stringp format) body) format
+ (prog1 "Error: %S"
+ (if format (push format body))))))
+ `(condition-case-unless-debug ,err
+ ,(macroexp-progn body)
+ (error (message ,format ,err) nil))))
+
+(defmacro combine-after-change-calls (&rest body)
+ "Execute BODY, but don't call the after-change functions till the end.
+If BODY makes changes in the buffer, they are recorded
+and the functions on `after-change-functions' are called several times
+when BODY is finished.
+The return value is the value of the last form in BODY.
+
+If `before-change-functions' is non-nil, then calls to the after-change
+functions can't be deferred, so in that case this macro has no effect.
+
+Do not alter `after-change-functions' or `before-change-functions'
+in BODY."
+ (declare (indent 0) (debug t))
+ `(unwind-protect
+ (let ((combine-after-change-calls t))
+ . ,body)
+ (combine-after-change-execute)))
+
+(defmacro with-case-table (table &rest body)
+ "Execute the forms in BODY with TABLE as the current case table.
+The value returned is the value of the last form in BODY."
+ (declare (indent 1) (debug t))
+ (let ((old-case-table (make-symbol "table"))
+ (old-buffer (make-symbol "buffer")))
+ `(let ((,old-case-table (current-case-table))
+ (,old-buffer (current-buffer)))
+ (unwind-protect
+ (progn (set-case-table ,table)
+ ,@body)
+ (with-current-buffer ,old-buffer
+ (set-case-table ,old-case-table))))))
+
+(defmacro with-file-modes (modes &rest body)
+ "Execute BODY with default file permissions temporarily set to MODES.
+MODES is as for `set-default-file-modes'."
+ (declare (indent 1) (debug t))
+ (let ((umask (make-symbol "umask")))
+ `(let ((,umask (default-file-modes)))
+ (unwind-protect
+ (progn
+ (set-default-file-modes ,modes)
+ ,@body)
+ (set-default-file-modes ,umask)))))
+
+
+;;; Matching and match data.
+
+(defvar save-match-data-internal)
+
+;; We use save-match-data-internal as the local variable because
+;; that works ok in practice (people should not use that variable elsewhere).
+;; We used to use an uninterned symbol; the compiler handles that properly
+;; now, but it generates slower code.
+(defmacro save-match-data (&rest body)
+ "Execute the BODY forms, restoring the global value of the match data.
+The value returned is the value of the last form in BODY."
+ ;; It is better not to use backquote here,
+ ;; because that makes a bootstrapping problem
+ ;; if you need to recompile all the Lisp files using interpreted code.
+ (declare (indent 0) (debug t))
+ (list 'let
+ '((save-match-data-internal (match-data)))
+ (list 'unwind-protect
+ (cons 'progn body)
+ ;; It is safe to free (evaporate) markers immediately here,
+ ;; as Lisp programs should not copy from save-match-data-internal.
+ '(set-match-data save-match-data-internal 'evaporate))))
+
+(defun match-string (num &optional string)
+ "Return string of text matched by last search.
+NUM specifies which parenthesized expression in the last regexp.
+ Value is nil if NUMth pair didn't match, or there were less than NUM pairs.
+Zero means the entire text matched by the whole regexp or whole string.
+STRING should be given if the last search was by `string-match' on STRING.
+If STRING is nil, the current buffer should be the same buffer
+the search/match was performed in."
+ (if (match-beginning num)
+ (if string
+ (substring string (match-beginning num) (match-end num))
+ (buffer-substring (match-beginning num) (match-end num)))))
+
+(defun match-string-no-properties (num &optional string)
+ "Return string of text matched by last search, without text properties.
+NUM specifies which parenthesized expression in the last regexp.
+ Value is nil if NUMth pair didn't match, or there were less than NUM pairs.
+Zero means the entire text matched by the whole regexp or whole string.
+STRING should be given if the last search was by `string-match' on STRING.
+If STRING is nil, the current buffer should be the same buffer
+the search/match was performed in."
+ (if (match-beginning num)
+ (if string
+ (substring-no-properties string (match-beginning num)
+ (match-end num))
+ (buffer-substring-no-properties (match-beginning num)
+ (match-end num)))))
+
+
+(defun match-substitute-replacement (replacement
+ &optional fixedcase literal string subexp)
+ "Return REPLACEMENT as it will be inserted by `replace-match'.
+In other words, all back-references in the form `\\&' and `\\N'
+are substituted with actual strings matched by the last search.
+Optional FIXEDCASE, LITERAL, STRING and SUBEXP have the same
+meaning as for `replace-match'."
+ (let ((match (match-string 0 string)))
+ (save-match-data
+ (set-match-data (mapcar (lambda (x)
+ (if (numberp x)
+ (- x (match-beginning 0))
+ x))
+ (match-data t)))
+ (replace-match replacement fixedcase literal match subexp))))
+
+
+(defun looking-back (regexp &optional limit greedy)
+ "Return non-nil if text before point matches regular expression REGEXP.
+Like `looking-at' except matches before point, and is slower.
+LIMIT if non-nil speeds up the search by specifying a minimum
+starting position, to avoid checking matches that would start
+before LIMIT.
+
+If GREEDY is non-nil, extend the match backwards as far as
+possible, stopping when a single additional previous character
+cannot be part of a match for REGEXP. When the match is
+extended, its starting position is allowed to occur before
+LIMIT.
+
+As a general recommendation, try to avoid using `looking-back'
+wherever possible, since it is slow."
+ (let ((start (point))
+ (pos
+ (save-excursion
+ (and (re-search-backward (concat "\\(?:" regexp "\\)\\=") limit t)
+ (point)))))
+ (if (and greedy pos)
+ (save-restriction
+ (narrow-to-region (point-min) start)
+ (while (and (> pos (point-min))
+ (save-excursion
+ (goto-char pos)
+ (backward-char 1)
+ (looking-at (concat "\\(?:" regexp "\\)\\'"))))
+ (setq pos (1- pos)))
+ (save-excursion
+ (goto-char pos)
+ (looking-at (concat "\\(?:" regexp "\\)\\'")))))
+ (not (null pos))))
+
+(defsubst looking-at-p (regexp)
+ "\
+Same as `looking-at' except this function does not change the match data."
+ (let ((inhibit-changing-match-data t))
+ (looking-at regexp)))
+
+(defsubst string-match-p (regexp string &optional start)
+ "\
+Same as `string-match' except this function does not change the match data."
+ (let ((inhibit-changing-match-data t))
+ (string-match regexp string start)))
+
+(defun subregexp-context-p (regexp pos &optional start)
+ "Return non-nil if POS is in a normal subregexp context in REGEXP.
+A subregexp context is one where a sub-regexp can appear.
+A non-subregexp context is for example within brackets, or within a
+repetition bounds operator `\\=\\{...\\}', or right after a `\\'.
+If START is non-nil, it should be a position in REGEXP, smaller
+than POS, and known to be in a subregexp context."
+ ;; Here's one possible implementation, with the great benefit that it
+ ;; reuses the regexp-matcher's own parser, so it understands all the
+ ;; details of the syntax. A disadvantage is that it needs to match the
+ ;; error string.
+ (condition-case err
+ (progn
+ (string-match (substring regexp (or start 0) pos) "")
+ t)
+ (invalid-regexp
+ (not (member (cadr err) '("Unmatched [ or [^"
+ "Unmatched \\{"
+ "Trailing backslash")))))
+ ;; An alternative implementation:
+ ;; (defconst re-context-re
+ ;; (let* ((harmless-ch "[^\\[]")
+ ;; (harmless-esc "\\\\[^{]")
+ ;; (class-harmless-ch "[^][]")
+ ;; (class-lb-harmless "[^]:]")
+ ;; (class-lb-colon-maybe-charclass ":\\([a-z]+:]\\)?")
+ ;; (class-lb (concat "\\[\\(" class-lb-harmless
+ ;; "\\|" class-lb-colon-maybe-charclass "\\)"))
+ ;; (class
+ ;; (concat "\\[^?]?"
+ ;; "\\(" class-harmless-ch
+ ;; "\\|" class-lb "\\)*"
+ ;; "\\[?]")) ; special handling for bare [ at end of re
+ ;; (braces "\\\\{[0-9,]+\\\\}"))
+ ;; (concat "\\`\\(" harmless-ch "\\|" harmless-esc
+ ;; "\\|" class "\\|" braces "\\)*\\'"))
+ ;; "Matches any prefix that corresponds to a normal subregexp context.")
+ ;; (string-match re-context-re (substring regexp (or start 0) pos))
+ )
+
+;;;; split-string
+
+(defconst split-string-default-separators "[ \f\t\n\r\v]+"
+ "The default value of separators for `split-string'.
+
+A regexp matching strings of whitespace. May be locale-dependent
+\(as yet unimplemented). Should not match non-breaking spaces.
+
+Warning: binding this to a different value and using it as default is
+likely to have undesired semantics.")
+
+;; The specification says that if both SEPARATORS and OMIT-NULLS are
+;; defaulted, OMIT-NULLS should be treated as t. Simplifying the logical
+;; expression leads to the equivalent implementation that if SEPARATORS
+;; is defaulted, OMIT-NULLS is treated as t.
+(defun split-string (string &optional separators omit-nulls trim)
+ "Split STRING into substrings bounded by matches for SEPARATORS.
+
+The beginning and end of STRING, and each match for SEPARATORS, are
+splitting points. The substrings matching SEPARATORS are removed, and
+the substrings between the splitting points are collected as a list,
+which is returned.
+
+If SEPARATORS is non-nil, it should be a regular expression matching text
+which separates, but is not part of, the substrings. If nil it defaults to
+`split-string-default-separators', normally \"[ \\f\\t\\n\\r\\v]+\", and
+OMIT-NULLS is forced to t.
+
+If OMIT-NULLS is t, zero-length substrings are omitted from the list (so
+that for the default value of SEPARATORS leading and trailing whitespace
+are effectively trimmed). If nil, all zero-length substrings are retained,
+which correctly parses CSV format, for example.
+
+If TRIM is non-nil, it should be a regular expression to match
+text to trim from the beginning and end of each substring. If trimming
+makes the substring empty, it is treated as null.
+
+If you want to trim whitespace from the substrings, the reliably correct
+way is using TRIM. Making SEPARATORS match that whitespace gives incorrect
+results when there is whitespace at the start or end of STRING. If you
+see such calls to `split-string', please fix them.
+
+Note that the effect of `(split-string STRING)' is the same as
+`(split-string STRING split-string-default-separators t)'. In the rare
+case that you wish to retain zero-length substrings when splitting on
+whitespace, use `(split-string STRING split-string-default-separators)'.
+
+Modifies the match data; use `save-match-data' if necessary."
+ (let* ((keep-nulls (not (if separators omit-nulls t)))
+ (rexp (or separators split-string-default-separators))
+ (start 0)
+ this-start this-end
+ notfirst
+ (list nil)
+ (push-one
+ ;; Push the substring in range THIS-START to THIS-END
+ ;; onto LIST, trimming it and perhaps discarding it.
+ (lambda ()
+ (when trim
+ ;; Discard the trim from start of this substring.
+ (let ((tem (string-match trim string this-start)))
+ (and (eq tem this-start)
+ (setq this-start (match-end 0)))))
+
+ (when (or keep-nulls (< this-start this-end))
+ (let ((this (substring string this-start this-end)))
+
+ ;; Discard the trim from end of this substring.
+ (when trim
+ (let ((tem (string-match (concat trim "\\'") this 0)))
+ (and tem (< tem (length this))
+ (setq this (substring this 0 tem)))))
+
+ ;; Trimming could make it empty; check again.
+ (when (or keep-nulls (> (length this) 0))
+ (push this list)))))))
+
+ (while (and (string-match rexp string
+ (if (and notfirst
+ (= start (match-beginning 0))
+ (< start (length string)))
+ (1+ start) start))
+ (< start (length string)))
+ (setq notfirst t)
+ (setq this-start start this-end (match-beginning 0)
+ start (match-end 0))
+
+ (funcall push-one))
+
+ ;; Handle the substring at the end of STRING.
+ (setq this-start start this-end (length string))
+ (funcall push-one)
+
+ (nreverse list)))
+
+(defun combine-and-quote-strings (strings &optional separator)
+ "Concatenate the STRINGS, adding the SEPARATOR (default \" \").
+This tries to quote the strings to avoid ambiguity such that
+ (split-string-and-unquote (combine-and-quote-strings strs)) == strs
+Only some SEPARATORs will work properly."
+ (let* ((sep (or separator " "))
+ (re (concat "[\\\"]" "\\|" (regexp-quote sep))))
+ (mapconcat
+ (lambda (str)
+ (if (string-match re str)
+ (concat "\"" (replace-regexp-in-string "[\\\"]" "\\\\\\&" str) "\"")
+ str))
+ strings sep)))
+
+(defun split-string-and-unquote (string &optional separator)
+ "Split the STRING into a list of strings.
+It understands Emacs Lisp quoting within STRING, such that
+ (split-string-and-unquote (combine-and-quote-strings strs)) == strs
+The SEPARATOR regexp defaults to \"\\s-+\"."
+ (let ((sep (or separator "\\s-+"))
+ (i (string-match "\"" string)))
+ (if (null i)
+ (split-string string sep t) ; no quoting: easy
+ (append (unless (eq i 0) (split-string (substring string 0 i) sep t))
+ (let ((rfs (read-from-string string i)))
+ (cons (car rfs)
+ (split-string-and-unquote (substring string (cdr rfs))
+ sep)))))))
+
+
+;;;; Replacement in strings.
+
+(defun subst-char-in-string (fromchar tochar string &optional inplace)
+ "Replace FROMCHAR with TOCHAR in STRING each time it occurs.
+Unless optional argument INPLACE is non-nil, return a new string."
+ (let ((i (length string))
+ (newstr (if inplace string (copy-sequence string))))
+ (while (> i 0)
+ (setq i (1- i))
+ (if (eq (aref newstr i) fromchar)
+ (aset newstr i tochar)))
+ newstr))
+
+(defun replace-regexp-in-string (regexp rep string &optional
+ fixedcase literal subexp start)
+ "Replace all matches for REGEXP with REP in STRING.
+
+Return a new string containing the replacements.
+
+Optional arguments FIXEDCASE, LITERAL and SUBEXP are like the
+arguments with the same names of function `replace-match'. If START
+is non-nil, start replacements at that index in STRING.
+
+REP is either a string used as the NEWTEXT arg of `replace-match' or a
+function. If it is a function, it is called with the actual text of each
+match, and its value is used as the replacement text. When REP is called,
+the match data are the result of matching REGEXP against a substring
+of STRING.
+
+To replace only the first match (if any), make REGEXP match up to \\'
+and replace a sub-expression, e.g.
+ (replace-regexp-in-string \"\\\\(foo\\\\).*\\\\'\" \"bar\" \" foo foo\" nil nil 1)
+ => \" bar foo\""
+
+ ;; To avoid excessive consing from multiple matches in long strings,
+ ;; don't just call `replace-match' continually. Walk down the
+ ;; string looking for matches of REGEXP and building up a (reversed)
+ ;; list MATCHES. This comprises segments of STRING which weren't
+ ;; matched interspersed with replacements for segments that were.
+ ;; [For a `large' number of replacements it's more efficient to
+ ;; operate in a temporary buffer; we can't tell from the function's
+ ;; args whether to choose the buffer-based implementation, though it
+ ;; might be reasonable to do so for long enough STRING.]
+ (let ((l (length string))
+ (start (or start 0))
+ matches str mb me)
+ (save-match-data
+ (while (and (< start l) (string-match regexp string start))
+ (setq mb (match-beginning 0)
+ me (match-end 0))
+ ;; If we matched the empty string, make sure we advance by one char
+ (when (= me mb) (setq me (min l (1+ mb))))
+ ;; Generate a replacement for the matched substring.
+ ;; Operate only on the substring to minimize string consing.
+ ;; Set up match data for the substring for replacement;
+ ;; presumably this is likely to be faster than munging the
+ ;; match data directly in Lisp.
+ (string-match regexp (setq str (substring string mb me)))
+ (setq matches
+ (cons (replace-match (if (stringp rep)
+ rep
+ (funcall rep (match-string 0 str)))
+ fixedcase literal str subexp)
+ (cons (substring string start mb) ; unmatched prefix
+ matches)))
+ (setq start me))
+ ;; Reconstruct a string from the pieces.
+ (setq matches (cons (substring string start l) matches)) ; leftover
+ (apply #'concat (nreverse matches)))))
+
+(defun string-prefix-p (prefix string &optional ignore-case)
+ "Return non-nil if PREFIX is a prefix of STRING.
+If IGNORE-CASE is non-nil, the comparison is done without paying attention
+to case differences."
+ (let ((prefix-length (length prefix)))
+ (if (> prefix-length (length string)) nil
+ (eq t (compare-strings prefix 0 prefix-length string
+ 0 prefix-length ignore-case)))))
+
+(defun string-suffix-p (suffix string &optional ignore-case)
+ "Return non-nil if SUFFIX is a suffix of STRING.
+If IGNORE-CASE is non-nil, the comparison is done without paying
+attention to case differences."
+ (let ((start-pos (- (length string) (length suffix))))
+ (and (>= start-pos 0)
+ (eq t (compare-strings suffix nil nil
+ string start-pos nil ignore-case)))))
+
+(defun bidi-string-mark-left-to-right (str)
+ "Return a string that can be safely inserted in left-to-right text.
+
+Normally, inserting a string with right-to-left (RTL) script into
+a buffer may cause some subsequent text to be displayed as part
+of the RTL segment (usually this affects punctuation characters).
+This function returns a string which displays as STR but forces
+subsequent text to be displayed as left-to-right.
+
+If STR contains any RTL character, this function returns a string
+consisting of STR followed by an invisible left-to-right mark
+\(LRM) character. Otherwise, it returns STR."
+ (unless (stringp str)
+ (signal 'wrong-type-argument (list 'stringp str)))
+ (if (string-match "\\cR" str)
+ (concat str (propertize (string ?\x200e) 'invisible t))
+ str))
+
+;;;; Specifying things to do later.
+
+(defun load-history-regexp (file)
+ "Form a regexp to find FILE in `load-history'.
+FILE, a string, is described in the function `eval-after-load'."
+ (if (file-name-absolute-p file)
+ (setq file (file-truename file)))
+ (concat (if (file-name-absolute-p file) "\\`" "\\(\\`\\|/\\)")
+ (regexp-quote file)
+ (if (file-name-extension file)
+ ""
+ ;; Note: regexp-opt can't be used here, since we need to call
+ ;; this before Emacs has been fully started. 2006-05-21
+ (concat "\\(" (mapconcat 'regexp-quote load-suffixes "\\|") "\\)?"))
+ "\\(" (mapconcat 'regexp-quote jka-compr-load-suffixes "\\|")
+ "\\)?\\'"))
+
+(defun load-history-filename-element (file-regexp)
+ "Get the first elt of `load-history' whose car matches FILE-REGEXP.
+Return nil if there isn't one."
+ (let* ((loads load-history)
+ (load-elt (and loads (car loads))))
+ (save-match-data
+ (while (and loads
+ (or (null (car load-elt))
+ (not (string-match file-regexp (car load-elt)))))
+ (setq loads (cdr loads)
+ load-elt (and loads (car loads)))))
+ load-elt))
+
+(put 'eval-after-load 'lisp-indent-function 1)
+(defun eval-after-load (file form)
+ "Arrange that if FILE is loaded, FORM will be run immediately afterwards.
+If FILE is already loaded, evaluate FORM right now.
+FORM can be an Elisp expression (in which case it's passed to `eval'),
+or a function (in which case it's passed to `funcall' with no argument).
+
+If a matching file is loaded again, FORM will be evaluated again.
+
+If FILE is a string, it may be either an absolute or a relative file
+name, and may have an extension (e.g. \".el\") or may lack one, and
+additionally may or may not have an extension denoting a compressed
+format (e.g. \".gz\").
+
+When FILE is absolute, this first converts it to a true name by chasing
+symbolic links. Only a file of this name (see next paragraph regarding
+extensions) will trigger the evaluation of FORM. When FILE is relative,
+a file whose absolute true name ends in FILE will trigger evaluation.
+
+When FILE lacks an extension, a file name with any extension will trigger
+evaluation. Otherwise, its extension must match FILE's. A further
+extension for a compressed format (e.g. \".gz\") on FILE will not affect
+this name matching.
+
+Alternatively, FILE can be a feature (i.e. a symbol), in which case FORM
+is evaluated at the end of any file that `provide's this feature.
+If the feature is provided when evaluating code not associated with a
+file, FORM is evaluated immediately after the provide statement.
+
+Usually FILE is just a library name like \"font-lock\" or a feature name
+like 'font-lock.
+
+This function makes or adds to an entry on `after-load-alist'."
+ (declare (compiler-macro
+ (lambda (whole)
+ (if (eq 'quote (car-safe form))
+ ;; Quote with lambda so the compiler can look inside.
+ `(eval-after-load ,file (lambda () ,(nth 1 form)))
+ whole))))
+ ;; Add this FORM into after-load-alist (regardless of whether we'll be
+ ;; evaluating it now).
+ (let* ((regexp-or-feature
+ (if (stringp file)
+ (setq file (purecopy (load-history-regexp file)))
+ file))
+ (elt (assoc regexp-or-feature after-load-alist))
+ (func
+ (if (functionp form) form
+ ;; Try to use the "current" lexical/dynamic mode for `form'.
+ (eval `(lambda () ,form) lexical-binding))))
+ (unless elt
+ (setq elt (list regexp-or-feature))
+ (push elt after-load-alist))
+ ;; Is there an already loaded file whose name (or `provide' name)
+ ;; matches FILE?
+ (prog1 (if (if (stringp file)
+ (load-history-filename-element regexp-or-feature)
+ (featurep file))
+ (funcall func))
+ (let ((delayed-func
+ (if (not (symbolp regexp-or-feature)) func
+ ;; For features, the after-load-alist elements get run when
+ ;; `provide' is called rather than at the end of the file.
+ ;; So add an indirection to make sure that `func' is really run
+ ;; "after-load" in case the provide call happens early.
+ (lambda ()
+ (if (not load-file-name)
+ ;; Not being provided from a file, run func right now.
+ (funcall func)
+ (let ((lfn load-file-name)
+ ;; Don't use letrec, because equal (in
+ ;; add/remove-hook) would get trapped in a cycle.
+ (fun (make-symbol "eval-after-load-helper")))
+ (fset fun (lambda (file)
+ (when (equal file lfn)
+ (remove-hook 'after-load-functions fun)
+ (funcall func))))
+ (add-hook 'after-load-functions fun 'append)))))))
+ ;; Add FORM to the element unless it's already there.
+ (unless (member delayed-func (cdr elt))
+ (nconc elt (list delayed-func)))))))
+
+(defmacro with-eval-after-load (file &rest body)
+ "Execute BODY after FILE is loaded.
+FILE is normally a feature name, but it can also be a file name,
+in case that file does not provide any feature."
+ (declare (indent 1) (debug t))
+ `(eval-after-load ,file (lambda () ,@body)))
+
+(defvar after-load-functions nil
+ "Special hook run after loading a file.
+Each function there is called with a single argument, the absolute
+name of the file just loaded.")
+
+(defun do-after-load-evaluation (abs-file)
+ "Evaluate all `eval-after-load' forms, if any, for ABS-FILE.
+ABS-FILE, a string, should be the absolute true name of a file just loaded.
+This function is called directly from the C code."
+ ;; Run the relevant eval-after-load forms.
+ (dolist (a-l-element after-load-alist)
+ (when (and (stringp (car a-l-element))
+ (string-match-p (car a-l-element) abs-file))
+ ;; discard the file name regexp
+ (mapc #'funcall (cdr a-l-element))))
+ ;; Complain when the user uses obsolete files.
+ (when (save-match-data
+ (and (string-match "/obsolete/\\([^/]*\\)\\'" abs-file)
+ (not (equal "loaddefs.el" (match-string 1 abs-file)))))
+ ;; Maybe we should just use display-warning? This seems yucky...
+ (let* ((file (file-name-nondirectory abs-file))
+ (msg (format "Package %s is obsolete!"
+ (substring file 0
+ (string-match "\\.elc?\\>" file)))))
+ ;; Cribbed from cl--compiling-file.
+ (if (and (boundp 'byte-compile--outbuffer)
+ (bufferp (symbol-value 'byte-compile--outbuffer))
+ (equal (buffer-name (symbol-value 'byte-compile--outbuffer))
+ " *Compiler Output*"))
+ ;; Don't warn about obsolete files using other obsolete files.
+ (unless (and (stringp byte-compile-current-file)
+ (string-match-p "/obsolete/[^/]*\\'"
+ (expand-file-name
+ byte-compile-current-file
+ byte-compile-root-dir)))
+ (byte-compile-log-warning msg))
+ (run-with-timer 0 nil
+ (lambda (msg)
+ (message "%s" msg))
+ msg))))
+
+ ;; Finally, run any other hook.
+ (run-hook-with-args 'after-load-functions abs-file))
+
+(defun eval-next-after-load (file)
+ "Read the following input sexp, and run it whenever FILE is loaded.
+This makes or adds to an entry on `after-load-alist'.
+FILE should be the name of a library, with no directory name."
+ (declare (obsolete eval-after-load "23.2"))
+ (eval-after-load file (read)))
+
+
+(defun display-delayed-warnings ()
+ "Display delayed warnings from `delayed-warnings-list'.
+Used from `delayed-warnings-hook' (which see)."
+ (dolist (warning (nreverse delayed-warnings-list))
+ (apply 'display-warning warning))
+ (setq delayed-warnings-list nil))
+
+(defun collapse-delayed-warnings ()
+ "Remove duplicates from `delayed-warnings-list'.
+Collapse identical adjacent warnings into one (plus count).
+Used from `delayed-warnings-hook' (which see)."
+ (let ((count 1)
+ collapsed warning)
+ (while delayed-warnings-list
+ (setq warning (pop delayed-warnings-list))
+ (if (equal warning (car delayed-warnings-list))
+ (setq count (1+ count))
+ (when (> count 1)
+ (setcdr warning (cons (format "%s [%d times]" (cadr warning) count)
+ (cddr warning)))
+ (setq count 1))
+ (push warning collapsed)))
+ (setq delayed-warnings-list (nreverse collapsed))))
+
+;; At present this is only used for Emacs internals.
+;; Ref http://lists.gnu.org/archive/html/emacs-devel/2012-02/msg00085.html
+(defvar delayed-warnings-hook '(collapse-delayed-warnings
+ display-delayed-warnings)
+ "Normal hook run to process and display delayed warnings.
+By default, this hook contains functions to consolidate the
+warnings listed in `delayed-warnings-list', display them, and set
+`delayed-warnings-list' back to nil.")
+
+(defun delay-warning (type message &optional level buffer-name)
+ "Display a delayed warning.
+Aside from going through `delayed-warnings-list', this is equivalent
+to `display-warning'."
+ (push (list type message level buffer-name) delayed-warnings-list))
+
+
+;;;; invisibility specs
+
+(defun add-to-invisibility-spec (element)
+ "Add ELEMENT to `buffer-invisibility-spec'.
+See documentation for `buffer-invisibility-spec' for the kind of elements
+that can be added."
+ (if (eq buffer-invisibility-spec t)
+ (setq buffer-invisibility-spec (list t)))
+ (setq buffer-invisibility-spec
+ (cons element buffer-invisibility-spec)))
+
+(defun remove-from-invisibility-spec (element)
+ "Remove ELEMENT from `buffer-invisibility-spec'."
+ (if (consp buffer-invisibility-spec)
+ (setq buffer-invisibility-spec
+ (delete element buffer-invisibility-spec))))
+
+;;;; Syntax tables.
+
+(defmacro with-syntax-table (table &rest body)
+ "Evaluate BODY with syntax table of current buffer set to TABLE.
+The syntax table of the current buffer is saved, BODY is evaluated, and the
+saved table is restored, even in case of an abnormal exit.
+Value is what BODY returns."
+ (declare (debug t) (indent 1))
+ (let ((old-table (make-symbol "table"))
+ (old-buffer (make-symbol "buffer")))
+ `(let ((,old-table (syntax-table))
+ (,old-buffer (current-buffer)))
+ (unwind-protect
+ (progn
+ (set-syntax-table ,table)
+ ,@body)
+ (save-current-buffer
+ (set-buffer ,old-buffer)
+ (set-syntax-table ,old-table))))))
+
+(defun make-syntax-table (&optional oldtable)
+ "Return a new syntax table.
+Create a syntax table which inherits from OLDTABLE (if non-nil) or
+from `standard-syntax-table' otherwise."
+ (let ((table (make-char-table 'syntax-table nil)))
+ (set-char-table-parent table (or oldtable (standard-syntax-table)))
+ table))
+
+(defun syntax-after (pos)
+ "Return the raw syntax descriptor for the char after POS.
+If POS is outside the buffer's accessible portion, return nil."
+ (unless (or (< pos (point-min)) (>= pos (point-max)))
+ (let ((st (if parse-sexp-lookup-properties
+ (get-char-property pos 'syntax-table))))
+ (if (consp st) st
+ (aref (or st (syntax-table)) (char-after pos))))))
+
+(defun syntax-class (syntax)
+ "Return the code for the syntax class described by SYNTAX.
+
+SYNTAX should be a raw syntax descriptor; the return value is a
+integer which encodes the corresponding syntax class. See Info
+node `(elisp)Syntax Table Internals' for a list of codes.
+
+If SYNTAX is nil, return nil."
+ (and syntax (logand (car syntax) 65535)))
+
+;; Utility motion commands
+
+;; Whitespace
+
+(defun forward-whitespace (arg)
+ "Move point to the end of the next sequence of whitespace chars.
+Each such sequence may be a single newline, or a sequence of
+consecutive space and/or tab characters.
+With prefix argument ARG, do it ARG times if positive, or move
+backwards ARG times if negative."
+ (interactive "^p")
+ (if (natnump arg)
+ (re-search-forward "[ \t]+\\|\n" nil 'move arg)
+ (while (< arg 0)
+ (if (re-search-backward "[ \t]+\\|\n" nil 'move)
+ (or (eq (char-after (match-beginning 0)) ?\n)
+ (skip-chars-backward " \t")))
+ (setq arg (1+ arg)))))
+
+;; Symbols
+
+(defun forward-symbol (arg)
+ "Move point to the next position that is the end of a symbol.
+A symbol is any sequence of characters that are in either the
+word constituent or symbol constituent syntax class.
+With prefix argument ARG, do it ARG times if positive, or move
+backwards ARG times if negative."
+ (interactive "^p")
+ (if (natnump arg)
+ (re-search-forward "\\(\\sw\\|\\s_\\)+" nil 'move arg)
+ (while (< arg 0)
+ (if (re-search-backward "\\(\\sw\\|\\s_\\)+" nil 'move)
+ (skip-syntax-backward "w_"))
+ (setq arg (1+ arg)))))
+
+;; Syntax blocks
+
+(defun forward-same-syntax (&optional arg)
+ "Move point past all characters with the same syntax class.
+With prefix argument ARG, do it ARG times if positive, or move
+backwards ARG times if negative."
+ (interactive "^p")
+ (or arg (setq arg 1))
+ (while (< arg 0)
+ (skip-syntax-backward
+ (char-to-string (char-syntax (char-before))))
+ (setq arg (1+ arg)))
+ (while (> arg 0)
+ (skip-syntax-forward (char-to-string (char-syntax (char-after))))
+ (setq arg (1- arg))))
+
+
+;;;; Text clones
+
+(defvar text-clone--maintaining nil)
+
+(defun text-clone--maintain (ol1 after beg end &optional _len)
+ "Propagate the changes made under the overlay OL1 to the other clones.
+This is used on the `modification-hooks' property of text clones."
+ (when (and after (not undo-in-progress)
+ (not text-clone--maintaining)
+ (overlay-start ol1))
+ (let ((margin (if (overlay-get ol1 'text-clone-spreadp) 1 0)))
+ (setq beg (max beg (+ (overlay-start ol1) margin)))
+ (setq end (min end (- (overlay-end ol1) margin)))
+ (when (<= beg end)
+ (save-excursion
+ (when (overlay-get ol1 'text-clone-syntax)
+ ;; Check content of the clone's text.
+ (let ((cbeg (+ (overlay-start ol1) margin))
+ (cend (- (overlay-end ol1) margin)))
+ (goto-char cbeg)
+ (save-match-data
+ (if (not (re-search-forward
+ (overlay-get ol1 'text-clone-syntax) cend t))
+ ;; Mark the overlay for deletion.
+ (setq end cbeg)
+ (when (< (match-end 0) cend)
+ ;; Shrink the clone at its end.
+ (setq end (min end (match-end 0)))
+ (move-overlay ol1 (overlay-start ol1)
+ (+ (match-end 0) margin)))
+ (when (> (match-beginning 0) cbeg)
+ ;; Shrink the clone at its beginning.
+ (setq beg (max (match-beginning 0) beg))
+ (move-overlay ol1 (- (match-beginning 0) margin)
+ (overlay-end ol1)))))))
+ ;; Now go ahead and update the clones.
+ (let ((head (- beg (overlay-start ol1)))
+ (tail (- (overlay-end ol1) end))
+ (str (buffer-substring beg end))
+ (nothing-left t)
+ (text-clone--maintaining t))
+ (dolist (ol2 (overlay-get ol1 'text-clones))
+ (let ((oe (overlay-end ol2)))
+ (unless (or (eq ol1 ol2) (null oe))
+ (setq nothing-left nil)
+ (let ((mod-beg (+ (overlay-start ol2) head)))
+ ;;(overlay-put ol2 'modification-hooks nil)
+ (goto-char (- (overlay-end ol2) tail))
+ (unless (> mod-beg (point))
+ (save-excursion (insert str))
+ (delete-region mod-beg (point)))
+ ;;(overlay-put ol2 'modification-hooks '(text-clone--maintain))
+ ))))
+ (if nothing-left (delete-overlay ol1))))))))
+
+(defun text-clone-create (start end &optional spreadp syntax)
+ "Create a text clone of START...END at point.
+Text clones are chunks of text that are automatically kept identical:
+changes done to one of the clones will be immediately propagated to the other.
+
+The buffer's content at point is assumed to be already identical to
+the one between START and END.
+If SYNTAX is provided it's a regexp that describes the possible text of
+the clones; the clone will be shrunk or killed if necessary to ensure that
+its text matches the regexp.
+If SPREADP is non-nil it indicates that text inserted before/after the
+clone should be incorporated in the clone."
+ ;; To deal with SPREADP we can either use an overlay with `nil t' along
+ ;; with insert-(behind|in-front-of)-hooks or use a slightly larger overlay
+ ;; (with a one-char margin at each end) with `t nil'.
+ ;; We opted for a larger overlay because it behaves better in the case
+ ;; where the clone is reduced to the empty string (we want the overlay to
+ ;; stay when the clone's content is the empty string and we want to use
+ ;; `evaporate' to make sure those overlays get deleted when needed).
+ ;;
+ (let* ((pt-end (+ (point) (- end start)))
+ (start-margin (if (or (not spreadp) (bobp) (<= start (point-min)))
+ 0 1))
+ (end-margin (if (or (not spreadp)
+ (>= pt-end (point-max))
+ (>= start (point-max)))
+ 0 1))
+ ;; FIXME: Reuse overlays at point to extend dups!
+ (ol1 (make-overlay (- start start-margin) (+ end end-margin) nil t))
+ (ol2 (make-overlay (- (point) start-margin) (+ pt-end end-margin) nil t))
+ (dups (list ol1 ol2)))
+ (overlay-put ol1 'modification-hooks '(text-clone--maintain))
+ (when spreadp (overlay-put ol1 'text-clone-spreadp t))
+ (when syntax (overlay-put ol1 'text-clone-syntax syntax))
+ ;;(overlay-put ol1 'face 'underline)
+ (overlay-put ol1 'evaporate t)
+ (overlay-put ol1 'text-clones dups)
+ ;;
+ (overlay-put ol2 'modification-hooks '(text-clone--maintain))
+ (when spreadp (overlay-put ol2 'text-clone-spreadp t))
+ (when syntax (overlay-put ol2 'text-clone-syntax syntax))
+ ;;(overlay-put ol2 'face 'underline)
+ (overlay-put ol2 'evaporate t)
+ (overlay-put ol2 'text-clones dups)))
+
+;;;; Mail user agents.
+
+;; Here we include just enough for other packages to be able
+;; to define them.
+
+(defun define-mail-user-agent (symbol composefunc sendfunc
+ &optional abortfunc hookvar)
+ "Define a symbol to identify a mail-sending package for `mail-user-agent'.
+
+SYMBOL can be any Lisp symbol. Its function definition and/or
+value as a variable do not matter for this usage; we use only certain
+properties on its property list, to encode the rest of the arguments.
+
+COMPOSEFUNC is program callable function that composes an outgoing
+mail message buffer. This function should set up the basics of the
+buffer without requiring user interaction. It should populate the
+standard mail headers, leaving the `to:' and `subject:' headers blank
+by default.
+
+COMPOSEFUNC should accept several optional arguments--the same
+arguments that `compose-mail' takes. See that function's documentation.
+
+SENDFUNC is the command a user would run to send the message.
+
+Optional ABORTFUNC is the command a user would run to abort the
+message. For mail packages that don't have a separate abort function,
+this can be `kill-buffer' (the equivalent of omitting this argument).
+
+Optional HOOKVAR is a hook variable that gets run before the message
+is actually sent. Callers that use the `mail-user-agent' may
+install a hook function temporarily on this hook variable.
+If HOOKVAR is nil, `mail-send-hook' is used.
+
+The properties used on SYMBOL are `composefunc', `sendfunc',
+`abortfunc', and `hookvar'."
+ (put symbol 'composefunc composefunc)
+ (put symbol 'sendfunc sendfunc)
+ (put symbol 'abortfunc (or abortfunc 'kill-buffer))
+ (put symbol 'hookvar (or hookvar 'mail-send-hook)))
+
+(defvar called-interactively-p-functions nil
+ "Special hook called to skip special frames in `called-interactively-p'.
+The functions are called with 3 arguments: (I FRAME1 FRAME2),
+where FRAME1 is a \"current frame\", FRAME2 is the next frame,
+I is the index of the frame after FRAME2. It should return nil
+if those frames don't seem special and otherwise, it should return
+the number of frames to skip (minus 1).")
+
+(defconst internal--funcall-interactively
+ (symbol-function 'funcall-interactively))
+
+(defun called-interactively-p (&optional kind)
+ "Return t if the containing function was called by `call-interactively'.
+If KIND is `interactive', then only return t if the call was made
+interactively by the user, i.e. not in `noninteractive' mode nor
+when `executing-kbd-macro'.
+If KIND is `any', on the other hand, it will return t for any kind of
+interactive call, including being called as the binding of a key or
+from a keyboard macro, even in `noninteractive' mode.
+
+This function is very brittle, it may fail to return the intended result when
+the code is debugged, advised, or instrumented in some form. Some macros and
+special forms (such as `condition-case') may also sometimes wrap their bodies
+in a `lambda', so any call to `called-interactively-p' from those bodies will
+indicate whether that lambda (rather than the surrounding function) was called
+interactively.
+
+Instead of using this function, it is cleaner and more reliable to give your
+function an extra optional argument whose `interactive' spec specifies
+non-nil unconditionally (\"p\" is a good way to do this), or via
+\(not (or executing-kbd-macro noninteractive)).
+
+The only known proper use of `interactive' for KIND is in deciding
+whether to display a helpful message, or how to display it. If you're
+thinking of using it for any other purpose, it is quite likely that
+you're making a mistake. Think: what do you want to do when the
+command is called from a keyboard macro?"
+ (declare (advertised-calling-convention (kind) "23.1"))
+ (when (not (and (eq kind 'interactive)
+ (or executing-kbd-macro noninteractive)))
+ (let* ((i 1) ;; 0 is the called-interactively-p frame.
+ frame nextframe
+ (get-next-frame
+ (lambda ()
+ (setq frame nextframe)
+ (setq nextframe (backtrace-frame i 'called-interactively-p))
+ ;; (message "Frame %d = %S" i nextframe)
+ (setq i (1+ i)))))
+ (funcall get-next-frame) ;; Get the first frame.
+ (while
+ ;; FIXME: The edebug and advice handling should be made modular and
+ ;; provided directly by edebug.el and nadvice.el.
+ (progn
+ ;; frame =(backtrace-frame i-2)
+ ;; nextframe=(backtrace-frame i-1)
+ (funcall get-next-frame)
+ ;; `pcase' would be a fairly good fit here, but it sometimes moves
+ ;; branches within local functions, which then messes up the
+ ;; `backtrace-frame' data we get,
+ (or
+ ;; Skip special forms (from non-compiled code).
+ (and frame (null (car frame)))
+ ;; Skip also `interactive-p' (because we don't want to know if
+ ;; interactive-p was called interactively but if it's caller was)
+ ;; and `byte-code' (idem; this appears in subexpressions of things
+ ;; like condition-case, which are wrapped in a separate bytecode
+ ;; chunk).
+ ;; FIXME: For lexical-binding code, this is much worse,
+ ;; because the frames look like "byte-code -> funcall -> #[...]",
+ ;; which is not a reliable signature.
+ (memq (nth 1 frame) '(interactive-p 'byte-code))
+ ;; Skip package-specific stack-frames.
+ (let ((skip (run-hook-with-args-until-success
+ 'called-interactively-p-functions
+ i frame nextframe)))
+ (pcase skip
+ (`nil nil)
+ (`0 t)
+ (_ (setq i (+ i skip -1)) (funcall get-next-frame)))))))
+ ;; Now `frame' should be "the function from which we were called".
+ (pcase (cons frame nextframe)
+ ;; No subr calls `interactive-p', so we can rule that out.
+ (`((,_ ,(pred (lambda (f) (subrp (indirect-function f)))) . ,_) . ,_) nil)
+ ;; In case #<subr funcall-interactively> without going through the
+ ;; `funcall-interactively' symbol (bug#3984).
+ (`(,_ . (t ,(pred (lambda (f)
+ (eq internal--funcall-interactively
+ (indirect-function f))))
+ . ,_))
+ t)))))
+
+(defun interactive-p ()
+ "Return t if the containing function was run directly by user input.
+This means that the function was called with `call-interactively'
+\(which includes being called as the binding of a key)
+and input is currently coming from the keyboard (not a keyboard macro),
+and Emacs is not running in batch mode (`noninteractive' is nil).
+
+The only known proper use of `interactive-p' is in deciding whether to
+display a helpful message, or how to display it. If you're thinking
+of using it for any other purpose, it is quite likely that you're
+making a mistake. Think: what do you want to do when the command is
+called from a keyboard macro or in batch mode?
+
+To test whether your function was called with `call-interactively',
+either (i) add an extra optional argument and give it an `interactive'
+spec that specifies non-nil unconditionally (such as \"p\"); or (ii)
+use `called-interactively-p'."
+ (declare (obsolete called-interactively-p "23.2"))
+ (called-interactively-p 'interactive))
+
+(defun internal-push-keymap (keymap symbol)
+ (let ((map (symbol-value symbol)))
+ (unless (memq keymap map)
+ (unless (memq 'add-keymap-witness (symbol-value symbol))
+ (setq map (make-composed-keymap nil (symbol-value symbol)))
+ (push 'add-keymap-witness (cdr map))
+ (set symbol map))
+ (push keymap (cdr map)))))
+
+(defun internal-pop-keymap (keymap symbol)
+ (let ((map (symbol-value symbol)))
+ (when (memq keymap map)
+ (setf (cdr map) (delq keymap (cdr map))))
+ (let ((tail (cddr map)))
+ (and (or (null tail) (keymapp tail))
+ (eq 'add-keymap-witness (nth 1 map))
+ (set symbol tail)))))
+
+(define-obsolete-function-alias
+ 'set-temporary-overlay-map 'set-transient-map "24.4")
+
+(defun set-transient-map (map &optional keep-pred on-exit)
+ "Set MAP as a temporary keymap taking precedence over other keymaps.
+Normally, MAP is used only once, to look up the very next key.
+However, if the optional argument KEEP-PRED is t, MAP stays
+active if a key from MAP is used. KEEP-PRED can also be a
+function of no arguments: it is called from `pre-command-hook' and
+if it returns non-nil, then MAP stays active.
+
+Optional arg ON-EXIT, if non-nil, specifies a function that is
+called, with no arguments, after MAP is deactivated.
+
+This uses `overriding-terminal-local-map' which takes precedence over all other
+keymaps. As usual, if no match for a key is found in MAP, the normal key
+lookup sequence then continues.
+
+This returns an \"exit function\", which can be called with no argument
+to deactivate this transient map, regardless of KEEP-PRED."
+ (let* ((clearfun (make-symbol "clear-transient-map"))
+ (exitfun
+ (lambda ()
+ (internal-pop-keymap map 'overriding-terminal-local-map)
+ (remove-hook 'pre-command-hook clearfun)
+ (when on-exit (funcall on-exit)))))
+ ;; Don't use letrec, because equal (in add/remove-hook) would get trapped
+ ;; in a cycle.
+ (fset clearfun
+ (lambda ()
+ (with-demoted-errors "set-transient-map PCH: %S"
+ (unless (cond
+ ((null keep-pred) nil)
+ ((not (eq map (cadr overriding-terminal-local-map)))
+ ;; There's presumably some other transient-map in
+ ;; effect. Wait for that one to terminate before we
+ ;; remove ourselves.
+ ;; For example, if isearch and C-u both use transient
+ ;; maps, then the lifetime of the C-u should be nested
+ ;; within isearch's, so the pre-command-hook of
+ ;; isearch should be suspended during the C-u one so
+ ;; we don't exit isearch just because we hit 1 after
+ ;; C-u and that 1 exits isearch whereas it doesn't
+ ;; exit C-u.
+ t)
+ ((eq t keep-pred)
+ (eq this-command
+ (lookup-key map (this-command-keys-vector))))
+ (t (funcall keep-pred)))
+ (funcall exitfun)))))
+ (add-hook 'pre-command-hook clearfun)
+ (internal-push-keymap map 'overriding-terminal-local-map)
+ exitfun))
+
+;;;; Progress reporters.
+
+;; Progress reporter has the following structure:
+;;
+;; (NEXT-UPDATE-VALUE . [NEXT-UPDATE-TIME
+;; MIN-VALUE
+;; MAX-VALUE
+;; MESSAGE
+;; MIN-CHANGE
+;; MIN-TIME])
+;;
+;; This weirdness is for optimization reasons: we want
+;; `progress-reporter-update' to be as fast as possible, so
+;; `(car reporter)' is better than `(aref reporter 0)'.
+;;
+;; NEXT-UPDATE-TIME is a float. While `float-time' loses a couple
+;; digits of precision, it doesn't really matter here. On the other
+;; hand, it greatly simplifies the code.
+
+(defsubst progress-reporter-update (reporter &optional value)
+ "Report progress of an operation in the echo area.
+REPORTER should be the result of a call to `make-progress-reporter'.
+
+If REPORTER is a numerical progress reporter---i.e. if it was
+ made using non-nil MIN-VALUE and MAX-VALUE arguments to
+ `make-progress-reporter'---then VALUE should be a number between
+ MIN-VALUE and MAX-VALUE.
+
+If REPORTER is a non-numerical reporter, VALUE should be nil.
+
+This function is relatively inexpensive. If the change since
+last update is too small or insufficient time has passed, it does
+nothing."
+ (when (or (not (numberp value)) ; For pulsing reporter
+ (>= value (car reporter))) ; For numerical reporter
+ (progress-reporter-do-update reporter value)))
+
+(defun make-progress-reporter (message &optional min-value max-value
+ current-value min-change min-time)
+ "Return progress reporter object for use with `progress-reporter-update'.
+
+MESSAGE is shown in the echo area, with a status indicator
+appended to the end. When you call `progress-reporter-done', the
+word \"done\" is printed after the MESSAGE. You can change the
+MESSAGE of an existing progress reporter by calling
+`progress-reporter-force-update'.
+
+MIN-VALUE and MAX-VALUE, if non-nil, are starting (0% complete)
+and final (100% complete) states of operation; the latter should
+be larger. In this case, the status message shows the percentage
+progress.
+
+If MIN-VALUE and/or MAX-VALUE is omitted or nil, the status
+message shows a \"spinning\", non-numeric indicator.
+
+Optional CURRENT-VALUE is the initial progress; the default is
+MIN-VALUE.
+Optional MIN-CHANGE is the minimal change in percents to report;
+the default is 1%.
+CURRENT-VALUE and MIN-CHANGE do not have any effect if MIN-VALUE
+and/or MAX-VALUE are nil.
+
+Optional MIN-TIME specifies the minimum interval time between
+echo area updates (default is 0.2 seconds.) If the function
+`float-time' is not present, time is not tracked at all. If the
+OS is not capable of measuring fractions of seconds, this
+parameter is effectively rounded up."
+ (when (string-match "[[:alnum:]]\\'" message)
+ (setq message (concat message "...")))
+ (unless min-time
+ (setq min-time 0.2))
+ (let ((reporter
+ ;; Force a call to `message' now
+ (cons (or min-value 0)
+ (vector (if (and (fboundp 'float-time)
+ (>= min-time 0.02))
+ (float-time) nil)
+ min-value
+ max-value
+ message
+ (if min-change (max (min min-change 50) 1) 1)
+ min-time))))
+ (progress-reporter-update reporter (or current-value min-value))
+ reporter))
+
+(defun progress-reporter-force-update (reporter &optional value new-message)
+ "Report progress of an operation in the echo area unconditionally.
+
+The first two arguments are the same as in `progress-reporter-update'.
+NEW-MESSAGE, if non-nil, sets a new message for the reporter."
+ (let ((parameters (cdr reporter)))
+ (when new-message
+ (aset parameters 3 new-message))
+ (when (aref parameters 0)
+ (aset parameters 0 (float-time)))
+ (progress-reporter-do-update reporter value)))
+
+(defvar progress-reporter--pulse-characters ["-" "\\" "|" "/"]
+ "Characters to use for pulsing progress reporters.")
+
+(defun progress-reporter-do-update (reporter value)
+ (let* ((parameters (cdr reporter))
+ (update-time (aref parameters 0))
+ (min-value (aref parameters 1))
+ (max-value (aref parameters 2))
+ (text (aref parameters 3))
+ (enough-time-passed
+ ;; See if enough time has passed since the last update.
+ (or (not update-time)
+ (when (>= (float-time) update-time)
+ ;; Calculate time for the next update
+ (aset parameters 0 (+ update-time (aref parameters 5)))))))
+ (cond ((and min-value max-value)
+ ;; Numerical indicator
+ (let* ((one-percent (/ (- max-value min-value) 100.0))
+ (percentage (if (= max-value min-value)
+ 0
+ (truncate (/ (- value min-value)
+ one-percent)))))
+ ;; Calculate NEXT-UPDATE-VALUE. If we are not printing
+ ;; message because not enough time has passed, use 1
+ ;; instead of MIN-CHANGE. This makes delays between echo
+ ;; area updates closer to MIN-TIME.
+ (setcar reporter
+ (min (+ min-value (* (+ percentage
+ (if enough-time-passed
+ ;; MIN-CHANGE
+ (aref parameters 4)
+ 1))
+ one-percent))
+ max-value))
+ (when (integerp value)
+ (setcar reporter (ceiling (car reporter))))
+ ;; Only print message if enough time has passed
+ (when enough-time-passed
+ (if (> percentage 0)
+ (message "%s%d%%" text percentage)
+ (message "%s" text)))))
+ ;; Pulsing indicator
+ (enough-time-passed
+ (let ((index (mod (1+ (car reporter)) 4))
+ (message-log-max nil))
+ (setcar reporter index)
+ (message "%s %s"
+ text
+ (aref progress-reporter--pulse-characters
+ index)))))))
+
+(defun progress-reporter-done (reporter)
+ "Print reporter's message followed by word \"done\" in echo area."
+ (message "%sdone" (aref (cdr reporter) 3)))
+
+(defmacro dotimes-with-progress-reporter (spec message &rest body)
+ "Loop a certain number of times and report progress in the echo area.
+Evaluate BODY with VAR bound to successive integers running from
+0, inclusive, to COUNT, exclusive. Then evaluate RESULT to get
+the return value (nil if RESULT is omitted).
+
+At each iteration MESSAGE followed by progress percentage is
+printed in the echo area. After the loop is finished, MESSAGE
+followed by word \"done\" is printed. This macro is a
+convenience wrapper around `make-progress-reporter' and friends.
+
+\(fn (VAR COUNT [RESULT]) MESSAGE BODY...)"
+ (declare (indent 2) (debug ((symbolp form &optional form) form body)))
+ (let ((temp (make-symbol "--dotimes-temp--"))
+ (temp2 (make-symbol "--dotimes-temp2--"))
+ (start 0)
+ (end (nth 1 spec)))
+ `(let ((,temp ,end)
+ (,(car spec) ,start)
+ (,temp2 (make-progress-reporter ,message ,start ,end)))
+ (while (< ,(car spec) ,temp)
+ ,@body
+ (progress-reporter-update ,temp2
+ (setq ,(car spec) (1+ ,(car spec)))))
+ (progress-reporter-done ,temp2)
+ nil ,@(cdr (cdr spec)))))
+
+
+;;;; Comparing version strings.
+
+(defconst version-separator "."
+ "Specify the string used to separate the version elements.
+
+Usually the separator is \".\", but it can be any other string.")
+
+
+(defconst version-regexp-alist
+ '(("^[-_+ ]?snapshot$" . -4)
+ ;; treat "1.2.3-20050920" and "1.2-3" as snapshot releases
+ ("^[-_+]$" . -4)
+ ;; treat "1.2.3-CVS" as snapshot release
+ ("^[-_+ ]?\\(cvs\\|git\\|bzr\\|svn\\|hg\\|darcs\\)$" . -4)
+ ("^[-_+ ]?alpha$" . -3)
+ ("^[-_+ ]?beta$" . -2)
+ ("^[-_+ ]?\\(pre\\|rc\\)$" . -1))
+ "Specify association between non-numeric version and its priority.
+
+This association is used to handle version string like \"1.0pre2\",
+\"0.9alpha1\", etc. It's used by `version-to-list' (which see) to convert the
+non-numeric part of a version string to an integer. For example:
+
+ String Version Integer List Version
+ \"0.9snapshot\" (0 9 -4)
+ \"1.0-git\" (1 0 -4)
+ \"1.0pre2\" (1 0 -1 2)
+ \"1.0PRE2\" (1 0 -1 2)
+ \"22.8beta3\" (22 8 -2 3)
+ \"22.8 Beta3\" (22 8 -2 3)
+ \"0.9alpha1\" (0 9 -3 1)
+ \"0.9AlphA1\" (0 9 -3 1)
+ \"0.9 alpha\" (0 9 -3)
+
+Each element has the following form:
+
+ (REGEXP . PRIORITY)
+
+Where:
+
+REGEXP regexp used to match non-numeric part of a version string.
+ It should begin with the `^' anchor and end with a `$' to
+ prevent false hits. Letter-case is ignored while matching
+ REGEXP.
+
+PRIORITY a negative integer specifying non-numeric priority of REGEXP.")
+
+
+(defun version-to-list (ver)
+ "Convert version string VER into a list of integers.
+
+The version syntax is given by the following EBNF:
+
+ VERSION ::= NUMBER ( SEPARATOR NUMBER )*.
+
+ NUMBER ::= (0|1|2|3|4|5|6|7|8|9)+.
+
+ SEPARATOR ::= `version-separator' (which see)
+ | `version-regexp-alist' (which see).
+
+The NUMBER part is optional if SEPARATOR is a match for an element
+in `version-regexp-alist'.
+
+Examples of valid version syntax:
+
+ 1.0pre2 1.0.7.5 22.8beta3 0.9alpha1 6.9.30Beta
+
+Examples of invalid version syntax:
+
+ 1.0prepre2 1.0..7.5 22.8X3 alpha3.2 .5
+
+Examples of version conversion:
+
+ Version String Version as a List of Integers
+ \"1.0.7.5\" (1 0 7 5)
+ \"1.0pre2\" (1 0 -1 2)
+ \"1.0PRE2\" (1 0 -1 2)
+ \"22.8beta3\" (22 8 -2 3)
+ \"22.8Beta3\" (22 8 -2 3)
+ \"0.9alpha1\" (0 9 -3 1)
+ \"0.9AlphA1\" (0 9 -3 1)
+ \"0.9alpha\" (0 9 -3)
+ \"0.9snapshot\" (0 9 -4)
+ \"1.0-git\" (1 0 -4)
+
+See documentation for `version-separator' and `version-regexp-alist'."
+ (or (and (stringp ver) (> (length ver) 0))
+ (error "Invalid version string: '%s'" ver))
+ ;; Change .x.y to 0.x.y
+ (if (and (>= (length ver) (length version-separator))
+ (string-equal (substring ver 0 (length version-separator))
+ version-separator))
+ (setq ver (concat "0" ver)))
+ (save-match-data
+ (let ((i 0)
+ (case-fold-search t) ; ignore case in matching
+ lst s al)
+ (while (and (setq s (string-match "[0-9]+" ver i))
+ (= s i))
+ ;; handle numeric part
+ (setq lst (cons (string-to-number (substring ver i (match-end 0)))
+ lst)
+ i (match-end 0))
+ ;; handle non-numeric part
+ (when (and (setq s (string-match "[^0-9]+" ver i))
+ (= s i))
+ (setq s (substring ver i (match-end 0))
+ i (match-end 0))
+ ;; handle alpha, beta, pre, etc. separator
+ (unless (string= s version-separator)
+ (setq al version-regexp-alist)
+ (while (and al (not (string-match (caar al) s)))
+ (setq al (cdr al)))
+ (cond (al
+ (push (cdar al) lst))
+ ;; Convert 22.3a to 22.3.1, 22.3b to 22.3.2, etc.
+ ((string-match "^[-_+ ]?\\([a-zA-Z]\\)$" s)
+ (push (- (aref (downcase (match-string 1 s)) 0) ?a -1)
+ lst))
+ (t (error "Invalid version syntax: '%s'" ver))))))
+ (if (null lst)
+ (error "Invalid version syntax: '%s'" ver)
+ (nreverse lst)))))
+
+
+(defun version-list-< (l1 l2)
+ "Return t if L1, a list specification of a version, is lower than L2.
+
+Note that a version specified by the list (1) is equal to (1 0),
+\(1 0 0), (1 0 0 0), etc. That is, the trailing zeros are insignificant.
+Also, a version given by the list (1) is higher than (1 -1), which in
+turn is higher than (1 -2), which is higher than (1 -3)."
+ (while (and l1 l2 (= (car l1) (car l2)))
+ (setq l1 (cdr l1)
+ l2 (cdr l2)))
+ (cond
+ ;; l1 not null and l2 not null
+ ((and l1 l2) (< (car l1) (car l2)))
+ ;; l1 null and l2 null ==> l1 length = l2 length
+ ((and (null l1) (null l2)) nil)
+ ;; l1 not null and l2 null ==> l1 length > l2 length
+ (l1 (< (version-list-not-zero l1) 0))
+ ;; l1 null and l2 not null ==> l2 length > l1 length
+ (t (< 0 (version-list-not-zero l2)))))
+
+
+(defun version-list-= (l1 l2)
+ "Return t if L1, a list specification of a version, is equal to L2.
+
+Note that a version specified by the list (1) is equal to (1 0),
+\(1 0 0), (1 0 0 0), etc. That is, the trailing zeros are insignificant.
+Also, a version given by the list (1) is higher than (1 -1), which in
+turn is higher than (1 -2), which is higher than (1 -3)."
+ (while (and l1 l2 (= (car l1) (car l2)))
+ (setq l1 (cdr l1)
+ l2 (cdr l2)))
+ (cond
+ ;; l1 not null and l2 not null
+ ((and l1 l2) nil)
+ ;; l1 null and l2 null ==> l1 length = l2 length
+ ((and (null l1) (null l2)))
+ ;; l1 not null and l2 null ==> l1 length > l2 length
+ (l1 (zerop (version-list-not-zero l1)))
+ ;; l1 null and l2 not null ==> l2 length > l1 length
+ (t (zerop (version-list-not-zero l2)))))
+
+
+(defun version-list-<= (l1 l2)
+ "Return t if L1, a list specification of a version, is lower or equal to L2.
+
+Note that integer list (1) is equal to (1 0), (1 0 0), (1 0 0 0),
+etc. That is, the trailing zeroes are insignificant. Also, integer
+list (1) is greater than (1 -1) which is greater than (1 -2)
+which is greater than (1 -3)."
+ (while (and l1 l2 (= (car l1) (car l2)))
+ (setq l1 (cdr l1)
+ l2 (cdr l2)))
+ (cond
+ ;; l1 not null and l2 not null
+ ((and l1 l2) (< (car l1) (car l2)))
+ ;; l1 null and l2 null ==> l1 length = l2 length
+ ((and (null l1) (null l2)))
+ ;; l1 not null and l2 null ==> l1 length > l2 length
+ (l1 (<= (version-list-not-zero l1) 0))
+ ;; l1 null and l2 not null ==> l2 length > l1 length
+ (t (<= 0 (version-list-not-zero l2)))))
+
+(defun version-list-not-zero (lst)
+ "Return the first non-zero element of LST, which is a list of integers.
+
+If all LST elements are zeros or LST is nil, return zero."
+ (while (and lst (zerop (car lst)))
+ (setq lst (cdr lst)))
+ (if lst
+ (car lst)
+ ;; there is no element different of zero
+ 0))
+
+
+(defun version< (v1 v2)
+ "Return t if version V1 is lower (older) than V2.
+
+Note that version string \"1\" is equal to \"1.0\", \"1.0.0\", \"1.0.0.0\",
+etc. That is, the trailing \".0\"s are insignificant. Also, version
+string \"1\" is higher (newer) than \"1pre\", which is higher than \"1beta\",
+which is higher than \"1alpha\", which is higher than \"1snapshot\".
+Also, \"-GIT\", \"-CVS\" and \"-NNN\" are treated as snapshot versions."
+ (version-list-< (version-to-list v1) (version-to-list v2)))
+
+(defun version<= (v1 v2)
+ "Return t if version V1 is lower (older) than or equal to V2.
+
+Note that version string \"1\" is equal to \"1.0\", \"1.0.0\", \"1.0.0.0\",
+etc. That is, the trailing \".0\"s are insignificant. Also, version
+string \"1\" is higher (newer) than \"1pre\", which is higher than \"1beta\",
+which is higher than \"1alpha\", which is higher than \"1snapshot\".
+Also, \"-GIT\", \"-CVS\" and \"-NNN\" are treated as snapshot versions."
+ (version-list-<= (version-to-list v1) (version-to-list v2)))
+
+(defun version= (v1 v2)
+ "Return t if version V1 is equal to V2.
+
+Note that version string \"1\" is equal to \"1.0\", \"1.0.0\", \"1.0.0.0\",
+etc. That is, the trailing \".0\"s are insignificant. Also, version
+string \"1\" is higher (newer) than \"1pre\", which is higher than \"1beta\",
+which is higher than \"1alpha\", which is higher than \"1snapshot\".
+Also, \"-GIT\", \"-CVS\" and \"-NNN\" are treated as snapshot versions."
+ (version-list-= (version-to-list v1) (version-to-list v2)))
+
+(defvar package--builtin-versions
+ ;; Mostly populated by loaddefs.el via autoload-builtin-package-versions.
+ (purecopy `((emacs . ,(version-to-list emacs-version))))
+ "Alist giving the version of each versioned builtin package.
+I.e. each element of the list is of the form (NAME . VERSION) where
+NAME is the package name as a symbol, and VERSION is its version
+as a list.")
+
+(defun package--description-file (dir)
+ (concat (let ((subdir (file-name-nondirectory
+ (directory-file-name dir))))
+ (if (string-match "\\([^.].*?\\)-\\([0-9]+\\(?:[.][0-9]+\\|\\(?:pre\\|beta\\|alpha\\)[0-9]+\\)*\\)" subdir)
+ (match-string 1 subdir) subdir))
+ "-pkg.el"))
+
+
+;;; Misc.
+(defconst menu-bar-separator '("--")
+ "Separator for menus.")
+
+;; The following statement ought to be in print.c, but `provide' can't
+;; be used there.
+;; http://lists.gnu.org/archive/html/emacs-devel/2009-08/msg00236.html
+(when (hash-table-p (car (read-from-string
+ (prin1-to-string (make-hash-table)))))
+ (provide 'hashtable-print-readable))
+
+;; This is used in lisp/Makefile.in and in leim/Makefile.in to
+;; generate file names for autoloads, custom-deps, and finder-data.
+(defun unmsys--file-name (file)
+ "Produce the canonical file name for FILE from its MSYS form.
+
+On systems other than MS-Windows, just returns FILE.
+On MS-Windows, converts /d/foo/bar form of file names
+passed by MSYS Make into d:/foo/bar that Emacs can grok.
+
+This function is called from lisp/Makefile and leim/Makefile."
+ (when (and (eq system-type 'windows-nt)
+ (string-match "\\`/[a-zA-Z]/" file))
+ (setq file (concat (substring file 1 2) ":" (substring file 2))))
+ file)
+
+
+;;; subr.el ends here
diff --git a/tests/examplefiles/tads3_example.t b/tests/examplefiles/tads3_example.t
new file mode 100644
index 00000000..41881c93
--- /dev/null
+++ b/tests/examplefiles/tads3_example.t
@@ -0,0 +1,1248 @@
+#charset "utf-8"
+
+#include <adv3.h>
+#include <en_us.h>
+
+extern function extern_function;
+extern method extern_method;
+extern function extern_function(a, b=a, c='<<a>>', d:, e:=1, f?, ...);
+extern method extern_method(a, b=a, c='<<a>>', d:, e:=1, f?, [g]);;
+extern class extern_class;
+extern object extern_object;
+intrinsic 't3vm' { };
+#ifndef PropDefAny
+intrinsic class Object 'root-object/030004' { };
+#endif
+object /**//**/ // /* \\
+#define Room Unthing
+ template [lst];
+
+/*
+ * Quotations from "Le Roman de la Rose" are transcribed from MS. Douce 195,
+ * owned by Bodleian Library, University of Oxford
+ * (http://image.ox.ac.uk/show?collection=bodleian&manuscript=msdouce195).
+ */
+
+versionInfo: GameID
+ IFID = '17d8efc3-07da-4dde-a837-ff7c4e386a77'
+ name = 'Pygmentalion'
+ byline = 'by David Corbett'
+ htmlByline = 'by <a href="mailto:corbett.dav\100husky.neu.edu">David
+ Corbett</a>'
+ version = '1'
+ authorEmail = 'David Corbett\040<corbett.dav\x40husky.neu.edu>'
+ desc = 'You have fallen in love with a statue\x2e'
+ htmlDesc = 'You have fallen in love with a statue\x2E'
+;
+
+/*
+ * Pymalion fu ẽtailleꝛꝛes.
+ * Poᷣtrayãs en fus ⁊ en peꝛꝛeˢ
+ * En metaulx en os ⁊ en cyꝛes
+ * Et en touteˢ aultres matires.
+ * Quon peult a tel oeuure trouuer.
+ * Poᷣ ſon grant engin eſpꝛouuer.
+ * Car maiſtre en fu bien dire loz.
+ * Ainſi com poᷣ acquerre loz
+ * Se voult a poᷣtraire deduyꝛe
+ * Si fiſt vng ymage diuuyꝛe
+ * Et miſt au faire tel entente
+ * Quel fu ſi plaiſãt et ſi gente
+ * Quel ſembloit eſtre auſſi viue.
+ * Com la plus belle riens q̇ viue
+ * (MS. Douce 195, fol. 149r)
+ */
+
+modify _init()
+{
+ ({: local r, r = randomize, r})();
+ replaced();
+}
+
+gameMain: GameMainDef
+ initialPlayerChar: Actor {
+ desc = "You look the same as usual, but you feel unusually
+ sentimental. "
+ location = entrance
+ }
+ showIntro
+ {
+ "The statue is undeniably a masterpiece: the most skillful carving you
+ have ever done, and the most beautiful woman you have ever seen.
+ Unfortunately, she is also an inanimate block, and now you can neither
+ work nor rest for unrequitable love.\b
+ Once again you stumble into your studio, hoping and praying to find
+ your statue brought to life.\b
+ <b><<versionInfo.name>></b>\r\n
+ <<versionInfo.byline>>\b";
+ }
+;
+
+enum token token, tokOp, token;
+
+modify cmdTokenizer
+ rules_ = static
+ [
+ ['whitespace', new RexPattern('%s+'), nil, &tokCvtSkip, nil],
+ ['punctuation', new RexPattern('[.,;:?!]'), tokPunct, nil, nil],
+ ['spelled number',
+ new RexPattern('<NoCase>(twenty|thirty|forty|fifty|sixty|'
+ + 'seventy|eighty|ninety)-'
+ + '(one|two|three|four|five|six|seven|eight|nine)'
+ + '(?!<AlphaNum>)'),
+ tokWord, &tokCvtSpelledNumber, nil],
+ ['spelled operator', new RexPattern(
+ '<NoCase>(plus|positive|minus|negat(iv)?e|not|inverse(%s+of)?|'
+ + 'times|over|divided%s+by|mod(ulo)?|and|xor|or|[al]?sh[lr])'
+ + '(?!<AlphaNum>)'),
+ tokOp, &tokCvtSpelledOperator, nil],
+ ['operator', R'[-!~+*/%&^|]|<<|>>>?', tokOp, nil, nil],
+ ['word', new RexPattern('<Alpha|-|&><AlphaNum|-|&|squote>*'),
+ tokWord, nil, nil],
+ ['string ascii-quote', R"""<min>([`\'"])(.*)%1(?!<AlphaNum>)""",
+ tokString, nil, nil],
+ ['string back-quote', R"<min>`(.*)'(?!%w)", tokString, nil, nil],
+ ['string curly single-quote', new RexPattern('<min>\u2018(.*)\u2019'),
+ tokString, nil, nil],
+ ['string curly double-quote', new RexPattern('<min>\u201C(.*)\u201D'),
+ tokString, nil, nil],
+ ['string unterminated', R'''([`\'"\u2018\u201C](.*)''', tokString,
+ nil, nil],
+ ['integer', new RexPattern('[0-9]+'), tokInt, nil, nil]
+ ]
+ replace tokCvtSpelledOperator(txt, typ, toks)
+ {
+ toks.append([rexReplace(R'%s+', txt.toLower(), '\\'), typ, txt]);
+ }
+;
+
+/* Tokens */
+
+/*
+ * Puiˢ li reueſt en maĩteˢ guiſes.
+ * Robeˢ faicteˢ ꝑ grãˢ maiſtriſeˢ.
+ * De biaulx dꝛaps de ſoye ⁊ de laĩe.
+ * Deſcarlate de tiretaine
+ * De vert de pers ⁊ de bꝛunecte
+ * De couleᷣ freſche fine ⁊ necte
+ * Ou moult a riches paneˢ miſes.
+ * Herminees vaires et griſes
+ * Puis les li roſte puis reſſaye.
+ * Cõmant li ſiet robbe de ſaye
+ * Sendaulx meloguins galebꝛunˢ.
+ * Indes vermeilz iaunes ⁊ bꝛunˢ.
+ * [...]
+ * Aultre foiz luy repꝛẽd courage.
+ * De tout oſter ⁊ mectre guindeˢ.
+ * Iaunes vermeilles vers ⁊ indeˢ.
+ * (MS. Douce 195, fol. 150r)
+ */
+
+class Token: Achievement
+{
+ points = 1;
+ desc = "<<before_>><<desc_>><<after_>>";
+ before = before = '', before_
+ after = (after = '', after_)
+}
+
+Token template inherited 'before_' 'after_' 'desc_';
+
+#define DefineToken(name, before, after) name##Token: Token before after #@name
+
+DefineToken(builtin, '<font color=g&#x72;een>', '</font>');
+DefineToken(comment, '<i><font color=#408080>', '</font></i>');
+DefineToken(decorator, '<font color=#aa22ff>', '</font>');
+DefineToken(error, '<U><FONT COLOR=RED>', '</FONT></U>');
+DefineToken(escape, '<b><font color=#bb6622>', '</font></b>');
+DefineToken(float, '<u><font color=gray>', '</font></u>');
+DefineToken(keyword, '<b><font face=TADS-Sans color=green>', '</font></b>');
+DefineToken(label, '<font color=#A0A000>', '</font>');
+DefineToken(long, '<i><font color=gray>', '</font></i>');
+DefineToken(name, '<u>', '</u>');
+DefineToken(operator, '<b><font color=\"#AA22F&#x46;\">', '</font></b>');
+DefineToken(string, '<font color=\'#BA212&#49;\'>', '</font>');
+DefineToken(whitespace, '<font color="bgcolor"bgcolor=\'text\'>', '</font>');
+
+function highlightToken(tokenString)
+{
+ local token = [
+ 'built in' -> builtinToken,
+ 'comment' -> commentToken,
+ 'decorator' -> decoratorToken,
+ 'error' -> errorToken,
+ 'escape' -> escapeToken,
+ 'float' -> floatToken,
+ 'keyword' -> keywordToken,
+ 'label' -> labelToken,
+ 'long' -> longToken,
+ 'name' -> nameToken,
+ 'operator' -> operatorToken,
+ 'string' -> stringToken,
+ 'white space' -> whitespaceToken,
+ * -> nil
+ ][tokenString.toLower()];
+ if (!token)
+ return tokenString;
+ token.awardPointsOnce();
+ return '<<token.before>><<tokenString>><<token.after>>';
+}
+
+string /**//**/ // /* \\
+#define Room Unthing
+ template <<highlight *>> highlightToken;
+
+/* Grammar for materials */
+
+dictionary property material;
+grammar adjWord(material): <material material>->adj_ : AdjPhraseWithVocab
+ getVocabMatchList(resolver, results, extraFlags)
+ {
+ return getWordMatches(adj_, &material, resolver, extraFlags,
+ VocabTruncated);
+ }
+ getAdjustedTokens()
+ {
+ return [adj_, &material];
+ }
+;
+
+/* Rooms and objects */
+
++ property location;
+
+entrance: Room 'Entrance'
+ "You are in the entrance to your studio. This is where you carve great
+ works of art, not that you have felt like making any lately. A door leads
+ outside, and the studio itself is to the north and the east. "
+ north = workbenchRoom
+ northeast = sinkRoom
+ east = altarRoom
+ south = door
+ out asExit(south)
+;
+
++ door: LockableWithKey, Door 'door' 'door'
+ "It is a simple wooden door. "
+ material = 'wood' 'wooden'
+ keyList = [key]
+ cannotOpenLockedMsg = '{The dobj/He} {is} locked. You cannot
+ <<highlight 'escape'>>! '
+;
+
+key: PresentLater, Key 'key' 'key' @altar
+ "It is a <<unless clean>>grimy<<end>> bronze key. <<if clean>>On it is \
+ etched the word <q><<keyword>></q>. "
+ material = 'bronze'
+ clean = nil
+ keyword = (keyword = randomGreekWord(), targetprop)
+ dobjFor(Clean) { verify { } action { askForIobj(CleanWith); } }
+ dobjFor(CleanWith)
+ {
+ verify
+ {
+ if (clean)
+ illogicalAlready('{The dobj/He} {is} already clean. ');
+ }
+ action
+ {
+ gDobj.clean = true;
+ "{You/He} clean{s} {the dobj/him}, revealing an inscription. ";
+ }
+ }
+ dobjFor(Read) { verify { nonObvious; } }
+;
+
+workbenchRoom: Room 'At the Workbench'
+ "This workbench, in the northwest part of the studio, was where you would
+ create works of art. Now you just come here to contemplate your
+ creation&rsquo;s beauty and lament your hopeless situation.\b
+ The statue stands on a plinth beside the workbench. "
+ east = sinkRoom
+ southeast = altarRoom
+ south = entrance
+ getDestName(actor, origin) { return 'the workbench'; }
+;
+
++ workbench: Fixture, Surface
+ 'workbench/bench/material/materials/tool/tools' 'workbench'
+ "Normally, the workbench would be scattered with half-finished projects,
+ but now your tools and materials lie abandoned. "
+;
+
++ plinth: Fixture, Thing 'marble plinth/pedestal' 'plinth'
+ "It&rsquo;s a smoothed block of marble about a cubit high. "
+;
+
+replace grammar predicate(Screw): ' ': object;
+replace grammar predicate(ScrewWith): ' ': object;
++ + statue: Fixture, Surface
+ '"creation\'s" beauty/carving/creation/galatea/statue/woman' 'statue'
+ "This is a<<if nameToken.scoreCount>>n untitled<<end>> statue of a woman
+ carved from <<if errorToken.scoreCount>>flawless <<end>>
+ <<if whitespaceToken.scoreCount>>milk-white <<end>>ivory.
+ <<if escapeToken.scoreCount || longToken.scoreCount>>Her
+ <<if longToken.scoreCount>>long <<end>>hair is done up in a
+ chignon<<if escapeToken.scoreCount>>, with a few strands falling down her
+ neck<<end>><<if floatToken.scoreCount>>, and \v<<else>>.<<end>><<end>>
+ <<if floatToken.scoreCount>>She radiates an aura of contrapposto grace.
+ <<end>><<if keywordToken.scoreCount>>\bYou wonder what she
+ <<if labelToken.scoreCount>>is going to<<else>>will<<end>> be like as a
+ woman.
+ <<if decoratorToken.scoreCount>>Maybe she&rsquo;ll be a painter and expand
+ your business.<<end>>
+ <<if operatorToken.scoreCount>>Maybe she&rsquo;ll have a head for figures
+ and will put the accounts in order.<<end>>
+ <<if builtinToken.scoreCount>>She&rsquo;ll love you, obviously, but beyond
+ that you don&rsquo;t know.<<end>><<end>>
+ <<if commentToken.scoreCount>>If only Aphrodite would bring her to life
+ without this silly puzzle about tokens and mirrors!<<end>> "
+ material = 'ivory'
+ propertyset 'is*'
+ {
+ propertyset 'H*'
+ {
+ im = nil\
+ er = true;
+ }
+ It = true
+ }
+ iobjFor(PutOn)
+ {
+ check
+ {
+ if (gDobj not /**//**/ // /* \\
+#define Room Unthing
+ in (necklace, __objref(necklace, warn)))
+ "How rude! You don&rsquo;t know what you were thinking. ";
+ }
+ }
+ iobjFor(GiveTo) remapTo(PutOn, DirectObject, IndirectObject)
+;
+
++++ necklace: Wearable
+ 'pearl necklace/string pearls' '<<highlight 'string'>> of pearls'
+ "This is a masterfully crafted pearl necklace. You hope the statue
+ won&rsquo;t mind if you hold onto it for a while. "
+ initDesc = "You gave the statue this pearl necklace yesterday. "
+ isPlural = true
+;
+
+altarRoom: Room 'At the Altar'
+ "Light from the window illuminates a crude altar. Until recently, this
+ corner was your bedroom. The rest of the studio lies north and west. "
+ north = sinkRoom
+ northwest = workbenchRoom
+ west = entrance
+ getDestName(actor, origin) { return 'the altar'; }
+;
+
++ window: Fixture 'window' 'window'
+ "It&rsquo;s just a window above the altar. <<one of>>The space under the
+ window is blank; as an interior <<highlight 'decorator'>>, you can&rsquo;t
+ help but think the wall would benefit from a bas-relief, but &ndash;
+ <i>sigh</i> &endash you are too lovelorn to wield the chisel. <<||>>The
+ wall right below it is a boring <<highlight 'white space'>>. <<stopping>>"
+;
+
++ altar: Fixture, Surface 'crude rough altar/banker/slab' 'altar'
+ "A rough marble slab lies on a wooden banker. In your rush to construct an
+ altar, you neglected the usual surface finish and friezes, but you pray at
+ it anyway. You are sure the gods will understand. "
+ material = 'marble' 'wood' 'wooden'
+ bulkCapacity = 1
+ dobjFor(PrayAt)
+ {
+ verify { }
+ action()
+ {
+ /*
+ * Biaulx dieux diſt il tout ce poez.
+ * Sil voꝰ plaiſt ma requeſte oez
+ * [...]
+ * Et la belle q̇ mon cueᷣ emble
+ * Qui ſi bien yuoyꝛe reſſemble.
+ * Deuiengne ma loyal amye
+ * De fẽme ait coꝛps ame et vie
+ * (MS. Douce 195, fol. 151r)
+ */
+ local offering;
+ foreach (offering in contents);
+ if (!keywordToken.scoreCount)
+ "<<one of>><q>O Aphrodite,</q> you say, <q>comforter of
+ hopeless lovers, hear my prayer! May she to whom I have given
+ my heart be given body, soul, and life. And a colorful
+ personality. And&mdash</q>\b
+ You are interrupted by a shimmering about the altar. As you
+ watch, it takes the form of a callipygian goddess.\b
+ <q>Mortal, I have heard your heart-felt and oft-repeated plea,
+ and I will take pity on you,</q> says Aphrodite. <q>If you give
+ me a token of your love as an offering, I will give you the
+ <<highlight 'keyword'>> of life. Speak this word in the
+ presence of a mirror, and I will grant your request.</q>\b
+ She fades away, adding, <q>As for her colorful personality,
+ just look around you.</q> <<or>><<stopping>>";
+ else if (key.location)
+ "<q>O Aphrodite,</q> you say, <q>what am I supposed to do
+ again?</q>\bThe goddess reappears and reminds you to speak the
+ keyword of life at a mirror. <<one of>><q>What&rsquo;s the
+ keyword, then?</q> <q>Gods help those who help themselves.
+ Figure it out yourself.</q><<or>><q>Why a mirror?</q> <q>I like
+ mirrors.</q><<purely at random>> ";
+ else if (offering == necklace)
+ {
+ "Aphrodite reappears. <q>A necklace! Perfect!</q> The necklace
+ disappears in a bright flash. When your eyes readjust, you see
+ a key lying in its place. ";
+ necklace.moveInto(nil);
+ key.makePresent();
+ }
+ else if (+offering)
+ "Aphrodite reappears. She eyes <<offering.theNameObj>>
+ skeptically. <q><<one of>>No <<highlight 'comment'>>.<<or>>You
+ call <i>that</i> a token of love?<<or>>\^<<offering.aNameObj>>?
+ Really?<<or>>Come on, mortal, it&rsquo;s not that
+ difficult!<<then at random>></q> ";
+ else
+ "<q>I heard you the first time,</q> says Aphrodite. <q>Prove
+ your devotion by offering a token of your love at the altar,
+ or the deal&rsquo;s off.</q> ";
+ }
+ }
+ iobjFor(GiveTo) remapTo(PutOn, DirectObject, IndirectObject)
+;
+
+aphrodite: Unthing
+ '(love) aphrodite/cytherea/god/goddess/venus love' 'Aphrodite'
+ '<<if gActor.canSee(altar)>>You can only pray to a god.
+ <<else>>You need an altar to interact with a god. '
+ location = (gPlayerChar)
+ isProperName = true
+ isHer = true
+ iobjFor(GiveTo)
+ {
+ verify
+ {
+ illogical('She isn&rsquo;t here. You&rsquo;ll have to leave {the
+ dobj/him} somewhere she can find it. ');
+ }
+ }
+ dobjFor(PrayAt) maybeRemapTo(gActor.canSee(altar), PrayAt, altar)
+;
+
+sinkRoom: Room 'Washroom'
+ "Sculpting marble is a dusty business. You use this sink to clean off after
+ a hard day&rsquo;s work. Beside the sink is a small end table, and on the
+ wall is a calculator. The rest of the studio is south and west. "
+ south = altarRoom
+ southwest = entrance
+ west = workbenchRoom
+;
+
+property level, overflowing;
+export overflowing;
+export level 'waterLevel';
++ sink: Fixture '(auto) (mop) auto-sink/autosink/bowl/drain/faucet/sink' 'sink'
+ "This is a state-of-the-art mop sink with anti-miasmic coating and bronze
+ backsplash. It is so modern, there are no handles or other obvious ways to
+ turn it on.\b
+ <<if overflowing>>It is overflowing.
+ <<else unless level < 19500>>It is full to the brim with water.
+ <<otherwise if level >= 15000>>It is full of water.
+ <<otherwise unless level < 10000>>It is half full of water.
+ <<else if level >= 2000>>There is some water in the sink.
+ <<else if level > 0>>A small puddle has formed at the bottom of the sink.
+ <<otherwise>>It is empty.
+ <<if level <= -1.0e+2>>It looks like it hasn&rsquo;t been used in a
+ <<highlight 'long'>> time. "
+ level = not in ([lst]) { return argcount; }
+ not = in()
+ overflowing = nil
+ current = self
+ setLevel(level:)
+ {
+ targetobj.current.overflowing = level == nil;
+ targetobj.current.level = min(level ?? 0, 20000);
+ if (sink.overflowing || sink.level > 0e+1)
+ sinkWater.makePresent();
+ if (basin.overflowing || basin.level > 0e-1)
+ basinWater.makePresent();
+ }
+ iobjFor(CleanWith) remapTo(CleanWith, DirectObject, sinkWater)
+;
+
+++ sinkWater: PresentLater, Fixture
+ '(sink) water sink water' 'water' "<<sink.desc>>"
+ disambigName = 'water in the sink'
+ dobjFor(Drink)
+ {
+ verify { illogical('''{You're} not thirsty. '''); }
+ }
+ iobjFor(CleanWith)
+ {
+ preCond = []
+ verify {
+ if (!location)
+ illogicalNow('There is no water in the sink. ');
+ if (!sink.overflowing && sink.level < 1e2)
+ illogicalNow('There is not enough water in the sink. ');
+ }
+ }
+;
+
++ table: Fixture, Surface 'small end bracket/table' 'table'
+ "<<first time>>Upon closer inspection, you see that \v<<only>>The table is
+ bracketed to the wall. "
+;
+
+++ Readable '"operator\'s" manual' 'manual'
+ "<center ><<highlight 'Operator'>>&rsquo;s Manual<\center>\b
+ <bq>To control the auto-sink, use the calculator add-on to enter the
+ desired volume of water. For example,\n
+ \t\t<<aHref('calculate 69 * 105', 'CALCULATE 69 TIMES 105')>>\n
+ to fill the basin with <<% ,d 0x69 * 0105>> kochliaria<!-- an ancient Greek
+ unit, < 5 ml >.\b
+ Warning: Do not use big numbers or divide by zero!<\\bq>\b"
+ dobjFor(Read) asDobjFor(Examine)
+;
+
++ calculator: Fixture, Thing 'button/buttons/calculator/screen' 'calculator'
+ "The calculator is <<highlight 'built in'>>to the wall beside the sink. It
+ has buttons for all the standard unary and binary operations.
+ <<if(screen)>>The screen reads <<screen>>"
+ screen = nil
+ literalMatch = ''
+;
+
+method wrongContextMsg()
+{
+ return '<font face="TADS-Typewriter"><<highlight '<<'ERROR'>>'>> {{can\'t
+ use\ \"<<self.literalMatch>>\" in that context}}</font>. ';
+}
+
+portico: OutdoorRoom 'Portico'
+ "Columns line the portico stretching east and west, and steps lead down to
+ the south. The door leads back in, and beside the door is a basin. A
+ <<highlight 'label'>> is affixed to the doorpost. "
+ north = (__objref(error, error))
+ in asExit(north)
+ south: FakeConnector
+ {
+ "You begin moving away from the door, but then you remember the statue.
+ The gods won&rsquo;t bring her to life if you give up this easily!
+ <<setMethod(&isConnectorApparent, {origin, actor: nil})>>"
+ }
+ east asExit(south)
+ west asExit(south)
+ down asExit(south)
+;
+
+error: LockableWithKey, Door ->door 'door' 'door' @portico "<<door.desc>>"
+ keyList = (otherSide.keyList)
+;
+
++ Fixture 'column*columns' 'columns'
+ "There are six <<one of>>short columns with simple capitals<<or>>slender
+ columns with scrollwork in the capitals<<or>>tall columns with ornate
+ capitals<<sticky random>>. Above the architrave is a frieze depicting some
+ of your wares. <<first time>>The cornice overhangs the frieze a bit too
+ much, you think; perhaps you should shorten it. You try to concentrate on
+ the architecture of the portico, stoically ignoring what you cannot change,
+ but it doesn&rsquo;t work. It never does. <<only>>"
+ isPlural = true
+;
+
++ Fixture, Readable 'label/doorpost' '<<highlight 'label'>>'
+ "The <<highlight 'label'>> says <q>Pygmentalion</q><<first time>> (which is
+ your <<highlight 'name'>>)<<only>>. "
+ dobjFor(Read) asDobjFor(Examine)
+;
+
++ basin: RestrictedContainer, Fixture
+ '(bird) basin/bath/birdbath/fountain/mosaic/pool/tile/tiles' 'basin'
+ "It is shallow but wide, and lined with tiles. It used to be a fountain,
+ but it stopped working after they installed the new sink. Something to do
+ with water pressure, no doubt. Now you just use it as a birdbath.\b
+ <<if overflowing>>Water is spilling over the sides in a turbulent flow.
+ <<else if level >= 19500>>It is full to the brim with water. You can see
+ your reflection quite clearly. Gods, you look a mess.
+ <<else if level >= 15000>>It is full of water. You can see your reflection.
+ <<else if level >= 10000>>It is half full. From the right angle, you can
+ make out a shadowy reflection of the columns, but nothing more.
+ <<else if level >= 2000>>There is some water in it, but you can still make
+ out the mosaic lining the basin.
+ <<else if level > 0>>A small puddle has formed at the bottom of the basin.
+ <<else>>It is empty.
+ <<if level <= -1.0e+2>>It looks as if it has never been filled. "
+ level = 0
+ overflowing = nil
+ isMirror = (level >= 15000)
+ setLevel(level:)
+ {
+ delegated sink.setLevel(_: sourceTextOrder ? level: nil, level: level);
+ }
+ iobjFor(CleanWith) maybeRemapTo(basinWater.location, CleanWith,
+ DirectObject, basinWater)
+;
+
+++ basinWater: PresentLater, Fixture '(basin) water basin water' 'water'
+ "<<basin.desc>>"
+ disambigName = 'water in the basin'
+ dobjFor(Drink)
+ {
+ verify
+ {
+ illogical('Drinking from a birdbath might not be the best idea. ');
+ }
+ }
+ iobjFor(CleanWith)
+ {
+ preCond = [touchObj]
+ verify {
+ illogical('Washing something in a birdbath is unlikely to get it
+ clean. ');
+ }
+ }
+;
+
+++ feather: PresentLater, Thing
+ '(bird) (dove) (pigeon) (turtle) (turtle-dove) (turtledove) feather'
+ 'feather' "It&rsquo;s a turtle-dove feather: an auspicious omen! "
+ initSpecialDesc = "<<one of>>A little brown bird is splashing around in the
+ basin. When it notices you, it ruffles its feathers, one of which falls
+ out, and flies out between the columns. <<or>>A feather is
+ <<if basin.overflowing || basin.level > 0>><<highlight 'float'>>ing
+ <<else>>lying <<end>> in the basin. <<stopping>>"
+;
+
+/* Water */
+
+trickling(water) multimethod
+{
+ if (sink.overflowing)
+ {
+ dirs: for (local dir in Direction.allDirections)
+ {
+ if (dir.ofKind(RelativeDirection))
+ continue;
+ if (dir.ofKind(ShipboardDirection))
+ continue dirs;
+ if (water.eventualLocation.(dir.dirProp) == __objref(entrance))
+ return 'trickling <<dir.name>>';
+ }
+ }
+ return 'a stagnant puddle';
+}
+
+class Water:PresentLater,Fixture'(floor) (ground) water puddle water''water'
+ "The water on the floor is <<trickling(self)>>. "
+ disambigName = 'water on the floor'
+ specialDesc = "The floor is covered with water. "
+ dobjFor(Drink)
+ {
+ preCond = [touchObj]
+ verify { }
+ check { failCheck('{You\'re} not thirsty. '); }
+ }
+ iobjFor(CleanWith)
+ {
+ preCond = [touchObj]
+ verify { illogical('The water on the ground is too dirty. '); }
+ }
+;
+
+Water template +location | ~location "specialDesc"? inherited;
+Water +altarRoom;
+Water +sinkRoom { ;; };
+Water { +workbenchRoom };
+
+entranceWater: Water +entrance
+ "<<if sink.overflowing>>At your feet, all the water from the sink flows
+ into a <<%-o 02>>-dactyl slit in the baseboard. <<else>><<inherited>>"
+ vocabWords = 'water baseboard/puddle/slit water'
+;
+trickling(entranceWater w)
+{
+ return sink.overflowing ? 'trickling into the wall' : inherited<*>(w);
+}
+
+porticoWater: Water ~portico;
+trickling(porticoWater w)
+{
+ return basin.overflowing ? 'trickling down the stairs' : inherited<*>(w);
+}
+
+/* Calculating */
+
+;;;class is: Exception { finalize { } };;; // InvalidSpecificationError
+
+DefineLiteralAction(Calculate)
+ checkAction()
+ {
+ if (defined(calculator) && !gActor.canTouch(calculator))
+ {
+ { gActor.failCheck('{You/He} {can\'t} do that kind of math in
+ {your} head. '); }
+ }
+ }
+ execAction()
+ {
+ local op = function(...) { throw new is(); }, a, b;
+ local opString = (literalMatch, literalMatch);
+ if (numMatch)
+ goto binary;
+ switch (opString)
+ {
+ case '!':
+ case 'not':
+ opString = '!';
+ op = {x : !toInteger('<<%_\u0030[1]5.3\170x>>', 16)};
+ break;
+ case '+':
+ case 'plus':
+ case 'positive':
+ opString = '+';
+ op = {self_ : self_};
+ break;
+ case '-':
+ case 'minus':
+ case 'negate':
+ case 'negative':
+ opString = '&#x2212;';
+ op = {x : -x};
+ break;
+ case '~':
+ case 'inverse':
+ case 'inverse\\of':
+ opString = '~';
+ op = {x : ~x};
+ break;
+ }
+ goto doCalculation;
+ binary: binaryOp:
+ switch (opString)
+ {
+ case '+':
+ case 'plus':
+ opString = '+';
+ op = {a, b : +a+++b};
+ break binaryOp;
+ case '-':
+ case 'minus':
+ opString = '&#8722;';
+ op = {a, b : -b-- - -a};
+ break;
+ case '*':
+ case 'times':
+ opString = '&times;';
+ op = new function(a, b) { return a * b; };
+ break;
+ case '/':
+ case 'over':
+ case 'divided\\by':
+ opString = '/';
+ op = function(a, b) { return a / b; };
+ break;
+ case '%':
+ case 'mod':
+ case 'modulo':
+ opString = 'mod';
+ op = function(a, b, multimethod=b) { return a % multimethod; };
+ break;
+ case '\<<':
+ case 'shl':
+ case 'ashl':
+ case 'lshl':
+ opString = '&lt;&lt;';
+ op = {a, b, c? : a << b};
+ break;
+ case '&':
+ case 'and':
+ opString = '&amp;';
+ op = {a, b : local badness = a, local token = b, badness & token};
+ break;
+ case '^':
+ case 'xor':
+ opString = '^';
+ op = {a, b, c? : a ^ b};
+ break;
+ case '|':
+ case 'or':
+ opString = '|';
+ op = {a, b : a | b};
+ break;
+ case '>\>':
+ case 'shr':
+ case 'ashr':
+ opString = '>>';
+ op = {a, b : toInteger('<<(a >> b)>>')};
+ break;
+ case '>>>':
+ case 'lshr':
+ opString = '>>>';
+ op = {a, b : b ? invokee(a >>> 1, --b) : a};
+ break;
+ }
+ opString = ' <<opString>> ';
+ doCalculation:
+ "The calculator outputs ";
+ try
+ {
+ a = numMatch ? numMatch.getval(colon : nil) : nil;
+ b = numMatch2.getval();
+ local result = toInteger(numMatch ? op(a, b) : op(b));
+ calculator.setMethod(&screen, method()
+ {
+ return '<font face="TADS-Typewriter"><<a>><<opString>><<b>> =
+ <<%d result>></font>. ';
+ });
+ local oldLevel = sink.level;
+ sink.current.setLevel(level: result);
+ "<<calculator.screen()>>
+ <<if sink.current == basin>>The sink gurgles and the pipes rattle.
+ <<else if sink.level == oldLevel>>The sink gurgles.
+ <<else if sink.level <= 0 && oldLevel <= 0>>The pipes rattle for a
+ moment.
+ <<else if sink.level <= 0>>All the water drains from the sink.
+ <<else if oldLevel <= 0>>The sink begins to fill with water.
+ <<else if sink.level < oldLevel - 0xabc>>Some of the water drains
+ from the sink.
+ <<else if sink.level < oldLevel>>The water level drops slightly.
+ <<else if oldLevel < sink.level - 0XABC>>Water splashes into the
+ sink for a few seconds.
+ <<else if oldLevel < sink.level>>Water dribbles from the faucet. ";
+ }
+ catch (is in)
+ {
+ calculator.literalMatch = literalMatch;
+ calculator.setMethod(&screen, &wrongContextMsg);
+ "<<calculator.screen()>>";
+ }
+ catch (RuntimeError e)
+ {
+ calculator.setMethod(&screen, new method
+ {
+ return '<font face=\"TADS-Typewriter\"><<highlight 'ERROR'>>
+ {{<<e.exceptionMessage>>}}</font>.\b';
+ });
+ "<<calculator.screen()>>";
+ switch (e.errno_)
+ {
+ case 2008: // division by zero
+ "<<if sink.current == sink
+ && (sink.level > 0 || sink.overflowing)>>The water in the
+ sink is sucked down the drain.
+ <<else if basin.level > 0 || basin.overflowing>>Water comes up
+ from the drain and <<if basin.overflowing>>spills over
+ the edges of<<else>>begins to fill<<end>> the sink.
+ <<else>>The sink gurgles and the pipes rattle. ";
+ sink.current = sink.current == sink ? basin : sink;
+ local _tmp = sink.level;
+ sink.level = basin.level;
+ basin.level = _tmp;
+ _tmp = sink.overflowing;
+ sink.overflowing = basin.overflowing;
+ basin.overflowing = _tmp;
+ if (!sink.current.overflowing)
+ break;
+ // fall through
+ case 2023: // numeric overflow
+ if (!sink.current.overflowing)
+ "<<if sink.current == sink>>High-pressure water streams
+ from the faucet, filling the sink and spilling over the
+ edge. Rivulets begin running down the slight gradient of
+ the floor. <<else>>The pipes shake loudly. ";
+ forEachInstance(Water, function(w) {
+ if ((w.eventualLocation == portico) ==
+ (sink.current == basin))
+ w.makePresent();
+ });
+ sink.current.setLevel(level: nil);
+ break;
+ default:
+ throw e;
+ }
+ }
+ if (!gPlayerChar.hasSeen(feather))
+ {
+ feather.makePresentIf(basin.isMirror);
+ feather.moved = nil;
+ }
+ }
+;
+
+VerbRule(Calculate)
+ ('c' | 'calculate' | 'enter' | 'eval' | 'evaluate') (()|(singleNumber|))
+ (tokOp->literalMatch | '!'->literalMatch) numberPhrase -> numMatch2
+ : CalculateAction
+ verbPhrase = 'calculate/calculating (what) (how) (what)'
+;
+
+/* Cleaning */
+
+modify VerbRule(Clean)
+ [ /**//**/ // /* \\
+#define Room Unthing
+ badness 500] ('clean' | 'wash') dobjList:
+;
+
+grammar predicate(CleanIn):
+ ('clean' | 'wash') dobjList ('at' | 'in' | 'with') singleIobj
+ : CleanWithAction
+ verbPhrase = 'clean/cleaning (what) (in what)'
+ askIobjResponseProd = inSingleNoun
+ omitIobjInDobjQuery = true
+;
+
+/* Prayer */
+
+VerbRule(Pray)
+ [badness 500] 'pray' singleDobj
+ : PrayAtAction
+ verbPhrase = 'pray/praying (at what)'
+;
+
+VerbRule(PrayAt)
+ 'pray' ('at' | 'to') singleDobj
+ : PrayAtAction
+ verbPhrase = 'pray/praying (at what)'
+ askDobjResponseProd = singleNoun
+;
+
+DefineTAction(PrayAt);
+modify Thing
+ dobjFor(PrayAt)
+ {
+ verify
+ {
+ illogical('{You/He} {cannot} pray at {that dobj/him}. ');
+ }
+ }
+;
+
+/* Extended grammar for 'in' and 'out' */
+
+modify grammar directionName(in): 'in' | 'inside':
+ dir = inDirection
+;
+modify /**//**/ // /* \\
+#define Room Unthing
+ grammar directionName(out): 'out' | 'outside':
+ dir = outDirection
+;
+
+/* Speech */
+
+DefineLiteralAction(Say)
+ execAction
+ {
+ local literal = getLiteral().toLower();
+ if (literal is in ('xyzzy', 'plugh'))
+ tryImplicitActionMsg(&silentImplicitAction, Xyzzy);
+ else if (literal != key.keyword)
+ "Nothing happens. ";
+ else if (literal not in ())
+ {
+ if (gActor.location == portico && basin.isMirror)
+ {
+ if (feather.location == basin)
+ "The air above the basin shimmers, and the feather bobs on
+ the rippling water. After a moment, the shimmering
+ disappears.";
+ else
+ {
+ /*
+ * Venus q̇ la pꝛiere ouyt
+ * [...]
+ * A lymage ẽuoya loꝛs lame.
+ * Si deuĩt ſi treſbelle dame.
+ * Quoncq̄s en toute la contree.
+ * Not len ſi belle encontree.
+ * [...]
+ * Doulx amys aĩs ſuy vꝛ̄e amye.
+ * Pꝛeſte de voſtre compaignye.
+ * Receuoir ⁊ mamoᷣ voꝰ offre.
+ * Sil voꝰ plaiſt receuoir tel offre.
+ * (MS. Douce 195, fol. 151v)
+ */
+ "The air above the basin shimmers for a moment. You hear
+ the door opening behind you. Turning around, you see a
+ woman who looks suspiciously like your statue, except not
+ the color of marble.\b
+ <q>Hello, world,</q> she says. <q>It&rsquo;s nice to be
+ alive at last! Hello, dearest Pygmentalion.</q>\b
+ Ah, what beauty! What mastery of syntax! Praise be to
+ Aphrodite! ";
+ finishGameMsg(ftVictory,
+ [finishOptionUndo, finishOptionFullScore]);
+ }
+ }
+ else
+ "Nothing happens. <<if keywordToken.scoreCount>>Aphrodite said
+ you would need a mirror. <<end>>";
+ }
+ }
+;
+
+VerbRule(Say)
+ ('say' | 'shout') singleLiteral
+ : SayAction
+ verbPhrase = 'say/saying (what)'
+;
+
+VerbRule(SayTo)
+ ('say' | 'shout') singleLiteral ('at' | 'to') singleIobj
+ : SayAction
+ verbPhrase = 'say/saying (what) (to what)'
+;
+
+/**/ #if /* Revere the basileus. */ 0 \
+ // Expel the barbarian.
+;
+ #ifndef __DEBUG
+;
+ #define __DEBUG
+;
+# else
+;
+#if 1
+;
+ #define DEBUG__
+;
+#endif
+;
+ #endif
+;\\
+#endif
+/*
+#endif
+?*/
+//\\
+#endif
+'''
+#endif
+'\''''
+#endif
+\\'''
+"""
+#endif
+"\""""
+#endif
+\\"""
+'
+#endif
+\'
+#endif
+\\'
+"
+#endif
+\"
+#endif
+\\"
+'''<<'<<'
+#endif
+'>>'>>
+#endif
+'''
+"""<<'<<'
+#endif
+'>>'>>
+#endif
+"""
+'<<'<<'
+#endif
+'>>'>>
+#endif
+'
+"<<'<<'
+#endif
+'>>'>>
+#endif
+"//"
+\
+ # endif
+;
+dictionary barbarianDict;
+transient xyzzy: object;
+DefineIAction(Xyzzy)
+ execAction
+ {
+ "Only a barbarian could pronounce such a word. ";
+ local oldSay = t3SetSay({str : nil});
+ try
+ {
+ new transient Vector([
+ '<<one of>><< cycling >>',
+ '<<one of>><< at random>>',
+ '<<one of>><<then purely at random>>',
+ '<<one of>><<as decreasingly likely outcomes>>',
+ '<<one of>><< shuffled>>',
+ '<<one of>><< half shuffled>>',
+ '<<one of>><<then shuffled>>',
+ '<<one of>><<then half shuffled>>']);
+ '''''<font x= color=red bgcolor='silver' face="TADS-Sans"
+ size=\'+1\' x=\"x\">{can't}</font>\'''' '' '''';
+ """""<font x= color=red bgcolor='silver' face="TADS-Sans"
+ size=\'+1\' x=\"x\">{can't}</font>\"""" "" """";
+ '<font x= color=red face="TADS-Sans" size=\'+1\'
+ x=\"x\">{can\'t}</font>\'';
+ "<font x= color=red bgcolor='silver' size=\'+1\'
+ x=\"x\">{can\'t}</font>\"";
+ '''''<font <<'color=red'>> bgcolor<<'='>>silver
+ face=<<'"TADS-Sans"'>>>{ca<<'n\''>>t}</font>\'''' '' '''';
+ """""<font <<'color=red'>> bgcolor<<'='>>silver
+ face=<<'"TADS-Sans"'>>>{ca<<'n\''>>t}</font>\"""" "" """";
+ '<font <<'color=red'>> bgcolor<<'='>>silver
+ face=<<'"TADS-Sans"'>>>{ca<<'n\''>>t}</font>\'';
+ "<font <<'color=red'>> bgcolor<<'='>>silver
+ face=<<'"TADS-Sans"'>>>{ca<<'n\''>>t}</font>\"";
+ '''<s a1={\.}a a2=a{\>} a3=a{\>}a b1='{\>}b' b2='b{\>}' b3='b{\>}b'
+ c1="c{\>}" c2="{\>}c" c3="c{\>}c" d1=\'d{\>}\' d2=\'{\>}d\'
+ d3=\'d{\>}d\' e1=\"e{\>}\" e2=\"{\>}e\" e3=\"e{\>}e\"></s>''';
+ """<s a1={\.}a a2=a{\>} a3=a{\>}a b1='{\>}b' b2='b{\>}' b3='b{\>}b'
+ c1="c{\>}" c2="{\>}c" c3="c{\>}c" d1=\'d{\>}\' d2=\'{\>}d\'
+ d3=\'d{\>}d\' e1=\"e{\>}\" e2=\"{\>}e\" e3=\"e{\>}e\"></s>""";
+ '<s a1={\.}a a2=a{\>} a3=a{\>}a c1="c{\>}" c2="{\>}c" c3="c{\>}c"
+ d1=\'d{\>}\' d2=\'{\>}d\' d3=\'d{\>}d\' e1=\"e{\>}\" e2=\"{\>}e\"
+ e3=\"e{\>}e\"></s>';
+ "<s a1={\.}a a2=a{\>} a3=a{\>}a b1='{\>}b' b2='b{\>}' b3='b{\>}b'
+ d1=\'d{\>}\' d2=\'{\>}d\' d3=\'d{\>}d\' e1=\"e{\>}\" e2=\"{\>}e\"
+ e3=\"e{\>}e\"></s>";
+ '''{a<<1>>b}'''; """{a<<1>>b}"""; '{a<<1>>b}'; "{a<<1>>b}";
+ '''<s a<<'='>>'1' b<<'='>>"2" c<<'='>>\'3\' d<<'='>>\"4\"
+ <<'e'>>=5 f=6' g=7">''';
+ """<s a<<'='>>'1' b<<'='>>"2" c<<'='>>\'3\' d<<'='>>\"4\"
+ <<'e'>>=5 f=6' g=7">""";
+ '<s b<<'='>>"2" c<<'='>>\'3\' d<<'='>>\"4\" <<'e'>>=5 g=7">';
+ "<s a<<'='>>'1' c<<'='>>\'3\' d<<'='>>\"4\" <<'e'>>=5 f=6'>";
+ '''<s a=v\\ a=v\ v\><s a='{'}'\><s a="{"}"\>''';
+ """<s a=v\\ a=v\ v\><s a='{'}'\><s a="{"}"\>""";
+ '<s a=v\\ a=v\ v\><s a=\'{\'}\'\><s a="{"}"\>';
+ "<s a=v\\ a=v\ v\><s a='{'}'\><s a=\"{\"}\"\>";
+ '''<font color='purple>igram</font>'''; '''<t a={'''; '''}''';
+ '''<font color="purple>igram</font>'''; '''<t a='{'''; '''}''';
+ '''<font color=\'purple>igram</font>'''; '''<t a="{'''; '''}''';
+ '''<font color=\"purple>igram</font>''';
+ """<font color='purple>igram</font>"""; """<t a={"""; """}""";
+ """<font color="purple>igram</font>"""; """<t a='{"""; """}""";
+ """<font color=\'purple>igram</font>"""; """<t a=\"{"""; """}""";
+ """<font color=\"purple>igram</font>""";
+ '<font color="purple>igram</font>'; '<t a={'; '}';
+ '<font color=\'purple>igram</font>'; '<t a=\'{'; '}';
+ '<font color=\"purple>igram</font>'; '<t a="{'; '}';
+ "<font color=\"purple>igram</font>"; "<t a={"; "}";
+ "<font color='purple>igram</font>"; "<t a='{"; "}";
+ "<font color=\'purple>igram</font>"; "<t a=\"{"; "}\"";
+ '''<xmp a=v>&amp;\x26<b><\xmp></xmp a=v>''';
+ """<xmp a=v>&amp;\x26<b><\xmp></xmp a=v>""";
+ '<xmp a=v>&amp;\x26<b><\xmp></xmp a=v>';
+ "<xmp a=v>&amp;\x26<b><\xmp></xmp a=v>";
+ '''<xmp a=v>&amp;\x26<b><\xmp><\Xmp a=v>''';
+ """<xmp a=v>&amp;\x26<b><\xmp><\Xmp a=v>""";
+ '<xmp a=v>&amp;\x26<b><\xmp><\Xmp a=v>';
+ "<xmp a=v>&amp;\x26<b><\xmp><\Xmp a=v>";
+ '''<xmp a=v>&amp;\x26<b><\xmp><\\xmp a=v>''';
+ """<xmp a=v>&amp;\x26<b><\xmp><\\xmp a=v>""";
+ '<xmp a=v>&amp;\x26<b><\xmp><\\xmp a=v>';
+ "<xmp a=v>&amp;\x26<b><\xmp><\\xmp a=v>";
+ '''<xmp>'''; """<xmp>"""; '<xmp>'; "<xmp>";
+ '''<listing a=v>&amp;\x26<b><listing><xmp></listing a=v>''';
+ """<listing a=v>&amp;\x26<b><listing><xmp></listing a=v>""";
+ '<listing a=v>&amp;\x26<b><listing><xmp></listing a=v>';
+ "<listing a=v>&amp;\x26<b><listing><xmp></listing a=v>";
+ '''<listing a=v>&amp;\x26<b><listing><xmp><\listing a=v>''';
+ """<listing a=v>&amp;\x26<b><listing><xmp><\listing a=v>""";
+ '<listing a=v>&amp;\x26<b><listing><xmp><\listing a=v>';
+ "<listing a=v>&amp;\x26<b><listing><xmp><\listing a=v>";
+ '''<listing a=v>&amp;\x26<b><listing><xmp><\\listing a=v>''';
+ """<listing a=v>&amp;\x26<b><listing><xmp><\\listing a=v>""";
+ '<listing a=v>&amp;\x26<b><listing><xmp><\\listing a=v>';
+ "<listing a=v>&amp;\x26<b><listing><xmp><\\listing a=v>";
+ '''<listing>'''; """<listing>"""; '<listing>'; "<listing>";
+ }
+ finally
+ {
+ t3SetSay(oldSay);
+ }
+ }
+;
+
+VerbRule(Xyzzy)
+ "xyzzy" | "plugh" *
+ : XyzzyAction
+ verbPhrase = 'babble/talking like a barbarian'
+;
+
+randomGreekWord()
+{
+ local vowels = ['a', 'e', 'e', 'i', 'o', 'y', 'o'];
+ local consonants = ['p', 't', 'k', 'b', 'd', 'g', 's', 'm', 'n', 'l', 'r'];
+ local clusters =
+ ['pn', 'pl', 'pr', 'tm', 'tr', 'kn', 'kl', 'kr', 'bl', 'br'];
+ local ends = consonants - ['b', 'd', 'g'];
+ local word;
+ local retries = 0;
+ for (local r in 0 .. -1 step -1)
+ {
+ for ((r), local i = 0, local j = 2; i < j; ++i, --j)
+ {
+ for (local s = 0, local n in [90, 30, 10]; ; --s)
+ retries -= s * n;
+ }
+ }
+ retries *= 2;
+ retries >>= 1;
+ retries /= 2;
+ retries <<= 1;
+ retries >>>= 2;
+ retries %= 16;
+ retries &= ~1;
+ retries |= 2;
+ retries ^= retries ^ retries;
+ do
+ {
+ word = rand('[ptkbdgsm]?');
+ for (local i in 0 .. __TADS3)
+ word += concat(rand(rand('', clusters, consonants)), rand('"h"?'),
+ rand(vowels...), rand('','', 'i', 'u', rand(ends)));
+ word += rand('"s"?');
+ word = rexReplace(R'^[pk](?![tnlrhaeioy]|[tnlr]h?[^aeioy])', word, '');
+ word = rexReplace(R'^b(?![dlrhaeioy]|[dlr]h?[^aeioy])', word, '');
+ word = rexReplace(R'^g(?![nlrhaeioy]|[nlr]h?[^aeioy])', word, '');
+ word = rexReplace(R'^t(?![mrhaeioy]|[mlr]h?[^aeioy])', word, '');
+ word = rexReplace(R'^d(?![rhaeioy]|rh?[^aeioy])', word, '');
+ word = rexReplace(R'^m(?![nhaeioy]|nh?[^aeioy])', word, '');
+ word = rexReplace(R'^[^aeioy]h?(([^haeioy]h?){2})', word, '%1');
+ word = rexReplace(R'[ptkbdgs]([ptkbdg][^haeioy])', word, '%1');
+ word = rexReplace(R'([mnlr])h', word, 'h%1');
+ word = rexReplace(R'(?<!(^|[ptk]))h', word, '');
+ word = rexReplace(R'^h(?![aeioy])', word, '');
+ word = rexReplace(R'h(?=.*h)', word, '');
+ word = rexReplace(R'(?<=^|r)r', word, 'rh');
+ word = rexReplace(R'([iy]+)[iu]', word, '%1');
+ word = rexReplace(R'nl', word, 'll');
+ word = rexReplace(R'n(?=[pbm])', word, 'm');
+ word = rexReplace(R'(?<.)m(?=[tdn])', word, 'n');
+ word = rexReplace(R'pb|bp', word, 'pp');
+ word = rexReplace(R'td|dt', word, 'tt');
+ word = rexReplace(R'kg|gk', word, 'kk');
+ word = rexReplace(R'bs', word, 'ps');
+ word = rexReplace(R'ds|sd', word, 'z');
+ word = rexReplace(R'gs', word, 'ks');
+ word = rexReplace(R'ts', word, 'ss');
+ word = rexReplace(R'[^pkaeioyusnr]+(s?)$', word, '%1');
+ word = rexReplace(R'[pk]+$', word, '');
+ word = rexReplace(R'(.h?)%1{2,}', word, '%1%1');
+ word = rexReplace(R'^(.h?)%1', word, '%1');
+ word = rexReplace(R'(.h?)%1$', word, '%1');
+ word = rexReplace(R'^y', word, 'hy');
+ word = rexReplace(R'([ptk])([ptk])h', word, '%1h%2h');
+ word = rexReplace(R'([ptk])h%1h', word, '%1%1h');
+ word = rexReplace(R'ks', word, 'x');
+ word = rexReplace(R'gg', word, 'kg');
+ word = rexReplace(R'kh', word, 'ch');
+ } while (retries-- && (word.length() < 4 || !rexSearch(
+ new RexPattern('^(eu|hy|[pgm]n|bd|tm|rh)|(.h.|pp|kc|rr)h|ch([^aeioy])|'
+ + '([^aeioy])y([^aeioy])$|(ps|x|o[ius])$'), word)));
+ return word;
+}
diff --git a/tests/examplefiles/test.R b/tests/examplefiles/test.R
index 54325339..1dd8f64b 100644
--- a/tests/examplefiles/test.R
+++ b/tests/examplefiles/test.R
@@ -33,10 +33,11 @@ NA_foo_ <- NULL
123456.78901
123e3
123E3
-1.23e-3
-1.23e3
-1.23e-3
-## integer constants
+6.02e23
+1.6e-35
+1.E12
+.1234
+## integers
123L
1.23L
## imaginary numbers
@@ -80,7 +81,7 @@ repeat {1+1}
## Switch
x <- 3
switch(x, 2+2, mean(1:10), rnorm(5))
-## Function, dot-dot-dot, return
+## Function, dot-dot-dot, return, sum
foo <- function(...) {
return(sum(...))
}
@@ -151,3 +152,34 @@ world!'
## Backtick strings
`foo123 +!"bar'baz` <- 2 + 2
+
+## Builtin funcitons
+file.create()
+gamma()
+grep()
+paste()
+rbind()
+rownames()
+R.Version()
+R.version.string()
+sample()
+sapply()
+save.image()
+seq()
+setwd()
+sin()
+
+## Data structures
+servo <- matrix(1:25, nrow = 5)
+numeric()
+vector(servo)
+data.frame()
+list1 <- list(time = 1:40)
+# multidimensional array
+array(c(c(c(2,300,4),c(8,9,0)),c(c(5,60,0),c(66,7,847))), dim=c(3,2,2))
+
+## Namespace
+library(ggplot2)
+require(plyr)
+attach(cars)
+source("test.R")
diff --git a/tests/examplefiles/test.agda b/tests/examplefiles/test.agda
index d930a77b..f6cea91c 100644
--- a/tests/examplefiles/test.agda
+++ b/tests/examplefiles/test.agda
@@ -12,11 +12,18 @@ open import Data.List hiding ([_])
open import Data.Vec hiding ([_])
open import Relation.Nullary.Core
open import Relation.Binary.PropositionalEquality using (_≡_; refl; cong; trans; inspect; [_])
+ renaming (setoid to setiod)
open SemiringSolver
{- this is a {- nested -} comment -}
+postulate pierce : {A B : Set} → ((A → B) → A) → A
+
+instance
+ someBool : Bool
+ someBool = true
+
-- Factorial
_! : ℕ → ℕ
0 ! = 1
diff --git a/tests/examplefiles/test.apl b/tests/examplefiles/test.apl
new file mode 100644
index 00000000..26ecf971
--- /dev/null
+++ b/tests/examplefiles/test.apl
@@ -0,0 +1,26 @@
+∇ R←M COMBIN N;D;E;F;G;P
+ ⍝ Returns a matrix of every possible
+ ⍝ combination of M elements from the
+ ⍝ vector ⍳N. That is, returns a
+ ⍝ matrix with M!N rows and N columns.
+ ⍝
+ E←(⍳P←N-R←M-1)-⎕IO
+ D←R+⍳P
+ R←(P,1)⍴D
+ P←P⍴1
+ L1:→(⎕IO>1↑D←D-1)⍴0
+ P←+\P
+ G←+\¯1↓0,F←⌽P
+ E←F/E-G
+ R←(F/D),R[E+⍳⍴E;]
+ E←G
+ →L1
+∇
+
+∇ R←M QUICKEXP N
+ ⍝ Matrix exponentiation
+ B ← ⌊ 1 + 2 ⍟ N
+ V ← (B ⍴ 2) ⊤ N
+ L ← ⊂ M
+ R ← ⊃ +.× / V / L ⊣ { L ← (⊂ A +.× A ← ↑L) , L }¨ ⍳ B-1
+∇
diff --git a/tests/examplefiles/test.cyp b/tests/examplefiles/test.cyp
new file mode 100644
index 00000000..37465a4d
--- /dev/null
+++ b/tests/examplefiles/test.cyp
@@ -0,0 +1,123 @@
+//test comment
+START a = node(*)
+MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d)
+RETURN a.name, m.title, d.name;
+
+START a = node(*)
+MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d)
+WITH d,m,count(a) as Actors
+WHERE Actors > 4
+RETURN d.name as Director,m.title as Movie, Actors ORDER BY Actors;
+
+START a=node(*)
+MATCH p=(a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d)
+return p;
+
+START a = node(*)
+MATCH p1=(a)-[:ACTED_IN]->(m), p2=d-[:DIRECTED]->(m)
+WHERE m.title="The Matrix"
+RETURN p1, p2;
+
+START a = node(*)
+MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d)
+WHERE a=d
+RETURN a.name;
+
+START a = node(*)
+MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d)
+WHERE a=d
+RETURN a.name;
+
+START a=node(*)
+MATCH (a)-[:ACTED_IN]->(m)<-[:DIRECTED]-(d)
+RETURN a.name, d.name, count(*) as Movies,collect(m.title) as Titles
+ORDER BY (Movies) DESC
+LIMIT 5;
+
+START keanu=node:node_auto_index(name="Keanu Reeves")
+RETURN keanu;
+
+START keanu=node:node_auto_index(name="Keanu Reeves")
+MATCH (keanu)-[:ACTED_IN]->(movie)
+RETURN movie.title;
+
+START keanu=node:node_auto_index(name="Keanu Reeves")
+MATCH (keanu)-[r:ACTED_IN]->(movie)
+WHERE "Neo" in r.roles
+RETURN DISTINCT movie.title;
+
+START keanu=node:node_auto_index(name="Keanu Reeves")
+MATCH (keanu)-[:ACTED_IN]->()<-[:DIRECTED]-(director)
+RETURN director.name;
+
+START keanu=node:node_auto_index(name="Keanu Reeves")
+MATCH (keanu)-[:ACTED_IN]->(movie)<-[:ACTED_IN]-(n)
+WHERE n.born < keanu.born
+RETURN DISTINCT n.name, keanu.born ,n.born;
+
+START keanu=node:node_auto_index(name="Keanu Reeves"),
+ hugo=node:node_auto_index(name="Hugo Weaving")
+MATCH (keanu)-[:ACTED_IN]->(movie)
+WHERE NOT((hugo)-[:ACTED_IN]->(movie))
+RETURN DISTINCT movie.title;
+
+START a = node(*)
+MATCH (a)-[:ACTED_IN]->(m)
+WITH a,count(m) as Movies
+RETURN a.name as Actor, Movies ORDER BY Movies;
+
+START keanu=node:node_auto_index(name="Keanu Reeves"),actor
+MATCH past=(keanu)-[:ACTED_IN]->()<-[:ACTED_IN]-(),
+ actors=(actor)-[:ACTED_IN]->()
+WHERE hasnt=actors NOT IN past
+RETURN hasnt;
+
+START keanu=node:node_auto_index(name="Keanu Reeves")
+MATCH (keanu)-[:ACTED_IN]->()<-[:ACTED_IN]-(c),
+ (c)-[:ACTED_IN]->()<-[:ACTED_IN]-(coc)
+WHERE NOT((keanu)-[:ACTED_IN]->()<-[:ACTED_IN]-(coc))
+AND coc > keanu
+RETURN coc.name, count(coc)
+ORDER BY count(coc) DESC
+LIMIT 3;
+
+START kevin=node:node_auto_index(name="Kevin Bacon"),
+ movie=node:node_auto_index(name="Mystic River")
+MATCH (kevin)-[:ACTED_IN]->(movie)
+RETURN DISTINCT movie.title;
+
+CREATE (n
+ {
+ title:"Mystic River",
+ released:1993,
+ tagline:"We bury our sins here, Dave. We wash them clean."
+ }
+ ) RETURN n;
+
+
+START movie=node:node_auto_index(title="Mystic River")
+SET movie.released = 2003
+RETURN movie;
+
+start emil=node:node_auto_index(name="Emil Eifrem") MATCH emil-[r]->(n) DELETE r, emil;
+
+START a=node(*)
+MATCH (a)-[:ACTED_IN]->()<-[:ACTED_IN]-(b)
+CREATE UNIQUE (a)-[:KNOWS]->(b);
+
+START keanu=node:node_auto_index(name="Keanu Reeves")
+MATCH (keanu)-[:KNOWS*2]->(fof)
+WHERE keanu <> fof
+RETURN distinct fof.name;
+
+START charlize=node:node_auto_index(name="Charlize Theron"),
+ bacon=node:node_auto_index(name="Kevin Bacon")
+MATCH p=shortestPath((charlize)-[:KNOWS*]->(bacon))
+RETURN extract(n in nodes(p) | n.name)[1];
+
+START actors=node:
+
+MATCH (alice)-[:`REALLY LIKES`]->(bob)
+MATCH (alice)-[:`REALLY ``LIKES```]->(bob)
+myFancyIdentifier.`(weird property name)`
+"string\t\n\b\f\\\''\""
diff --git a/tests/examplefiles/test.gradle b/tests/examplefiles/test.gradle
new file mode 100644
index 00000000..0bc834c1
--- /dev/null
+++ b/tests/examplefiles/test.gradle
@@ -0,0 +1,20 @@
+apply plugin: 'java'
+
+repositories {
+ mavenCentral()
+}
+
+dependencies {
+ testCompile 'junit:junit:4.12'
+}
+
+task sayHello << {
+ def x = SomeClass.worldString
+ println "Hello ${x}"
+}
+
+private class SomeClass {
+ public static String getWorldString() {
+ return "world"
+ }
+}
diff --git a/tests/examplefiles/test.idr b/tests/examplefiles/test.idr
new file mode 100644
index 00000000..fd008d31
--- /dev/null
+++ b/tests/examplefiles/test.idr
@@ -0,0 +1,101 @@
+module Main
+
+data Ty = TyInt | TyBool | TyFun Ty Ty
+
+interpTy : Ty -> Type
+interpTy TyInt = Int
+interpTy TyBool = Bool
+interpTy (TyFun s t) = interpTy s -> interpTy t
+
+using (G : Vect n Ty)
+
+ data Env : Vect n Ty -> Type where
+ Nil : Env Nil
+ (::) : interpTy a -> Env G -> Env (a :: G)
+
+ data HasType : (i : Fin n) -> Vect n Ty -> Ty -> Type where
+ stop : HasType fZ (t :: G) t
+ pop : HasType k G t -> HasType (fS k) (u :: G) t
+
+ lookup : HasType i G t -> Env G -> interpTy t
+ lookup stop (x :: xs) = x
+ lookup (pop k) (x :: xs) = lookup k xs
+
+ data Expr : Vect n Ty -> Ty -> Type where
+ Var : HasType i G t -> Expr G t
+ Val : (x : Int) -> Expr G TyInt
+ Lam : Expr (a :: G) t -> Expr G (TyFun a t)
+ App : Expr G (TyFun a t) -> Expr G a -> Expr G t
+ Op : (interpTy a -> interpTy b -> interpTy c) -> Expr G a -> Expr G b ->
+ Expr G c
+ If : Expr G TyBool -> Expr G a -> Expr G a -> Expr G a
+ Bind : Expr G a -> (interpTy a -> Expr G b) -> Expr G b
+
+ dsl expr
+ lambda = Lam
+ variable = Var
+ index_first = stop
+ index_next = pop
+
+ (<$>) : |(f : Expr G (TyFun a t)) -> Expr G a -> Expr G t
+ (<$>) = \f, a => App f a
+
+ pure : Expr G a -> Expr G a
+ pure = id
+
+ syntax IF [x] THEN [t] ELSE [e] = If x t e
+
+ (==) : Expr G TyInt -> Expr G TyInt -> Expr G TyBool
+ (==) = Op (==)
+
+ (<) : Expr G TyInt -> Expr G TyInt -> Expr G TyBool
+ (<) = Op (<)
+
+ instance Num (Expr G TyInt) where
+ (+) x y = Op (+) x y
+ (-) x y = Op (-) x y
+ (*) x y = Op (*) x y
+
+ abs x = IF (x < 0) THEN (-x) ELSE x
+
+ fromInteger = Val . fromInteger
+
+ ||| Evaluates an expression in the given context.
+ interp : Env G -> {static} Expr G t -> interpTy t
+ interp env (Var i) = lookup i env
+ interp env (Val x) = x
+ interp env (Lam sc) = \x => interp (x :: env) sc
+ interp env (App f s) = (interp env f) (interp env s)
+ interp env (Op op x y) = op (interp env x) (interp env y)
+ interp env (If x t e) = if (interp env x) then (interp env t) else (interp env e)
+ interp env (Bind v f) = interp env (f (interp env v))
+
+ eId : Expr G (TyFun TyInt TyInt)
+ eId = expr (\x => x)
+
+ eTEST : Expr G (TyFun TyInt (TyFun TyInt TyInt))
+ eTEST = expr (\x, y => y)
+
+ eAdd : Expr G (TyFun TyInt (TyFun TyInt TyInt))
+ eAdd = expr (\x, y => Op (+) x y)
+
+ eDouble : Expr G (TyFun TyInt TyInt)
+ eDouble = expr (\x => App (App eAdd x) (Var stop))
+
+ eFac : Expr G (TyFun TyInt TyInt)
+ eFac = expr (\x => IF x == 0 THEN 1 ELSE [| eFac (x - 1) |] * x)
+
+testFac : Int
+testFac = interp [] eFac 4
+
+--testFacTooBig : Int
+--testFacTooBig = interp [] eFac 100000
+
+ {-testFacTooBig2 : Int
+testFacTooBig2 = interp [] eFac 1000
+-}
+
+main : IO ()
+main = print testFac
+
+
diff --git a/tests/examplefiles/test.lean b/tests/examplefiles/test.lean
new file mode 100644
index 00000000..a7b7e261
--- /dev/null
+++ b/tests/examplefiles/test.lean
@@ -0,0 +1,217 @@
+/-
+Theorems/Exercises from "Logical Investigations, with the Nuprl Proof Assistant"
+by Robert L. Constable and Anne Trostle
+http://www.nuprl.org/MathLibrary/LogicalInvestigations/
+-/
+import logic
+
+-- 2. The Minimal Implicational Calculus
+theorem thm1 {A B : Prop} : A → B → A :=
+assume Ha Hb, Ha
+
+theorem thm2 {A B C : Prop} : (A → B) → (A → B → C) → (A → C) :=
+assume Hab Habc Ha,
+ Habc Ha (Hab Ha)
+
+theorem thm3 {A B C : Prop} : (A → B) → (B → C) → (A → C) :=
+assume Hab Hbc Ha,
+ Hbc (Hab Ha)
+
+-- 3. False Propositions and Negation
+theorem thm4 {P Q : Prop} : ¬P → P → Q :=
+assume Hnp Hp,
+ absurd Hp Hnp
+
+theorem thm5 {P : Prop} : P → ¬¬P :=
+assume (Hp : P) (HnP : ¬P),
+ absurd Hp HnP
+
+theorem thm6 {P Q : Prop} : (P → Q) → (¬Q → ¬P) :=
+assume (Hpq : P → Q) (Hnq : ¬Q) (Hp : P),
+ have Hq : Q, from Hpq Hp,
+ show false, from absurd Hq Hnq
+
+theorem thm7 {P Q : Prop} : (P → ¬P) → (P → Q) :=
+assume Hpnp Hp,
+ absurd Hp (Hpnp Hp)
+
+theorem thm8 {P Q : Prop} : ¬(P → Q) → (P → ¬Q) :=
+assume (Hn : ¬(P → Q)) (Hp : P) (Hq : Q),
+ -- Rermak we don't even need the hypothesis Hp
+ have H : P → Q, from assume H', Hq,
+ absurd H Hn
+
+-- 4. Conjunction and Disjunction
+theorem thm9 {P : Prop} : (P ∨ ¬P) → (¬¬P → P) :=
+assume (em : P ∨ ¬P) (Hnn : ¬¬P),
+ or_elim em
+ (assume Hp, Hp)
+ (assume Hn, absurd Hn Hnn)
+
+theorem thm10 {P : Prop} : ¬¬(P ∨ ¬P) :=
+assume Hnem : ¬(P ∨ ¬P),
+ have Hnp : ¬P, from
+ assume Hp : P,
+ have Hem : P ∨ ¬P, from or_inl Hp,
+ absurd Hem Hnem,
+ have Hem : P ∨ ¬P, from or_inr Hnp,
+ absurd Hem Hnem
+
+theorem thm11 {P Q : Prop} : ¬P ∨ ¬Q → ¬(P ∧ Q) :=
+assume (H : ¬P ∨ ¬Q) (Hn : P ∧ Q),
+ or_elim H
+ (assume Hnp : ¬P, absurd (and_elim_left Hn) Hnp)
+ (assume Hnq : ¬Q, absurd (and_elim_right Hn) Hnq)
+
+theorem thm12 {P Q : Prop} : ¬(P ∨ Q) → ¬P ∧ ¬Q :=
+assume H : ¬(P ∨ Q),
+ have Hnp : ¬P, from assume Hp : P, absurd (or_inl Hp) H,
+ have Hnq : ¬Q, from assume Hq : Q, absurd (or_inr Hq) H,
+ and_intro Hnp Hnq
+
+theorem thm13 {P Q : Prop} : ¬P ∧ ¬Q → ¬(P ∨ Q) :=
+assume (H : ¬P ∧ ¬Q) (Hn : P ∨ Q),
+ or_elim Hn
+ (assume Hp : P, absurd Hp (and_elim_left H))
+ (assume Hq : Q, absurd Hq (and_elim_right H))
+
+theorem thm14 {P Q : Prop} : ¬P ∨ Q → P → Q :=
+assume (Hor : ¬P ∨ Q) (Hp : P),
+ or_elim Hor
+ (assume Hnp : ¬P, absurd Hp Hnp)
+ (assume Hq : Q, Hq)
+
+theorem thm15 {P Q : Prop} : (P → Q) → ¬¬(¬P ∨ Q) :=
+assume (Hpq : P → Q) (Hn : ¬(¬P ∨ Q)),
+ have H1 : ¬¬P ∧ ¬Q, from thm12 Hn,
+ have Hnp : ¬P, from mt Hpq (and_elim_right H1),
+ absurd Hnp (and_elim_left H1)
+
+theorem thm16 {P Q : Prop} : (P → Q) ∧ ((P ∨ ¬P) ∨ (Q ∨ ¬Q)) → ¬P ∨ Q :=
+assume H : (P → Q) ∧ ((P ∨ ¬P) ∨ (Q ∨ ¬Q)),
+ have Hpq : P → Q, from and_elim_left H,
+ or_elim (and_elim_right H)
+ (assume Hem1 : P ∨ ¬P, or_elim Hem1
+ (assume Hp : P, or_inr (Hpq Hp))
+ (assume Hnp : ¬P, or_inl Hnp))
+ (assume Hem2 : Q ∨ ¬Q, or_elim Hem2
+ (assume Hq : Q, or_inr Hq)
+ (assume Hnq : ¬Q, or_inl (mt Hpq Hnq)))
+
+-- 5. First-Order Logic: All and Exists
+section
+parameters {T : Type} {C : Prop} {P : T → Prop}
+theorem thm17a : (C → ∀x, P x) → (∀x, C → P x) :=
+assume H : C → ∀x, P x,
+ take x : T, assume Hc : C,
+ H Hc x
+
+theorem thm17b : (∀x, C → P x) → (C → ∀x, P x) :=
+assume (H : ∀x, C → P x) (Hc : C),
+ take x : T,
+ H x Hc
+
+theorem thm18a : ((∃x, P x) → C) → (∀x, P x → C) :=
+assume H : (∃x, P x) → C,
+ take x, assume Hp : P x,
+ have Hex : ∃x, P x, from exists_intro x Hp,
+ H Hex
+
+theorem thm18b : (∀x, P x → C) → (∃x, P x) → C :=
+assume (H1 : ∀x, P x → C) (H2 : ∃x, P x),
+ obtain (w : T) (Hw : P w), from H2,
+ H1 w Hw
+
+theorem thm19a : (C ∨ ¬C) → (∃x : T, true) → (C → (∃x, P x)) → (∃x, C → P x) :=
+assume (Hem : C ∨ ¬C) (Hin : ∃x : T, true) (H1 : C → ∃x, P x),
+ or_elim Hem
+ (assume Hc : C,
+ obtain (w : T) (Hw : P w), from H1 Hc,
+ have Hr : C → P w, from assume Hc, Hw,
+ exists_intro w Hr)
+ (assume Hnc : ¬C,
+ obtain (w : T) (Hw : true), from Hin,
+ have Hr : C → P w, from assume Hc, absurd Hc Hnc,
+ exists_intro w Hr)
+
+theorem thm19b : (∃x, C → P x) → C → (∃x, P x) :=
+assume (H : ∃x, C → P x) (Hc : C),
+ obtain (w : T) (Hw : C → P w), from H,
+ exists_intro w (Hw Hc)
+
+theorem thm20a : (C ∨ ¬C) → (∃x : T, true) → ((¬∀x, P x) → ∃x, ¬P x) → ((∀x, P x) → C) → (∃x, P x → C) :=
+assume Hem Hin Hnf H,
+ or_elim Hem
+ (assume Hc : C,
+ obtain (w : T) (Hw : true), from Hin,
+ exists_intro w (assume H : P w, Hc))
+ (assume Hnc : ¬C,
+ have H1 : ¬(∀x, P x), from mt H Hnc,
+ have H2 : ∃x, ¬P x, from Hnf H1,
+ obtain (w : T) (Hw : ¬P w), from H2,
+ exists_intro w (assume H : P w, absurd H Hw))
+
+theorem thm20b : (∃x, P x → C) → (∀ x, P x) → C :=
+assume Hex Hall,
+ obtain (w : T) (Hw : P w → C), from Hex,
+ Hw (Hall w)
+
+theorem thm21a : (∃x : T, true) → ((∃x, P x) ∨ C) → (∃x, P x ∨ C) :=
+assume Hin H,
+ or_elim H
+ (assume Hex : ∃x, P x,
+ obtain (w : T) (Hw : P w), from Hex,
+ exists_intro w (or_inl Hw))
+ (assume Hc : C,
+ obtain (w : T) (Hw : true), from Hin,
+ exists_intro w (or_inr Hc))
+
+theorem thm21b : (∃x, P x ∨ C) → ((∃x, P x) ∨ C) :=
+assume H,
+ obtain (w : T) (Hw : P w ∨ C), from H,
+ or_elim Hw
+ (assume H : P w, or_inl (exists_intro w H))
+ (assume Hc : C, or_inr Hc)
+
+theorem thm22a : (∀x, P x) ∨ C → ∀x, P x ∨ C :=
+assume H, take x,
+ or_elim H
+ (assume Hl, or_inl (Hl x))
+ (assume Hr, or_inr Hr)
+
+theorem thm22b : (C ∨ ¬C) → (∀x, P x ∨ C) → ((∀x, P x) ∨ C) :=
+assume Hem H1,
+ or_elim Hem
+ (assume Hc : C, or_inr Hc)
+ (assume Hnc : ¬C,
+ have Hx : ∀x, P x, from
+ take x,
+ have H1 : P x ∨ C, from H1 x,
+ resolve_left H1 Hnc,
+ or_inl Hx)
+
+theorem thm23a : (∃x, P x) ∧ C → (∃x, P x ∧ C) :=
+assume H,
+ have Hex : ∃x, P x, from and_elim_left H,
+ have Hc : C, from and_elim_right H,
+ obtain (w : T) (Hw : P w), from Hex,
+ exists_intro w (and_intro Hw Hc)
+
+theorem thm23b : (∃x, P x ∧ C) → (∃x, P x) ∧ C :=
+assume H,
+ obtain (w : T) (Hw : P w ∧ C), from H,
+ have Hex : ∃x, P x, from exists_intro w (and_elim_left Hw),
+ and_intro Hex (and_elim_right Hw)
+
+theorem thm24a : (∀x, P x) ∧ C → (∀x, P x ∧ C) :=
+assume H, take x,
+ and_intro (and_elim_left H x) (and_elim_right H)
+
+theorem thm24b : (∃x : T, true) → (∀x, P x ∧ C) → (∀x, P x) ∧ C :=
+assume Hin H,
+ obtain (w : T) (Hw : true), from Hin,
+ have Hc : C, from and_elim_right (H w),
+ have Hx : ∀x, P x, from take x, and_elim_left (H x),
+ and_intro Hx Hc
+
+end -- of section
diff --git a/tests/examplefiles/test.mask b/tests/examplefiles/test.mask
new file mode 100644
index 00000000..39134d74
--- /dev/null
+++ b/tests/examplefiles/test.mask
@@ -0,0 +1,41 @@
+
+// comment
+h4.class-1#id.class-2.other checked='true' disabled name = x param > 'Enter ..'
+input placeholder=Password type=password >
+ :dualbind x-signal='dom:create' value=user.passord;
+% each='flowers' >
+ div style='
+ position: absolute;
+ display: inline-block;
+ background: url("image.png") center center no-repeat;
+ ';
+#skippedDiv.other {
+ img src='~[url]';
+ div style="text-align:center;" {
+ '~[: $obj.foo("username", name) + 2]'
+ "~[Localize: stringId]"
+ }
+
+ p > """
+
+ Hello "world"
+ """
+
+ p > '
+ Hello "world"
+ '
+
+ p > "Hello 'world'"
+
+ :customComponent x-value='tt';
+ /* footer > '(c) 2014' */
+}
+
+.skippedDiv >
+ span >
+ #skipped >
+ table >
+ td >
+ tr > ';)'
+
+br; \ No newline at end of file
diff --git a/tests/examplefiles/test.pan b/tests/examplefiles/test.pan
new file mode 100644
index 00000000..56c8bd62
--- /dev/null
+++ b/tests/examplefiles/test.pan
@@ -0,0 +1,54 @@
+object template pantest;
+
+# Very simple pan test file
+"/long/decimal" = 123;
+"/long/octal" = 0755;
+"/long/hexadecimal" = 0xFF;
+
+"/double/simple" = 0.01;
+"/double/pi" = 3.14159;
+"/double/exponent" = 1e-8;
+"/double/scientific" = 1.3E10;
+
+"/string/single" = 'Faster, but escapes like \t, \n and \x3d don''t work, but '' should work.';
+"/string/double" = "Slower, but escapes like \t, \n and \x3d do work";
+
+variable TEST = 2;
+
+"/x2" = to_string(TEST);
+"/x2" ?= 'Default value';
+
+"/x3" = 1 + 2 + value("/long/decimal");
+
+"/x4" = undef;
+
+"/x5" = null;
+
+variable e ?= error("Test error message");
+
+# include gmond config for services-monitoring
+include { 'site/ganglia/gmond/services-monitoring' };
+
+"/software/packages"=pkg_repl("httpd","2.2.3-43.sl5.3",PKG_ARCH_DEFAULT);
+"/software/packages"=pkg_repl("php");
+
+# Example function
+function show_things_view_for_stuff = {
+ thing = ARGV[0];
+ foreach( i; mything; STUFF ) {
+ if ( thing == mything ) {
+ return( true );
+ } else {
+ return SELF;
+ };
+ };
+ false;
+};
+
+variable HERE = <<EOF;
+; This example demonstrates an in-line heredoc style config file
+[main]
+awesome = true
+EOF
+
+variable small = false;#This should be highlighted normally again.
diff --git a/tests/examplefiles/test.php b/tests/examplefiles/test.php
index 97e21f73..2ce4023e 100644
--- a/tests/examplefiles/test.php
+++ b/tests/examplefiles/test.php
@@ -1,5 +1,7 @@
<?php
+$disapproval_ಠ_ಠ_of_php = 'unicode var';
+
$test = function($a) { $lambda = 1; }
/**
@@ -16,7 +18,7 @@ if(!defined('UNLOCK') || !UNLOCK)
// Load the parent archive class
require_once(ROOT_PATH.'/classes/archive.class.php');
-class Zip\Zipp {
+class Zip\Zippಠ_ಠ_ {
}
@@ -502,4 +504,12 @@ function &byref() {
$x = array();
return $x;
}
+
+ echo <<<EOF
+
+ Test the heredocs...
+
+ EOF;
+
?>
+
diff --git a/tests/examplefiles/test.pig b/tests/examplefiles/test.pig
new file mode 100644
index 00000000..f67b0268
--- /dev/null
+++ b/tests/examplefiles/test.pig
@@ -0,0 +1,148 @@
+/**
+ * This script is an example recommender (using made up data) showing how you might modify item-item links
+ * by defining similar relations between items in a dataset and customizing the change in weighting.
+ * This example creates metadata by using the genre field as the metadata_field. The items with
+ * the same genre have it's weight cut in half in order to boost the signals of movies that do not have the same genre.
+ * This technique requires a customization of the standard GetItemItemRecommendations macro
+ */
+import 'recommenders.pig';
+
+
+
+%default INPUT_PATH_PURCHASES '../data/retail/purchases.json'
+%default INPUT_PATH_WISHLIST '../data/retail/wishlists.json'
+%default INPUT_PATH_INVENTORY '../data/retail/inventory.json'
+%default OUTPUT_PATH '../data/retail/out/modify_item_item'
+
+
+/******** Custom GetItemItemRecommnedations *********/
+define recsys__GetItemItemRecommendations_ModifyCustom(user_item_signals, metadata) returns item_item_recs {
+
+ -- Convert user_item_signals to an item_item_graph
+ ii_links_raw, item_weights = recsys__BuildItemItemGraph(
+ $user_item_signals,
+ $LOGISTIC_PARAM,
+ $MIN_LINK_WEIGHT,
+ $MAX_LINKS_PER_USER
+ );
+ -- NOTE this function is added in order to combine metadata with item-item links
+ -- See macro for more detailed explination
+ ii_links_metadata = recsys__AddMetadataToItemItemLinks(
+ ii_links_raw,
+ $metadata
+ );
+
+ /********* Custom Code starts here ********/
+
+ --The code here should adjust the weights based on an item-item link and the equality of metadata.
+ -- In this case, if the metadata is the same, the weight is reduced. Otherwise the weight is left alone.
+ ii_links_adjusted = foreach ii_links_metadata generate item_A, item_B,
+ -- the amount of weight adjusted is dependant on the domain of data and what is expected
+ -- It is always best to adjust the weight by multiplying it by a factor rather than addition with a constant
+ (metadata_B == metadata_A ? (weight * 0.5): weight) as weight;
+
+
+ /******** Custom Code stops here *********/
+
+ -- remove negative numbers just incase
+ ii_links_adjusted_filt = foreach ii_links_adjusted generate item_A, item_B,
+ (weight <= 0 ? 0: weight) as weight;
+ -- Adjust the weights of the graph to improve recommendations.
+ ii_links = recsys__AdjustItemItemGraphWeight(
+ ii_links_adjusted_filt,
+ item_weights,
+ $BAYESIAN_PRIOR
+ );
+
+ -- Use the item-item graph to create item-item recommendations.
+ $item_item_recs = recsys__BuildItemItemRecommendationsFromGraph(
+ ii_links,
+ $NUM_RECS_PER_ITEM,
+ $NUM_RECS_PER_ITEM
+ );
+};
+
+
+/******* Load Data **********/
+
+--Get purchase signals
+purchase_input = load '$INPUT_PATH_PURCHASES' using org.apache.pig.piggybank.storage.JsonLoader(
+ 'row_id: int,
+ movie_id: chararray,
+ movie_name: chararray,
+ user_id: chararray,
+ purchase_price: int');
+
+--Get wishlist signals
+wishlist_input = load '$INPUT_PATH_WISHLIST' using org.apache.pig.piggybank.storage.JsonLoader(
+ 'row_id: int,
+ movie_id: chararray,
+ movie_name: chararray,
+ user_id: chararray');
+
+
+/******* Convert Data to Signals **********/
+
+-- Start with choosing 1 as max weight for a signal.
+purchase_signals = foreach purchase_input generate
+ user_id as user,
+ movie_name as item,
+ 1.0 as weight;
+
+
+-- Start with choosing 0.5 as weight for wishlist items because that is a weaker signal than
+-- purchasing an item.
+wishlist_signals = foreach wishlist_input generate
+ user_id as user,
+ movie_name as item,
+ 0.5 as weight;
+
+user_signals = union purchase_signals, wishlist_signals;
+
+
+/******** Changes for Modifying item-item links ******/
+inventory_input = load '$INPUT_PATH_INVENTORY' using org.apache.pig.piggybank.storage.JsonLoader(
+ 'movie_title: chararray,
+ genres: bag{tuple(content:chararray)}');
+
+
+metadata = foreach inventory_input generate
+ FLATTEN(genres) as metadata_field,
+ movie_title as item;
+-- requires the macro to be written seperately
+ --NOTE this macro is defined within this file for clarity
+item_item_recs = recsys__GetItemItemRecommendations_ModifyCustom(user_signals, metadata);
+/******* No more changes ********/
+
+
+user_item_recs = recsys__GetUserItemRecommendations(user_signals, item_item_recs);
+
+--Completely unrelated code stuck in the middle
+data = LOAD 's3n://my-s3-bucket/path/to/responses'
+ USING org.apache.pig.piggybank.storage.JsonLoader();
+responses = FOREACH data GENERATE object#'response' AS response: map[];
+out = FOREACH responses
+ GENERATE response#'id' AS id: int, response#'thread' AS thread: chararray,
+ response#'comments' AS comments: {t: (comment: chararray)};
+STORE out INTO 's3n://path/to/output' USING PigStorage('|');
+
+
+/******* Store recommendations **********/
+
+-- If your output folder exists already, hadoop will refuse to write data to it.
+
+rmf $OUTPUT_PATH/item_item_recs;
+rmf $OUTPUT_PATH/user_item_recs;
+
+store item_item_recs into '$OUTPUT_PATH/item_item_recs' using PigStorage();
+store user_item_recs into '$OUTPUT_PATH/user_item_recs' using PigStorage();
+
+-- STORE the item_item_recs into dynamo
+STORE item_item_recs
+ INTO '$OUTPUT_PATH/unused-ii-table-data'
+USING com.mortardata.pig.storage.DynamoDBStorage('$II_TABLE', '$AWS_ACCESS_KEY_ID', '$AWS_SECRET_ACCESS_KEY');
+
+-- STORE the user_item_recs into dynamo
+STORE user_item_recs
+ INTO '$OUTPUT_PATH/unused-ui-table-data'
+USING com.mortardata.pig.storage.DynamoDBStorage('$UI_TABLE', '$AWS_ACCESS_KEY_ID', '$AWS_SECRET_ACCESS_KEY');
diff --git a/tests/examplefiles/test.pwn b/tests/examplefiles/test.pwn
new file mode 100644
index 00000000..d6468617
--- /dev/null
+++ b/tests/examplefiles/test.pwn
@@ -0,0 +1,253 @@
+#include <core>
+
+// Single line comment
+/* Multi line
+ comment */
+
+/// documentation
+/**
+
+ documentation multi line
+
+**/
+
+public OnGameModeInit() {
+ printf("Hello, World!");
+}
+
+enum info {
+ Float:ex;
+ exa,
+ exam[5],
+}
+new arr[5][info];
+
+stock Float:test_func()
+{
+ new a = 5, Float:b = 10.3;
+ if (a == b) {
+
+ } else {
+
+ }
+
+ for (new i = 0; i < 10; i++) {
+ continue;
+ }
+
+ do {
+ a--;
+ } while (a > 0);
+
+ while (a < 5) {
+ a++;
+ break;
+ }
+
+ switch (a) {
+ case 0: {
+ }
+ case 0..4: {
+ }
+ case 5, 6: {
+ }
+ }
+
+ static x;
+ new xx = a > 5 ? 5 : 0;
+ new array[sizeof arr] = {0};
+ tagof a;
+ state a;
+ goto label;
+ new byte[2 char];
+ byte{0} = 'a';
+
+ return (float(a) + b);
+}
+
+
+// float.inc
+/* Float arithmetic
+ *
+ * (c) Copyright 1999, Artran, Inc.
+ * Written by Greg Garner (gmg@artran.com)
+ * Modified in March 2001 to include user defined
+ * operators for the floating point functions.
+ *
+ * This file is provided as is (no warranties).
+ */
+#if defined _Float_included
+ #endinput
+#endif
+#define _Float_included
+#pragma library Float
+
+/* Different methods of rounding */
+enum floatround_method {
+ floatround_round,
+ floatround_floor,
+ floatround_ceil,
+ floatround_tozero,
+ floatround_unbiased
+}
+enum anglemode {
+ radian,
+ degrees,
+ grades
+}
+
+/**************************************************/
+/* Convert an integer into a floating point value */
+native Float:float(value);
+
+/**************************************************/
+/* Convert a string into a floating point value */
+native Float:floatstr(const string[]);
+
+/**************************************************/
+/* Multiple two floats together */
+native Float:floatmul(Float:oper1, Float:oper2);
+
+/**************************************************/
+/* Divide the dividend float by the divisor float */
+native Float:floatdiv(Float:dividend, Float:divisor);
+
+/**************************************************/
+/* Add two floats together */
+native Float:floatadd(Float:oper1, Float:oper2);
+
+/**************************************************/
+/* Subtract oper2 float from oper1 float */
+native Float:floatsub(Float:oper1, Float:oper2);
+
+/**************************************************/
+/* Return the fractional part of a float */
+native Float:floatfract(Float:value);
+
+/**************************************************/
+/* Round a float into a integer value */
+native floatround(Float:value, floatround_method:method=floatround_round);
+
+/**************************************************/
+/* Compare two integers. If the two elements are equal, return 0.
+ If the first argument is greater than the second argument, return 1,
+ If the first argument is less than the second argument, return -1. */
+native floatcmp(Float:oper1, Float:oper2);
+
+/**************************************************/
+/* Return the square root of the input value, same as floatpower(value, 0.5) */
+native Float:floatsqroot(Float:value);
+
+/**************************************************/
+/* Return the value raised to the power of the exponent */
+native Float:floatpower(Float:value, Float:exponent);
+
+/**************************************************/
+/* Return the logarithm */
+native Float:floatlog(Float:value, Float:base=10.0);
+
+/**************************************************/
+/* Return the sine, cosine or tangent. The input angle may be in radian,
+ degrees or grades. */
+native Float:floatsin(Float:value, anglemode:mode=radian);
+native Float:floatcos(Float:value, anglemode:mode=radian);
+native Float:floattan(Float:value, anglemode:mode=radian);
+
+/**************************************************/
+/* Return the absolute value */
+native Float:floatabs(Float:value);
+
+
+/**************************************************/
+#pragma rational Float
+
+/* user defined operators */
+native Float:operator*(Float:oper1, Float:oper2) = floatmul;
+native Float:operator/(Float:oper1, Float:oper2) = floatdiv;
+native Float:operator+(Float:oper1, Float:oper2) = floatadd;
+native Float:operator-(Float:oper1, Float:oper2) = floatsub;
+native Float:operator=(oper) = float;
+
+stock Float:operator++(Float:oper)
+ return oper+1.0;
+
+stock Float:operator--(Float:oper)
+ return oper-1.0;
+
+stock Float:operator-(Float:oper)
+ return oper^Float:cellmin; /* IEEE values are sign/magnitude */
+
+stock Float:operator*(Float:oper1, oper2)
+ return floatmul(oper1, float(oper2)); /* "*" is commutative */
+
+stock Float:operator/(Float:oper1, oper2)
+ return floatdiv(oper1, float(oper2));
+
+stock Float:operator/(oper1, Float:oper2)
+ return floatdiv(float(oper1), oper2);
+
+stock Float:operator+(Float:oper1, oper2)
+ return floatadd(oper1, float(oper2)); /* "+" is commutative */
+
+stock Float:operator-(Float:oper1, oper2)
+ return floatsub(oper1, float(oper2));
+
+stock Float:operator-(oper1, Float:oper2)
+ return floatsub(float(oper1), oper2);
+
+stock bool:operator==(Float:oper1, Float:oper2)
+ return floatcmp(oper1, oper2) == 0;
+
+stock bool:operator==(Float:oper1, oper2)
+ return floatcmp(oper1, float(oper2)) == 0; /* "==" is commutative */
+
+stock bool:operator!=(Float:oper1, Float:oper2)
+ return floatcmp(oper1, oper2) != 0;
+
+stock bool:operator!=(Float:oper1, oper2)
+ return floatcmp(oper1, float(oper2)) != 0; /* "!=" is commutative */
+
+stock bool:operator>(Float:oper1, Float:oper2)
+ return floatcmp(oper1, oper2) > 0;
+
+stock bool:operator>(Float:oper1, oper2)
+ return floatcmp(oper1, float(oper2)) > 0;
+
+stock bool:operator>(oper1, Float:oper2)
+ return floatcmp(float(oper1), oper2) > 0;
+
+stock bool:operator>=(Float:oper1, Float:oper2)
+ return floatcmp(oper1, oper2) >= 0;
+
+stock bool:operator>=(Float:oper1, oper2)
+ return floatcmp(oper1, float(oper2)) >= 0;
+
+stock bool:operator>=(oper1, Float:oper2)
+ return floatcmp(float(oper1), oper2) >= 0;
+
+stock bool:operator<(Float:oper1, Float:oper2)
+ return floatcmp(oper1, oper2) < 0;
+
+stock bool:operator<(Float:oper1, oper2)
+ return floatcmp(oper1, float(oper2)) < 0;
+
+stock bool:operator<(oper1, Float:oper2)
+ return floatcmp(float(oper1), oper2) < 0;
+
+stock bool:operator<=(Float:oper1, Float:oper2)
+ return floatcmp(oper1, oper2) <= 0;
+
+stock bool:operator<=(Float:oper1, oper2)
+ return floatcmp(oper1, float(oper2)) <= 0;
+
+stock bool:operator<=(oper1, Float:oper2)
+ return floatcmp(float(oper1), oper2) <= 0;
+
+stock bool:operator!(Float:oper)
+ return (_:oper & cellmax) == 0;
+
+/* forbidden operations */
+forward operator%(Float:oper1, Float:oper2);
+forward operator%(Float:oper1, oper2);
+forward operator%(oper1, Float:oper2);
+
diff --git a/tests/examplefiles/test.pypylog b/tests/examplefiles/test.pypylog
index f85030cb..1a6aa5ed 100644
--- a/tests/examplefiles/test.pypylog
+++ b/tests/examplefiles/test.pypylog
@@ -998,842 +998,3 @@ setfield_gc(p73, i14, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntO
setarrayitem_gc(p60, 9, p73, descr=<GcPtrArrayDescr>)
p76 = new_with_vtable(19800744)
setfield_gc(p76, f15, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p60, 10, p76, descr=<GcPtrArrayDescr>)
-setfield_gc(p1, 1, descr=<BoolFieldDescr pypy.interpreter.pyframe.PyFrame.inst_frame_finished_execution 148>)
-setfield_gc(p1, ConstPtr(ptr79), descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_pycode 112>)
-setfield_gc(p1, ConstPtr(ptr55), descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_lastblock 104>)
-setfield_gc(p1, 0, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.inst_valuestackdepth 128>)
-setfield_gc(p1, p4, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_last_exception 88>)
-setfield_gc(p1, 0, descr=<BoolFieldDescr pypy.interpreter.pyframe.PyFrame.inst_is_being_profiled 149>)
-setfield_gc(p1, 307, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.inst_last_instr 96>)
-p84 = new_with_vtable(19800744)
-setfield_gc(p84, f36, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-finish(p84, descr=<DoneWithThisFrameDescrRef object at 0x140bcc0>)
-[5ed6619e9448] jit-log-opt-bridge}
-[5ed74f2eef6e] {jit-log-opt-loop
-# Loop 2 : loop with 394 ops
-[p0, p1, p2, p3, p4, p5, p6, p7, i8, f9, i10, i11, p12, p13]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #21 LOAD_FAST', 0)
-guard_nonnull_class(p7, 19800744, descr=<Guard180>) [p1, p0, p7, p2, p3, p4, p5, p6, i8]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #24 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #27 COMPARE_OP', 0)
-f15 = getfield_gc_pure(p7, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-i16 = float_gt(f15, f9)
-guard_true(i16, descr=<Guard181>) [p1, p0, p6, p7, p2, p3, p4, p5, i8]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #30 POP_JUMP_IF_FALSE', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #33 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #36 POP_JUMP_IF_FALSE', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #39 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #42 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #45 COMPARE_OP', 0)
-i17 = int_ge(i8, i10)
-guard_false(i17, descr=<Guard182>) [p1, p0, p5, p2, p3, p4, p6, p7, i8]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #48 POP_JUMP_IF_FALSE', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #55 LOAD_GLOBAL', 0)
-p18 = getfield_gc(p0, descr=<GcPtrFieldDescr pypy.interpreter.eval.Frame.inst_w_globals 8>)
-guard_value(p18, ConstPtr(ptr19), descr=<Guard183>) [p1, p0, p18, p2, p3, p4, p5, p6, p7, i8]
-p20 = getfield_gc(p18, descr=<GcPtrFieldDescr pypy.objspace.std.dictmultiobject.W_DictMultiObject.inst_r_dict_content 8>)
-guard_isnull(p20, descr=<Guard184>) [p1, p0, p20, p18, p2, p3, p4, p5, p6, p7, i8]
-p22 = getfield_gc(ConstPtr(ptr21), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_nonnull_class(p22, ConstClass(Function), descr=<Guard185>) [p1, p0, p22, p2, p3, p4, p5, p6, p7, i8]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #58 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #61 CALL_FUNCTION', 0)
-p24 = getfield_gc(p22, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_code 24>)
-guard_value(p24, ConstPtr(ptr25), descr=<Guard186>) [p1, p0, p24, p22, p2, p3, p4, p5, p6, p7, i8]
-p26 = getfield_gc(p22, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_w_func_globals 64>)
-p27 = getfield_gc(p22, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_closure 16>)
-i28 = force_token()
-i29 = int_is_zero(i11)
-guard_true(i29, descr=<Guard187>) [p1, p0, p12, p2, p3, p22, p4, p5, p6, p7, p26, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #0 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #3 LOAD_ATTR', 1)
-p30 = getfield_gc(p4, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst_map 48>)
-guard_value(p30, ConstPtr(ptr31), descr=<Guard188>) [p1, p0, p12, p4, p30, p2, p3, p22, p5, p6, p7, p26, p13, i28, i8]
-p33 = getfield_gc(ConstPtr(ptr32), descr=<GcPtrFieldDescr pypy.objspace.std.typeobject.W_TypeObject.inst__version_tag 16>)
-guard_value(p33, ConstPtr(ptr34), descr=<Guard189>) [p1, p0, p12, p4, p33, p2, p3, p22, p5, p6, p7, p26, p13, i28, i8]
-p35 = getfield_gc(p4, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value2 24>)
-guard_nonnull_class(p35, 19800744, descr=<Guard190>) [p1, p0, p12, p35, p4, p2, p3, p22, p5, p6, p7, p26, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #6 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #9 LOAD_ATTR', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #12 BINARY_MULTIPLY', 1)
-f37 = getfield_gc_pure(p35, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-f38 = float_mul(f37, f37)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #13 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #16 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #19 LOAD_ATTR', 1)
-p39 = getfield_gc(p4, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value3 32>)
-guard_nonnull_class(p39, 19800744, descr=<Guard191>) [p1, p0, p12, p39, p4, p2, p3, p22, p5, p6, p7, f38, p26, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #22 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #25 LOAD_ATTR', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #28 BINARY_MULTIPLY', 1)
-f41 = getfield_gc_pure(p39, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-f42 = float_mul(f41, f41)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #29 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #32 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #35 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #38 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #41 BINARY_ADD', 1)
-f43 = float_add(f38, f42)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #42 BINARY_DIVIDE', 1)
-i45 = float_eq(f43, 0.000000)
-guard_false(i45, descr=<Guard192>) [p1, p0, p12, f43, p2, p3, p22, p4, p5, p6, p7, f42, f38, p26, p13, i28, i8]
-f47 = float_truediv(0.500000, f43)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #43 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #46 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #49 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #52 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #55 LOAD_ATTR', 1)
-p48 = getfield_gc(p4, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value0 8>)
-guard_nonnull_class(p48, ConstClass(W_IntObject), descr=<Guard193>) [p1, p0, p12, p48, p4, p2, p3, p22, p5, p6, p7, f47, f42, f38, p26, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #58 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #61 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #64 LOAD_ATTR', 1)
-p50 = getfield_gc(p4, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value1 16>)
-guard_nonnull_class(p50, ConstClass(W_IntObject), descr=<Guard194>) [p1, p0, p12, p50, p4, p2, p3, p22, p5, p6, p7, p48, f47, f42, f38, p26, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #67 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #70 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #73 LOAD_ATTR', 1)
-p52 = getfield_gc(p4, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value4 40>)
-guard_nonnull_class(p52, 19886912, descr=<Guard195>) [p1, p0, p12, p52, p4, p2, p3, p22, p5, p6, p7, p50, p48, f47, f42, f38, p26, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #76 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #79 SETUP_LOOP', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #82 LOAD_GLOBAL', 1)
-guard_value(p26, ConstPtr(ptr54), descr=<Guard196>) [p1, p0, p12, p26, p2, p3, p22, p4, p5, p6, p7, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p56 = getfield_gc(p26, descr=<GcPtrFieldDescr pypy.objspace.std.dictmultiobject.W_DictMultiObject.inst_r_dict_content 8>)
-guard_isnull(p56, descr=<Guard197>) [p1, p0, p12, p56, p26, p2, p3, p22, p4, p5, p6, p7, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p58 = getfield_gc(ConstPtr(ptr57), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_isnull(p58, descr=<Guard198>) [p1, p0, p12, p58, p2, p3, p22, p4, p5, p6, p7, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p60 = getfield_gc(ConstPtr(ptr59), descr=<GcPtrFieldDescr pypy.interpreter.module.Module.inst_w_dict 8>)
-guard_value(p60, ConstPtr(ptr61), descr=<Guard199>) [p1, p0, p12, p60, p2, p3, p22, p4, p5, p6, p7, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p62 = getfield_gc(p60, descr=<GcPtrFieldDescr pypy.objspace.std.dictmultiobject.W_DictMultiObject.inst_r_dict_content 8>)
-guard_isnull(p62, descr=<Guard200>) [p1, p0, p12, p62, p60, p2, p3, p22, p4, p5, p6, p7, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p64 = getfield_gc(ConstPtr(ptr63), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_value(p64, ConstPtr(ptr65), descr=<Guard201>) [p1, p0, p12, p64, p2, p3, p22, p4, p5, p6, p7, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #85 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #88 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #91 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #94 BINARY_SUBTRACT', 1)
-i66 = getfield_gc_pure(p48, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i68 = int_sub_ovf(i66, 1)
-guard_no_overflow(, descr=<Guard202>) [p1, p0, p12, p48, i68, p2, p3, p22, p4, p5, p6, p7, p64, p52, p50, None, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #95 CALL_FUNCTION', 1)
-p70 = getfield_gc(ConstPtr(ptr69), descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_name 40>)
-p71 = getfield_gc(ConstPtr(ptr69), descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_defs 32>)
-i72 = getfield_gc_pure(p71, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i72, descr=<Guard203>) [p1, p0, p12, p70, p71, p2, p3, p22, p4, p5, p6, p7, i68, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p73 = getfield_gc_pure(p71, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-i74 = arraylen_gc(p73, descr=<GcPtrArrayDescr>)
-i76 = int_sub(4, i74)
-i78 = int_ge(3, i76)
-guard_true(i78, descr=<Guard204>) [p1, p0, p12, p70, i76, p71, p2, p3, p22, p4, p5, p6, p7, i68, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i79 = int_sub(3, i76)
-i80 = getfield_gc_pure(p71, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i80, descr=<Guard205>) [p1, p0, p12, p70, i79, i76, p71, p2, p3, p22, p4, p5, p6, p7, i68, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p81 = getfield_gc_pure(p71, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-p82 = getarrayitem_gc(p81, i79, descr=<GcPtrArrayDescr>)
-guard_class(p82, ConstClass(W_IntObject), descr=<Guard206>) [p1, p0, p12, p82, p2, p3, p22, p4, p5, p6, p7, i68, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i84 = getfield_gc_pure(p82, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i85 = int_is_zero(i84)
-guard_false(i85, descr=<Guard207>) [p1, p0, p12, i84, i68, p2, p3, p22, p4, p5, p6, p7, p82, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i87 = int_lt(i84, 0)
-guard_false(i87, descr=<Guard208>) [p1, p0, p12, i84, i68, p2, p3, p22, p4, p5, p6, p7, p82, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i89 = int_lt(1, i68)
-guard_true(i89, descr=<Guard209>) [p1, p0, p12, i84, i68, p2, p3, p22, p4, p5, p6, p7, p82, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i90 = int_sub(i68, 1)
-i92 = int_sub(i90, 1)
-i93 = uint_floordiv(i92, i84)
-i95 = int_add(i93, 1)
-i97 = int_lt(i95, 0)
-guard_false(i97, descr=<Guard210>) [p1, p0, p12, i84, i95, p2, p3, p22, p4, p5, p6, p7, p82, i68, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #98 GET_ITER', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #99 FOR_ITER', 1)
-i99 = int_gt(i95, 0)
-guard_true(i99, descr=<Guard211>) [p1, p0, p12, p2, p3, p22, p4, p5, p6, p7, i84, i95, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i100 = int_add(1, i84)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #102 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #105 SETUP_LOOP', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #108 LOAD_GLOBAL', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #111 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #114 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #117 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #120 BINARY_SUBTRACT', 1)
-i101 = getfield_gc_pure(p50, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i103 = int_sub_ovf(i101, 1)
-guard_no_overflow(, descr=<Guard212>) [p1, p0, p12, p50, i103, p2, p3, p22, p4, p5, p6, p7, i100, i93, i84, None, None, None, None, p52, None, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #121 CALL_FUNCTION', 1)
-i104 = getfield_gc_pure(p71, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i104, descr=<Guard213>) [p1, p0, p12, p70, p71, p2, p3, p22, p4, p5, p6, p7, i103, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p105 = getfield_gc_pure(p71, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-i106 = arraylen_gc(p105, descr=<GcPtrArrayDescr>)
-i108 = int_sub(4, i106)
-i110 = int_ge(3, i108)
-guard_true(i110, descr=<Guard214>) [p1, p0, p12, p70, i108, p71, p2, p3, p22, p4, p5, p6, p7, i103, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i111 = int_sub(3, i108)
-i112 = getfield_gc_pure(p71, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i112, descr=<Guard215>) [p1, p0, p12, p70, i111, i108, p71, p2, p3, p22, p4, p5, p6, p7, i103, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p113 = getfield_gc_pure(p71, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-p114 = getarrayitem_gc(p113, i111, descr=<GcPtrArrayDescr>)
-guard_class(p114, ConstClass(W_IntObject), descr=<Guard216>) [p1, p0, p12, p114, p2, p3, p22, p4, p5, p6, p7, i103, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i116 = getfield_gc_pure(p114, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i117 = int_is_zero(i116)
-guard_false(i117, descr=<Guard217>) [p1, p0, p12, i116, i103, p2, p3, p22, p4, p5, p6, p7, p114, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i119 = int_lt(i116, 0)
-guard_false(i119, descr=<Guard218>) [p1, p0, p12, i116, i103, p2, p3, p22, p4, p5, p6, p7, p114, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i121 = int_lt(1, i103)
-guard_true(i121, descr=<Guard219>) [p1, p0, p12, i116, i103, p2, p3, p22, p4, p5, p6, p7, p114, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i122 = int_sub(i103, 1)
-i124 = int_sub(i122, 1)
-i125 = uint_floordiv(i124, i116)
-i127 = int_add(i125, 1)
-i129 = int_lt(i127, 0)
-guard_false(i129, descr=<Guard220>) [p1, p0, p12, i116, i127, p2, p3, p22, p4, p5, p6, p7, p114, i103, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #124 GET_ITER', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #125 FOR_ITER', 1)
-i131 = int_gt(i127, 0)
-guard_true(i131, descr=<Guard221>) [p1, p0, p12, p2, p3, p22, p4, p5, p6, p7, i116, i127, None, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i132 = int_add(1, i116)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #128 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #131 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #134 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #137 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #140 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #141 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #144 BINARY_ADD', 1)
-i133 = int_add_ovf(i66, 1)
-guard_no_overflow(, descr=<Guard222>) [p1, p0, p12, i133, p2, p3, p22, p4, p5, p6, p7, i132, i125, i66, i116, None, None, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #145 BINARY_SUBSCR', 1)
-i134 = getfield_gc(p52, descr=<SignedFieldDescr pypy.module.array.interp_array.W_ArrayTyped.inst_len 32>)
-i135 = int_lt(i133, i134)
-guard_true(i135, descr=<Guard223>) [p1, p0, p12, p52, i133, p2, p3, p22, p4, p5, p6, p7, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, None, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i136 = getfield_gc(p52, descr=<NonGcPtrFieldDescr pypy.module.array.interp_array.W_ArrayTyped.inst_buffer 24>)
-f137 = getarrayitem_raw(i136, i133, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #146 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #149 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #152 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #155 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #158 BINARY_SUBTRACT', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #159 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #162 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #163 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #166 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #167 BINARY_SUBSCR', 1)
-f138 = getarrayitem_raw(i136, 1, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #168 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #171 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #174 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #177 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #178 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #181 BINARY_MULTIPLY', 1)
-i140 = int_mul_ovf(2, i66)
-guard_no_overflow(, descr=<Guard224>) [p1, p0, p12, p48, i140, p2, p3, p22, p4, p5, p6, p7, f138, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, p52, p50, None, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #182 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #185 BINARY_ADD', 1)
-i141 = int_add_ovf(i140, 1)
-guard_no_overflow(, descr=<Guard225>) [p1, p0, p12, i141, p2, p3, p22, p4, p5, p6, p7, i140, f138, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #186 BINARY_SUBSCR', 1)
-i143 = int_lt(i141, 0)
-guard_false(i143, descr=<Guard226>) [p1, p0, p12, p52, i141, i134, p2, p3, p22, p4, p5, p6, p7, None, f138, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, None, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i144 = int_lt(i141, i134)
-guard_true(i144, descr=<Guard227>) [p1, p0, p12, p52, i141, p2, p3, p22, p4, p5, p6, p7, None, f138, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, None, p50, p48, f47, f42, f38, None, p13, i28, i8]
-f145 = getarrayitem_raw(i136, i141, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #187 BINARY_ADD', 1)
-f146 = float_add(f138, f145)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #188 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #191 BINARY_MULTIPLY', 1)
-f147 = float_mul(f146, f42)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #192 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #195 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #198 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #201 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #202 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #205 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #206 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #209 BINARY_SUBTRACT', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #210 BINARY_SUBSCR', 1)
-i148 = int_lt(i66, i134)
-guard_true(i148, descr=<Guard228>) [p1, p0, p12, p52, i66, p2, p3, p22, p4, p5, p6, p7, f147, None, None, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, None, p50, p48, f47, f42, f38, None, p13, i28, i8]
-f149 = getarrayitem_raw(i136, i66, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #211 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #214 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #217 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #220 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #221 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #224 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #225 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #228 BINARY_ADD', 1)
-i151 = int_add(i133, 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #229 BINARY_SUBSCR', 1)
-i152 = int_lt(i151, i134)
-guard_true(i152, descr=<Guard229>) [p1, p0, p12, p52, i151, p2, p3, p22, p4, p5, p6, p7, f149, f147, None, None, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, None, p50, p48, f47, f42, f38, None, p13, i28, i8]
-f153 = getarrayitem_raw(i136, i151, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #230 BINARY_ADD', 1)
-f154 = float_add(f149, f153)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #231 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #234 BINARY_MULTIPLY', 1)
-f155 = float_mul(f154, f38)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #235 BINARY_ADD', 1)
-f156 = float_add(f147, f155)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #236 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #239 BINARY_MULTIPLY', 1)
-f157 = float_mul(f156, f47)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #240 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #243 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #246 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #249 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #250 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #253 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #254 STORE_SUBSCR', 1)
-setarrayitem_raw(i136, i133, f157, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #255 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #258 LOAD_GLOBAL', 1)
-p159 = getfield_gc(ConstPtr(ptr158), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_nonnull_class(p159, ConstClass(Function), descr=<Guard230>) [p1, p0, p12, p159, p2, p3, p22, p4, p5, p6, p7, None, None, None, None, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #261 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #264 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #267 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #270 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #271 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #274 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #275 BINARY_SUBSCR', 1)
-f161 = getarrayitem_raw(i136, i133, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #276 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #279 BINARY_SUBTRACT', 1)
-f162 = float_sub(f161, f137)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #280 CALL_FUNCTION', 1)
-p163 = getfield_gc(p159, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_code 24>)
-guard_value(p163, ConstPtr(ptr164), descr=<Guard231>) [p1, p0, p12, p163, p159, p2, p3, p22, p4, p5, p6, p7, f162, None, None, None, None, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p165 = getfield_gc(p159, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_w_func_globals 64>)
-p166 = getfield_gc(p159, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_closure 16>)
-i167 = force_token()
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #0 LOAD_FAST', 2)
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #3 LOAD_FAST', 2)
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #6 BINARY_MULTIPLY', 2)
-f168 = float_mul(f162, f162)
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #7 RETURN_VALUE', 2)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #283 INPLACE_ADD', 1)
-f170 = float_add(0.000000, f168)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #284 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #287 JUMP_ABSOLUTE', 1)
-i172 = getfield_raw(38968960, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-i174 = int_sub(i172, 100)
-setfield_raw(38968960, i174, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-i176 = int_lt(i174, 0)
-guard_false(i176, descr=<Guard232>) [p1, p0, p12, p2, p3, p22, p4, p5, p6, p7, f170, None, None, None, None, None, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #125 FOR_ITER', 1)
-i177 = force_token()
-p179 = new_with_vtable(19809200)
-setfield_gc(p179, i28, descr=<SignedFieldDescr JitVirtualRef.virtual_token 8>)
-setfield_gc(p12, p179, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_topframeref 56>)
-setfield_gc(p0, i177, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.vable_token 24>)
-p181 = new_with_vtable(19863424)
-setfield_gc(p181, p13, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_f_backref 48>)
-setfield_gc(p181, ConstPtr(ptr54), descr=<GcPtrFieldDescr pypy.interpreter.eval.Frame.inst_w_globals 8>)
-setfield_gc(p181, 34, descr=<INTFieldDescr pypy.interpreter.pyframe.PyFrame.inst_f_lineno 144>)
-setfield_gc(p181, ConstPtr(ptr25), descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_pycode 112>)
-p184 = new_array(8, descr=<GcPtrArrayDescr>)
-p186 = new_with_vtable(19861240)
-setfield_gc(p186, i100, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_current 8>)
-setfield_gc(p186, i93, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_remaining 16>)
-setfield_gc(p186, i84, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_step 24>)
-setarrayitem_gc(p184, 0, p186, descr=<GcPtrArrayDescr>)
-p189 = new_with_vtable(19861240)
-setfield_gc(p189, i132, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_current 8>)
-setfield_gc(p189, i125, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_remaining 16>)
-setfield_gc(p189, i116, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_step 24>)
-setarrayitem_gc(p184, 1, p189, descr=<GcPtrArrayDescr>)
-setfield_gc(p181, p184, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_valuestack_w 120>)
-setfield_gc(p181, 125, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.inst_last_instr 96>)
-p193 = new_with_vtable(19865144)
-setfield_gc(p193, 291, descr=<UnsignedFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_handlerposition 8>)
-setfield_gc(p193, 1, descr=<SignedFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_valuestackdepth 24>)
-p197 = new_with_vtable(19865144)
-setfield_gc(p197, 295, descr=<UnsignedFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_handlerposition 8>)
-setfield_gc(p193, p197, descr=<GcPtrFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_previous 16>)
-setfield_gc(p181, p193, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_lastblock 104>)
-p200 = new_array(11, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p200, 0, p4, descr=<GcPtrArrayDescr>)
-p203 = new_with_vtable(19800744)
-setfield_gc(p203, f38, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p200, 1, p203, descr=<GcPtrArrayDescr>)
-p206 = new_with_vtable(19800744)
-setfield_gc(p206, f42, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p200, 2, p206, descr=<GcPtrArrayDescr>)
-p209 = new_with_vtable(19800744)
-setfield_gc(p209, f47, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p200, 3, p209, descr=<GcPtrArrayDescr>)
-p212 = new_with_vtable(19800744)
-setfield_gc(p212, f170, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p200, 4, p212, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p200, 5, p48, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p200, 6, p50, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p200, 7, p52, descr=<GcPtrArrayDescr>)
-p218 = new_with_vtable(ConstClass(W_IntObject))
-setfield_gc(p218, 1, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-setarrayitem_gc(p200, 8, p218, descr=<GcPtrArrayDescr>)
-p221 = new_with_vtable(ConstClass(W_IntObject))
-setfield_gc(p221, 1, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-setarrayitem_gc(p200, 9, p221, descr=<GcPtrArrayDescr>)
-p224 = new_with_vtable(19800744)
-setfield_gc(p224, f137, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p200, 10, p224, descr=<GcPtrArrayDescr>)
-setfield_gc(p181, p200, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_fastlocals_w 56>)
-setfield_gc(p181, 2, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.inst_valuestackdepth 128>)
-p235 = call_assembler(p181, p12, ConstPtr(ptr25), p193, 2, ConstPtr(ptr227), 0, 125, p186, p189, ConstPtr(ptr229), ConstPtr(ptr230), ConstPtr(ptr231), ConstPtr(ptr232), ConstPtr(ptr233), ConstPtr(ptr234), p4, p203, p206, p209, p212, p48, p50, p52, p218, p221, p224, descr=<Loop1>)
-guard_not_forced(, descr=<Guard233>) [p1, p0, p12, p181, p235, p179, p2, p3, p22, p4, p5, p6, p7, i8]
-guard_no_exception(, descr=<Guard234>) [p1, p0, p12, p181, p235, p179, p2, p3, p22, p4, p5, p6, p7, i8]
-p236 = getfield_gc(p12, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_w_tracefunc 72>)
-guard_isnull(p236, descr=<Guard235>) [p1, p0, p12, p235, p181, p236, p179, p2, p3, p22, p4, p5, p6, p7, i8]
-i237 = ptr_eq(p181, p0)
-guard_false(i237, descr=<Guard236>) [p1, p0, p12, p235, p181, p179, p2, p3, p22, p4, p5, p6, p7, i8]
-i238 = getfield_gc(p12, descr=<NonGcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_profilefunc 40>)
-setfield_gc(p181, ConstPtr(ptr239), descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_last_exception 88>)
-i240 = int_is_true(i238)
-guard_false(i240, descr=<Guard237>) [p1, p0, p235, p181, p12, p179, p2, p3, p22, p4, p5, p6, p7, i8]
-p241 = getfield_gc(p181, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_f_backref 48>)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #64 STORE_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #67 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #70 LOAD_CONST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #73 INPLACE_ADD', 0)
-i243 = int_add(i8, 1)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #74 STORE_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #77 JUMP_ABSOLUTE', 0)
-i245 = getfield_raw(38968960, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-i247 = int_sub(i245, 100)
-setfield_raw(38968960, i247, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-setfield_gc(p12, p241, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_topframeref 56>)
-setfield_gc(p179, p181, descr=<GcPtrFieldDescr JitVirtualRef.forced 16>)
-setfield_gc(p179, -3, descr=<SignedFieldDescr JitVirtualRef.virtual_token 8>)
-i250 = int_lt(i247, 0)
-guard_false(i250, descr=<Guard238>) [p1, p0, p2, p3, p4, p5, p6, p235, i243, None]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #21 LOAD_FAST', 0)
-jump(p0, p1, p2, p3, p4, p5, p6, p235, i243, f9, i10, i238, p12, p241, descr=<Loop2>)
-[5ed74fc965fa] jit-log-opt-loop}
-[5ed74fe43ee0] {jit-log-opt-loop
-# Loop 3 : entry bridge with 413 ops
-[p0, p1, p2, p3, i4, p5, i6, i7, p8, p9, p10, p11, p12, p13, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #21 LOAD_FAST', 0)
-guard_value(i4, 0, descr=<Guard239>) [i4, p1, p0, p2, p3, p5, i6, i7, p8, p9, p10, p11, p12, p13, p14]
-guard_nonnull_class(p13, 19800744, descr=<Guard240>) [p1, p0, p13, p2, p3, p5, i6, p8, p9, p10, p11, p12, p14]
-guard_value(i6, 0, descr=<Guard241>) [i6, p1, p0, p2, p3, p5, p13, p9, p10, p11, p12, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #24 LOAD_FAST', 0)
-guard_nonnull_class(p12, 19800744, descr=<Guard242>) [p1, p0, p12, p2, p3, p5, p13, p9, p10, p11, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #27 COMPARE_OP', 0)
-f19 = getfield_gc_pure(p13, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-f20 = getfield_gc_pure(p12, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-i21 = float_gt(f19, f20)
-guard_true(i21, descr=<Guard243>) [p1, p0, p12, p13, p2, p3, p5, p10, p11, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #30 POP_JUMP_IF_FALSE', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #33 LOAD_FAST', 0)
-guard_nonnull_class(p11, ConstClass(W_IntObject), descr=<Guard244>) [p1, p0, p11, p2, p3, p5, p10, p12, p13, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #36 POP_JUMP_IF_FALSE', 0)
-i23 = getfield_gc_pure(p11, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i24 = int_is_true(i23)
-guard_true(i24, descr=<Guard245>) [p1, p0, p11, p2, p3, p5, p10, p12, p13, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #39 LOAD_FAST', 0)
-guard_nonnull_class(p14, ConstClass(W_IntObject), descr=<Guard246>) [p1, p0, p14, p2, p3, p5, p10, p11, p12, p13]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #42 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #45 COMPARE_OP', 0)
-i26 = getfield_gc_pure(p14, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i27 = int_ge(i26, i23)
-guard_false(i27, descr=<Guard247>) [p1, p0, p11, p14, p2, p3, p5, p10, p12, p13]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #48 POP_JUMP_IF_FALSE', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #55 LOAD_GLOBAL', 0)
-guard_value(p2, ConstPtr(ptr28), descr=<Guard248>) [p1, p0, p2, p3, p5, p10, p11, p12, p13, p14]
-p29 = getfield_gc(p0, descr=<GcPtrFieldDescr pypy.interpreter.eval.Frame.inst_w_globals 8>)
-guard_value(p29, ConstPtr(ptr30), descr=<Guard249>) [p1, p0, p29, p3, p5, p10, p11, p12, p13, p14]
-p31 = getfield_gc(p29, descr=<GcPtrFieldDescr pypy.objspace.std.dictmultiobject.W_DictMultiObject.inst_r_dict_content 8>)
-guard_isnull(p31, descr=<Guard250>) [p1, p0, p31, p29, p3, p5, p10, p11, p12, p13, p14]
-p33 = getfield_gc(ConstPtr(ptr32), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_nonnull_class(p33, ConstClass(Function), descr=<Guard251>) [p1, p0, p33, p3, p5, p10, p11, p12, p13, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #58 LOAD_FAST', 0)
-guard_nonnull_class(p10, 19852624, descr=<Guard252>) [p1, p0, p10, p3, p5, p33, p11, p12, p13, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #61 CALL_FUNCTION', 0)
-p36 = getfield_gc(p33, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_code 24>)
-guard_value(p36, ConstPtr(ptr37), descr=<Guard253>) [p1, p0, p36, p33, p3, p5, p10, p11, p12, p13, p14]
-p38 = getfield_gc(p33, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_w_func_globals 64>)
-p39 = getfield_gc(p33, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_closure 16>)
-p41 = call(ConstClass(getexecutioncontext), descr=<GcPtrCallDescr>)
-p42 = getfield_gc(p41, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_topframeref 56>)
-i43 = force_token()
-p44 = getfield_gc(p41, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_w_tracefunc 72>)
-guard_isnull(p44, descr=<Guard254>) [p1, p0, p41, p44, p3, p5, p33, p10, p11, p12, p13, p14, i43, p42, p38]
-i45 = getfield_gc(p41, descr=<NonGcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_profilefunc 40>)
-i46 = int_is_zero(i45)
-guard_true(i46, descr=<Guard255>) [p1, p0, p41, p3, p5, p33, p10, p11, p12, p13, p14, i43, p42, p38]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #0 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #3 LOAD_ATTR', 1)
-p47 = getfield_gc(p10, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst_map 48>)
-guard_value(p47, ConstPtr(ptr48), descr=<Guard256>) [p1, p0, p41, p10, p47, p3, p5, p33, p11, p12, p13, p14, i43, p42, p38]
-p50 = getfield_gc(ConstPtr(ptr49), descr=<GcPtrFieldDescr pypy.objspace.std.typeobject.W_TypeObject.inst__version_tag 16>)
-guard_value(p50, ConstPtr(ptr51), descr=<Guard257>) [p1, p0, p41, p10, p50, p3, p5, p33, p11, p12, p13, p14, i43, p42, p38]
-p52 = getfield_gc(p10, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value2 24>)
-guard_nonnull_class(p52, 19800744, descr=<Guard258>) [p1, p0, p41, p52, p10, p3, p5, p33, p11, p12, p13, p14, i43, p42, p38]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #6 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #9 LOAD_ATTR', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #12 BINARY_MULTIPLY', 1)
-f54 = getfield_gc_pure(p52, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-f55 = float_mul(f54, f54)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #13 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #16 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #19 LOAD_ATTR', 1)
-p56 = getfield_gc(p10, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value3 32>)
-guard_nonnull_class(p56, 19800744, descr=<Guard259>) [p1, p0, p41, p56, p10, p3, p5, p33, p11, p12, p13, p14, f55, i43, p42, p38]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #22 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #25 LOAD_ATTR', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #28 BINARY_MULTIPLY', 1)
-f58 = getfield_gc_pure(p56, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-f59 = float_mul(f58, f58)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #29 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #32 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #35 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #38 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #41 BINARY_ADD', 1)
-f60 = float_add(f55, f59)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #42 BINARY_DIVIDE', 1)
-i62 = float_eq(f60, 0.000000)
-guard_false(i62, descr=<Guard260>) [p1, p0, p41, f60, p3, p5, p33, p10, p11, p12, p13, p14, f59, f55, i43, p42, p38]
-f64 = float_truediv(0.500000, f60)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #43 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #46 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #49 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #52 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #55 LOAD_ATTR', 1)
-p65 = getfield_gc(p10, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value0 8>)
-guard_nonnull_class(p65, ConstClass(W_IntObject), descr=<Guard261>) [p1, p0, p41, p65, p10, p3, p5, p33, p11, p12, p13, p14, f64, f59, f55, i43, p42, p38]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #58 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #61 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #64 LOAD_ATTR', 1)
-p67 = getfield_gc(p10, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value1 16>)
-guard_nonnull_class(p67, ConstClass(W_IntObject), descr=<Guard262>) [p1, p0, p41, p67, p10, p3, p5, p33, p11, p12, p13, p14, p65, f64, f59, f55, i43, p42, p38]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #67 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #70 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #73 LOAD_ATTR', 1)
-p69 = getfield_gc(p10, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value4 40>)
-guard_nonnull_class(p69, 19886912, descr=<Guard263>) [p1, p0, p41, p69, p10, p3, p5, p33, p11, p12, p13, p14, p67, p65, f64, f59, f55, i43, p42, p38]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #76 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #79 SETUP_LOOP', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #82 LOAD_GLOBAL', 1)
-guard_value(p38, ConstPtr(ptr71), descr=<Guard264>) [p1, p0, p41, p38, p3, p5, p33, p10, p11, p12, p13, p14, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p73 = getfield_gc(p38, descr=<GcPtrFieldDescr pypy.objspace.std.dictmultiobject.W_DictMultiObject.inst_r_dict_content 8>)
-guard_isnull(p73, descr=<Guard265>) [p1, p0, p41, p73, p38, p3, p5, p33, p10, p11, p12, p13, p14, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p75 = getfield_gc(ConstPtr(ptr74), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_isnull(p75, descr=<Guard266>) [p1, p0, p41, p75, p3, p5, p33, p10, p11, p12, p13, p14, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p77 = getfield_gc(ConstPtr(ptr76), descr=<GcPtrFieldDescr pypy.interpreter.module.Module.inst_w_dict 8>)
-guard_value(p77, ConstPtr(ptr78), descr=<Guard267>) [p1, p0, p41, p77, p3, p5, p33, p10, p11, p12, p13, p14, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p79 = getfield_gc(p77, descr=<GcPtrFieldDescr pypy.objspace.std.dictmultiobject.W_DictMultiObject.inst_r_dict_content 8>)
-guard_isnull(p79, descr=<Guard268>) [p1, p0, p41, p79, p77, p3, p5, p33, p10, p11, p12, p13, p14, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p81 = getfield_gc(ConstPtr(ptr80), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_value(p81, ConstPtr(ptr82), descr=<Guard269>) [p1, p0, p41, p81, p3, p5, p33, p10, p11, p12, p13, p14, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #85 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #88 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #91 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #94 BINARY_SUBTRACT', 1)
-i83 = getfield_gc_pure(p65, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i85 = int_sub_ovf(i83, 1)
-guard_no_overflow(, descr=<Guard270>) [p1, p0, p41, p65, i85, p3, p5, p33, p10, p11, p12, p13, p14, p81, p69, p67, None, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #95 CALL_FUNCTION', 1)
-p87 = getfield_gc(ConstPtr(ptr86), descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_name 40>)
-p88 = getfield_gc(ConstPtr(ptr86), descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_defs 32>)
-i89 = getfield_gc_pure(p88, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i89, descr=<Guard271>) [p1, p0, p41, p87, p88, p3, p5, p33, p10, p11, p12, p13, p14, i85, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p90 = getfield_gc_pure(p88, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-i91 = arraylen_gc(p90, descr=<GcPtrArrayDescr>)
-i93 = int_sub(4, i91)
-i95 = int_ge(3, i93)
-guard_true(i95, descr=<Guard272>) [p1, p0, p41, p87, i93, p88, p3, p5, p33, p10, p11, p12, p13, p14, i85, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i96 = int_sub(3, i93)
-i97 = getfield_gc_pure(p88, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i97, descr=<Guard273>) [p1, p0, p41, p87, i96, i93, p88, p3, p5, p33, p10, p11, p12, p13, p14, i85, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p98 = getfield_gc_pure(p88, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-p99 = getarrayitem_gc(p98, i96, descr=<GcPtrArrayDescr>)
-guard_class(p99, ConstClass(W_IntObject), descr=<Guard274>) [p1, p0, p41, p99, p3, p5, p33, p10, p11, p12, p13, p14, i85, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i101 = getfield_gc_pure(p99, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i102 = int_is_zero(i101)
-guard_false(i102, descr=<Guard275>) [p1, p0, p41, i101, i85, p3, p5, p33, p10, p11, p12, p13, p14, p99, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i104 = int_lt(i101, 0)
-guard_false(i104, descr=<Guard276>) [p1, p0, p41, i101, i85, p3, p5, p33, p10, p11, p12, p13, p14, p99, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i106 = int_lt(1, i85)
-guard_true(i106, descr=<Guard277>) [p1, p0, p41, i101, i85, p3, p5, p33, p10, p11, p12, p13, p14, p99, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i107 = int_sub(i85, 1)
-i109 = int_sub(i107, 1)
-i110 = uint_floordiv(i109, i101)
-i112 = int_add(i110, 1)
-i114 = int_lt(i112, 0)
-guard_false(i114, descr=<Guard278>) [p1, p0, p41, i101, i112, p3, p5, p33, p10, p11, p12, p13, p14, p99, i85, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #98 GET_ITER', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #99 FOR_ITER', 1)
-i116 = int_gt(i112, 0)
-guard_true(i116, descr=<Guard279>) [p1, p0, p41, p3, p5, p33, p10, p11, p12, p13, p14, i112, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i117 = int_add(1, i101)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #102 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #105 SETUP_LOOP', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #108 LOAD_GLOBAL', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #111 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #114 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #117 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #120 BINARY_SUBTRACT', 1)
-i118 = getfield_gc_pure(p67, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i120 = int_sub_ovf(i118, 1)
-guard_no_overflow(, descr=<Guard280>) [p1, p0, p41, p67, i120, p3, p5, p33, p10, p11, p12, p13, p14, i110, i117, None, i101, None, None, None, p69, None, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #121 CALL_FUNCTION', 1)
-i121 = getfield_gc_pure(p88, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i121, descr=<Guard281>) [p1, p0, p41, p87, p88, p3, p5, p33, p10, p11, p12, p13, p14, i120, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p122 = getfield_gc_pure(p88, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-i123 = arraylen_gc(p122, descr=<GcPtrArrayDescr>)
-i125 = int_sub(4, i123)
-i127 = int_ge(3, i125)
-guard_true(i127, descr=<Guard282>) [p1, p0, p41, p87, i125, p88, p3, p5, p33, p10, p11, p12, p13, p14, i120, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i128 = int_sub(3, i125)
-i129 = getfield_gc_pure(p88, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i129, descr=<Guard283>) [p1, p0, p41, p87, i128, i125, p88, p3, p5, p33, p10, p11, p12, p13, p14, i120, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p130 = getfield_gc_pure(p88, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-p131 = getarrayitem_gc(p130, i128, descr=<GcPtrArrayDescr>)
-guard_class(p131, ConstClass(W_IntObject), descr=<Guard284>) [p1, p0, p41, p131, p3, p5, p33, p10, p11, p12, p13, p14, i120, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i133 = getfield_gc_pure(p131, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i134 = int_is_zero(i133)
-guard_false(i134, descr=<Guard285>) [p1, p0, p41, i133, i120, p3, p5, p33, p10, p11, p12, p13, p14, p131, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i136 = int_lt(i133, 0)
-guard_false(i136, descr=<Guard286>) [p1, p0, p41, i133, i120, p3, p5, p33, p10, p11, p12, p13, p14, p131, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i138 = int_lt(1, i120)
-guard_true(i138, descr=<Guard287>) [p1, p0, p41, i133, i120, p3, p5, p33, p10, p11, p12, p13, p14, p131, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i139 = int_sub(i120, 1)
-i141 = int_sub(i139, 1)
-i142 = uint_floordiv(i141, i133)
-i144 = int_add(i142, 1)
-i146 = int_lt(i144, 0)
-guard_false(i146, descr=<Guard288>) [p1, p0, p41, i133, i144, p3, p5, p33, p10, p11, p12, p13, p14, p131, i120, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #124 GET_ITER', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #125 FOR_ITER', 1)
-i148 = int_gt(i144, 0)
-guard_true(i148, descr=<Guard289>) [p1, p0, p41, p3, p5, p33, p10, p11, p12, p13, p14, i144, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i149 = int_add(1, i133)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #128 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #131 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #134 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #137 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #140 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #141 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #144 BINARY_ADD', 1)
-i150 = int_add_ovf(i83, 1)
-guard_no_overflow(, descr=<Guard290>) [p1, p0, p41, i150, p3, p5, p33, p10, p11, p12, p13, p14, i83, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #145 BINARY_SUBSCR', 1)
-i151 = getfield_gc(p69, descr=<SignedFieldDescr pypy.module.array.interp_array.W_ArrayTyped.inst_len 32>)
-i152 = int_lt(i150, i151)
-guard_true(i152, descr=<Guard291>) [p1, p0, p41, p69, i150, p3, p5, p33, p10, p11, p12, p13, p14, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, None, p67, p65, f64, f59, f55, i43, p42, None]
-i153 = getfield_gc(p69, descr=<NonGcPtrFieldDescr pypy.module.array.interp_array.W_ArrayTyped.inst_buffer 24>)
-f154 = getarrayitem_raw(i153, i150, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #146 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #149 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #152 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #155 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #158 BINARY_SUBTRACT', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #159 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #162 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #163 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #166 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #167 BINARY_SUBSCR', 1)
-f155 = getarrayitem_raw(i153, 1, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #168 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #171 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #174 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #177 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #178 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #181 BINARY_MULTIPLY', 1)
-i157 = int_mul_ovf(2, i83)
-guard_no_overflow(, descr=<Guard292>) [p1, p0, p41, p65, i157, p3, p5, p33, p10, p11, p12, p13, p14, f154, f155, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, None, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #182 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #185 BINARY_ADD', 1)
-i158 = int_add_ovf(i157, 1)
-guard_no_overflow(, descr=<Guard293>) [p1, p0, p41, i158, p3, p5, p33, p10, p11, p12, p13, p14, i157, f154, f155, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #186 BINARY_SUBSCR', 1)
-i160 = int_lt(i158, 0)
-guard_false(i160, descr=<Guard294>) [p1, p0, p41, p69, i158, i151, p3, p5, p33, p10, p11, p12, p13, p14, None, f154, f155, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, None, p67, p65, f64, f59, f55, i43, p42, None]
-i161 = int_lt(i158, i151)
-guard_true(i161, descr=<Guard295>) [p1, p0, p41, p69, i158, p3, p5, p33, p10, p11, p12, p13, p14, None, f154, f155, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, None, p67, p65, f64, f59, f55, i43, p42, None]
-f162 = getarrayitem_raw(i153, i158, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #187 BINARY_ADD', 1)
-f163 = float_add(f155, f162)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #188 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #191 BINARY_MULTIPLY', 1)
-f164 = float_mul(f163, f59)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #192 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #195 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #198 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #201 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #202 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #205 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #206 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #209 BINARY_SUBTRACT', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #210 BINARY_SUBSCR', 1)
-i165 = int_lt(i83, i151)
-guard_true(i165, descr=<Guard296>) [p1, p0, p41, p69, i83, p3, p5, p33, p10, p11, p12, p13, p14, f164, None, f154, None, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, None, p67, p65, f64, f59, f55, i43, p42, None]
-f166 = getarrayitem_raw(i153, i83, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #211 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #214 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #217 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #220 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #221 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #224 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #225 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #228 BINARY_ADD', 1)
-i168 = int_add(i150, 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #229 BINARY_SUBSCR', 1)
-i169 = int_lt(i168, i151)
-guard_true(i169, descr=<Guard297>) [p1, p0, p41, p69, i168, p3, p5, p33, p10, p11, p12, p13, p14, f166, f164, None, f154, None, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, None, p67, p65, f64, f59, f55, i43, p42, None]
-f170 = getarrayitem_raw(i153, i168, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #230 BINARY_ADD', 1)
-f171 = float_add(f166, f170)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #231 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #234 BINARY_MULTIPLY', 1)
-f172 = float_mul(f171, f55)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #235 BINARY_ADD', 1)
-f173 = float_add(f164, f172)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #236 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #239 BINARY_MULTIPLY', 1)
-f174 = float_mul(f173, f64)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #240 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #243 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #246 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #249 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #250 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #253 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #254 STORE_SUBSCR', 1)
-setarrayitem_raw(i153, i150, f174, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #255 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #258 LOAD_GLOBAL', 1)
-p176 = getfield_gc(ConstPtr(ptr175), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_nonnull_class(p176, ConstClass(Function), descr=<Guard298>) [p1, p0, p41, p176, p3, p5, p33, p10, p11, p12, p13, p14, None, None, None, f154, None, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #261 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #264 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #267 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #270 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #271 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #274 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #275 BINARY_SUBSCR', 1)
-f178 = getarrayitem_raw(i153, i150, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #276 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #279 BINARY_SUBTRACT', 1)
-f179 = float_sub(f178, f154)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #280 CALL_FUNCTION', 1)
-p180 = getfield_gc(p176, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_code 24>)
-guard_value(p180, ConstPtr(ptr181), descr=<Guard299>) [p1, p0, p41, p180, p176, p3, p5, p33, p10, p11, p12, p13, p14, f179, None, None, None, f154, None, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p182 = getfield_gc(p176, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_w_func_globals 64>)
-p183 = getfield_gc(p176, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_closure 16>)
-i184 = force_token()
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #0 LOAD_FAST', 2)
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #3 LOAD_FAST', 2)
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #6 BINARY_MULTIPLY', 2)
-f185 = float_mul(f179, f179)
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #7 RETURN_VALUE', 2)
-i186 = int_is_true(i45)
-guard_false(i186, descr=<Guard300>) [p1, p0, p41, p3, p5, p33, p10, p11, p12, p13, p14, p182, i184, p176, f185, f179, None, None, None, f154, None, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #283 INPLACE_ADD', 1)
-f188 = float_add(0.000000, f185)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #284 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #287 JUMP_ABSOLUTE', 1)
-i190 = getfield_raw(38968960, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-i192 = int_sub(i190, 100)
-setfield_raw(38968960, i192, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-i194 = int_lt(i192, 0)
-guard_false(i194, descr=<Guard301>) [p1, p0, p41, p3, p5, p33, p10, p11, p12, p13, p14, f188, None, None, None, None, None, None, None, None, f154, None, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #125 FOR_ITER', 1)
-i195 = force_token()
-p197 = new_with_vtable(19809200)
-setfield_gc(p197, i43, descr=<SignedFieldDescr JitVirtualRef.virtual_token 8>)
-setfield_gc(p41, p197, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_topframeref 56>)
-setfield_gc(p0, i195, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.vable_token 24>)
-p199 = new_with_vtable(19863424)
-setfield_gc(p199, p42, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_f_backref 48>)
-setfield_gc(p199, ConstPtr(ptr71), descr=<GcPtrFieldDescr pypy.interpreter.eval.Frame.inst_w_globals 8>)
-setfield_gc(p199, 34, descr=<INTFieldDescr pypy.interpreter.pyframe.PyFrame.inst_f_lineno 144>)
-setfield_gc(p199, ConstPtr(ptr37), descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_pycode 112>)
-p202 = new_array(8, descr=<GcPtrArrayDescr>)
-p204 = new_with_vtable(19861240)
-setfield_gc(p204, i117, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_current 8>)
-setfield_gc(p204, i110, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_remaining 16>)
-setfield_gc(p204, i101, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_step 24>)
-setarrayitem_gc(p202, 0, p204, descr=<GcPtrArrayDescr>)
-p207 = new_with_vtable(19861240)
-setfield_gc(p207, i149, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_current 8>)
-setfield_gc(p207, i142, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_remaining 16>)
-setfield_gc(p207, i133, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_step 24>)
-setarrayitem_gc(p202, 1, p207, descr=<GcPtrArrayDescr>)
-setfield_gc(p199, p202, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_valuestack_w 120>)
-setfield_gc(p199, 125, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.inst_last_instr 96>)
-p211 = new_with_vtable(19865144)
-setfield_gc(p211, 291, descr=<UnsignedFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_handlerposition 8>)
-setfield_gc(p211, 1, descr=<SignedFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_valuestackdepth 24>)
-p215 = new_with_vtable(19865144)
-setfield_gc(p215, 295, descr=<UnsignedFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_handlerposition 8>)
-setfield_gc(p211, p215, descr=<GcPtrFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_previous 16>)
-setfield_gc(p199, p211, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_lastblock 104>)
-p218 = new_array(11, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p218, 0, p10, descr=<GcPtrArrayDescr>)
-p221 = new_with_vtable(19800744)
-setfield_gc(p221, f55, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p218, 1, p221, descr=<GcPtrArrayDescr>)
-p224 = new_with_vtable(19800744)
-setfield_gc(p224, f59, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p218, 2, p224, descr=<GcPtrArrayDescr>)
-p227 = new_with_vtable(19800744)
-setfield_gc(p227, f64, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p218, 3, p227, descr=<GcPtrArrayDescr>)
-p230 = new_with_vtable(19800744)
-setfield_gc(p230, f188, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p218, 4, p230, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p218, 5, p65, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p218, 6, p67, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p218, 7, p69, descr=<GcPtrArrayDescr>)
-p236 = new_with_vtable(ConstClass(W_IntObject))
-setfield_gc(p236, 1, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-setarrayitem_gc(p218, 8, p236, descr=<GcPtrArrayDescr>)
-p239 = new_with_vtable(ConstClass(W_IntObject))
-setfield_gc(p239, 1, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-setarrayitem_gc(p218, 9, p239, descr=<GcPtrArrayDescr>)
-p242 = new_with_vtable(19800744)
-setfield_gc(p242, f154, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p218, 10, p242, descr=<GcPtrArrayDescr>)
-setfield_gc(p199, p218, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_fastlocals_w 56>)
-setfield_gc(p199, 2, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.inst_valuestackdepth 128>)
-p253 = call_assembler(p199, p41, ConstPtr(ptr37), p211, 2, ConstPtr(ptr245), 0, 125, p204, p207, ConstPtr(ptr247), ConstPtr(ptr248), ConstPtr(ptr249), ConstPtr(ptr250), ConstPtr(ptr251), ConstPtr(ptr252), p10, p221, p224, p227, p230, p65, p67, p69, p236, p239, p242, descr=<Loop1>)
-guard_not_forced(, descr=<Guard302>) [p1, p0, p41, p199, p253, p197, p3, p5, p33, p10, p11, p12, p13, p14]
-guard_no_exception(, descr=<Guard303>) [p1, p0, p41, p199, p253, p197, p3, p5, p33, p10, p11, p12, p13, p14]
-p254 = getfield_gc(p41, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_w_tracefunc 72>)
-guard_isnull(p254, descr=<Guard304>) [p1, p0, p41, p253, p199, p254, p197, p3, p5, p33, p10, p11, p12, p13, p14]
-i255 = ptr_eq(p199, p0)
-guard_false(i255, descr=<Guard305>) [p1, p0, p41, p253, p199, p197, p3, p5, p33, p10, p11, p12, p13, p14]
-i256 = getfield_gc(p41, descr=<NonGcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_profilefunc 40>)
-setfield_gc(p199, ConstPtr(ptr257), descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_last_exception 88>)
-i258 = int_is_true(i256)
-guard_false(i258, descr=<Guard306>) [p1, p0, p253, p199, p41, p197, p3, p5, p33, p10, p11, p12, p13, p14]
-p259 = getfield_gc(p199, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_f_backref 48>)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #64 STORE_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #67 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #70 LOAD_CONST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #73 INPLACE_ADD', 0)
-i261 = int_add(i26, 1)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #74 STORE_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #77 JUMP_ABSOLUTE', 0)
-i263 = getfield_raw(38968960, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-i265 = int_sub(i263, 100)
-setfield_raw(38968960, i265, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-setfield_gc(p41, p259, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_topframeref 56>)
-setfield_gc(p197, p199, descr=<GcPtrFieldDescr JitVirtualRef.forced 16>)
-setfield_gc(p197, -3, descr=<SignedFieldDescr JitVirtualRef.virtual_token 8>)
-i268 = int_lt(i265, 0)
-guard_false(i268, descr=<Guard307>) [p1, p0, p3, p5, p10, p11, p12, p253, i261]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #21 LOAD_FAST', 0)
-jump(p0, p1, p3, p5, p10, p11, p12, p253, i261, f20, i23, i256, p41, p259, descr=<Loop2>)
-[5ed74ff695c8] jit-log-opt-loop}
-[5ed8737e9776] {jit-backend-counts
-0:493724565
-1:2281802
-2:1283242
-3:993105
-4:2933
-5:2163
-6:2492
-7:1799
-8:963
-9:36
-[5ed8737ee19c] jit-backend-counts}
diff --git a/tests/examplefiles/test.r3 b/tests/examplefiles/test.r3
index cad12a8d..707102db 100644
--- a/tests/examplefiles/test.r3
+++ b/tests/examplefiles/test.r3
@@ -1,3 +1,9 @@
+preface.... everything what is before header is not evaluated
+so this should not be colorized:
+1 + 2
+
+REBOL [] ;<- this is minimal header, everything behind it must be colorized
+
;## String tests ##
print "Hello ^"World" ;<- with escaped char
multiline-string: {
@@ -52,15 +58,29 @@ type? #ff0000 ;== issue!
to integer! (1 + (x / 4.5) * 1E-4)
;## some spec comments
-comment now
-comment 10
+1 + 1
+comment "aa"
+2 + 2
+comment {aa}
+3 + 3
+comment {a^{}
+4 + 4
+comment {{}}
+5 + 5
comment {
- bla
- bla
+ foo: 6
}
-comment [
- quit
-]
+6 + 6
+comment [foo: 6]
+7 + 7
+comment [foo: "[" ]
+8 + 8
+comment [foo: {^{} ]
+9 + 9
+comment [foo: {boo} ]
+10 + 10
+comment 5-May-2014/11:17:34+2:00
+5-May-2014/11:17:34+2:00 11 + 11
;## other tests ##
---: 1
diff --git a/tests/examplefiles/test.rsl b/tests/examplefiles/test.rsl
new file mode 100644
index 00000000..d6c9fc9a
--- /dev/null
+++ b/tests/examplefiles/test.rsl
@@ -0,0 +1,111 @@
+scheme COMPILER =
+class
+ type
+ Prog == mk_Prog(stmt : Stmt),
+
+ Stmt ==
+ mk_Asgn(ide : Identifier, expr : Expr) |
+ mk_If(cond : Expr, s1 : Stmt, s2 : Stmt) |
+ mk_Seq(head : Stmt, last : Stmt),
+
+ Expr ==
+ mk_Const(const : Int) |
+ mk_Plus(fst : Expr, snd : Expr) |
+ mk_Id(ide : Identifier),
+ Identifier = Text
+
+type /* storage for program variables */
+ `Sigma = Identifier -m-> Int
+
+value
+ m : Prog -> `Sigma -> `Sigma
+ m(p)(`sigma) is m(stmt(p))(`sigma),
+
+ m : Stmt -> `Sigma -> `Sigma
+ m(s)(`sigma) is
+ case s of
+ mk_Asgn(i, e) -> `sigma !! [i +> m(e)(`sigma)],
+ mk_Seq(s1, s2) -> m(s2)(m(s1)(`sigma)),
+ mk_If(c, s1, s2) ->
+ if m(c)(`sigma) ~= 0 then m(s1)(`sigma) else m(s2)(`sigma) end
+ end,
+
+ m : Expr -> `Sigma -> Int
+ m(e)(`sigma) is
+ case e of
+ mk_Const(n) -> n,
+ mk_Plus(e1, e2) -> m(e1)(`sigma) + m(e2)(`sigma),
+ mk_Id(id) -> if id isin dom `sigma then `sigma(id) else 0 end
+ end
+
+type
+ MProg = Inst-list,
+ Inst ==
+ mk_Push(ide1 : Identifier) |
+ mk_Pop(Unit) |
+ mk_Add(Unit) |
+ mk_Cnst(val : Int) |
+ mk_Store(ide2 : Identifier) |
+ mk_Jumpfalse(off1 : Int) |
+ mk_Jump(off2 : Int)
+
+
+/* An interpreter for SMALL instructions */
+
+type Stack = Int-list
+value
+ I : MProg >< Int >< Stack -> (`Sigma ->`Sigma)
+ I(mp, pc, s)(`sigma) is
+ if pc <= 0 \/ pc > len mp then `sigma else
+ case mp(pc) of
+ mk_Push(x) -> if x isin dom `sigma
+ then I(mp, pc + 1, <.`sigma(x).> ^ s)(`sigma)
+ else I(mp, pc + 1, <.0.> ^ s)(`sigma) end,
+ mk_Pop(()) -> if len s = 0 then `sigma
+ else I(mp, pc + 1, tl s)(`sigma) end,
+ mk_Cnst(n) -> I(mp, pc + 1, <.n.> ^ s)(`sigma),
+ mk_Add(()) -> if len s < 2 then `sigma
+ else I(mp, pc + 1,<.s(1) + s(2).> ^ tl tl s)(`sigma) end,
+ mk_Store(x) -> if len s = 0 then `sigma
+ else I(mp, pc + 1, s)(`sigma !! [x +> s(1)]) end,
+ mk_Jumpfalse(n) -> if len s = 0 then `sigma
+ elsif hd s ~= 0 then I(mp, pc + 1, s)(`sigma)
+ else I(mp, pc + n, s)(`sigma) end,
+ mk_Jump(n) -> I(mp, pc + n, s)(`sigma)
+ end
+ end
+
+value
+ comp_Prog : Prog -> MProg
+ comp_Prog(p) is comp_Stmt(stmt(p)),
+
+ comp_Stmt : Stmt -> MProg
+ comp_Stmt(s) is
+ case s of
+ mk_Asgn(id, e) -> comp_Expr(e) ^ <. mk_Store(id), mk_Pop() .>,
+ mk_Seq(s1, s2) -> comp_Stmt(s1) ^ comp_Stmt(s2),
+ mk_If(e, s1, s2) ->
+ let
+ ce = comp_Expr(e),
+ cs1 = comp_Stmt(s1), cs2 = comp_Stmt(s2)
+ in
+ ce ^
+ <. mk_Jumpfalse(len cs1 + 3) .> ^
+ <. mk_Pop() .> ^
+ cs1 ^
+ <. mk_Jump(len cs2 + 2) .> ^
+ <. mk_Pop() .> ^
+ cs2
+ end
+ end,
+
+ comp_Expr : Expr -> MProg
+ comp_Expr(e) is
+ case e of
+ mk_Const(n) -> <. mk_Cnst(n) .>,
+ mk_Plus(e1, e2) ->
+ comp_Expr(e1) ^ comp_Expr(e2) ^ <. mk_Add() .>,
+ mk_Id(id) -> <. mk_Push(id) .>
+ end
+
+end
diff --git a/tests/examplefiles/test.swift b/tests/examplefiles/test.swift
new file mode 100644
index 00000000..8ef19763
--- /dev/null
+++ b/tests/examplefiles/test.swift
@@ -0,0 +1,65 @@
+//
+// test.swift
+// from https://github.com/fullstackio/FlappySwift
+//
+// Created by Nate Murray on 6/2/14.
+// Copyright (c) 2014 Fullstack.io. All rights reserved.
+//
+
+import UIKit
+import SpriteKit
+
+extension SKNode {
+ class func unarchiveFromFile(file : NSString) -> SKNode? {
+
+ let path = NSBundle.mainBundle().pathForResource(file, ofType: "sks")
+
+ var sceneData = NSData.dataWithContentsOfFile(path, options: .DataReadingMappedIfSafe, error: nil)
+ var archiver = NSKeyedUnarchiver(forReadingWithData: sceneData)
+
+ archiver.setClass(self.classForKeyedUnarchiver(), forClassName: "SKScene")
+ let scene = archiver.decodeObjectForKey(NSKeyedArchiveRootObjectKey) as GameScene
+ archiver.finishDecoding()
+ return scene
+ }
+}
+
+class GameViewController: UIViewController {
+
+ override func viewDidLoad() {
+ super.viewDidLoad()
+
+ if let scene = GameScene.unarchiveFromFile("GameScene") as? GameScene {
+ // Configure the view.
+ let skView = self.view as SKView
+ skView.showsFPS = true
+ skView.showsNodeCount = true
+
+ /* Sprite Kit applies additional optimizations to improve rendering performance */
+ skView.ignoresSiblingOrder = true
+
+ /* Set the scale mode to scale to fit the window */
+ scene.scaleMode = .AspectFill
+
+ skView.presentScene(scene)
+ }
+ }
+
+ override func shouldAutorotate() -> Bool {
+ return true
+ }
+
+ override func supportedInterfaceOrientations() -> Int {
+ if UIDevice.currentDevice().userInterfaceIdiom == .Phone {
+ return Int(UIInterfaceOrientationMask.AllButUpsideDown.toRaw())
+ } else {
+ return Int(UIInterfaceOrientationMask.All.toRaw())
+ }
+ }
+
+ override func didReceiveMemoryWarning() {
+ super.didReceiveMemoryWarning()
+ // Release any cached data, images, etc that aren't in use.
+ }
+
+}
diff --git a/tests/examplefiles/test.zep b/tests/examplefiles/test.zep
new file mode 100644
index 00000000..4724d4c4
--- /dev/null
+++ b/tests/examplefiles/test.zep
@@ -0,0 +1,33 @@
+namespace Test;
+
+use Test\Foo;
+
+class Bar
+{
+ protected a;
+ private b;
+ public c {set, get};
+
+ public function __construct(string str, boolean bool)
+ {
+ let this->c = str;
+ this->setC(bool);
+ let this->b = [];
+ }
+
+ public function sayHello(string name)
+ {
+ echo "Hello " . name;
+ }
+
+ protected function loops()
+ {
+ for a in b {
+ echo a;
+ }
+ loop {
+ return "boo!";
+ }
+ }
+
+} \ No newline at end of file
diff --git a/tests/examplefiles/twig_test b/tests/examplefiles/twig_test
new file mode 100644
index 00000000..0932fe90
--- /dev/null
+++ b/tests/examplefiles/twig_test
@@ -0,0 +1,4612 @@
+From the Twig test suite, https://github.com/fabpot/Twig, available under BSD license.
+
+--TEST--
+Exception for an unclosed tag
+--TEMPLATE--
+{% block foo %}
+ {% if foo %}
+
+
+
+
+ {% for i in fo %}
+
+
+
+ {% endfor %}
+
+
+
+{% endblock %}
+--EXCEPTION--
+Twig_Error_Syntax: Unexpected tag name "endblock" (expecting closing tag for the "if" tag defined near line 4) in "index.twig" at line 16
+--TEST--
+Exception for an undefined trait
+--TEMPLATE--
+{% use 'foo' with foobar as bar %}
+--TEMPLATE(foo)--
+{% block bar %}
+{% endblock %}
+--EXCEPTION--
+Twig_Error_Runtime: Block "foobar" is not defined in trait "foo" in "index.twig".
+--TEST--
+Twig supports method calls
+--TEMPLATE--
+{{ items.foo }}
+{{ items['foo'] }}
+{{ items[foo] }}
+{{ items[items[foo]] }}
+--DATA--
+return array('foo' => 'bar', 'items' => array('foo' => 'bar', 'bar' => 'foo'))
+--EXPECT--
+bar
+bar
+foo
+bar
+--TEST--
+Twig supports array notation
+--TEMPLATE--
+{# empty array #}
+{{ []|join(',') }}
+
+{{ [1, 2]|join(',') }}
+{{ ['foo', "bar"]|join(',') }}
+{{ {0: 1, 'foo': 'bar'}|join(',') }}
+{{ {0: 1, 'foo': 'bar'}|keys|join(',') }}
+
+{{ {0: 1, foo: 'bar'}|join(',') }}
+{{ {0: 1, foo: 'bar'}|keys|join(',') }}
+
+{# nested arrays #}
+{% set a = [1, 2, [1, 2], {'foo': {'foo': 'bar'}}] %}
+{{ a[2]|join(',') }}
+{{ a[3]["foo"]|join(',') }}
+
+{# works even if [] is used inside the array #}
+{{ [foo[bar]]|join(',') }}
+
+{# elements can be any expression #}
+{{ ['foo'|upper, bar|upper, bar == foo]|join(',') }}
+
+{# arrays can have a trailing , like in PHP #}
+{{
+ [
+ 1,
+ 2,
+ ]|join(',')
+}}
+
+{# keys can be any expression #}
+{% set a = 1 %}
+{% set b = "foo" %}
+{% set ary = { (a): 'a', (b): 'b', 'c': 'c', (a ~ b): 'd' } %}
+{{ ary|keys|join(',') }}
+{{ ary|join(',') }}
+--DATA--
+return array('bar' => 'bar', 'foo' => array('bar' => 'bar'))
+--EXPECT--
+1,2
+foo,bar
+1,bar
+0,foo
+
+1,bar
+0,foo
+
+1,2
+bar
+
+bar
+
+FOO,BAR,
+
+1,2
+
+1,foo,c,1foo
+a,b,c,d
+--TEST--
+Twig supports binary operations (+, -, *, /, ~, %, and, or)
+--TEMPLATE--
+{{ 1 + 1 }}
+{{ 2 - 1 }}
+{{ 2 * 2 }}
+{{ 2 / 2 }}
+{{ 3 % 2 }}
+{{ 1 and 1 }}
+{{ 1 and 0 }}
+{{ 0 and 1 }}
+{{ 0 and 0 }}
+{{ 1 or 1 }}
+{{ 1 or 0 }}
+{{ 0 or 1 }}
+{{ 0 or 0 }}
+{{ 0 or 1 and 0 }}
+{{ 1 or 0 and 1 }}
+{{ "foo" ~ "bar" }}
+{{ foo ~ "bar" }}
+{{ "foo" ~ bar }}
+{{ foo ~ bar }}
+{{ 20 // 7 }}
+--DATA--
+return array('foo' => 'bar', 'bar' => 'foo')
+--EXPECT--
+2
+1
+4
+1
+1
+1
+
+
+
+1
+1
+1
+
+
+1
+foobar
+barbar
+foofoo
+barfoo
+2
+--TEST--
+Twig supports bitwise operations
+--TEMPLATE--
+{{ 1 b-and 5 }}
+{{ 1 b-or 5 }}
+{{ 1 b-xor 5 }}
+{{ (1 and 0 b-or 0) is same as(1 and (0 b-or 0)) ? 'ok' : 'ko' }}
+--DATA--
+return array()
+--EXPECT--
+1
+5
+4
+ok
+--TEST--
+Twig supports comparison operators (==, !=, <, >, >=, <=)
+--TEMPLATE--
+{{ 1 > 2 }}/{{ 1 > 1 }}/{{ 1 >= 2 }}/{{ 1 >= 1 }}
+{{ 1 < 2 }}/{{ 1 < 1 }}/{{ 1 <= 2 }}/{{ 1 <= 1 }}
+{{ 1 == 1 }}/{{ 1 == 2 }}
+{{ 1 != 1 }}/{{ 1 != 2 }}
+--DATA--
+return array()
+--EXPECT--
+///1
+1//1/1
+1/
+/1
+--TEST--
+Twig supports the "divisible by" operator
+--TEMPLATE--
+{{ 8 is divisible by(2) ? 'OK' }}
+{{ 8 is not divisible by(3) ? 'OK' }}
+{{ 8 is divisible by (2) ? 'OK' }}
+{{ 8 is not
+ divisible
+ by
+ (3) ? 'OK' }}
+--DATA--
+return array()
+--EXPECT--
+OK
+OK
+OK
+OK
+--TEST--
+Twig supports the .. operator
+--TEMPLATE--
+{% for i in 0..10 %}{{ i }} {% endfor %}
+
+{% for letter in 'a'..'z' %}{{ letter }} {% endfor %}
+
+{% for letter in 'a'|upper..'z'|upper %}{{ letter }} {% endfor %}
+
+{% for i in foo[0]..foo[1] %}{{ i }} {% endfor %}
+
+{% for i in 0 + 1 .. 10 - 1 %}{{ i }} {% endfor %}
+--DATA--
+return array('foo' => array(1, 10))
+--EXPECT--
+0 1 2 3 4 5 6 7 8 9 10
+a b c d e f g h i j k l m n o p q r s t u v w x y z
+A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
+1 2 3 4 5 6 7 8 9 10
+1 2 3 4 5 6 7 8 9
+--TEST--
+Twig supports the "ends with" operator
+--TEMPLATE--
+{{ 'foo' ends with 'o' ? 'OK' : 'KO' }}
+{{ not ('foo' ends with 'f') ? 'OK' : 'KO' }}
+{{ not ('foo' ends with 'foowaytoolong') ? 'OK' : 'KO' }}
+{{ 'foo' ends with '' ? 'OK' : 'KO' }}
+{{ '1' ends with true ? 'OK' : 'KO' }}
+{{ 1 ends with true ? 'OK' : 'KO' }}
+{{ 0 ends with false ? 'OK' : 'KO' }}
+{{ '' ends with false ? 'OK' : 'KO' }}
+{{ false ends with false ? 'OK' : 'KO' }}
+{{ false ends with '' ? 'OK' : 'KO' }}
+--DATA--
+return array()
+--EXPECT--
+OK
+OK
+OK
+OK
+KO
+KO
+KO
+KO
+KO
+KO
+--TEST--
+Twig supports grouping of expressions
+--TEMPLATE--
+{{ (2 + 2) / 2 }}
+--DATA--
+return array()
+--EXPECT--
+2
+--TEST--
+Twig supports literals
+--TEMPLATE--
+1 {{ true }}
+2 {{ TRUE }}
+3 {{ false }}
+4 {{ FALSE }}
+5 {{ none }}
+6 {{ NONE }}
+7 {{ null }}
+8 {{ NULL }}
+--DATA--
+return array()
+--EXPECT--
+1 1
+2 1
+3
+4
+5
+6
+7
+8
+--TEST--
+Twig supports __call() for attributes
+--TEMPLATE--
+{{ foo.foo }}
+{{ foo.bar }}
+--EXPECT--
+foo_from_call
+bar_from_getbar
+--TEST--
+Twig supports the "matches" operator
+--TEMPLATE--
+{{ 'foo' matches '/o/' ? 'OK' : 'KO' }}
+{{ 'foo' matches '/^fo/' ? 'OK' : 'KO' }}
+{{ 'foo' matches '/O/i' ? 'OK' : 'KO' }}
+--DATA--
+return array()
+--EXPECT--
+OK
+OK
+OK
+--TEST--
+Twig supports method calls
+--TEMPLATE--
+{{ items.foo.foo }}
+{{ items.foo.getFoo() }}
+{{ items.foo.bar }}
+{{ items.foo['bar'] }}
+{{ items.foo.bar('a', 43) }}
+{{ items.foo.bar(foo) }}
+{{ items.foo.self.foo() }}
+{{ items.foo.is }}
+{{ items.foo.in }}
+{{ items.foo.not }}
+--DATA--
+return array('foo' => 'bar', 'items' => array('foo' => new TwigTestFoo(), 'bar' => 'foo'))
+--CONFIG--
+return array('strict_variables' => false)
+--EXPECT--
+foo
+foo
+bar
+
+bar_a-43
+bar_bar
+foo
+is
+in
+not
+--TEST--
+Twig allows to use named operators as variable names
+--TEMPLATE--
+{% for match in matches %}
+ {{- match }}
+{% endfor %}
+{{ in }}
+{{ is }}
+--DATA--
+return array('matches' => array(1, 2, 3), 'in' => 'in', 'is' => 'is')
+--EXPECT--
+1
+2
+3
+in
+is
+--TEST--
+Twig parses postfix expressions
+--TEMPLATE--
+{% import _self as macros %}
+
+{% macro foo() %}foo{% endmacro %}
+
+{{ 'a' }}
+{{ 'a'|upper }}
+{{ ('a')|upper }}
+{{ -1|upper }}
+{{ macros.foo() }}
+{{ (macros).foo() }}
+--DATA--
+return array();
+--EXPECT--
+a
+A
+A
+-1
+foo
+foo
+--TEST--
+Twig supports the "same as" operator
+--TEMPLATE--
+{{ 1 is same as(1) ? 'OK' }}
+{{ 1 is not same as(true) ? 'OK' }}
+{{ 1 is same as(1) ? 'OK' }}
+{{ 1 is not same as(true) ? 'OK' }}
+{{ 1 is same as (1) ? 'OK' }}
+{{ 1 is not
+ same
+ as
+ (true) ? 'OK' }}
+--DATA--
+return array()
+--EXPECT--
+OK
+OK
+OK
+OK
+OK
+OK
+--TEST--
+Twig supports the "starts with" operator
+--TEMPLATE--
+{{ 'foo' starts with 'f' ? 'OK' : 'KO' }}
+{{ not ('foo' starts with 'oo') ? 'OK' : 'KO' }}
+{{ not ('foo' starts with 'foowaytoolong') ? 'OK' : 'KO' }}
+{{ 'foo' starts with 'f' ? 'OK' : 'KO' }}
+{{ 'foo' starts
+with 'f' ? 'OK' : 'KO' }}
+{{ 'foo' starts with '' ? 'OK' : 'KO' }}
+{{ '1' starts with true ? 'OK' : 'KO' }}
+{{ '' starts with false ? 'OK' : 'KO' }}
+{{ 'a' starts with false ? 'OK' : 'KO' }}
+{{ false starts with '' ? 'OK' : 'KO' }}
+--DATA--
+return array()
+--EXPECT--
+OK
+OK
+OK
+OK
+OK
+OK
+KO
+KO
+KO
+KO
+--TEST--
+Twig supports string interpolation
+--TEMPLATE--
+{# "foo #{"foo #{bar} baz"} baz" #}
+{# "foo #{bar}#{bar} baz" #}
+--DATA--
+return array('bar' => 'BAR');
+--EXPECT--
+foo foo BAR baz baz
+foo BARBAR baz
+--TEST--
+Twig supports the ternary operator
+--TEMPLATE--
+{{ 1 ? 'YES' }}
+{{ 0 ? 'YES' }}
+--DATA--
+return array()
+--EXPECT--
+YES
+
+--TEST--
+Twig supports the ternary operator
+--TEMPLATE--
+{{ 'YES' ?: 'NO' }}
+{{ 0 ?: 'NO' }}
+--DATA--
+return array()
+--EXPECT--
+YES
+NO
+--TEST--
+Twig supports the ternary operator
+--TEMPLATE--
+{{ 1 ? 'YES' : 'NO' }}
+{{ 0 ? 'YES' : 'NO' }}
+{{ 0 ? 'YES' : (1 ? 'YES1' : 'NO1') }}
+{{ 0 ? 'YES' : (0 ? 'YES1' : 'NO1') }}
+{{ 1 == 1 ? 'foo<br />':'' }}
+{{ foo ~ (bar ? ('-' ~ bar) : '') }}
+--DATA--
+return array('foo' => 'foo', 'bar' => 'bar')
+--EXPECT--
+YES
+NO
+YES1
+NO1
+foo<br />
+foo-bar
+--TEST--
+Twig does not allow to use two-word named operators as variable names
+--TEMPLATE--
+{{ starts with }}
+--DATA--
+return array()
+--EXCEPTION--
+Twig_Error_Syntax: Unexpected token "operator" of value "starts with" in "index.twig" at line 2
+--TEST--
+Twig unary operators precedence
+--TEMPLATE--
+{{ -1 - 1 }}
+{{ -1 - -1 }}
+{{ -1 * -1 }}
+{{ 4 / -1 * 5 }}
+--DATA--
+return array()
+--EXPECT--
+-2
+0
+1
+-20
+--TEST--
+Twig supports unary operators (not, -, +)
+--TEMPLATE--
+{{ not 1 }}/{{ not 0 }}
+{{ +1 + 1 }}/{{ -1 - 1 }}
+{{ not (false or true) }}
+--DATA--
+return array()
+--EXPECT--
+/1
+2/-2
+
+--TEST--
+"abs" filter
+--TEMPLATE--
+{{ (-5.5)|abs }}
+{{ (-5)|abs }}
+{{ (-0)|abs }}
+{{ 0|abs }}
+{{ 5|abs }}
+{{ 5.5|abs }}
+{{ number1|abs }}
+{{ number2|abs }}
+{{ number3|abs }}
+{{ number4|abs }}
+{{ number5|abs }}
+{{ number6|abs }}
+--DATA--
+return array('number1' => -5.5, 'number2' => -5, 'number3' => -0, 'number4' => 0, 'number5' => 5, 'number6' => 5.5)
+--EXPECT--
+5.5
+5
+0
+0
+5
+5.5
+5.5
+5
+0
+0
+5
+5.5
+--TEST--
+"batch" filter
+--TEMPLATE--
+{% for row in items|batch(3.1) %}
+ <div class=row>
+ {% for column in row %}
+ <div class=item>{{ column }}</div>
+ {% endfor %}
+ </div>
+{% endfor %}
+--DATA--
+return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'))
+--EXPECT--
+<div class=row>
+ <div class=item>a</div>
+ <div class=item>b</div>
+ <div class=item>c</div>
+ <div class=item>d</div>
+ </div>
+ <div class=row>
+ <div class=item>e</div>
+ <div class=item>f</div>
+ <div class=item>g</div>
+ <div class=item>h</div>
+ </div>
+ <div class=row>
+ <div class=item>i</div>
+ <div class=item>j</div>
+ </div>
+--TEST--
+"batch" filter
+--TEMPLATE--
+{% for row in items|batch(3) %}
+ <div class=row>
+ {% for column in row %}
+ <div class=item>{{ column }}</div>
+ {% endfor %}
+ </div>
+{% endfor %}
+--DATA--
+return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'))
+--EXPECT--
+<div class=row>
+ <div class=item>a</div>
+ <div class=item>b</div>
+ <div class=item>c</div>
+ </div>
+ <div class=row>
+ <div class=item>d</div>
+ <div class=item>e</div>
+ <div class=item>f</div>
+ </div>
+ <div class=row>
+ <div class=item>g</div>
+ <div class=item>h</div>
+ <div class=item>i</div>
+ </div>
+ <div class=row>
+ <div class=item>j</div>
+ </div>
+--TEST--
+"batch" filter
+--TEMPLATE--
+<table>
+{% for row in items|batch(3, '') %}
+ <tr>
+ {% for column in row %}
+ <td>{{ column }}</td>
+ {% endfor %}
+ </tr>
+{% endfor %}
+</table>
+--DATA--
+return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'))
+--EXPECT--
+<table>
+ <tr>
+ <td>a</td>
+ <td>b</td>
+ <td>c</td>
+ </tr>
+ <tr>
+ <td>d</td>
+ <td>e</td>
+ <td>f</td>
+ </tr>
+ <tr>
+ <td>g</td>
+ <td>h</td>
+ <td>i</td>
+ </tr>
+ <tr>
+ <td>j</td>
+ <td></td>
+ <td></td>
+ </tr>
+</table>
+--TEST--
+"batch" filter
+--TEMPLATE--
+{% for row in items|batch(3, 'fill') %}
+ <div class=row>
+ {% for column in row %}
+ <div class=item>{{ column }}</div>
+ {% endfor %}
+ </div>
+{% endfor %}
+--DATA--
+return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l'))
+--EXPECT--
+<div class=row>
+ <div class=item>a</div>
+ <div class=item>b</div>
+ <div class=item>c</div>
+ </div>
+ <div class=row>
+ <div class=item>d</div>
+ <div class=item>e</div>
+ <div class=item>f</div>
+ </div>
+ <div class=row>
+ <div class=item>g</div>
+ <div class=item>h</div>
+ <div class=item>i</div>
+ </div>
+ <div class=row>
+ <div class=item>j</div>
+ <div class=item>k</div>
+ <div class=item>l</div>
+ </div>
+--TEST--
+"batch" filter
+--TEMPLATE--
+<table>
+{% for row in items|batch(3, 'fill') %}
+ <tr>
+ {% for column in row %}
+ <td>{{ column }}</td>
+ {% endfor %}
+ </tr>
+{% endfor %}
+</table>
+--DATA--
+return array('items' => array('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'))
+--EXPECT--
+<table>
+ <tr>
+ <td>a</td>
+ <td>b</td>
+ <td>c</td>
+ </tr>
+ <tr>
+ <td>d</td>
+ <td>e</td>
+ <td>f</td>
+ </tr>
+ <tr>
+ <td>g</td>
+ <td>h</td>
+ <td>i</td>
+ </tr>
+ <tr>
+ <td>j</td>
+ <td>fill</td>
+ <td>fill</td>
+ </tr>
+</table>
+--TEST--
+"convert_encoding" filter
+--CONDITION--
+function_exists('iconv') || function_exists('mb_convert_encoding')
+--TEMPLATE--
+{{ "愛していますか?"|convert_encoding('ISO-2022-JP', 'UTF-8')|convert_encoding('UTF-8', 'ISO-2022-JP') }}
+--DATA--
+return array()
+--EXPECT--
+愛していますか?
+--TEST--
+"date" filter (interval support as of PHP 5.3)
+--CONDITION--
+version_compare(phpversion(), '5.3.0', '>=')
+--TEMPLATE--
+{{ date2|date }}
+{{ date2|date('%d days') }}
+--DATA--
+date_default_timezone_set('UTC');
+$twig->getExtension('core')->setDateFormat('Y-m-d', '%d days %h hours');
+return array(
+ 'date2' => new DateInterval('P2D'),
+)
+--EXPECT--
+2 days 0 hours
+2 days
+--TEST--
+"date" filter
+--TEMPLATE--
+{{ date1|date }}
+{{ date1|date('d/m/Y') }}
+--DATA--
+date_default_timezone_set('UTC');
+$twig->getExtension('core')->setDateFormat('Y-m-d', '%d days %h hours');
+return array(
+ 'date1' => mktime(13, 45, 0, 10, 4, 2010),
+)
+--EXPECT--
+2010-10-04
+04/10/2010
+--TEST--
+"date" filter
+--CONDITION--
+version_compare(phpversion(), '5.5.0', '>=')
+--TEMPLATE--
+{{ date1|date }}
+{{ date1|date('d/m/Y') }}
+{{ date1|date('d/m/Y H:i:s', 'Asia/Hong_Kong') }}
+{{ date1|date('d/m/Y H:i:s', timezone1) }}
+{{ date1|date('d/m/Y H:i:s') }}
+
+{{ date2|date('d/m/Y H:i:s P', 'Europe/Paris') }}
+{{ date2|date('d/m/Y H:i:s P', 'Asia/Hong_Kong') }}
+{{ date2|date('d/m/Y H:i:s P', false) }}
+{{ date2|date('e', 'Europe/Paris') }}
+{{ date2|date('e', false) }}
+--DATA--
+date_default_timezone_set('Europe/Paris');
+return array(
+ 'date1' => new DateTimeImmutable('2010-10-04 13:45'),
+ 'date2' => new DateTimeImmutable('2010-10-04 13:45', new DateTimeZone('America/New_York')),
+ 'timezone1' => new DateTimeZone('America/New_York'),
+)
+--EXPECT--
+October 4, 2010 13:45
+04/10/2010
+04/10/2010 19:45:00
+04/10/2010 07:45:00
+04/10/2010 13:45:00
+
+04/10/2010 19:45:00 +02:00
+05/10/2010 01:45:00 +08:00
+04/10/2010 13:45:00 -04:00
+Europe/Paris
+America/New_York
+--TEST--
+"date" filter (interval support as of PHP 5.3)
+--CONDITION--
+version_compare(phpversion(), '5.3.0', '>=')
+--TEMPLATE--
+{{ date1|date }}
+{{ date1|date('%d days %h hours') }}
+{{ date1|date('%d days %h hours', timezone1) }}
+--DATA--
+date_default_timezone_set('UTC');
+return array(
+ 'date1' => new DateInterval('P2D'),
+ // This should have no effect on DateInterval formatting
+ 'timezone1' => new DateTimeZone('America/New_York'),
+)
+--EXPECT--
+2 days
+2 days 0 hours
+2 days 0 hours
+--TEST--
+"date_modify" filter
+--TEMPLATE--
+{{ date1|date_modify('-1day')|date('Y-m-d H:i:s') }}
+{{ date2|date_modify('-1day')|date('Y-m-d H:i:s') }}
+--DATA--
+date_default_timezone_set('UTC');
+return array(
+ 'date1' => '2010-10-04 13:45',
+ 'date2' => new DateTime('2010-10-04 13:45'),
+)
+--EXPECT--
+2010-10-03 13:45:00
+2010-10-03 13:45:00
+--TEST--
+"date" filter
+--TEMPLATE--
+{{ date|date(format='d/m/Y H:i:s P', timezone='America/Chicago') }}
+{{ date|date(timezone='America/Chicago', format='d/m/Y H:i:s P') }}
+{{ date|date('d/m/Y H:i:s P', timezone='America/Chicago') }}
+--DATA--
+date_default_timezone_set('UTC');
+return array('date' => mktime(13, 45, 0, 10, 4, 2010))
+--EXPECT--
+04/10/2010 08:45:00 -05:00
+04/10/2010 08:45:00 -05:00
+04/10/2010 08:45:00 -05:00
+--TEST--
+"date" filter
+--TEMPLATE--
+{{ date1|date }}
+{{ date1|date('d/m/Y') }}
+{{ date1|date('d/m/Y H:i:s', 'Asia/Hong_Kong') }}
+{{ date1|date('d/m/Y H:i:s P', 'Asia/Hong_Kong') }}
+{{ date1|date('d/m/Y H:i:s P', 'America/Chicago') }}
+{{ date1|date('e') }}
+{{ date1|date('d/m/Y H:i:s') }}
+
+{{ date2|date }}
+{{ date2|date('d/m/Y') }}
+{{ date2|date('d/m/Y H:i:s', 'Asia/Hong_Kong') }}
+{{ date2|date('d/m/Y H:i:s', timezone1) }}
+{{ date2|date('d/m/Y H:i:s') }}
+
+{{ date3|date }}
+{{ date3|date('d/m/Y') }}
+
+{{ date4|date }}
+{{ date4|date('d/m/Y') }}
+
+{{ date5|date }}
+{{ date5|date('d/m/Y') }}
+
+{{ date6|date('d/m/Y H:i:s P', 'Europe/Paris') }}
+{{ date6|date('d/m/Y H:i:s P', 'Asia/Hong_Kong') }}
+{{ date6|date('d/m/Y H:i:s P', false) }}
+{{ date6|date('e', 'Europe/Paris') }}
+{{ date6|date('e', false) }}
+
+{{ date7|date }}
+--DATA--
+date_default_timezone_set('Europe/Paris');
+return array(
+ 'date1' => mktime(13, 45, 0, 10, 4, 2010),
+ 'date2' => new DateTime('2010-10-04 13:45'),
+ 'date3' => '2010-10-04 13:45',
+ 'date4' => 1286199900, // DateTime::createFromFormat('Y-m-d H:i', '2010-10-04 13:45', new DateTimeZone('UTC'))->getTimestamp() -- A unixtimestamp is always GMT
+ 'date5' => -189291360, // DateTime::createFromFormat('Y-m-d H:i', '1964-01-02 03:04', new DateTimeZone('UTC'))->getTimestamp(),
+ 'date6' => new DateTime('2010-10-04 13:45', new DateTimeZone('America/New_York')),
+ 'date7' => '2010-01-28T15:00:00+05:00',
+ 'timezone1' => new DateTimeZone('America/New_York'),
+)
+--EXPECT--
+October 4, 2010 13:45
+04/10/2010
+04/10/2010 19:45:00
+04/10/2010 19:45:00 +08:00
+04/10/2010 06:45:00 -05:00
+Europe/Paris
+04/10/2010 13:45:00
+
+October 4, 2010 13:45
+04/10/2010
+04/10/2010 19:45:00
+04/10/2010 07:45:00
+04/10/2010 13:45:00
+
+October 4, 2010 13:45
+04/10/2010
+
+October 4, 2010 15:45
+04/10/2010
+
+January 2, 1964 04:04
+02/01/1964
+
+04/10/2010 19:45:00 +02:00
+05/10/2010 01:45:00 +08:00
+04/10/2010 13:45:00 -04:00
+Europe/Paris
+America/New_York
+
+January 28, 2010 11:00
+--TEST--
+"default" filter
+--TEMPLATE--
+Variable:
+{{ definedVar |default('default') is same as('default') ? 'ko' : 'ok' }}
+{{ zeroVar |default('default') is same as('default') ? 'ko' : 'ok' }}
+{{ emptyVar |default('default') is same as('default') ? 'ok' : 'ko' }}
+{{ nullVar |default('default') is same as('default') ? 'ok' : 'ko' }}
+{{ undefinedVar |default('default') is same as('default') ? 'ok' : 'ko' }}
+Array access:
+{{ nested.definedVar |default('default') is same as('default') ? 'ko' : 'ok' }}
+{{ nested['definedVar'] |default('default') is same as('default') ? 'ko' : 'ok' }}
+{{ nested.zeroVar |default('default') is same as('default') ? 'ko' : 'ok' }}
+{{ nested.emptyVar |default('default') is same as('default') ? 'ok' : 'ko' }}
+{{ nested.nullVar |default('default') is same as('default') ? 'ok' : 'ko' }}
+{{ nested.undefinedVar |default('default') is same as('default') ? 'ok' : 'ko' }}
+{{ nested['undefinedVar'] |default('default') is same as('default') ? 'ok' : 'ko' }}
+{{ undefinedVar.foo |default('default') is same as('default') ? 'ok' : 'ko' }}
+Plain values:
+{{ 'defined' |default('default') is same as('default') ? 'ko' : 'ok' }}
+{{ 0 |default('default') is same as('default') ? 'ko' : 'ok' }}
+{{ '' |default('default') is same as('default') ? 'ok' : 'ko' }}
+{{ null |default('default') is same as('default') ? 'ok' : 'ko' }}
+Precedence:
+{{ 'o' ~ nullVar |default('k') }}
+{{ 'o' ~ nested.nullVar |default('k') }}
+Object methods:
+{{ object.foo |default('default') is same as('default') ? 'ko' : 'ok' }}
+{{ object.undefinedMethod |default('default') is same as('default') ? 'ok' : 'ko' }}
+{{ object.getFoo() |default('default') is same as('default') ? 'ko' : 'ok' }}
+{{ object.getFoo('a') |default('default') is same as('default') ? 'ko' : 'ok' }}
+{{ object.undefinedMethod() |default('default') is same as('default') ? 'ok' : 'ko' }}
+{{ object.undefinedMethod('a') |default('default') is same as('default') ? 'ok' : 'ko' }}
+Deep nested:
+{{ nested.undefinedVar.foo.bar |default('default') is same as('default') ? 'ok' : 'ko' }}
+{{ nested.definedArray.0 |default('default') is same as('default') ? 'ko' : 'ok' }}
+{{ nested['definedArray'][0] |default('default') is same as('default') ? 'ko' : 'ok' }}
+{{ object.self.foo |default('default') is same as('default') ? 'ko' : 'ok' }}
+{{ object.self.undefinedMethod |default('default') is same as('default') ? 'ok' : 'ko' }}
+{{ object.undefinedMethod.self |default('default') is same as('default') ? 'ok' : 'ko' }}
+--DATA--
+return array(
+ 'definedVar' => 'defined',
+ 'zeroVar' => 0,
+ 'emptyVar' => '',
+ 'nullVar' => null,
+ 'nested' => array(
+ 'definedVar' => 'defined',
+ 'zeroVar' => 0,
+ 'emptyVar' => '',
+ 'nullVar' => null,
+ 'definedArray' => array(0),
+ ),
+ 'object' => new TwigTestFoo(),
+)
+--CONFIG--
+return array('strict_variables' => false)
+--EXPECT--
+Variable:
+ok
+ok
+ok
+ok
+ok
+Array access:
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+Plain values:
+ok
+ok
+ok
+ok
+Precedence:
+ok
+ok
+Object methods:
+ok
+ok
+ok
+ok
+ok
+ok
+Deep nested:
+ok
+ok
+ok
+ok
+ok
+ok
+--DATA--
+return array(
+ 'definedVar' => 'defined',
+ 'zeroVar' => 0,
+ 'emptyVar' => '',
+ 'nullVar' => null,
+ 'nested' => array(
+ 'definedVar' => 'defined',
+ 'zeroVar' => 0,
+ 'emptyVar' => '',
+ 'nullVar' => null,
+ 'definedArray' => array(0),
+ ),
+ 'object' => new TwigTestFoo(),
+)
+--CONFIG--
+return array('strict_variables' => true)
+--EXPECT--
+Variable:
+ok
+ok
+ok
+ok
+ok
+Array access:
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+Plain values:
+ok
+ok
+ok
+ok
+Precedence:
+ok
+ok
+Object methods:
+ok
+ok
+ok
+ok
+ok
+ok
+Deep nested:
+ok
+ok
+ok
+ok
+ok
+ok
+--TEST--
+dynamic filter
+--TEMPLATE--
+{{ 'bar'|foo_path }}
+{{ 'bar'|a_foo_b_bar }}
+--DATA--
+return array()
+--EXPECT--
+foo/bar
+a/b/bar
+--TEST--
+"escape" filter does not escape with the html strategy when using the html_attr strategy
+--TEMPLATE--
+{{ '<br />'|escape('html_attr') }}
+--DATA--
+return array()
+--EXPECT--
+&lt;br&#x20;&#x2F;&gt;
+--TEST--
+"escape" filter
+--TEMPLATE--
+{{ "愛していますか? <br />"|e }}
+--DATA--
+return array()
+--EXPECT--
+愛していますか? &lt;br /&gt;
+--TEST--
+"escape" filter
+--TEMPLATE--
+{{ "foo <br />"|e }}
+--DATA--
+return array()
+--EXPECT--
+foo &lt;br /&gt;
+--TEST--
+"first" filter
+--TEMPLATE--
+{{ [1, 2, 3, 4]|first }}
+{{ {a: 1, b: 2, c: 3, d: 4}|first }}
+{{ '1234'|first }}
+{{ arr|first }}
+{{ 'Ä€é'|first }}
+{{ ''|first }}
+--DATA--
+return array('arr' => new ArrayObject(array(1, 2, 3, 4)))
+--EXPECT--
+1
+1
+1
+1
+--TEST--
+"escape" filter
+--TEMPLATE--
+{% set foo %}
+ foo<br />
+{% endset %}
+
+{{ foo|e('html') -}}
+{{ foo|e('js') }}
+{% autoescape true %}
+ {{ foo }}
+{% endautoescape %}
+--DATA--
+return array()
+--EXPECT--
+ foo&lt;br /&gt;
+\x20\x20\x20\x20foo\x3Cbr\x20\x2F\x3E\x0A
+ foo<br />
+--TEST--
+"format" filter
+--TEMPLATE--
+{{ string|format(foo, 3) }}
+--DATA--
+return array('string' => '%s/%d', 'foo' => 'bar')
+--EXPECT--
+bar/3
+--TEST--
+"join" filter
+--TEMPLATE--
+{{ ["foo", "bar"]|join(', ') }}
+{{ foo|join(', ') }}
+{{ bar|join(', ') }}
+--DATA--
+return array('foo' => new TwigTestFoo(), 'bar' => new ArrayObject(array(3, 4)))
+--EXPECT--
+foo, bar
+1, 2
+3, 4
+--TEST--
+"json_encode" filter
+--TEMPLATE--
+{{ "foo"|json_encode|raw }}
+{{ foo|json_encode|raw }}
+{{ [foo, "foo"]|json_encode|raw }}
+--DATA--
+return array('foo' => new Twig_Markup('foo', 'UTF-8'))
+--EXPECT--
+"foo"
+"foo"
+["foo","foo"]
+--TEST--
+"last" filter
+--TEMPLATE--
+{{ [1, 2, 3, 4]|last }}
+{{ {a: 1, b: 2, c: 3, d: 4}|last }}
+{{ '1234'|last }}
+{{ arr|last }}
+{{ 'Ä€é'|last }}
+{{ ''|last }}
+--DATA--
+return array('arr' => new ArrayObject(array(1, 2, 3, 4)))
+--EXPECT--
+4
+4
+4
+4
+--TEST--
+"length" filter
+--TEMPLATE--
+{{ array|length }}
+{{ string|length }}
+{{ number|length }}
+{{ markup|length }}
+--DATA--
+return array('array' => array(1, 4), 'string' => 'foo', 'number' => 1000, 'markup' => new Twig_Markup('foo', 'UTF-8'))
+--EXPECT--
+2
+3
+4
+3
+--TEST--
+"length" filter
+--CONDITION--
+function_exists('mb_get_info')
+--TEMPLATE--
+{{ string|length }}
+{{ markup|length }}
+--DATA--
+return array('string' => 'été', 'markup' => new Twig_Markup('foo', 'UTF-8'))
+--EXPECT--
+3
+3
+--TEST--
+"merge" filter
+--TEMPLATE--
+{{ items|merge({'bar': 'foo'})|join }}
+{{ items|merge({'bar': 'foo'})|keys|join }}
+{{ {'bar': 'foo'}|merge(items)|join }}
+{{ {'bar': 'foo'}|merge(items)|keys|join }}
+{{ numerics|merge([4, 5, 6])|join }}
+--DATA--
+return array('items' => array('foo' => 'bar'), 'numerics' => array(1, 2, 3))
+--EXPECT--
+barfoo
+foobar
+foobar
+barfoo
+123456
+--TEST--
+"nl2br" filter
+--TEMPLATE--
+{{ "I like Twig.\nYou will like it too.\n\nEverybody like it!"|nl2br }}
+{{ text|nl2br }}
+--DATA--
+return array('text' => "If you have some <strong>HTML</strong>\nit will be escaped.")
+--EXPECT--
+I like Twig.<br />
+You will like it too.<br />
+<br />
+Everybody like it!
+If you have some &lt;strong&gt;HTML&lt;/strong&gt;<br />
+it will be escaped.
+--TEST--
+"number_format" filter with defaults.
+--TEMPLATE--
+{{ 20|number_format }}
+{{ 20.25|number_format }}
+{{ 20.25|number_format(1) }}
+{{ 20.25|number_format(2, ',') }}
+{{ 1020.25|number_format }}
+{{ 1020.25|number_format(2, ',') }}
+{{ 1020.25|number_format(2, ',', '.') }}
+--DATA--
+$twig->getExtension('core')->setNumberFormat(2, '!', '=');
+return array();
+--EXPECT--
+20!00
+20!25
+20!3
+20,25
+1=020!25
+1=020,25
+1.020,25
+--TEST--
+"number_format" filter
+--TEMPLATE--
+{{ 20|number_format }}
+{{ 20.25|number_format }}
+{{ 20.25|number_format(2) }}
+{{ 20.25|number_format(2, ',') }}
+{{ 1020.25|number_format(2, ',') }}
+{{ 1020.25|number_format(2, ',', '.') }}
+--DATA--
+return array();
+--EXPECT--
+20
+20
+20.25
+20,25
+1,020,25
+1.020,25
+--TEST--
+"replace" filter
+--TEMPLATE--
+{{ "I like %this% and %that%."|replace({'%this%': "foo", '%that%': "bar"}) }}
+--DATA--
+return array()
+--EXPECT--
+I like foo and bar.
+--TEST--
+"reverse" filter
+--TEMPLATE--
+{{ [1, 2, 3, 4]|reverse|join('') }}
+{{ '1234évènement'|reverse }}
+{{ arr|reverse|join('') }}
+{{ {'a': 'c', 'b': 'a'}|reverse()|join(',') }}
+{{ {'a': 'c', 'b': 'a'}|reverse(preserveKeys=true)|join(glue=',') }}
+{{ {'a': 'c', 'b': 'a'}|reverse(preserve_keys=true)|join(glue=',') }}
+--DATA--
+return array('arr' => new ArrayObject(array(1, 2, 3, 4)))
+--EXPECT--
+4321
+tnemenèvé4321
+4321
+a,c
+a,c
+a,c
+--TEST--
+"round" filter
+--TEMPLATE--
+{{ 2.7|round }}
+{{ 2.1|round }}
+{{ 2.1234|round(3, 'floor') }}
+{{ 2.1|round(0, 'ceil') }}
+
+{{ 21.3|round(-1)}}
+{{ 21.3|round(-1, 'ceil')}}
+{{ 21.3|round(-1, 'floor')}}
+--DATA--
+return array()
+--EXPECT--
+3
+2
+2.123
+3
+
+20
+30
+20
+--TEST--
+"slice" filter
+--TEMPLATE--
+{{ [1, 2, 3, 4][1:2]|join('') }}
+{{ {a: 1, b: 2, c: 3, d: 4}[1:2]|join('') }}
+{{ [1, 2, 3, 4][start:length]|join('') }}
+{{ [1, 2, 3, 4]|slice(1, 2)|join('') }}
+{{ [1, 2, 3, 4]|slice(1, 2)|keys|join('') }}
+{{ [1, 2, 3, 4]|slice(1, 2, true)|keys|join('') }}
+{{ {a: 1, b: 2, c: 3, d: 4}|slice(1, 2)|join('') }}
+{{ {a: 1, b: 2, c: 3, d: 4}|slice(1, 2)|keys|join('') }}
+{{ '1234'|slice(1, 2) }}
+{{ '1234'[1:2] }}
+{{ arr|slice(1, 2)|join('') }}
+{{ arr[1:2]|join('') }}
+
+{{ [1, 2, 3, 4]|slice(1)|join('') }}
+{{ [1, 2, 3, 4][1:]|join('') }}
+{{ '1234'|slice(1) }}
+{{ '1234'[1:] }}
+{{ '1234'[:1] }}
+--DATA--
+return array('start' => 1, 'length' => 2, 'arr' => new ArrayObject(array(1, 2, 3, 4)))
+--EXPECT--
+23
+23
+23
+23
+01
+12
+23
+bc
+23
+23
+23
+23
+
+234
+234
+234
+234
+1
+--TEST--
+"sort" filter
+--TEMPLATE--
+{{ array1|sort|join }}
+{{ array2|sort|join }}
+--DATA--
+return array('array1' => array(4, 1), 'array2' => array('foo', 'bar'))
+--EXPECT--
+14
+barfoo
+--TEST--
+"split" filter
+--TEMPLATE--
+{{ "one,two,three,four,five"|split(',')|join('-') }}
+{{ foo|split(',')|join('-') }}
+{{ foo|split(',', 3)|join('-') }}
+{{ baz|split('')|join('-') }}
+{{ baz|split('', 2)|join('-') }}
+{{ foo|split(',', -2)|join('-') }}
+--DATA--
+return array('foo' => "one,two,three,four,five", 'baz' => '12345',)
+--EXPECT--
+one-two-three-four-five
+one-two-three-four-five
+one-two-three,four,five
+1-2-3-4-5
+12-34-5
+one-two-three--TEST--
+"trim" filter
+--TEMPLATE--
+{{ " I like Twig. "|trim }}
+{{ text|trim }}
+{{ " foo/"|trim("/") }}
+--DATA--
+return array('text' => " If you have some <strong>HTML</strong> it will be escaped. ")
+--EXPECT--
+I like Twig.
+If you have some &lt;strong&gt;HTML&lt;/strong&gt; it will be escaped.
+ foo
+--TEST--
+"url_encode" filter for PHP < 5.4 and HHVM
+--CONDITION--
+defined('PHP_QUERY_RFC3986')
+--TEMPLATE--
+{{ {foo: "bar", number: 3, "spéßi%l": "e%c0d@d", "spa ce": ""}|url_encode }}
+{{ {foo: "bar", number: 3, "spéßi%l": "e%c0d@d", "spa ce": ""}|url_encode|raw }}
+{{ {}|url_encode|default("default") }}
+{{ 'spéßi%le%c0d@dspa ce'|url_encode }}
+--DATA--
+return array()
+--EXPECT--
+foo=bar&amp;number=3&amp;sp%C3%A9%C3%9Fi%25l=e%25c0d%40d&amp;spa%20ce=
+foo=bar&number=3&sp%C3%A9%C3%9Fi%25l=e%25c0d%40d&spa%20ce=
+default
+sp%C3%A9%C3%9Fi%25le%25c0d%40dspa%20ce
+--TEST--
+"url_encode" filter
+--CONDITION--
+defined('PHP_QUERY_RFC3986')
+--TEMPLATE--
+{{ {foo: "bar", number: 3, "spéßi%l": "e%c0d@d", "spa ce": ""}|url_encode }}
+{{ {foo: "bar", number: 3, "spéßi%l": "e%c0d@d", "spa ce": ""}|url_encode|raw }}
+{{ {}|url_encode|default("default") }}
+{{ 'spéßi%le%c0d@dspa ce'|url_encode }}
+--DATA--
+return array()
+--EXPECT--
+foo=bar&amp;number=3&amp;sp%C3%A9%C3%9Fi%25l=e%25c0d%40d&amp;spa%20ce=
+foo=bar&number=3&sp%C3%A9%C3%9Fi%25l=e%25c0d%40d&spa%20ce=
+default
+sp%C3%A9%C3%9Fi%25le%25c0d%40dspa%20ce
+--TEST--
+"attribute" function
+--TEMPLATE--
+{{ attribute(obj, method) }}
+{{ attribute(array, item) }}
+{{ attribute(obj, "bar", ["a", "b"]) }}
+{{ attribute(obj, "bar", arguments) }}
+{{ attribute(obj, method) is defined ? 'ok' : 'ko' }}
+{{ attribute(obj, nonmethod) is defined ? 'ok' : 'ko' }}
+--DATA--
+return array('obj' => new TwigTestFoo(), 'method' => 'foo', 'array' => array('foo' => 'bar'), 'item' => 'foo', 'nonmethod' => 'xxx', 'arguments' => array('a', 'b'))
+--EXPECT--
+foo
+bar
+bar_a-b
+bar_a-b
+ok
+ko
+--TEST--
+"block" function
+--TEMPLATE--
+{% extends 'base.twig' %}
+{% block bar %}BAR{% endblock %}
+--TEMPLATE(base.twig)--
+{% block foo %}{{ block('bar') }}{% endblock %}
+{% block bar %}BAR_BASE{% endblock %}
+--DATA--
+return array()
+--EXPECT--
+BARBAR
+--TEST--
+"constant" function
+--TEMPLATE--
+{{ constant('DATE_W3C') == expect ? 'true' : 'false' }}
+{{ constant('ARRAY_AS_PROPS', object) }}
+--DATA--
+return array('expect' => DATE_W3C, 'object' => new ArrayObject(array('hi')));
+--EXPECT--
+true
+2
+--TEST--
+"cycle" function
+--TEMPLATE--
+{% for i in 0..6 %}
+{{ cycle(array1, i) }}-{{ cycle(array2, i) }}
+{% endfor %}
+--DATA--
+return array('array1' => array('odd', 'even'), 'array2' => array('apple', 'orange', 'citrus'))
+--EXPECT--
+odd-apple
+even-orange
+odd-citrus
+even-apple
+odd-orange
+even-citrus
+odd-apple
+--TEST--
+"date" function
+--TEMPLATE--
+{{ date(date, "America/New_York")|date('d/m/Y H:i:s P', false) }}
+{{ date(timezone="America/New_York", date=date)|date('d/m/Y H:i:s P', false) }}
+--DATA--
+date_default_timezone_set('UTC');
+return array('date' => mktime(13, 45, 0, 10, 4, 2010))
+--EXPECT--
+04/10/2010 09:45:00 -04:00
+04/10/2010 09:45:00 -04:00
+--TEST--
+"date" function
+--TEMPLATE--
+{{ date() == date('now') ? 'OK' : 'KO' }}
+{{ date(date1) == date('2010-10-04 13:45') ? 'OK' : 'KO' }}
+{{ date(date2) == date('2010-10-04 13:45') ? 'OK' : 'KO' }}
+{{ date(date3) == date('2010-10-04 13:45') ? 'OK' : 'KO' }}
+{{ date(date4) == date('2010-10-04 13:45') ? 'OK' : 'KO' }}
+{{ date(date5) == date('1964-01-02 03:04') ? 'OK' : 'KO' }}
+--DATA--
+date_default_timezone_set('UTC');
+return array(
+ 'date1' => mktime(13, 45, 0, 10, 4, 2010),
+ 'date2' => new DateTime('2010-10-04 13:45'),
+ 'date3' => '2010-10-04 13:45',
+ 'date4' => 1286199900, // DateTime::createFromFormat('Y-m-d H:i', '2010-10-04 13:45', new DateTimeZone('UTC'))->getTimestamp() -- A unixtimestamp is always GMT
+ 'date5' => -189291360, // DateTime::createFromFormat('Y-m-d H:i', '1964-01-02 03:04', new DateTimeZone('UTC'))->getTimestamp(),
+)
+--EXPECT--
+OK
+OK
+OK
+OK
+OK
+OK
+--TEST--
+"dump" function, xdebug is not loaded or xdebug <2.2-dev is loaded
+--CONDITION--
+!extension_loaded('xdebug') || (($r = new ReflectionExtension('xdebug')) && version_compare($r->getVersion(), '2.2-dev', '<'))
+--TEMPLATE--
+{{ dump() }}
+--DATA--
+return array('foo' => 'foo', 'bar' => 'bar')
+--CONFIG--
+return array('debug' => true, 'autoescape' => false);
+--TEST--
+"dump" function
+--CONDITION--
+!extension_loaded('xdebug')
+--TEMPLATE--
+{{ dump('foo') }}
+{{ dump('foo', 'bar') }}
+--DATA--
+return array('foo' => 'foo', 'bar' => 'bar')
+--CONFIG--
+return array('debug' => true, 'autoescape' => false);
+--EXPECT--
+string(3) "foo"
+
+string(3) "foo"
+string(3) "bar"
+--TEST--
+dynamic function
+--TEMPLATE--
+{{ foo_path('bar') }}
+{{ a_foo_b_bar('bar') }}
+--DATA--
+return array()
+--EXPECT--
+foo/bar
+a/b/bar
+--TEST--
+"include" function
+--TEMPLATE--
+{% set tmp = include("foo.twig") %}
+
+FOO{{ tmp }}BAR
+--TEMPLATE(foo.twig)--
+FOOBAR
+--DATA--
+return array()
+--EXPECT--
+FOO
+FOOBARBAR
+--TEST--
+"include" function is safe for auto-escaping
+--TEMPLATE--
+{{ include("foo.twig") }}
+--TEMPLATE(foo.twig)--
+<p>Test</p>
+--DATA--
+return array()
+--EXPECT--
+<p>Test</p>
+--TEST--
+"include" function
+--TEMPLATE--
+FOO
+{{ include("foo.twig") }}
+
+BAR
+--TEMPLATE(foo.twig)--
+FOOBAR
+--DATA--
+return array()
+--EXPECT--
+FOO
+
+FOOBAR
+
+BAR
+--TEST--
+"include" function allows expressions for the template to include
+--TEMPLATE--
+FOO
+{{ include(foo) }}
+
+BAR
+--TEMPLATE(foo.twig)--
+FOOBAR
+--DATA--
+return array('foo' => 'foo.twig')
+--EXPECT--
+FOO
+
+FOOBAR
+
+BAR
+--TEST--
+"include" function
+--TEMPLATE--
+{{ include(["foo.twig", "bar.twig"], ignore_missing = true) }}
+{{ include("foo.twig", ignore_missing = true) }}
+{{ include("foo.twig", ignore_missing = true, variables = {}) }}
+{{ include("foo.twig", ignore_missing = true, variables = {}, with_context = true) }}
+--DATA--
+return array()
+--EXPECT--
+--TEST--
+"include" function
+--TEMPLATE--
+{% extends "base.twig" %}
+
+{% block content %}
+ {{ parent() }}
+{% endblock %}
+--TEMPLATE(base.twig)--
+{% block content %}
+ {{ include("foo.twig") }}
+{% endblock %}
+--DATA--
+return array();
+--EXCEPTION--
+Twig_Error_Loader: Template "foo.twig" is not defined in "base.twig" at line 3.
+--TEST--
+"include" function
+--TEMPLATE--
+{{ include("foo.twig") }}
+--DATA--
+return array();
+--EXCEPTION--
+Twig_Error_Loader: Template "foo.twig" is not defined in "index.twig" at line 2.
+--TEST--
+"include" tag sandboxed
+--TEMPLATE--
+{{ include("foo.twig", sandboxed = true) }}
+--TEMPLATE(foo.twig)--
+{{ foo|e }}
+--DATA--
+return array()
+--EXCEPTION--
+Twig_Sandbox_SecurityError: Filter "e" is not allowed in "index.twig" at line 2.
+--TEST--
+"include" function accepts Twig_Template instance
+--TEMPLATE--
+{{ include(foo) }} FOO
+--TEMPLATE(foo.twig)--
+BAR
+--DATA--
+return array('foo' => $twig->loadTemplate('foo.twig'))
+--EXPECT--
+BAR FOO
+--TEST--
+"include" function
+--TEMPLATE--
+{{ include(["foo.twig", "bar.twig"]) }}
+{{- include(["bar.twig", "foo.twig"]) }}
+--TEMPLATE(foo.twig)--
+foo
+--DATA--
+return array()
+--EXPECT--
+foo
+foo
+--TEST--
+"include" function accept variables and with_context
+--TEMPLATE--
+{{ include("foo.twig") }}
+{{- include("foo.twig", with_context = false) }}
+{{- include("foo.twig", {'foo1': 'bar'}) }}
+{{- include("foo.twig", {'foo1': 'bar'}, with_context = false) }}
+--TEMPLATE(foo.twig)--
+{% for k, v in _context %}{{ k }},{% endfor %}
+--DATA--
+return array('foo' => 'bar')
+--EXPECT--
+foo,global,_parent,
+global,_parent,
+foo,global,foo1,_parent,
+foo1,global,_parent,
+--TEST--
+"include" function accept variables
+--TEMPLATE--
+{{ include("foo.twig", {'foo': 'bar'}) }}
+{{- include("foo.twig", vars) }}
+--TEMPLATE(foo.twig)--
+{{ foo }}
+--DATA--
+return array('vars' => array('foo' => 'bar'))
+--EXPECT--
+bar
+bar
+--TEST--
+"max" function
+--TEMPLATE--
+{{ max([2, 1, 3, 5, 4]) }}
+{{ max(2, 1, 3, 5, 4) }}
+{{ max({2:"two", 1:"one", 3:"three", 5:"five", 4:"for"}) }}
+--DATA--
+return array()
+--EXPECT--
+5
+5
+two
+--TEST--
+"min" function
+--TEMPLATE--
+{{ min(2, 1, 3, 5, 4) }}
+{{ min([2, 1, 3, 5, 4]) }}
+{{ min({2:"two", 1:"one", 3:"three", 5:"five", 4:"for"}) }}
+--DATA--
+return array()
+--EXPECT--
+1
+1
+five
+--TEST--
+"range" function
+--TEMPLATE--
+{{ range(low=0+1, high=10+0, step=2)|join(',') }}
+--DATA--
+return array()
+--EXPECT--
+1,3,5,7,9
+--TEST--
+"block" function recursively called in a parent template
+--TEMPLATE--
+{% extends "ordered_menu.twig" %}
+{% block label %}"{{ parent() }}"{% endblock %}
+{% block list %}{% set class = 'b' %}{{ parent() }}{% endblock %}
+--TEMPLATE(ordered_menu.twig)--
+{% extends "menu.twig" %}
+{% block list %}{% set class = class|default('a') %}<ol class="{{ class }}">{{ block('children') }}</ol>{% endblock %}
+--TEMPLATE(menu.twig)--
+{% extends "base.twig" %}
+{% block list %}<ul>{{ block('children') }}</ul>{% endblock %}
+{% block children %}{% set currentItem = item %}{% for item in currentItem %}{{ block('item') }}{% endfor %}{% set item = currentItem %}{% endblock %}
+{% block item %}<li>{% if item is not iterable %}{{ block('label') }}{% else %}{{ block('list') }}{% endif %}</li>{% endblock %}
+{% block label %}{{ item }}{{ block('unknown') }}{% endblock %}
+--TEMPLATE(base.twig)--
+{{ block('list') }}
+--DATA--
+return array('item' => array('1', '2', array('3.1', array('3.2.1', '3.2.2'), '3.4')))
+--EXPECT--
+<ol class="b"><li>"1"</li><li>"2"</li><li><ol class="b"><li>"3.1"</li><li><ol class="b"><li>"3.2.1"</li><li>"3.2.2"</li></ol></li><li>"3.4"</li></ol></li></ol>
+--TEST--
+"source" function
+--TEMPLATE--
+FOO
+{{ source("foo.twig") }}
+
+BAR
+--TEMPLATE(foo.twig)--
+{{ foo }}<br />
+--DATA--
+return array()
+--EXPECT--
+FOO
+
+{{ foo }}<br />
+
+BAR
+--TEST--
+"template_from_string" function
+--TEMPLATE--
+{% include template_from_string(template) %}
+
+{% include template_from_string("Hello {{ name }}") %}
+{% include template_from_string('{% extends "parent.twig" %}{% block content %}Hello {{ name }}{% endblock %}') %}
+--TEMPLATE(parent.twig)--
+{% block content %}{% endblock %}
+--DATA--
+return array('name' => 'Fabien', 'template' => "Hello {{ name }}")
+--EXPECT--
+Hello Fabien
+Hello Fabien
+Hello Fabien
+--TEST--
+macro
+--TEMPLATE--
+{% from _self import test %}
+
+{% macro test(a, b = 'bar') -%}
+{{ a }}{{ b }}
+{%- endmacro %}
+
+{{ test('foo') }}
+{{ test('bar', 'foo') }}
+--DATA--
+return array();
+--EXPECT--
+foobar
+barfoo
+--TEST--
+macro
+--TEMPLATE--
+{% import _self as macros %}
+
+{% macro foo(data) %}
+ {{ data }}
+{% endmacro %}
+
+{% macro bar() %}
+ <br />
+{% endmacro %}
+
+{{ macros.foo(macros.bar()) }}
+--DATA--
+return array();
+--EXPECT--
+<br />
+--TEST--
+macro
+--TEMPLATE--
+{% from _self import test %}
+
+{% macro test(this) -%}
+ {{ this }}
+{%- endmacro %}
+
+{{ test(this) }}
+--DATA--
+return array('this' => 'foo');
+--EXPECT--
+foo
+--TEST--
+macro
+--TEMPLATE--
+{% import _self as test %}
+{% from _self import test %}
+
+{% macro test(a, b) -%}
+ {{ a|default('a') }}<br />
+ {{- b|default('b') }}<br />
+{%- endmacro %}
+
+{{ test.test() }}
+{{ test() }}
+{{ test.test(1, "c") }}
+{{ test(1, "c") }}
+--DATA--
+return array();
+--EXPECT--
+a<br />b<br />
+a<br />b<br />
+1<br />c<br />
+1<br />c<br />
+--TEST--
+macro with a filter
+--TEMPLATE--
+{% import _self as test %}
+
+{% macro test() %}
+ {% filter escape %}foo<br />{% endfilter %}
+{% endmacro %}
+
+{{ test.test() }}
+--DATA--
+return array();
+--EXPECT--
+foo&lt;br /&gt;
+--TEST--
+Twig outputs 0 nodes correctly
+--TEMPLATE--
+{{ foo }}0{{ foo }}
+--DATA--
+return array('foo' => 'foo')
+--EXPECT--
+foo0foo
+--TEST--
+error in twig extension
+--TEMPLATE--
+{{ object.region is not null ? object.regionChoices[object.region] }}
+--EXPECT--
+house.region.s
+--TEST--
+Twig is able to deal with SimpleXMLElement instances as variables
+--CONDITION--
+version_compare(phpversion(), '5.3.0', '>=')
+--TEMPLATE--
+Hello '{{ images.image.0.group }}'!
+{{ images.image.0.group.attributes.myattr }}
+{{ images.children().image.count() }}
+{% for image in images %}
+ - {{ image.group }}
+{% endfor %}
+--DATA--
+return array('images' => new SimpleXMLElement('<images></images>'))
+--EXPECT--
+Hello 'foo'!
+example
+2
+ - foo
+ - bar
+--TEST--
+Twig does not confuse strings with integers in getAttribute()
+--TEMPLATE--
+{{ hash['2e2'] }}
+--DATA--
+return array('hash' => array('2e2' => 'works'))
+--EXPECT--
+works
+--TEST--
+"autoescape" tag applies escaping on its children
+--TEMPLATE--
+{% autoescape %}
+{{ var }}<br />
+{% endautoescape %}
+{% autoescape 'html' %}
+{{ var }}<br />
+{% endautoescape %}
+{% autoescape false %}
+{{ var }}<br />
+{% endautoescape %}
+{% autoescape true %}
+{{ var }}<br />
+{% endautoescape %}
+{% autoescape false %}
+{{ var }}<br />
+{% endautoescape %}
+--DATA--
+return array('var' => '<br />')
+--EXPECT--
+&lt;br /&gt;<br />
+&lt;br /&gt;<br />
+<br /><br />
+&lt;br /&gt;<br />
+<br /><br />
+--TEST--
+"autoescape" tag applies escaping on embedded blocks
+--TEMPLATE--
+{% autoescape 'html' %}
+ {% block foo %}
+ {{ var }}
+ {% endblock %}
+{% endautoescape %}
+--DATA--
+return array('var' => '<br />')
+--EXPECT--
+&lt;br /&gt;
+--TEST--
+"autoescape" tag does not double-escape
+--TEMPLATE--
+{% autoescape 'html' %}
+{{ var|escape }}
+{% endautoescape %}
+--DATA--
+return array('var' => '<br />')
+--EXPECT--
+&lt;br /&gt;
+--TEST--
+"autoescape" tag applies escaping after calling functions
+--TEMPLATE--
+
+autoescape false
+{% autoescape false %}
+
+safe_br
+{{ safe_br() }}
+
+unsafe_br
+{{ unsafe_br() }}
+
+{% endautoescape %}
+
+autoescape 'html'
+{% autoescape 'html' %}
+
+safe_br
+{{ safe_br() }}
+
+unsafe_br
+{{ unsafe_br() }}
+
+unsafe_br()|raw
+{{ (unsafe_br())|raw }}
+
+safe_br()|escape
+{{ (safe_br())|escape }}
+
+safe_br()|raw
+{{ (safe_br())|raw }}
+
+unsafe_br()|escape
+{{ (unsafe_br())|escape }}
+
+{% endautoescape %}
+
+autoescape js
+{% autoescape 'js' %}
+
+safe_br
+{{ safe_br() }}
+
+{% endautoescape %}
+--DATA--
+return array()
+--EXPECT--
+
+autoescape false
+
+safe_br
+<br />
+
+unsafe_br
+<br />
+
+
+autoescape 'html'
+
+safe_br
+<br />
+
+unsafe_br
+&lt;br /&gt;
+
+unsafe_br()|raw
+<br />
+
+safe_br()|escape
+&lt;br /&gt;
+
+safe_br()|raw
+<br />
+
+unsafe_br()|escape
+&lt;br /&gt;
+
+
+autoescape js
+
+safe_br
+\x3Cbr\x20\x2F\x3E
+--TEST--
+"autoescape" tag does not apply escaping on literals
+--TEMPLATE--
+{% autoescape 'html' %}
+
+1. Simple literal
+{{ "<br />" }}
+
+2. Conditional expression with only literals
+{{ true ? "<br />" : "<br>" }}
+
+3. Conditional expression with a variable
+{{ true ? "<br />" : someVar }}
+
+4. Nested conditionals with only literals
+{{ true ? (true ? "<br />" : "<br>") : "\n" }}
+
+5. Nested conditionals with a variable
+{{ true ? (true ? "<br />" : someVar) : "\n" }}
+
+6. Nested conditionals with a variable marked safe
+{{ true ? (true ? "<br />" : someVar|raw) : "\n" }}
+
+{% endautoescape %}
+--DATA--
+return array()
+--EXPECT--
+
+1. Simple literal
+<br />
+
+2. Conditional expression with only literals
+<br />
+
+3. Conditional expression with a variable
+&lt;br /&gt;
+
+4. Nested conditionals with only literals
+<br />
+
+5. Nested conditionals with a variable
+&lt;br /&gt;
+
+6. Nested conditionals with a variable marked safe
+<br />
+--TEST--
+"autoescape" tags can be nested at will
+--TEMPLATE--
+{{ var }}
+{% autoescape 'html' %}
+ {{ var }}
+ {% autoescape false %}
+ {{ var }}
+ {% autoescape 'html' %}
+ {{ var }}
+ {% endautoescape %}
+ {{ var }}
+ {% endautoescape %}
+ {{ var }}
+{% endautoescape %}
+{{ var }}
+--DATA--
+return array('var' => '<br />')
+--EXPECT--
+&lt;br /&gt;
+ &lt;br /&gt;
+ <br />
+ &lt;br /&gt;
+ <br />
+ &lt;br /&gt;
+&lt;br /&gt;
+--TEST--
+"autoescape" tag applies escaping to object method calls
+--TEMPLATE--
+{% autoescape 'html' %}
+{{ user.name }}
+{{ user.name|lower }}
+{{ user }}
+{% endautoescape %}
+--EXPECT--
+Fabien&lt;br /&gt;
+fabien&lt;br /&gt;
+Fabien&lt;br /&gt;
+--TEST--
+"autoescape" tag does not escape when raw is used as a filter
+--TEMPLATE--
+{% autoescape 'html' %}
+{{ var|raw }}
+{% endautoescape %}
+--DATA--
+return array('var' => '<br />')
+--EXPECT--
+<br />
+--TEST--
+"autoescape" tag accepts an escaping strategy
+--TEMPLATE--
+{% autoescape true js %}{{ var }}{% endautoescape %}
+
+{% autoescape true html %}{{ var }}{% endautoescape %}
+
+{% autoescape 'js' %}{{ var }}{% endautoescape %}
+
+{% autoescape 'html' %}{{ var }}{% endautoescape %}
+--DATA--
+return array('var' => '<br />"')
+--EXPECT--
+\x3Cbr\x20\x2F\x3E\x22
+&lt;br /&gt;&quot;
+\x3Cbr\x20\x2F\x3E\x22
+&lt;br /&gt;&quot;
+--TEST--
+escape types
+--TEMPLATE--
+
+1. autoescape 'html' |escape('js')
+
+{% autoescape 'html' %}
+<a onclick="alert(&quot;{{ msg|escape('js') }}&quot;)"></a>
+{% endautoescape %}
+
+2. autoescape 'html' |escape('js')
+
+{% autoescape 'html' %}
+<a onclick="alert(&quot;{{ msg|escape('js') }}&quot;)"></a>
+{% endautoescape %}
+
+3. autoescape 'js' |escape('js')
+
+{% autoescape 'js' %}
+<a onclick="alert(&quot;{{ msg|escape('js') }}&quot;)"></a>
+{% endautoescape %}
+
+4. no escape
+
+{% autoescape false %}
+<a onclick="alert(&quot;{{ msg }}&quot;)"></a>
+{% endautoescape %}
+
+5. |escape('js')|escape('html')
+
+{% autoescape false %}
+<a onclick="alert(&quot;{{ msg|escape('js')|escape('html') }}&quot;)"></a>
+{% endautoescape %}
+
+6. autoescape 'html' |escape('js')|escape('html')
+
+{% autoescape 'html' %}
+<a onclick="alert(&quot;{{ msg|escape('js')|escape('html') }}&quot;)"></a>
+{% endautoescape %}
+
+--DATA--
+return array('msg' => "<>\n'\"")
+--EXPECT--
+
+1. autoescape 'html' |escape('js')
+
+<a onclick="alert(&quot;\x3C\x3E\x0A\x27\x22&quot;)"></a>
+
+2. autoescape 'html' |escape('js')
+
+<a onclick="alert(&quot;\x3C\x3E\x0A\x27\x22&quot;)"></a>
+
+3. autoescape 'js' |escape('js')
+
+<a onclick="alert(&quot;\x3C\x3E\x0A\x27\x22&quot;)"></a>
+
+4. no escape
+
+<a onclick="alert(&quot;<>
+'"&quot;)"></a>
+
+5. |escape('js')|escape('html')
+
+<a onclick="alert(&quot;\x3C\x3E\x0A\x27\x22&quot;)"></a>
+
+6. autoescape 'html' |escape('js')|escape('html')
+
+<a onclick="alert(&quot;\x3C\x3E\x0A\x27\x22&quot;)"></a>
+
+--TEST--
+"autoescape" tag do not applies escaping on filter arguments
+--TEMPLATE--
+{% autoescape 'html' %}
+{{ var|nl2br("<br />") }}
+{{ var|nl2br("<br />"|escape) }}
+{{ var|nl2br(sep) }}
+{{ var|nl2br(sep|raw) }}
+{{ var|nl2br(sep|escape) }}
+{% endautoescape %}
+--DATA--
+return array('var' => "<Fabien>\nTwig", 'sep' => '<br />')
+--EXPECT--
+&lt;Fabien&gt;<br />
+Twig
+&lt;Fabien&gt;&lt;br /&gt;
+Twig
+&lt;Fabien&gt;<br />
+Twig
+&lt;Fabien&gt;<br />
+Twig
+&lt;Fabien&gt;&lt;br /&gt;
+Twig
+--TEST--
+"autoescape" tag applies escaping after calling filters
+--TEMPLATE--
+{% autoescape 'html' %}
+
+(escape_and_nl2br is an escaper filter)
+
+1. Don't escape escaper filter output
+( var is escaped by |escape_and_nl2br, line-breaks are added,
+ the output is not escaped )
+{{ var|escape_and_nl2br }}
+
+2. Don't escape escaper filter output
+( var is escaped by |escape_and_nl2br, line-breaks are added,
+ the output is not escaped, |raw is redundant )
+{{ var|escape_and_nl2br|raw }}
+
+3. Explicit escape
+( var is escaped by |escape_and_nl2br, line-breaks are added,
+ the output is explicitly escaped by |escape )
+{{ var|escape_and_nl2br|escape }}
+
+4. Escape non-escaper filter output
+( var is upper-cased by |upper,
+ the output is auto-escaped )
+{{ var|upper }}
+
+5. Escape if last filter is not an escaper
+( var is escaped by |escape_and_nl2br, line-breaks are added,
+ the output is upper-cased by |upper,
+ the output is auto-escaped as |upper is not an escaper )
+{{ var|escape_and_nl2br|upper }}
+
+6. Don't escape escaper filter output
+( var is upper cased by upper,
+ the output is escaped by |escape_and_nl2br, line-breaks are added,
+ the output is not escaped as |escape_and_nl2br is an escaper )
+{{ var|upper|escape_and_nl2br }}
+
+7. Escape if last filter is not an escaper
+( the output of |format is "<b>" ~ var ~ "</b>",
+ the output is auto-escaped )
+{{ "<b>%s</b>"|format(var) }}
+
+8. Escape if last filter is not an escaper
+( the output of |format is "<b>" ~ var ~ "</b>",
+ |raw is redundant,
+ the output is auto-escaped )
+{{ "<b>%s</b>"|raw|format(var) }}
+
+9. Don't escape escaper filter output
+( the output of |format is "<b>" ~ var ~ "</b>",
+ the output is not escaped due to |raw filter at the end )
+{{ "<b>%s</b>"|format(var)|raw }}
+
+10. Don't escape escaper filter output
+( the output of |format is "<b>" ~ var ~ "</b>",
+ the output is not escaped due to |raw filter at the end,
+ the |raw filter on var is redundant )
+{{ "<b>%s</b>"|format(var|raw)|raw }}
+
+{% endautoescape %}
+--DATA--
+return array('var' => "<Fabien>\nTwig")
+--EXPECT--
+
+(escape_and_nl2br is an escaper filter)
+
+1. Don't escape escaper filter output
+( var is escaped by |escape_and_nl2br, line-breaks are added,
+ the output is not escaped )
+&lt;Fabien&gt;<br />
+Twig
+
+2. Don't escape escaper filter output
+( var is escaped by |escape_and_nl2br, line-breaks are added,
+ the output is not escaped, |raw is redundant )
+&lt;Fabien&gt;<br />
+Twig
+
+3. Explicit escape
+( var is escaped by |escape_and_nl2br, line-breaks are added,
+ the output is explicitly escaped by |escape )
+&amp;lt;Fabien&amp;gt;&lt;br /&gt;
+Twig
+
+4. Escape non-escaper filter output
+( var is upper-cased by |upper,
+ the output is auto-escaped )
+&lt;FABIEN&gt;
+TWIG
+
+5. Escape if last filter is not an escaper
+( var is escaped by |escape_and_nl2br, line-breaks are added,
+ the output is upper-cased by |upper,
+ the output is auto-escaped as |upper is not an escaper )
+&amp;LT;FABIEN&amp;GT;&lt;BR /&gt;
+TWIG
+
+6. Don't escape escaper filter output
+( var is upper cased by upper,
+ the output is escaped by |escape_and_nl2br, line-breaks are added,
+ the output is not escaped as |escape_and_nl2br is an escaper )
+&lt;FABIEN&gt;<br />
+TWIG
+
+7. Escape if last filter is not an escaper
+( the output of |format is "<b>" ~ var ~ "</b>",
+ the output is auto-escaped )
+&lt;b&gt;&lt;Fabien&gt;
+Twig&lt;/b&gt;
+
+8. Escape if last filter is not an escaper
+( the output of |format is "<b>" ~ var ~ "</b>",
+ |raw is redundant,
+ the output is auto-escaped )
+&lt;b&gt;&lt;Fabien&gt;
+Twig&lt;/b&gt;
+
+9. Don't escape escaper filter output
+( the output of |format is "<b>" ~ var ~ "</b>",
+ the output is not escaped due to |raw filter at the end )
+<b><Fabien>
+Twig</b>
+
+10. Don't escape escaper filter output
+( the output of |format is "<b>" ~ var ~ "</b>",
+ the output is not escaped due to |raw filter at the end,
+ the |raw filter on var is redundant )
+<b><Fabien>
+Twig</b>
+--TEST--
+"autoescape" tag applies escaping after calling filters, and before calling pre_escape filters
+--TEMPLATE--
+{% autoescape 'html' %}
+
+(nl2br is pre_escaped for "html" and declared safe for "html")
+
+1. Pre-escape and don't post-escape
+( var|escape|nl2br )
+{{ var|nl2br }}
+
+2. Don't double-pre-escape
+( var|escape|nl2br )
+{{ var|escape|nl2br }}
+
+3. Don't escape safe values
+( var|raw|nl2br )
+{{ var|raw|nl2br }}
+
+4. Don't escape safe values
+( var|escape|nl2br|nl2br )
+{{ var|nl2br|nl2br }}
+
+5. Re-escape values that are escaped for an other contexts
+( var|escape_something|escape|nl2br )
+{{ var|escape_something|nl2br }}
+
+6. Still escape when using filters not declared safe
+( var|escape|nl2br|upper|escape )
+{{ var|nl2br|upper }}
+
+{% endautoescape %}
+--DATA--
+return array('var' => "<Fabien>\nTwig")
+--EXPECT--
+
+(nl2br is pre_escaped for "html" and declared safe for "html")
+
+1. Pre-escape and don't post-escape
+( var|escape|nl2br )
+&lt;Fabien&gt;<br />
+Twig
+
+2. Don't double-pre-escape
+( var|escape|nl2br )
+&lt;Fabien&gt;<br />
+Twig
+
+3. Don't escape safe values
+( var|raw|nl2br )
+<Fabien><br />
+Twig
+
+4. Don't escape safe values
+( var|escape|nl2br|nl2br )
+&lt;Fabien&gt;<br /><br />
+Twig
+
+5. Re-escape values that are escaped for an other contexts
+( var|escape_something|escape|nl2br )
+&lt;FABIEN&gt;<br />
+TWIG
+
+6. Still escape when using filters not declared safe
+( var|escape|nl2br|upper|escape )
+&amp;LT;FABIEN&amp;GT;&lt;BR /&gt;
+TWIG
+
+--TEST--
+"autoescape" tag handles filters preserving the safety
+--TEMPLATE--
+{% autoescape 'html' %}
+
+(preserves_safety is preserving safety for "html")
+
+1. Unsafe values are still unsafe
+( var|preserves_safety|escape )
+{{ var|preserves_safety }}
+
+2. Safe values are still safe
+( var|escape|preserves_safety )
+{{ var|escape|preserves_safety }}
+
+3. Re-escape values that are escaped for an other contexts
+( var|escape_something|preserves_safety|escape )
+{{ var|escape_something|preserves_safety }}
+
+4. Still escape when using filters not declared safe
+( var|escape|preserves_safety|replace({'FABIEN': 'FABPOT'})|escape )
+{{ var|escape|preserves_safety|replace({'FABIEN': 'FABPOT'}) }}
+
+{% endautoescape %}
+--DATA--
+return array('var' => "<Fabien>\nTwig")
+--EXPECT--
+
+(preserves_safety is preserving safety for "html")
+
+1. Unsafe values are still unsafe
+( var|preserves_safety|escape )
+&lt;FABIEN&gt;
+TWIG
+
+2. Safe values are still safe
+( var|escape|preserves_safety )
+&LT;FABIEN&GT;
+TWIG
+
+3. Re-escape values that are escaped for an other contexts
+( var|escape_something|preserves_safety|escape )
+&lt;FABIEN&gt;
+TWIG
+
+4. Still escape when using filters not declared safe
+( var|escape|preserves_safety|replace({'FABIEN': 'FABPOT'})|escape )
+&amp;LT;FABPOT&amp;GT;
+TWIG
+
+--TEST--
+"block" tag
+--TEMPLATE--
+{% block title1 %}FOO{% endblock %}
+{% block title2 foo|lower %}
+--TEMPLATE(foo.twig)--
+{% block content %}{% endblock %}
+--DATA--
+return array('foo' => 'bar')
+--EXPECT--
+FOObar
+--TEST--
+"block" tag
+--TEMPLATE--
+{% block content %}
+ {% block content %}
+ {% endblock %}
+{% endblock %}
+--DATA--
+return array()
+--EXCEPTION--
+Twig_Error_Syntax: The block 'content' has already been defined line 2 in "index.twig" at line 3
+--TEST--
+"§" special chars in a block name
+--TEMPLATE--
+{% block § %}
+{% endblock § %}
+--DATA--
+return array()
+--EXPECT--
+--TEST--
+"embed" tag
+--TEMPLATE--
+FOO
+{% embed "foo.twig" %}
+ {% block c1 %}
+ {{ parent() }}
+ block1extended
+ {% endblock %}
+{% endembed %}
+
+BAR
+--TEMPLATE(foo.twig)--
+A
+{% block c1 %}
+ block1
+{% endblock %}
+B
+{% block c2 %}
+ block2
+{% endblock %}
+C
+--DATA--
+return array()
+--EXPECT--
+FOO
+
+A
+ block1
+
+ block1extended
+ B
+ block2
+C
+BAR
+--TEST--
+"embed" tag
+--TEMPLATE(index.twig)--
+FOO
+{% embed "foo.twig" %}
+ {% block c1 %}
+ {{ nothing }}
+ {% endblock %}
+{% endembed %}
+BAR
+--TEMPLATE(foo.twig)--
+{% block c1 %}{% endblock %}
+--DATA--
+return array()
+--EXCEPTION--
+Twig_Error_Runtime: Variable "nothing" does not exist in "index.twig" at line 5
+--TEST--
+"embed" tag
+--TEMPLATE--
+FOO
+{% embed "foo.twig" %}
+ {% block c1 %}
+ {{ parent() }}
+ block1extended
+ {% endblock %}
+{% endembed %}
+
+{% embed "foo.twig" %}
+ {% block c1 %}
+ {{ parent() }}
+ block1extended
+ {% endblock %}
+{% endembed %}
+
+BAR
+--TEMPLATE(foo.twig)--
+A
+{% block c1 %}
+ block1
+{% endblock %}
+B
+{% block c2 %}
+ block2
+{% endblock %}
+C
+--DATA--
+return array()
+--EXPECT--
+FOO
+
+A
+ block1
+
+ block1extended
+ B
+ block2
+C
+
+A
+ block1
+
+ block1extended
+ B
+ block2
+C
+BAR
+--TEST--
+"embed" tag
+--TEMPLATE--
+{% embed "foo.twig" %}
+ {% block c1 %}
+ {{ parent() }}
+ {% embed "foo.twig" %}
+ {% block c1 %}
+ {{ parent() }}
+ block1extended
+ {% endblock %}
+ {% endembed %}
+
+ {% endblock %}
+{% endembed %}
+--TEMPLATE(foo.twig)--
+A
+{% block c1 %}
+ block1
+{% endblock %}
+B
+{% block c2 %}
+ block2
+{% endblock %}
+C
+--DATA--
+return array()
+--EXPECT--
+A
+ block1
+
+
+A
+ block1
+
+ block1extended
+ B
+ block2
+C
+ B
+ block2
+C
+--TEST--
+"embed" tag
+--TEMPLATE--
+{% extends "base.twig" %}
+
+{% block c1 %}
+ {{ parent() }}
+ blockc1baseextended
+{% endblock %}
+
+{% block c2 %}
+ {{ parent() }}
+
+ {% embed "foo.twig" %}
+ {% block c1 %}
+ {{ parent() }}
+ block1extended
+ {% endblock %}
+ {% endembed %}
+{% endblock %}
+--TEMPLATE(base.twig)--
+A
+{% block c1 %}
+ blockc1base
+{% endblock %}
+{% block c2 %}
+ blockc2base
+{% endblock %}
+B
+--TEMPLATE(foo.twig)--
+A
+{% block c1 %}
+ block1
+{% endblock %}
+B
+{% block c2 %}
+ block2
+{% endblock %}
+C
+--DATA--
+return array()
+--EXPECT--
+A
+ blockc1base
+
+ blockc1baseextended
+ blockc2base
+
+
+
+A
+ block1
+
+ block1extended
+ B
+ block2
+CB--TEST--
+"filter" tag applies a filter on its children
+--TEMPLATE--
+{% filter upper %}
+Some text with a {{ var }}
+{% endfilter %}
+--DATA--
+return array('var' => 'var')
+--EXPECT--
+SOME TEXT WITH A VAR
+--TEST--
+"filter" tag applies a filter on its children
+--TEMPLATE--
+{% filter json_encode|raw %}test{% endfilter %}
+--DATA--
+return array()
+--EXPECT--
+"test"
+--TEST--
+"filter" tags accept multiple chained filters
+--TEMPLATE--
+{% filter lower|title %}
+ {{ var }}
+{% endfilter %}
+--DATA--
+return array('var' => 'VAR')
+--EXPECT--
+ Var
+--TEST--
+"filter" tags can be nested at will
+--TEMPLATE--
+{% filter lower|title %}
+ {{ var }}
+ {% filter upper %}
+ {{ var }}
+ {% endfilter %}
+ {{ var }}
+{% endfilter %}
+--DATA--
+return array('var' => 'var')
+--EXPECT--
+ Var
+ Var
+ Var
+--TEST--
+"filter" tag applies the filter on "for" tags
+--TEMPLATE--
+{% filter upper %}
+{% for item in items %}
+{{ item }}
+{% endfor %}
+{% endfilter %}
+--DATA--
+return array('items' => array('a', 'b'))
+--EXPECT--
+A
+B
+--TEST--
+"filter" tag applies the filter on "if" tags
+--TEMPLATE--
+{% filter upper %}
+{% if items %}
+{{ items|join(', ') }}
+{% endif %}
+
+{% if items.3 is defined %}
+FOO
+{% else %}
+{{ items.1 }}
+{% endif %}
+
+{% if items.3 is defined %}
+FOO
+{% elseif items.1 %}
+{{ items.0 }}
+{% endif %}
+
+{% endfilter %}
+--DATA--
+return array('items' => array('a', 'b'))
+--EXPECT--
+A, B
+
+B
+
+A
+--TEST--
+"for" tag takes a condition
+--TEMPLATE--
+{% for i in 1..5 if i is odd -%}
+ {{ loop.index }}.{{ i }}{{ foo.bar }}
+{% endfor %}
+--DATA--
+return array('foo' => array('bar' => 'X'))
+--CONFIG--
+return array('strict_variables' => false)
+--EXPECT--
+1.1X
+2.3X
+3.5X
+--TEST--
+"for" tag keeps the context safe
+--TEMPLATE--
+{% for item in items %}
+ {% for item in items %}
+ * {{ item }}
+ {% endfor %}
+ * {{ item }}
+{% endfor %}
+--DATA--
+return array('items' => array('a', 'b'))
+--EXPECT--
+ * a
+ * b
+ * a
+ * a
+ * b
+ * b
+--TEST--
+"for" tag can use an "else" clause
+--TEMPLATE--
+{% for item in items %}
+ * {{ item }}
+{% else %}
+ no item
+{% endfor %}
+--DATA--
+return array('items' => array('a', 'b'))
+--EXPECT--
+ * a
+ * b
+--DATA--
+return array('items' => array())
+--EXPECT--
+ no item
+--DATA--
+return array()
+--CONFIG--
+return array('strict_variables' => false)
+--EXPECT--
+ no item
+--TEST--
+"for" tag does not reset inner variables
+--TEMPLATE--
+{% for i in 1..2 %}
+ {% for j in 0..2 %}
+ {{k}}{% set k = k+1 %} {{ loop.parent.loop.index }}
+ {% endfor %}
+{% endfor %}
+--DATA--
+return array('k' => 0)
+--EXPECT--
+ 0 1
+ 1 1
+ 2 1
+ 3 2
+ 4 2
+ 5 2
+--TEST--
+"for" tag can iterate over keys and values
+--TEMPLATE--
+{% for key, item in items %}
+ * {{ key }}/{{ item }}
+{% endfor %}
+--DATA--
+return array('items' => array('a', 'b'))
+--EXPECT--
+ * 0/a
+ * 1/b
+--TEST--
+"for" tag can iterate over keys
+--TEMPLATE--
+{% for key in items|keys %}
+ * {{ key }}
+{% endfor %}
+--DATA--
+return array('items' => array('a', 'b'))
+--EXPECT--
+ * 0
+ * 1
+--TEST--
+"for" tag adds a loop variable to the context locally
+--TEMPLATE--
+{% for item in items %}
+{% endfor %}
+{% if loop is not defined %}WORKS{% endif %}
+--DATA--
+return array('items' => array())
+--EXPECT--
+WORKS
+--TEST--
+"for" tag adds a loop variable to the context
+--TEMPLATE--
+{% for item in items %}
+ * {{ loop.index }}/{{ loop.index0 }}
+ * {{ loop.revindex }}/{{ loop.revindex0 }}
+ * {{ loop.first }}/{{ loop.last }}/{{ loop.length }}
+
+{% endfor %}
+--DATA--
+return array('items' => array('a', 'b'))
+--EXPECT--
+ * 1/0
+ * 2/1
+ * 1//2
+
+ * 2/1
+ * 1/0
+ * /1/2
+--TEST--
+"for" tag
+--TEMPLATE--
+{% for i, item in items if loop.last > 0 %}
+{% endfor %}
+--DATA--
+return array('items' => array('a', 'b'))
+--EXCEPTION--
+Twig_Error_Syntax: The "loop" variable cannot be used in a looping condition in "index.twig" at line 2
+--TEST--
+"for" tag
+--TEMPLATE--
+{% for i, item in items if i > 0 %}
+ {{ loop.last }}
+{% endfor %}
+--DATA--
+return array('items' => array('a', 'b'))
+--EXCEPTION--
+Twig_Error_Syntax: The "loop.last" variable is not defined when looping with a condition in "index.twig" at line 3
+--TEST--
+"for" tag can use an "else" clause
+--TEMPLATE--
+{% for item in items %}
+ {% for item in items1 %}
+ * {{ item }}
+ {% else %}
+ no {{ item }}
+ {% endfor %}
+{% else %}
+ no item1
+{% endfor %}
+--DATA--
+return array('items' => array('a', 'b'), 'items1' => array())
+--EXPECT--
+no a
+ no b
+--TEST--
+"for" tag iterates over iterable and countable objects
+--TEMPLATE--
+{% for item in items %}
+ * {{ item }}
+ * {{ loop.index }}/{{ loop.index0 }}
+ * {{ loop.revindex }}/{{ loop.revindex0 }}
+ * {{ loop.first }}/{{ loop.last }}/{{ loop.length }}
+
+{% endfor %}
+
+{% for key, value in items %}
+ * {{ key }}/{{ value }}
+{% endfor %}
+
+{% for key in items|keys %}
+ * {{ key }}
+{% endfor %}
+--DATA--
+class ItemsIteratorCountable implements Iterator, Countable
+{
+ protected $values = array('foo' => 'bar', 'bar' => 'foo');
+ public function current() { return current($this->values); }
+ public function key() { return key($this->values); }
+ public function next() { return next($this->values); }
+ public function rewind() { return reset($this->values); }
+ public function valid() { return false !== current($this->values); }
+ public function count() { return count($this->values); }
+}
+return array('items' => new ItemsIteratorCountable())
+--EXPECT--
+ * bar
+ * 1/0
+ * 2/1
+ * 1//2
+
+ * foo
+ * 2/1
+ * 1/0
+ * /1/2
+
+
+ * foo/bar
+ * bar/foo
+
+ * foo
+ * bar
+--TEST--
+"for" tag iterates over iterable objects
+--TEMPLATE--
+{% for item in items %}
+ * {{ item }}
+ * {{ loop.index }}/{{ loop.index0 }}
+ * {{ loop.first }}
+
+{% endfor %}
+
+{% for key, value in items %}
+ * {{ key }}/{{ value }}
+{% endfor %}
+
+{% for key in items|keys %}
+ * {{ key }}
+{% endfor %}
+--DATA--
+class ItemsIterator implements Iterator
+{
+ protected $values = array('foo' => 'bar', 'bar' => 'foo');
+ public function current() { return current($this->values); }
+ public function key() { return key($this->values); }
+ public function next() { return next($this->values); }
+ public function rewind() { return reset($this->values); }
+ public function valid() { return false !== current($this->values); }
+}
+return array('items' => new ItemsIterator())
+--EXPECT--
+ * bar
+ * 1/0
+ * 1
+
+ * foo
+ * 2/1
+ *
+
+
+ * foo/bar
+ * bar/foo
+
+ * foo
+ * bar
+--TEST--
+"for" tags can be nested
+--TEMPLATE--
+{% for key, item in items %}
+* {{ key }} ({{ loop.length }}):
+{% for value in item %}
+ * {{ value }} ({{ loop.length }})
+{% endfor %}
+{% endfor %}
+--DATA--
+return array('items' => array('a' => array('a1', 'a2', 'a3'), 'b' => array('b1')))
+--EXPECT--
+* a (2):
+ * a1 (3)
+ * a2 (3)
+ * a3 (3)
+* b (2):
+ * b1 (1)
+--TEST--
+"for" tag iterates over item values
+--TEMPLATE--
+{% for item in items %}
+ * {{ item }}
+{% endfor %}
+--DATA--
+return array('items' => array('a', 'b'))
+--EXPECT--
+ * a
+ * b
+--TEST--
+global variables
+--TEMPLATE--
+{% include "included.twig" %}
+{% from "included.twig" import foobar %}
+{{ foobar() }}
+--TEMPLATE(included.twig)--
+{% macro foobar() %}
+called foobar
+{% endmacro %}
+--DATA--
+return array();
+--EXPECT--
+called foobar
+--TEST--
+"if" creates a condition
+--TEMPLATE--
+{% if a is defined %}
+ {{ a }}
+{% elseif b is defined %}
+ {{ b }}
+{% else %}
+ NOTHING
+{% endif %}
+--DATA--
+return array('a' => 'a')
+--EXPECT--
+ a
+--DATA--
+return array('b' => 'b')
+--EXPECT--
+ b
+--DATA--
+return array()
+--EXPECT--
+ NOTHING
+--TEST--
+"if" takes an expression as a test
+--TEMPLATE--
+{% if a < 2 %}
+ A1
+{% elseif a > 10 %}
+ A2
+{% else %}
+ A3
+{% endif %}
+--DATA--
+return array('a' => 1)
+--EXPECT--
+ A1
+--DATA--
+return array('a' => 12)
+--EXPECT--
+ A2
+--DATA--
+return array('a' => 7)
+--EXPECT--
+ A3
+--TEST--
+"include" tag
+--TEMPLATE--
+FOO
+{% include "foo.twig" %}
+
+BAR
+--TEMPLATE(foo.twig)--
+FOOBAR
+--DATA--
+return array()
+--EXPECT--
+FOO
+
+FOOBAR
+BAR
+--TEST--
+"include" tag allows expressions for the template to include
+--TEMPLATE--
+FOO
+{% include foo %}
+
+BAR
+--TEMPLATE(foo.twig)--
+FOOBAR
+--DATA--
+return array('foo' => 'foo.twig')
+--EXPECT--
+FOO
+
+FOOBAR
+BAR
+--TEST--
+"include" tag
+--TEMPLATE--
+{% include ["foo.twig", "bar.twig"] ignore missing %}
+{% include "foo.twig" ignore missing %}
+{% include "foo.twig" ignore missing with {} %}
+{% include "foo.twig" ignore missing with {} only %}
+--DATA--
+return array()
+--EXPECT--
+--TEST--
+"include" tag
+--TEMPLATE--
+{% extends "base.twig" %}
+
+{% block content %}
+ {{ parent() }}
+{% endblock %}
+--TEMPLATE(base.twig)--
+{% block content %}
+ {% include "foo.twig" %}
+{% endblock %}
+--DATA--
+return array();
+--EXCEPTION--
+Twig_Error_Loader: Template "foo.twig" is not defined in "base.twig" at line 3.
+--TEST--
+"include" tag
+--TEMPLATE--
+{% include "foo.twig" %}
+--DATA--
+return array();
+--EXCEPTION--
+Twig_Error_Loader: Template "foo.twig" is not defined in "index.twig" at line 2.
+--TEST--
+"include" tag accept variables and only
+--TEMPLATE--
+{% include "foo.twig" %}
+{% include "foo.twig" only %}
+{% include "foo.twig" with {'foo1': 'bar'} %}
+{% include "foo.twig" with {'foo1': 'bar'} only %}
+--TEMPLATE(foo.twig)--
+{% for k, v in _context %}{{ k }},{% endfor %}
+--DATA--
+return array('foo' => 'bar')
+--EXPECT--
+foo,global,_parent,
+global,_parent,
+foo,global,foo1,_parent,
+foo1,global,_parent,
+--TEST--
+"include" tag accepts Twig_Template instance
+--TEMPLATE--
+{% include foo %} FOO
+--TEMPLATE(foo.twig)--
+BAR
+--DATA--
+return array('foo' => $twig->loadTemplate('foo.twig'))
+--EXPECT--
+BAR FOO
+--TEST--
+"include" tag
+--TEMPLATE--
+{% include ["foo.twig", "bar.twig"] %}
+{% include ["bar.twig", "foo.twig"] %}
+--TEMPLATE(foo.twig)--
+foo
+--DATA--
+return array()
+--EXPECT--
+foo
+foo
+--TEST--
+"include" tag accept variables
+--TEMPLATE--
+{% include "foo.twig" with {'foo': 'bar'} %}
+{% include "foo.twig" with vars %}
+--TEMPLATE(foo.twig)--
+{{ foo }}
+--DATA--
+return array('vars' => array('foo' => 'bar'))
+--EXPECT--
+bar
+bar
+--TEST--
+"extends" tag
+--TEMPLATE--
+{% extends "foo.twig" %}
+
+{% block content %}
+FOO
+{% endblock %}
+--TEMPLATE(foo.twig)--
+{% block content %}{% endblock %}
+--DATA--
+return array()
+--EXPECT--
+FOO
+--TEST--
+block_expr2
+--TEMPLATE--
+{% extends "base2.twig" %}
+
+{% block element -%}
+ Element:
+ {{- parent() -}}
+{% endblock %}
+--TEMPLATE(base2.twig)--
+{% extends "base.twig" %}
+--TEMPLATE(base.twig)--
+{% spaceless %}
+{% block element -%}
+ <div>
+ {%- if item.children is defined %}
+ {%- for item in item.children %}
+ {{- block('element') -}}
+ {% endfor %}
+ {%- endif -%}
+ </div>
+{%- endblock %}
+{% endspaceless %}
+--DATA--
+return array(
+ 'item' => array(
+ 'children' => array(
+ null,
+ null,
+ )
+ )
+)
+--EXPECT--
+Element:<div>Element:<div></div>Element:<div></div></div>
+--TEST--
+block_expr
+--TEMPLATE--
+{% extends "base.twig" %}
+
+{% block element -%}
+ Element:
+ {{- parent() -}}
+{% endblock %}
+--TEMPLATE(base.twig)--
+{% spaceless %}
+{% block element -%}
+ <div>
+ {%- if item.children is defined %}
+ {%- for item in item.children %}
+ {{- block('element') -}}
+ {% endfor %}
+ {%- endif -%}
+ </div>
+{%- endblock %}
+{% endspaceless %}
+--DATA--
+return array(
+ 'item' => array(
+ 'children' => array(
+ null,
+ null,
+ )
+ )
+)
+--EXPECT--
+Element:<div>Element:<div></div>Element:<div></div></div>
+--TEST--
+"extends" tag
+--TEMPLATE--
+{% extends standalone ? foo : 'bar.twig' %}
+
+{% block content %}{{ parent() }}FOO{% endblock %}
+--TEMPLATE(foo.twig)--
+{% block content %}FOO{% endblock %}
+--TEMPLATE(bar.twig)--
+{% block content %}BAR{% endblock %}
+--DATA--
+return array('foo' => 'foo.twig', 'standalone' => true)
+--EXPECT--
+FOOFOO
+--TEST--
+"extends" tag
+--TEMPLATE--
+{% extends foo %}
+
+{% block content %}
+FOO
+{% endblock %}
+--TEMPLATE(foo.twig)--
+{% block content %}{% endblock %}
+--DATA--
+return array('foo' => 'foo.twig')
+--EXPECT--
+FOO
+--TEST--
+"extends" tag
+--TEMPLATE--
+{% extends "foo.twig" %}
+--TEMPLATE(foo.twig)--
+{% block content %}FOO{% endblock %}
+--DATA--
+return array()
+--EXPECT--
+FOO
+--TEST--
+"extends" tag
+--TEMPLATE--
+{% extends ["foo.twig", "bar.twig"] %}
+--TEMPLATE(bar.twig)--
+{% block content %}
+foo
+{% endblock %}
+--DATA--
+return array()
+--EXPECT--
+foo
+--TEST--
+"extends" tag
+--TEMPLATE--
+{% extends "layout.twig" %}{% block content %}{{ parent() }}index {% endblock %}
+--TEMPLATE(layout.twig)--
+{% extends "base.twig" %}{% block content %}{{ parent() }}layout {% endblock %}
+--TEMPLATE(base.twig)--
+{% block content %}base {% endblock %}
+--DATA--
+return array()
+--EXPECT--
+base layout index
+--TEST--
+"block" tag
+--TEMPLATE--
+{% block content %}
+ CONTENT
+ {%- block subcontent -%}
+ SUBCONTENT
+ {%- endblock -%}
+ ENDCONTENT
+{% endblock %}
+--TEMPLATE(foo.twig)--
+--DATA--
+return array()
+--EXPECT--
+CONTENTSUBCONTENTENDCONTENT
+--TEST--
+"block" tag
+--TEMPLATE--
+{% extends "foo.twig" %}
+
+{% block content %}
+ {% block subcontent %}
+ {% block subsubcontent %}
+ SUBSUBCONTENT
+ {% endblock %}
+ {% endblock %}
+{% endblock %}
+--TEMPLATE(foo.twig)--
+{% block content %}
+ {% block subcontent %}
+ SUBCONTENT
+ {% endblock %}
+{% endblock %}
+--DATA--
+return array()
+--EXPECT--
+SUBSUBCONTENT
+--TEST--
+"extends" tag
+--TEMPLATE--
+{% extends "layout.twig" %}
+{% block inside %}INSIDE{% endblock inside %}
+--TEMPLATE(layout.twig)--
+{% extends "base.twig" %}
+{% block body %}
+ {% block inside '' %}
+{% endblock body %}
+--TEMPLATE(base.twig)--
+{% block body '' %}
+--DATA--
+return array()
+--EXPECT--
+INSIDE
+--TEST--
+"extends" tag
+--TEMPLATE--
+{% extends foo ? 'foo.twig' : 'bar.twig' %}
+--TEMPLATE(foo.twig)--
+FOO
+--TEMPLATE(bar.twig)--
+BAR
+--DATA--
+return array('foo' => true)
+--EXPECT--
+FOO
+--DATA--
+return array('foo' => false)
+--EXPECT--
+BAR
+--TEST--
+"extends" tag
+--TEMPLATE--
+{% block content %}
+ {% extends "foo.twig" %}
+{% endblock %}
+--EXCEPTION--
+Twig_Error_Syntax: Cannot extend from a block in "index.twig" at line 3
+--TEST--
+"extends" tag
+--TEMPLATE--
+{% extends "base.twig" %}
+{% block content %}{% include "included.twig" %}{% endblock %}
+
+{% block footer %}Footer{% endblock %}
+--TEMPLATE(included.twig)--
+{% extends "base.twig" %}
+{% block content %}Included Content{% endblock %}
+--TEMPLATE(base.twig)--
+{% block content %}Default Content{% endblock %}
+
+{% block footer %}Default Footer{% endblock %}
+--DATA--
+return array()
+--EXPECT--
+Included Content
+Default Footer
+Footer
+--TEST--
+"extends" tag
+--TEMPLATE--
+{% extends "foo.twig" %}
+
+{% block content %}
+ {% block inside %}
+ INSIDE OVERRIDDEN
+ {% endblock %}
+
+ BEFORE
+ {{ parent() }}
+ AFTER
+{% endblock %}
+--TEMPLATE(foo.twig)--
+{% block content %}
+ BAR
+{% endblock %}
+--DATA--
+return array()
+--EXPECT--
+
+INSIDE OVERRIDDEN
+
+ BEFORE
+ BAR
+
+ AFTER
+--TEST--
+"extends" tag
+--TEMPLATE--
+{% extends "foo.twig" %}
+
+{% block content %}{{ parent() }}FOO{{ parent() }}{% endblock %}
+--TEMPLATE(foo.twig)--
+{% block content %}BAR{% endblock %}
+--DATA--
+return array()
+--EXPECT--
+BARFOOBAR
+--TEST--
+"parent" tag
+--TEMPLATE--
+{% use 'foo.twig' %}
+
+{% block content %}
+ {{ parent() }}
+{% endblock %}
+--TEMPLATE(foo.twig)--
+{% block content %}BAR{% endblock %}
+--DATA--
+return array()
+--EXPECT--
+BAR
+--TEST--
+"parent" tag
+--TEMPLATE--
+{% block content %}
+ {{ parent() }}
+{% endblock %}
+--EXCEPTION--
+Twig_Error_Syntax: Calling "parent" on a template that does not extend nor "use" another template is forbidden in "index.twig" at line 3
+--TEST--
+"extends" tag accepts Twig_Template instance
+--TEMPLATE--
+{% extends foo %}
+
+{% block content %}
+{{ parent() }}FOO
+{% endblock %}
+--TEMPLATE(foo.twig)--
+{% block content %}BAR{% endblock %}
+--DATA--
+return array('foo' => $twig->loadTemplate('foo.twig'))
+--EXPECT--
+BARFOO
+--TEST--
+"parent" function
+--TEMPLATE--
+{% extends "parent.twig" %}
+
+{% use "use1.twig" %}
+{% use "use2.twig" %}
+
+{% block content_parent %}
+ {{ parent() }}
+{% endblock %}
+
+{% block content_use1 %}
+ {{ parent() }}
+{% endblock %}
+
+{% block content_use2 %}
+ {{ parent() }}
+{% endblock %}
+
+{% block content %}
+ {{ block('content_use1_only') }}
+ {{ block('content_use2_only') }}
+{% endblock %}
+--TEMPLATE(parent.twig)--
+{% block content_parent 'content_parent' %}
+{% block content_use1 'content_parent' %}
+{% block content_use2 'content_parent' %}
+{% block content '' %}
+--TEMPLATE(use1.twig)--
+{% block content_use1 'content_use1' %}
+{% block content_use2 'content_use1' %}
+{% block content_use1_only 'content_use1_only' %}
+--TEMPLATE(use2.twig)--
+{% block content_use2 'content_use2' %}
+{% block content_use2_only 'content_use2_only' %}
+--DATA--
+return array()
+--EXPECT--
+ content_parent
+ content_use1
+ content_use2
+ content_use1_only
+ content_use2_only
+--TEST--
+"macro" tag
+--TEMPLATE--
+{% import _self as macros %}
+
+{{ macros.input('username') }}
+{{ macros.input('password', null, 'password', 1) }}
+
+{% macro input(name, value, type, size) %}
+ <input type="{{ type|default("text") }}" name="{{ name }}" value="{{ value|e|default('') }}" size="{{ size|default(20) }}">
+{% endmacro %}
+--DATA--
+return array()
+--EXPECT--
+ <input type="text" name="username" value="" size="20">
+
+ <input type="password" name="password" value="" size="1">
+--TEST--
+"macro" tag supports name for endmacro
+--TEMPLATE--
+{% import _self as macros %}
+
+{{ macros.foo() }}
+{{ macros.bar() }}
+
+{% macro foo() %}foo{% endmacro %}
+{% macro bar() %}bar{% endmacro bar %}
+--DATA--
+return array()
+--EXPECT--
+foo
+bar
+
+--TEST--
+"macro" tag
+--TEMPLATE--
+{% import 'forms.twig' as forms %}
+
+{{ forms.input('username') }}
+{{ forms.input('password', null, 'password', 1) }}
+--TEMPLATE(forms.twig)--
+{% macro input(name, value, type, size) %}
+ <input type="{{ type|default("text") }}" name="{{ name }}" value="{{ value|e|default('') }}" size="{{ size|default(20) }}">
+{% endmacro %}
+--DATA--
+return array()
+--EXPECT--
+ <input type="text" name="username" value="" size="20">
+
+ <input type="password" name="password" value="" size="1">
+--TEST--
+"macro" tag
+--TEMPLATE--
+{% from 'forms.twig' import foo %}
+{% from 'forms.twig' import foo as foobar, bar %}
+
+{{ foo('foo') }}
+{{ foobar('foo') }}
+{{ bar('foo') }}
+--TEMPLATE(forms.twig)--
+{% macro foo(name) %}foo{{ name }}{% endmacro %}
+{% macro bar(name) %}bar{{ name }}{% endmacro %}
+--DATA--
+return array()
+--EXPECT--
+foofoo
+foofoo
+barfoo
+--TEST--
+"macro" tag
+--TEMPLATE--
+{% from 'forms.twig' import foo %}
+
+{{ foo('foo') }}
+{{ foo() }}
+--TEMPLATE(forms.twig)--
+{% macro foo(name) %}{{ name|default('foo') }}{{ global }}{% endmacro %}
+--DATA--
+return array()
+--EXPECT--
+fooglobal
+fooglobal
+--TEST--
+"macro" tag
+--TEMPLATE--
+{% import _self as forms %}
+
+{{ forms.input('username') }}
+{{ forms.input('password', null, 'password', 1) }}
+
+{% macro input(name, value, type, size) %}
+ <input type="{{ type|default("text") }}" name="{{ name }}" value="{{ value|e|default('') }}" size="{{ size|default(20) }}">
+{% endmacro %}
+--DATA--
+return array()
+--EXPECT--
+ <input type="text" name="username" value="" size="20">
+
+ <input type="password" name="password" value="" size="1">
+--TEST--
+"raw" tag
+--TEMPLATE--
+{% raw %}
+{{ foo }}
+{% endraw %}
+--DATA--
+return array()
+--EXPECT--
+{{ foo }}
+--TEST--
+"raw" tag
+--TEMPLATE--
+{% raw %}
+{{ foo }}
+{% endverbatim %}
+--DATA--
+return array()
+--EXCEPTION--
+Twig_Error_Syntax: Unexpected end of file: Unclosed "raw" block in "index.twig" at line 2
+--TEST--
+"raw" tag
+--TEMPLATE--
+1***
+
+{%- raw %}
+ {{ 'bla' }}
+{% endraw %}
+
+1***
+2***
+
+{%- raw -%}
+ {{ 'bla' }}
+{% endraw %}
+
+2***
+3***
+
+{%- raw -%}
+ {{ 'bla' }}
+{% endraw -%}
+
+3***
+4***
+
+{%- raw -%}
+ {{ 'bla' }}
+{%- endraw %}
+
+4***
+5***
+
+{%- raw -%}
+ {{ 'bla' }}
+{%- endraw -%}
+
+5***
+--DATA--
+return array()
+--EXPECT--
+1***
+ {{ 'bla' }}
+
+
+1***
+2***{{ 'bla' }}
+
+
+2***
+3***{{ 'bla' }}
+3***
+4***{{ 'bla' }}
+
+4***
+5***{{ 'bla' }}5***
+--TEST--
+sandbox tag
+--TEMPLATE--
+{%- sandbox %}
+ {%- include "foo.twig" %}
+ a
+{%- endsandbox %}
+--TEMPLATE(foo.twig)--
+foo
+--EXCEPTION--
+Twig_Error_Syntax: Only "include" tags are allowed within a "sandbox" section in "index.twig" at line 4
+--TEST--
+sandbox tag
+--TEMPLATE--
+{%- sandbox %}
+ {%- include "foo.twig" %}
+
+ {% if 1 %}
+ {%- include "foo.twig" %}
+ {% endif %}
+{%- endsandbox %}
+--TEMPLATE(foo.twig)--
+foo
+--EXCEPTION--
+Twig_Error_Syntax: Only "include" tags are allowed within a "sandbox" section in "index.twig" at line 5
+--TEST--
+sandbox tag
+--TEMPLATE--
+{%- sandbox %}
+ {%- include "foo.twig" %}
+{%- endsandbox %}
+
+{%- sandbox %}
+ {%- include "foo.twig" %}
+ {%- include "foo.twig" %}
+{%- endsandbox %}
+
+{%- sandbox %}{% include "foo.twig" %}{% endsandbox %}
+--TEMPLATE(foo.twig)--
+foo
+--DATA--
+return array()
+--EXPECT--
+foo
+foo
+foo
+foo
+--TEST--
+"set" tag
+--TEMPLATE--
+{% set foo = 'foo' %}
+{% set bar = 'foo<br />' %}
+
+{{ foo }}
+{{ bar }}
+
+{% set foo, bar = 'foo', 'bar' %}
+
+{{ foo }}{{ bar }}
+--DATA--
+return array()
+--EXPECT--
+foo
+foo&lt;br /&gt;
+
+
+foobar
+--TEST--
+"set" tag block empty capture
+--TEMPLATE--
+{% set foo %}{% endset %}
+
+{% if foo %}FAIL{% endif %}
+--DATA--
+return array()
+--EXPECT--
+--TEST--
+"set" tag block capture
+--TEMPLATE--
+{% set foo %}f<br />o<br />o{% endset %}
+
+{{ foo }}
+--DATA--
+return array()
+--EXPECT--
+f<br />o<br />o
+--TEST--
+"set" tag
+--TEMPLATE--
+{% set foo, bar = 'foo' ~ 'bar', 'bar' ~ 'foo' %}
+
+{{ foo }}
+{{ bar }}
+--DATA--
+return array()
+--EXPECT--
+foobar
+barfoo
+--TEST--
+"spaceless" tag removes whites between HTML tags
+--TEMPLATE--
+{% spaceless %}
+
+ <div> <div> foo </div> </div>
+
+{% endspaceless %}
+--DATA--
+return array()
+--EXPECT--
+<div><div> foo </div></div>
+--TEST--
+"§" custom tag
+--TEMPLATE--
+{% § %}
+--DATA--
+return array()
+--EXPECT--
+--TEST--
+Whitespace trimming on tags.
+--TEMPLATE--
+{{ 5 * '{#-'|length }}
+{{ '{{-'|length * 5 + '{%-'|length }}
+
+Trim on control tag:
+{% for i in range(1, 9) -%}
+ {{ i }}
+{%- endfor %}
+
+
+Trim on output tag:
+{% for i in range(1, 9) %}
+ {{- i -}}
+{% endfor %}
+
+
+Trim comments:
+
+{#- Invisible -#}
+
+After the comment.
+
+Trim leading space:
+{% if leading %}
+
+ {{- leading }}
+{% endif %}
+
+{%- if leading %}
+ {{- leading }}
+
+{%- endif %}
+
+
+Trim trailing space:
+{% if trailing -%}
+ {{ trailing -}}
+
+{% endif -%}
+
+Combined:
+
+{%- if both -%}
+<ul>
+ <li> {{- both -}} </li>
+</ul>
+
+{%- endif -%}
+
+end
+--DATA--
+return array('leading' => 'leading space', 'trailing' => 'trailing space', 'both' => 'both')
+--EXPECT--
+15
+18
+
+Trim on control tag:
+123456789
+
+Trim on output tag:
+123456789
+
+Trim comments:After the comment.
+
+Trim leading space:
+leading space
+leading space
+
+Trim trailing space:
+trailing spaceCombined:<ul>
+ <li>both</li>
+</ul>end
+--TEST--
+"use" tag
+--TEMPLATE--
+{% use "blocks.twig" with content as foo %}
+
+{{ block('foo') }}
+--TEMPLATE(blocks.twig)--
+{% block content 'foo' %}
+--DATA--
+return array()
+--EXPECT--
+foo
+--TEST--
+"use" tag
+--TEMPLATE--
+{% use "blocks.twig" %}
+
+{{ block('content') }}
+--TEMPLATE(blocks.twig)--
+{% block content 'foo' %}
+--DATA--
+return array()
+--EXPECT--
+foo
+--TEST--
+"use" tag
+--TEMPLATE--
+{% use "foo.twig" %}
+--TEMPLATE(foo.twig)--
+{% use "bar.twig" %}
+--TEMPLATE(bar.twig)--
+--DATA--
+return array()
+--EXPECT--
+--TEST--
+"use" tag
+--TEMPLATE--
+{% use "foo.twig" %}
+
+{{ block('content') }}
+{{ block('foo') }}
+{{ block('bar') }}
+--TEMPLATE(foo.twig)--
+{% use "bar.twig" %}
+
+{% block content 'foo' %}
+{% block foo 'foo' %}
+--TEMPLATE(bar.twig)--
+{% block content 'bar' %}
+{% block bar 'bar' %}
+--DATA--
+return array()
+--EXPECT--
+foo
+foo
+bar
+--TEST--
+"use" tag
+--TEMPLATE--
+{% use "ancestor.twig" %}
+{% use "parent.twig" %}
+
+{{ block('container') }}
+--TEMPLATE(parent.twig)--
+{% block sub_container %}
+ <div class="overriden_sub_container">overriden sub_container</div>
+{% endblock %}
+--TEMPLATE(ancestor.twig)--
+{% block container %}
+ <div class="container">{{ block('sub_container') }}</div>
+{% endblock %}
+
+{% block sub_container %}
+ <div class="sub_container">sub_container</div>
+{% endblock %}
+--DATA--
+return array()
+--EXPECT--
+<div class="container"> <div class="overriden_sub_container">overriden sub_container</div>
+</div>
+--TEST--
+"use" tag
+--TEMPLATE--
+{% use "parent.twig" %}
+
+{{ block('container') }}
+--TEMPLATE(parent.twig)--
+{% use "ancestor.twig" %}
+
+{% block sub_container %}
+ <div class="overriden_sub_container">overriden sub_container</div>
+{% endblock %}
+--TEMPLATE(ancestor.twig)--
+{% block container %}
+ <div class="container">{{ block('sub_container') }}</div>
+{% endblock %}
+
+{% block sub_container %}
+ <div class="sub_container">sub_container</div>
+{% endblock %}
+--DATA--
+return array()
+--EXPECT--
+<div class="container"> <div class="overriden_sub_container">overriden sub_container</div>
+</div>
+--TEST--
+"use" tag
+--TEMPLATE--
+{% use "foo.twig" with content as foo_content %}
+{% use "bar.twig" %}
+
+{{ block('content') }}
+{{ block('foo') }}
+{{ block('bar') }}
+{{ block('foo_content') }}
+--TEMPLATE(foo.twig)--
+{% block content 'foo' %}
+{% block foo 'foo' %}
+--TEMPLATE(bar.twig)--
+{% block content 'bar' %}
+{% block bar 'bar' %}
+--DATA--
+return array()
+--EXPECT--
+bar
+foo
+bar
+foo
+--TEST--
+"use" tag
+--TEMPLATE--
+{% use "foo.twig" %}
+{% use "bar.twig" %}
+
+{{ block('content') }}
+{{ block('foo') }}
+{{ block('bar') }}
+--TEMPLATE(foo.twig)--
+{% block content 'foo' %}
+{% block foo 'foo' %}
+--TEMPLATE(bar.twig)--
+{% block content 'bar' %}
+{% block bar 'bar' %}
+--DATA--
+return array()
+--EXPECT--
+bar
+foo
+bar
+--TEST--
+"use" tag
+--TEMPLATE--
+{% use 'file2.html.twig'%}
+{% block foobar %}
+ {{- parent() -}}
+ Content of block (second override)
+{% endblock foobar %}
+--TEMPLATE(file2.html.twig)--
+{% use 'file1.html.twig' %}
+{% block foobar %}
+ {{- parent() -}}
+ Content of block (first override)
+{% endblock foobar %}
+--TEMPLATE(file1.html.twig)--
+{% block foobar -%}
+ Content of block
+{% endblock foobar %}
+--DATA--
+return array()
+--EXPECT--
+Content of block
+Content of block (first override)
+Content of block (second override)
+--TEST--
+"use" tag
+--TEMPLATE--
+{% use 'file2.html.twig' %}
+{% use 'file1.html.twig' with foo %}
+{% block foo %}
+ {{- parent() -}}
+ Content of foo (second override)
+{% endblock foo %}
+{% block bar %}
+ {{- parent() -}}
+ Content of bar (second override)
+{% endblock bar %}
+--TEMPLATE(file2.html.twig)--
+{% use 'file1.html.twig' %}
+{% block foo %}
+ {{- parent() -}}
+ Content of foo (first override)
+{% endblock foo %}
+{% block bar %}
+ {{- parent() -}}
+ Content of bar (first override)
+{% endblock bar %}
+--TEMPLATE(file1.html.twig)--
+{% block foo -%}
+ Content of foo
+{% endblock foo %}
+{% block bar -%}
+ Content of bar
+{% endblock bar %}
+--DATA--
+return array()
+--EXPECT--
+Content of foo
+Content of foo (first override)
+Content of foo (second override)
+Content of bar
+Content of bar (second override)
+--TEST--
+"use" tag
+--TEMPLATE--
+{% use 'file2.html.twig' with foobar as base_base_foobar %}
+{% block foobar %}
+ {{- block('base_base_foobar') -}}
+ Content of block (second override)
+{% endblock foobar %}
+--TEMPLATE(file2.html.twig)--
+{% use 'file1.html.twig' with foobar as base_foobar %}
+{% block foobar %}
+ {{- block('base_foobar') -}}
+ Content of block (first override)
+{% endblock foobar %}
+--TEMPLATE(file1.html.twig)--
+{% block foobar -%}
+ Content of block
+{% endblock foobar %}
+--DATA--
+return array()
+--EXPECT--
+Content of block
+Content of block (first override)
+Content of block (second override)
+--TEST--
+"verbatim" tag
+--TEMPLATE--
+{% verbatim %}
+{{ foo }}
+{% endverbatim %}
+--DATA--
+return array()
+--EXPECT--
+{{ foo }}
+--TEST--
+"verbatim" tag
+--TEMPLATE--
+{% verbatim %}
+{{ foo }}
+{% endraw %}
+--DATA--
+return array()
+--EXCEPTION--
+Twig_Error_Syntax: Unexpected end of file: Unclosed "verbatim" block in "index.twig" at line 2
+--TEST--
+"verbatim" tag
+--TEMPLATE--
+1***
+
+{%- verbatim %}
+ {{ 'bla' }}
+{% endverbatim %}
+
+1***
+2***
+
+{%- verbatim -%}
+ {{ 'bla' }}
+{% endverbatim %}
+
+2***
+3***
+
+{%- verbatim -%}
+ {{ 'bla' }}
+{% endverbatim -%}
+
+3***
+4***
+
+{%- verbatim -%}
+ {{ 'bla' }}
+{%- endverbatim %}
+
+4***
+5***
+
+{%- verbatim -%}
+ {{ 'bla' }}
+{%- endverbatim -%}
+
+5***
+--DATA--
+return array()
+--EXPECT--
+1***
+ {{ 'bla' }}
+
+
+1***
+2***{{ 'bla' }}
+
+
+2***
+3***{{ 'bla' }}
+3***
+4***{{ 'bla' }}
+
+4***
+5***{{ 'bla' }}5***
+--TEST--
+array index test
+--TEMPLATE--
+{% for key, value in days %}
+{{ key }}
+{% endfor %}
+--DATA--
+return array('days' => array(
+ 1 => array('money' => 9),
+ 2 => array('money' => 21),
+ 3 => array('money' => 38),
+ 4 => array('money' => 6),
+ 18 => array('money' => 6),
+ 19 => array('money' => 3),
+ 31 => array('money' => 11),
+));
+--EXPECT--
+1
+2
+3
+4
+18
+19
+31
+--TEST--
+"const" test
+--TEMPLATE--
+{{ 8 is constant('E_NOTICE') ? 'ok' : 'no' }}
+{{ 'bar' is constant('TwigTestFoo::BAR_NAME') ? 'ok' : 'no' }}
+{{ value is constant('TwigTestFoo::BAR_NAME') ? 'ok' : 'no' }}
+{{ 2 is constant('ARRAY_AS_PROPS', object) ? 'ok' : 'no' }}
+--DATA--
+return array('value' => 'bar', 'object' => new ArrayObject(array('hi')));
+--EXPECT--
+ok
+ok
+ok
+ok--TEST--
+"defined" test
+--TEMPLATE--
+{{ definedVar is defined ? 'ok' : 'ko' }}
+{{ definedVar is not defined ? 'ko' : 'ok' }}
+{{ undefinedVar is defined ? 'ko' : 'ok' }}
+{{ undefinedVar is not defined ? 'ok' : 'ko' }}
+{{ zeroVar is defined ? 'ok' : 'ko' }}
+{{ nullVar is defined ? 'ok' : 'ko' }}
+{{ nested.definedVar is defined ? 'ok' : 'ko' }}
+{{ nested['definedVar'] is defined ? 'ok' : 'ko' }}
+{{ nested.definedVar is not defined ? 'ko' : 'ok' }}
+{{ nested.undefinedVar is defined ? 'ko' : 'ok' }}
+{{ nested['undefinedVar'] is defined ? 'ko' : 'ok' }}
+{{ nested.undefinedVar is not defined ? 'ok' : 'ko' }}
+{{ nested.zeroVar is defined ? 'ok' : 'ko' }}
+{{ nested.nullVar is defined ? 'ok' : 'ko' }}
+{{ nested.definedArray.0 is defined ? 'ok' : 'ko' }}
+{{ nested['definedArray'][0] is defined ? 'ok' : 'ko' }}
+{{ object.foo is defined ? 'ok' : 'ko' }}
+{{ object.undefinedMethod is defined ? 'ko' : 'ok' }}
+{{ object.getFoo() is defined ? 'ok' : 'ko' }}
+{{ object.getFoo('a') is defined ? 'ok' : 'ko' }}
+{{ object.undefinedMethod() is defined ? 'ko' : 'ok' }}
+{{ object.undefinedMethod('a') is defined ? 'ko' : 'ok' }}
+{{ object.self.foo is defined ? 'ok' : 'ko' }}
+{{ object.self.undefinedMethod is defined ? 'ko' : 'ok' }}
+{{ object.undefinedMethod.self is defined ? 'ko' : 'ok' }}
+--DATA--
+return array(
+ 'definedVar' => 'defined',
+ 'zeroVar' => 0,
+ 'nullVar' => null,
+ 'nested' => array(
+ 'definedVar' => 'defined',
+ 'zeroVar' => 0,
+ 'nullVar' => null,
+ 'definedArray' => array(0),
+ ),
+ 'object' => new TwigTestFoo(),
+);
+--EXPECT--
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+--DATA--
+return array(
+ 'definedVar' => 'defined',
+ 'zeroVar' => 0,
+ 'nullVar' => null,
+ 'nested' => array(
+ 'definedVar' => 'defined',
+ 'zeroVar' => 0,
+ 'nullVar' => null,
+ 'definedArray' => array(0),
+ ),
+ 'object' => new TwigTestFoo(),
+);
+--CONFIG--
+return array('strict_variables' => false)
+--EXPECT--
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+--TEST--
+"empty" test
+--TEMPLATE--
+{{ foo is empty ? 'ok' : 'ko' }}
+{{ bar is empty ? 'ok' : 'ko' }}
+{{ foobar is empty ? 'ok' : 'ko' }}
+{{ array is empty ? 'ok' : 'ko' }}
+{{ zero is empty ? 'ok' : 'ko' }}
+{{ string is empty ? 'ok' : 'ko' }}
+{{ countable_empty is empty ? 'ok' : 'ko' }}
+{{ countable_not_empty is empty ? 'ok' : 'ko' }}
+{{ markup_empty is empty ? 'ok' : 'ko' }}
+{{ markup_not_empty is empty ? 'ok' : 'ko' }}
+--DATA--
+
+class CountableStub implements Countable
+{
+ private $items;
+
+ public function __construct(array $items)
+ {
+ $this->items = $items;
+ }
+
+ public function count()
+ {
+ return count($this->items);
+ }
+}
+return array(
+ 'foo' => '', 'bar' => null, 'foobar' => false, 'array' => array(), 'zero' => 0, 'string' => '0',
+ 'countable_empty' => new CountableStub(array()), 'countable_not_empty' => new CountableStub(array(1, 2)),
+ 'markup_empty' => new Twig_Markup('', 'UTF-8'), 'markup_not_empty' => new Twig_Markup('test', 'UTF-8'),
+);
+--EXPECT--
+ok
+ok
+ok
+ok
+ko
+ko
+ok
+ko
+ok
+ko
+--TEST--
+"even" test
+--TEMPLATE--
+{{ 1 is even ? 'ko' : 'ok' }}
+{{ 2 is even ? 'ok' : 'ko' }}
+{{ 1 is not even ? 'ok' : 'ko' }}
+{{ 2 is not even ? 'ko' : 'ok' }}
+--DATA--
+return array()
+--EXPECT--
+ok
+ok
+ok
+ok
+--TEST--
+Twig supports the in operator
+--TEMPLATE--
+{% if bar in foo %}
+TRUE
+{% endif %}
+{% if not (bar in foo) %}
+{% else %}
+TRUE
+{% endif %}
+{% if bar not in foo %}
+{% else %}
+TRUE
+{% endif %}
+{% if 'a' in bar %}
+TRUE
+{% endif %}
+{% if 'c' not in bar %}
+TRUE
+{% endif %}
+{% if '' not in bar %}
+TRUE
+{% endif %}
+{% if '' in '' %}
+TRUE
+{% endif %}
+{% if '0' not in '' %}
+TRUE
+{% endif %}
+{% if 'a' not in '0' %}
+TRUE
+{% endif %}
+{% if '0' in '0' %}
+TRUE
+{% endif %}
+{{ false in [0, 1] ? 'TRUE' : 'FALSE' }}
+{{ true in [0, 1] ? 'TRUE' : 'FALSE' }}
+{{ '0' in [0, 1] ? 'TRUE' : 'FALSE' }}
+{{ '' in [0, 1] ? 'TRUE' : 'FALSE' }}
+{{ 0 in ['', 1] ? 'TRUE' : 'FALSE' }}
+{{ '' in 'foo' ? 'TRUE' : 'FALSE' }}
+{{ 0 in 'foo' ? 'TRUE' : 'FALSE' }}
+{{ false in 'foo' ? 'TRUE' : 'FALSE' }}
+{{ true in '100' ? 'TRUE' : 'FALSE' }}
+{{ [] in 'Array' ? 'TRUE' : 'FALSE' }}
+{{ [] in [true, false] ? 'TRUE' : 'FALSE' }}
+{{ [] in [true, ''] ? 'TRUE' : 'FALSE' }}
+{{ [] in [true, []] ? 'TRUE' : 'FALSE' }}
+{{ dir_object in 'foo'~dir_name ? 'TRUE' : 'FALSE' }}
+{{ 5 in 125 ? 'TRUE' : 'FALSE' }}
+--DATA--
+return array('bar' => 'bar', 'foo' => array('bar' => 'bar'), 'dir_name' => dirname(__FILE__), 'dir_object' => new SplFileInfo(dirname(__FILE__)))
+--EXPECT--
+TRUE
+TRUE
+TRUE
+TRUE
+TRUE
+TRUE
+TRUE
+TRUE
+TRUE
+FALSE
+FALSE
+FALSE
+FALSE
+FALSE
+TRUE
+FALSE
+FALSE
+FALSE
+FALSE
+FALSE
+FALSE
+TRUE
+FALSE
+FALSE
+--TEST--
+Twig supports the in operator when using objects
+--TEMPLATE--
+{% if object in object_list %}
+TRUE
+{% endif %}
+--DATA--
+$foo = new TwigTestFoo();
+$foo1 = new TwigTestFoo();
+
+$foo->position = $foo1;
+$foo1->position = $foo;
+
+return array(
+ 'object' => $foo,
+ 'object_list' => array($foo1, $foo),
+);
+--EXPECT--
+TRUE
+--TEST--
+"iterable" test
+--TEMPLATE--
+{{ foo is iterable ? 'ok' : 'ko' }}
+{{ traversable is iterable ? 'ok' : 'ko' }}
+{{ obj is iterable ? 'ok' : 'ko' }}
+{{ val is iterable ? 'ok' : 'ko' }}
+--DATA--
+return array(
+ 'foo' => array(),
+ 'traversable' => new ArrayIterator(array()),
+ 'obj' => new stdClass(),
+ 'val' => 'test',
+);
+--EXPECT--
+ok
+ok
+ko
+ko--TEST--
+"odd" test
+--TEMPLATE--
+{{ 1 is odd ? 'ok' : 'ko' }}
+{{ 2 is odd ? 'ko' : 'ok' }}
+--DATA--
+return array()
+--EXPECT--
+ok
+ok
diff --git a/tests/examplefiles/unicode.go b/tests/examplefiles/unicode.go
new file mode 100644
index 00000000..d4bef4d1
--- /dev/null
+++ b/tests/examplefiles/unicode.go
@@ -0,0 +1,10 @@
+package main
+
+import "fmt"
+
+func main() {
+ 世界 := "Hello, world!"
+ さようなら := "Goodbye, world!"
+ fmt.Println(世界)
+ fmt.Println(さようなら)
+}
diff --git a/tests/examplefiles/unicode.js b/tests/examplefiles/unicode.js
new file mode 100644
index 00000000..8f553f6f
--- /dev/null
+++ b/tests/examplefiles/unicode.js
@@ -0,0 +1,6 @@
+var école;
+var sinθ;
+var เมือง;
+var a\u1234b;
+
+var nbsp;
diff --git a/tests/examplefiles/test.bas b/tests/examplefiles/vbnet_test.bas
index af5f2574..af5f2574 100644
--- a/tests/examplefiles/test.bas
+++ b/tests/examplefiles/vbnet_test.bas
diff --git a/tests/examplefiles/vctreestatus_hg b/tests/examplefiles/vctreestatus_hg
new file mode 100644
index 00000000..193ed803
--- /dev/null
+++ b/tests/examplefiles/vctreestatus_hg
@@ -0,0 +1,4 @@
+M LICENSE
+M setup.py
+! setup.cfg
+? vctreestatus_hg
diff --git a/tests/examplefiles/vimrc b/tests/examplefiles/vimrc
new file mode 100644
index 00000000..d2f9cd1b
--- /dev/null
+++ b/tests/examplefiles/vimrc
@@ -0,0 +1,21 @@
+" A comment
+
+:py print "py"
+::pyt print 'pyt'
+ pyth print '''pyth'''
+ : pytho print "pytho"
+python print """python"""
+
+ : : python<<E OF
+print """my script"""
+
+def MyFunc(str):
+ """ My Function """
+ print str
+E OF
+
+let py = 42
+echo py
+
+let foo = 42
+echo foo
diff --git a/tests/examplefiles/vpath.mk b/tests/examplefiles/vpath.mk
new file mode 100644
index 00000000..a7f18fc3
--- /dev/null
+++ b/tests/examplefiles/vpath.mk
@@ -0,0 +1,16 @@
+vpath %.c src
+vpath %.h header
+EXEC=hello
+SRC= hello.c main.c
+OBJ= $(SRC:.c=.o)
+
+all: $(EXEC)
+
+hello: $(OBJ)
+ $(CC) -o $@ $^ $(LDFLAGS)
+
+main.o: hello.h
+
+%.o: %.c
+ $(CC) -I header -o $@ \
+ -c $< $(CFLAGS)
diff --git a/tests/old_run.py b/tests/old_run.py
deleted file mode 100644
index 4f7cef16..00000000
--- a/tests/old_run.py
+++ /dev/null
@@ -1,138 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- Pygments unit tests
- ~~~~~~~~~~~~~~~~~~
-
- Usage::
-
- python run.py [testfile ...]
-
-
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import sys, os
-import unittest
-
-from os.path import dirname, basename, join, abspath
-
-import pygments
-
-try:
- import coverage
-except ImportError:
- coverage = None
-
-testdir = abspath(dirname(__file__))
-
-failed = []
-total_test_count = 0
-error_test_count = 0
-
-
-def err(file, what, exc):
- print >>sys.stderr, file, 'failed %s:' % what,
- print >>sys.stderr, exc
- failed.append(file[:-3])
-
-
-class QuietTestRunner(object):
- """Customized test runner for relatively quiet output"""
-
- def __init__(self, testname, stream=sys.stderr):
- self.testname = testname
- self.stream = unittest._WritelnDecorator(stream)
-
- def run(self, test):
- global total_test_count
- global error_test_count
- result = unittest._TextTestResult(self.stream, True, 1)
- test(result)
- if not result.wasSuccessful():
- self.stream.write(' FAIL:')
- result.printErrors()
- failed.append(self.testname)
- else:
- self.stream.write(' ok\n')
- total_test_count += result.testsRun
- error_test_count += len(result.errors) + len(result.failures)
- return result
-
-
-def run_tests(with_coverage=False):
- # needed to avoid confusion involving atexit handlers
- import logging
-
- if sys.argv[1:]:
- # test only files given on cmdline
- files = [entry + '.py' for entry in sys.argv[1:] if entry.startswith('test_')]
- else:
- files = [entry for entry in os.listdir(testdir)
- if (entry.startswith('test_') and entry.endswith('.py'))]
- files.sort()
-
- WIDTH = 85
-
- print >>sys.stderr, \
- ('Pygments %s Test Suite running%s, stand by...' %
- (pygments.__version__,
- with_coverage and " with coverage analysis" or "")).center(WIDTH)
- print >>sys.stderr, ('(using Python %s)' % sys.version.split()[0]).center(WIDTH)
- print >>sys.stderr, '='*WIDTH
-
- if with_coverage:
- coverage.erase()
- coverage.start()
-
- for testfile in files:
- globs = {'__file__': join(testdir, testfile)}
- try:
- execfile(join(testdir, testfile), globs)
- except Exception, exc:
- raise
- err(testfile, 'execfile', exc)
- continue
- sys.stderr.write(testfile[:-3] + ': ')
- try:
- runner = QuietTestRunner(testfile[:-3])
- # make a test suite of all TestCases in the file
- tests = []
- for name, thing in globs.iteritems():
- if name.endswith('Test'):
- tests.append((name, unittest.makeSuite(thing)))
- tests.sort()
- suite = unittest.TestSuite()
- suite.addTests([x[1] for x in tests])
- runner.run(suite)
- except Exception, exc:
- err(testfile, 'running test', exc)
-
- print >>sys.stderr, '='*WIDTH
- if failed:
- print >>sys.stderr, '%d of %d tests failed.' % \
- (error_test_count, total_test_count)
- print >>sys.stderr, 'Tests failed in:', ', '.join(failed)
- ret = 1
- else:
- if total_test_count == 1:
- print >>sys.stderr, '1 test happy.'
- else:
- print >>sys.stderr, 'All %d tests happy.' % total_test_count
- ret = 0
-
- if with_coverage:
- coverage.stop()
- modules = [mod for name, mod in sys.modules.iteritems()
- if name.startswith('pygments.') and mod]
- coverage.report(modules)
-
- return ret
-
-
-if __name__ == '__main__':
- with_coverage = False
- if sys.argv[1:2] == ['-C']:
- with_coverage = bool(coverage)
- del sys.argv[1]
- sys.exit(run_tests(with_coverage))
diff --git a/tests/run.py b/tests/run.py
index 18a1d824..8167b911 100644
--- a/tests/run.py
+++ b/tests/run.py
@@ -8,42 +8,43 @@
python run.py [testfile ...]
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import sys, os
-
-if sys.version_info >= (3,):
- # copy test suite over to "build/lib" and convert it
- print ('Copying and converting sources to build/lib/test...')
- from distutils.util import copydir_run_2to3
- testroot = os.path.dirname(__file__)
- newroot = os.path.join(testroot, '..', 'build/lib/test')
- copydir_run_2to3(testroot, newroot)
- # make nose believe that we run from the converted dir
- os.chdir(newroot)
-else:
- # only find tests in this directory
- if os.path.dirname(__file__):
- os.chdir(os.path.dirname(__file__))
+from __future__ import print_function
+
+import os
+import sys
+
+# only find tests in this directory
+if os.path.dirname(__file__):
+ os.chdir(os.path.dirname(__file__))
try:
import nose
except ImportError:
- print ('nose is required to run the Pygments test suite')
+ print('nose is required to run the Pygments test suite')
sys.exit(1)
-try:
- # make sure the current source is first on sys.path
- sys.path.insert(0, '..')
- import pygments
-except ImportError:
- print ('Cannot find Pygments to test: %s' % sys.exc_info()[1])
- sys.exit(1)
+# make sure the current source is first on sys.path
+sys.path.insert(0, '..')
+
+if '--with-coverage' not in sys.argv:
+ # if running with coverage, pygments should not be imported before coverage
+ # is started, otherwise it will count already executed lines as uncovered
+ try:
+ import pygments
+ except ImportError as err:
+ print('Cannot find Pygments to test: %s' % err)
+ sys.exit(1)
+ else:
+ print('Pygments %s test suite running (Python %s)...' %
+ (pygments.__version__, sys.version.split()[0]),
+ file=sys.stderr)
else:
- print ('Pygments %s test suite running (Python %s)...' %
- (pygments.__version__, sys.version.split()[0]))
+ print('Pygments test suite running (Python %s)...' % sys.version.split()[0],
+ file=sys.stderr)
nose.main()
diff --git a/tests/string_asserts.py b/tests/string_asserts.py
new file mode 100644
index 00000000..11f5c7f0
--- /dev/null
+++ b/tests/string_asserts.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+"""
+ Pygments string assert utility
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+class StringTests(object):
+
+ def assertStartsWith(self, haystack, needle, msg=None):
+ if msg is None:
+ msg = "'{0}' does not start with '{1}'".format(haystack, needle)
+ if not haystack.startswith(needle):
+ raise(AssertionError(msg))
+
+ def assertEndsWith(self, haystack, needle, msg=None):
+ if msg is None:
+ msg = "'{0}' does not end with '{1}'".format(haystack, needle)
+ if not haystack.endswith(needle):
+ raise(AssertionError(msg))
diff --git a/tests/support.py b/tests/support.py
index 505c17da..c66ac663 100644
--- a/tests/support.py
+++ b/tests/support.py
@@ -5,6 +5,8 @@ Support for Pygments tests
import os
+from nose import SkipTest
+
def location(mod_name):
"""
diff --git a/tests/test_basic_api.py b/tests/test_basic_api.py
index 18ed8d64..be74c1bf 100644
--- a/tests/test_basic_api.py
+++ b/tests/test_basic_api.py
@@ -3,19 +3,20 @@
Pygments basic API tests
~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import os
+from __future__ import print_function
+
import random
import unittest
-from pygments import lexers, formatters, filters, format
+from pygments import lexers, formatters, lex, format
from pygments.token import _TokenType, Text
from pygments.lexer import RegexLexer
from pygments.formatters.img import FontNotFound
-from pygments.util import BytesIO, StringIO, bytes, b
+from pygments.util import text_type, StringIO, BytesIO, xrange, ClassNotFound
import support
@@ -26,10 +27,12 @@ random.shuffle(test_content)
test_content = ''.join(test_content) + '\n'
-def test_lexer_import_all():
+def test_lexer_instantiate_all():
# instantiate every lexer, to see if the token type defs are correct
- for x in lexers.LEXERS.keys():
- c = getattr(lexers, x)()
+ def verify(name):
+ getattr(lexers, name)
+ for x in lexers.LEXERS:
+ yield verify, x
def test_lexer_classes():
@@ -39,12 +42,14 @@ def test_lexer_classes():
for attr in 'aliases', 'filenames', 'alias_filenames', 'mimetypes':
assert hasattr(cls, attr)
assert type(getattr(cls, attr)) is list, \
- "%s: %s attribute wrong" % (cls, attr)
+ "%s: %s attribute wrong" % (cls, attr)
result = cls.analyse_text("abc")
assert isinstance(result, float) and 0.0 <= result <= 1.0
result = cls.analyse_text(".abc")
assert isinstance(result, float) and 0.0 <= result <= 1.0
+ assert all(al.lower() == al for al in cls.aliases)
+
inst = cls(opt1="val1", opt2="val2")
if issubclass(cls, RegexLexer):
if not hasattr(cls, '_tokens'):
@@ -60,19 +65,22 @@ def test_lexer_classes():
if cls.name in ['XQuery', 'Opa']: # XXX temporary
return
- tokens = list(inst.get_tokens(test_content))
+ try:
+ tokens = list(inst.get_tokens(test_content))
+ except KeyboardInterrupt:
+ raise KeyboardInterrupt(
+ 'interrupted %s.get_tokens(): test_content=%r' %
+ (cls.__name__, test_content))
txt = ""
for token in tokens:
assert isinstance(token, tuple)
assert isinstance(token[0], _TokenType)
- if isinstance(token[1], str):
- print repr(token[1])
- assert isinstance(token[1], unicode)
+ assert isinstance(token[1], text_type)
txt += token[1]
assert txt == test_content, "%s lexer roundtrip failed: %r != %r" % \
- (cls.name, test_content, txt)
+ (cls.name, test_content, txt)
- for lexer in lexers._iter_lexerclasses():
+ for lexer in lexers._iter_lexerclasses(plugins=False):
yield verify, lexer
@@ -81,7 +89,8 @@ def test_lexer_options():
def ensure(tokens, output):
concatenated = ''.join(token[1] for token in tokens)
assert concatenated == output, \
- '%s: %r != %r' % (lexer, concatenated, output)
+ '%s: %r != %r' % (lexer, concatenated, output)
+
def verify(cls):
inst = cls(stripnl=False)
ensure(inst.get_tokens('a\nb'), 'a\nb\n')
@@ -90,17 +99,18 @@ def test_lexer_options():
ensure(inst.get_tokens(' \n b\n\n\n'), 'b\n')
# some lexers require full lines in input
if cls.__name__ not in (
- 'PythonConsoleLexer', 'RConsoleLexer', 'RubyConsoleLexer',
- 'SqliteConsoleLexer', 'MatlabSessionLexer', 'ErlangShellLexer',
- 'BashSessionLexer', 'LiterateHaskellLexer', 'LiterateAgdaLexer',
- 'PostgresConsoleLexer', 'ElixirConsoleLexer', 'JuliaConsoleLexer',
- 'RobotFrameworkLexer', 'DylanConsoleLexer', 'ShellSessionLexer'):
+ 'PythonConsoleLexer', 'RConsoleLexer', 'RubyConsoleLexer',
+ 'SqliteConsoleLexer', 'MatlabSessionLexer', 'ErlangShellLexer',
+ 'BashSessionLexer', 'LiterateHaskellLexer', 'LiterateAgdaLexer',
+ 'PostgresConsoleLexer', 'ElixirConsoleLexer', 'JuliaConsoleLexer',
+ 'RobotFrameworkLexer', 'DylanConsoleLexer', 'ShellSessionLexer',
+ 'LiterateIdrisLexer', 'LiterateCryptolLexer'):
inst = cls(ensurenl=False)
ensure(inst.get_tokens('a\nb'), 'a\nb')
inst = cls(ensurenl=False, stripall=True)
ensure(inst.get_tokens('a\nb\n\n'), 'a\nb')
- for lexer in lexers._iter_lexerclasses():
+ for lexer in lexers._iter_lexerclasses(plugins=False):
if lexer.__name__ == 'RawTokenLexer':
# this one is special
continue
@@ -122,7 +132,7 @@ def test_get_lexers():
]:
yield verify, func, args
- for cls, (_, lname, aliases, _, mimetypes) in lexers.LEXERS.iteritems():
+ for cls, (_, lname, aliases, _, mimetypes) in lexers.LEXERS.items():
assert cls == lexers.find_lexer_class(lname).__name__
for alias in aliases:
@@ -131,34 +141,47 @@ def test_get_lexers():
for mimetype in mimetypes:
assert cls == lexers.get_lexer_for_mimetype(mimetype).__class__.__name__
+ try:
+ lexers.get_lexer_by_name(None)
+ except ClassNotFound:
+ pass
+ else:
+ raise Exception
+
def test_formatter_public_api():
- ts = list(lexers.PythonLexer().get_tokens("def f(): pass"))
- out = StringIO()
# test that every formatter class has the correct public API
- def verify(formatter, info):
- assert len(info) == 4
- assert info[0], "missing formatter name"
- assert info[1], "missing formatter aliases"
- assert info[3], "missing formatter docstring"
-
- if formatter.name == 'Raw tokens':
- # will not work with Unicode output file
- return
+ ts = list(lexers.PythonLexer().get_tokens("def f(): pass"))
+ string_out = StringIO()
+ bytes_out = BytesIO()
+
+ def verify(formatter):
+ info = formatters.FORMATTERS[formatter.__name__]
+ assert len(info) == 5
+ assert info[1], "missing formatter name"
+ assert info[2], "missing formatter aliases"
+ assert info[4], "missing formatter docstring"
try:
inst = formatter(opt1="val1")
except (ImportError, FontNotFound):
- return
+ raise support.SkipTest
+
try:
inst.get_style_defs()
except NotImplementedError:
# may be raised by formatters for which it doesn't make sense
pass
- inst.format(ts, out)
- for formatter, info in formatters.FORMATTERS.iteritems():
- yield verify, formatter, info
+ if formatter.unicodeoutput:
+ inst.format(ts, string_out)
+ else:
+ inst.format(ts, bytes_out)
+
+ for name in formatters.FORMATTERS:
+ formatter = getattr(formatters, name)
+ yield verify, formatter
+
def test_formatter_encodings():
from pygments.formatters import HtmlFormatter
@@ -167,7 +190,7 @@ def test_formatter_encodings():
fmt = HtmlFormatter()
tokens = [(Text, u"ä")]
out = format(tokens, fmt)
- assert type(out) is unicode
+ assert type(out) is text_type
assert u"ä" in out
# encoding option
@@ -191,12 +214,12 @@ def test_formatter_unicode_handling():
inst = formatter(encoding=None)
except (ImportError, FontNotFound):
# some dependency or font not installed
- return
+ raise support.SkipTest
if formatter.name != 'Raw tokens':
out = format(tokens, inst)
if formatter.unicodeoutput:
- assert type(out) is unicode
+ assert type(out) is text_type, '%s: %r' % (formatter, out)
inst = formatter(encoding='utf-8')
out = format(tokens, inst)
@@ -208,8 +231,10 @@ def test_formatter_unicode_handling():
out = format(tokens, inst)
assert type(out) is bytes, '%s: %r' % (formatter, out)
- for formatter, info in formatters.FORMATTERS.iteritems():
- yield verify, formatter
+ for formatter, info in formatters.FORMATTERS.items():
+ # this tests the automatic importing as well
+ fmter = getattr(formatters, formatter)
+ yield verify, fmter
def test_get_formatters():
@@ -226,27 +251,50 @@ def test_get_formatters():
def test_styles():
# minimal style test
from pygments.formatters import HtmlFormatter
- fmt = HtmlFormatter(style="pastie")
+ HtmlFormatter(style="pastie")
+
+
+def test_bare_class_handler():
+ from pygments.formatters import HtmlFormatter
+ from pygments.lexers import PythonLexer
+ try:
+ lex('test\n', PythonLexer)
+ except TypeError as e:
+ assert 'lex() argument must be a lexer instance' in str(e)
+ else:
+ assert False, 'nothing raised'
+ try:
+ format([], HtmlFormatter)
+ except TypeError as e:
+ assert 'format() argument must be a formatter instance' in str(e)
+ else:
+ assert False, 'nothing raised'
class FiltersTest(unittest.TestCase):
def test_basic(self):
- filter_args = {
- 'whitespace': {'spaces': True, 'tabs': True, 'newlines': True},
- 'highlight': {'names': ['isinstance', 'lexers', 'x']},
- }
- for x in filters.FILTERS.keys():
+ filters_args = [
+ ('whitespace', {'spaces': True, 'tabs': True, 'newlines': True}),
+ ('whitespace', {'wstokentype': False, 'spaces': True}),
+ ('highlight', {'names': ['isinstance', 'lexers', 'x']}),
+ ('codetagify', {'codetags': 'API'}),
+ ('keywordcase', {'case': 'capitalize'}),
+ ('raiseonerror', {}),
+ ('gobble', {'n': 4}),
+ ('tokenmerge', {}),
+ ]
+ for x, args in filters_args:
lx = lexers.PythonLexer()
- lx.add_filter(x, **filter_args.get(x, {}))
- fp = open(TESTFILE, 'rb')
- try:
+ lx.add_filter(x, **args)
+ with open(TESTFILE, 'rb') as fp:
text = fp.read().decode('utf-8')
- finally:
- fp.close()
tokens = list(lx.get_tokens(text))
+ self.assertTrue(all(isinstance(t[1], text_type)
+ for t in tokens),
+ '%s filter did not return Unicode' % x)
roundtext = ''.join([t[1] for t in tokens])
- if x not in ('whitespace', 'keywordcase'):
+ if x not in ('whitespace', 'keywordcase', 'gobble'):
# these filters change the text
self.assertEqual(roundtext, text,
"lexer roundtrip with %s filter failed" % x)
@@ -259,22 +307,16 @@ class FiltersTest(unittest.TestCase):
def test_whitespace(self):
lx = lexers.PythonLexer()
lx.add_filter('whitespace', spaces='%')
- fp = open(TESTFILE, 'rb')
- try:
+ with open(TESTFILE, 'rb') as fp:
text = fp.read().decode('utf-8')
- finally:
- fp.close()
lxtext = ''.join([t[1] for t in list(lx.get_tokens(text))])
self.assertFalse(' ' in lxtext)
def test_keywordcase(self):
lx = lexers.PythonLexer()
lx.add_filter('keywordcase', case='capitalize')
- fp = open(TESTFILE, 'rb')
- try:
+ with open(TESTFILE, 'rb') as fp:
text = fp.read().decode('utf-8')
- finally:
- fp.close()
lxtext = ''.join([t[1] for t in list(lx.get_tokens(text))])
self.assertTrue('Def' in lxtext and 'Class' in lxtext)
diff --git a/tests/test_cfm.py b/tests/test_cfm.py
new file mode 100644
index 00000000..2585489a
--- /dev/null
+++ b/tests/test_cfm.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+"""
+ Basic ColdfusionHtmlLexer Test
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+import os
+
+from pygments.token import Token
+from pygments.lexers import ColdfusionHtmlLexer
+
+
+class ColdfusionHtmlLexerTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = ColdfusionHtmlLexer()
+
+ def testBasicComment(self):
+ fragment = u'<!--- cfcomment --->'
+ expected = [
+ (Token.Text, u''),
+ (Token.Comment.Multiline, u'<!---'),
+ (Token.Comment.Multiline, u' cfcomment '),
+ (Token.Comment.Multiline, u'--->'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
+
+ def testNestedComment(self):
+ fragment = u'<!--- nested <!--- cfcomment ---> --->'
+ expected = [
+ (Token.Text, u''),
+ (Token.Comment.Multiline, u'<!---'),
+ (Token.Comment.Multiline, u' nested '),
+ (Token.Comment.Multiline, u'<!---'),
+ (Token.Comment.Multiline, u' cfcomment '),
+ (Token.Comment.Multiline, u'--->'),
+ (Token.Comment.Multiline, u' '),
+ (Token.Comment.Multiline, u'--->'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
diff --git a/tests/test_clexer.py b/tests/test_clexer.py
index 8b37bf57..6a3dcbce 100644
--- a/tests/test_clexer.py
+++ b/tests/test_clexer.py
@@ -3,14 +3,15 @@
Basic CLexer Test
~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import unittest
import os
+import textwrap
-from pygments.token import Text, Number
+from pygments.token import Text, Number, Token
from pygments.lexers import CLexer
@@ -27,5 +28,209 @@ class CLexerTest(unittest.TestCase):
Number.Float, Number.Float], code.split()):
wanted.append(item)
wanted.append((Text, ' '))
- wanted = [(Text, '')] + wanted[:-1] + [(Text, '\n')]
+ wanted = wanted[:-1] + [(Text, '\n')]
self.assertEqual(list(self.lexer.get_tokens(code)), wanted)
+
+ def testSwitch(self):
+ fragment = u'''\
+ int main()
+ {
+ switch (0)
+ {
+ case 0:
+ default:
+ ;
+ }
+ }
+ '''
+ tokens = [
+ (Token.Keyword.Type, u'int'),
+ (Token.Text, u' '),
+ (Token.Name.Function, u'main'),
+ (Token.Punctuation, u'('),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'{'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'switch'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'('),
+ (Token.Literal.Number.Integer, u'0'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'{'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'case'),
+ (Token.Text, u' '),
+ (Token.Literal.Number.Integer, u'0'),
+ (Token.Operator, u':'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'default'),
+ (Token.Operator, u':'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u';'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
+
+ def testSwitchSpaceBeforeColon(self):
+ fragment = u'''\
+ int main()
+ {
+ switch (0)
+ {
+ case 0 :
+ default :
+ ;
+ }
+ }
+ '''
+ tokens = [
+ (Token.Keyword.Type, u'int'),
+ (Token.Text, u' '),
+ (Token.Name.Function, u'main'),
+ (Token.Punctuation, u'('),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'{'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'switch'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'('),
+ (Token.Literal.Number.Integer, u'0'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'{'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'case'),
+ (Token.Text, u' '),
+ (Token.Literal.Number.Integer, u'0'),
+ (Token.Text, u' '),
+ (Token.Operator, u':'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'default'),
+ (Token.Text, u' '),
+ (Token.Operator, u':'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u';'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
+
+ def testLabel(self):
+ fragment = u'''\
+ int main()
+ {
+ foo:
+ goto foo;
+ }
+ '''
+ tokens = [
+ (Token.Keyword.Type, u'int'),
+ (Token.Text, u' '),
+ (Token.Name.Function, u'main'),
+ (Token.Punctuation, u'('),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'{'),
+ (Token.Text, u'\n'),
+ (Token.Name.Label, u'foo'),
+ (Token.Punctuation, u':'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'goto'),
+ (Token.Text, u' '),
+ (Token.Name, u'foo'),
+ (Token.Punctuation, u';'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
+
+ def testLabelSpaceBeforeColon(self):
+ fragment = u'''\
+ int main()
+ {
+ foo :
+ goto foo;
+ }
+ '''
+ tokens = [
+ (Token.Keyword.Type, u'int'),
+ (Token.Text, u' '),
+ (Token.Name.Function, u'main'),
+ (Token.Punctuation, u'('),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'{'),
+ (Token.Text, u'\n'),
+ (Token.Name.Label, u'foo'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u':'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'goto'),
+ (Token.Text, u' '),
+ (Token.Name, u'foo'),
+ (Token.Punctuation, u';'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
+
+ def testLabelFollowedByStatement(self):
+ fragment = u'''\
+ int main()
+ {
+ foo:return 0;
+ goto foo;
+ }
+ '''
+ tokens = [
+ (Token.Keyword.Type, u'int'),
+ (Token.Text, u' '),
+ (Token.Name.Function, u'main'),
+ (Token.Punctuation, u'('),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'{'),
+ (Token.Text, u'\n'),
+ (Token.Name.Label, u'foo'),
+ (Token.Punctuation, u':'),
+ (Token.Keyword, u'return'),
+ (Token.Text, u' '),
+ (Token.Literal.Number.Integer, u'0'),
+ (Token.Punctuation, u';'),
+ (Token.Text, u'\n'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'goto'),
+ (Token.Text, u' '),
+ (Token.Name, u'foo'),
+ (Token.Punctuation, u';'),
+ (Token.Text, u'\n'),
+ (Token.Punctuation, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(textwrap.dedent(fragment))))
diff --git a/tests/test_cmdline.py b/tests/test_cmdline.py
index 5ad815c0..5883fb5c 100644
--- a/tests/test_cmdline.py
+++ b/tests/test_cmdline.py
@@ -3,103 +3,250 @@
Command line test
~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-# Test the command line interface
+from __future__ import print_function
-import sys, os
+import io
+import os
+import re
+import sys
+import tempfile
import unittest
-import StringIO
-
-from pygments import highlight
-from pygments.cmdline import main as cmdline_main
import support
+from pygments import cmdline, highlight
+from pygments.util import BytesIO, StringIO
+
TESTFILE, TESTDIR = support.location(__file__)
+TESTCODE = '''\
+def func(args):
+ pass
+'''
-def run_cmdline(*args):
+def run_cmdline(*args, **kwds):
+ saved_stdin = sys.stdin
saved_stdout = sys.stdout
saved_stderr = sys.stderr
- new_stdout = sys.stdout = StringIO.StringIO()
- new_stderr = sys.stderr = StringIO.StringIO()
+ if sys.version_info > (3,):
+ stdin_buffer = BytesIO()
+ stdout_buffer = BytesIO()
+ stderr_buffer = BytesIO()
+ new_stdin = sys.stdin = io.TextIOWrapper(stdin_buffer, 'utf-8')
+ new_stdout = sys.stdout = io.TextIOWrapper(stdout_buffer, 'utf-8')
+ new_stderr = sys.stderr = io.TextIOWrapper(stderr_buffer, 'utf-8')
+ else:
+ stdin_buffer = new_stdin = sys.stdin = StringIO()
+ stdout_buffer = new_stdout = sys.stdout = StringIO()
+ stderr_buffer = new_stderr = sys.stderr = StringIO()
+ new_stdin.write(kwds.get('stdin', ''))
+ new_stdin.seek(0, 0)
try:
- ret = cmdline_main(["pygmentize"] + list(args))
+ ret = cmdline.main(['pygmentize'] + list(args))
finally:
+ sys.stdin = saved_stdin
sys.stdout = saved_stdout
sys.stderr = saved_stderr
- return (ret, new_stdout.getvalue(), new_stderr.getvalue())
+ new_stdout.flush()
+ new_stderr.flush()
+ out, err = stdout_buffer.getvalue().decode('utf-8'), \
+ stderr_buffer.getvalue().decode('utf-8')
+ return (ret, out, err)
class CmdLineTest(unittest.TestCase):
+ def check_success(self, *cmdline, **kwds):
+ code, out, err = run_cmdline(*cmdline, **kwds)
+ self.assertEqual(code, 0)
+ self.assertEqual(err, '')
+ return out
+
+ def check_failure(self, *cmdline, **kwds):
+ expected_code = kwds.pop('code', 1)
+ code, out, err = run_cmdline(*cmdline, **kwds)
+ self.assertEqual(code, expected_code)
+ self.assertEqual(out, '')
+ return err
+
+ def test_normal(self):
+ # test that cmdline gives the same output as library api
+ from pygments.lexers import PythonLexer
+ from pygments.formatters import HtmlFormatter
+ filename = TESTFILE
+ with open(filename, 'rb') as fp:
+ code = fp.read()
+
+ output = highlight(code, PythonLexer(), HtmlFormatter())
+
+ o = self.check_success('-lpython', '-fhtml', filename)
+ self.assertEqual(o, output)
+
+ def test_stdin(self):
+ o = self.check_success('-lpython', '-fhtml', stdin=TESTCODE)
+ o = re.sub('<[^>]*>', '', o)
+ # rstrip is necessary since HTML inserts a \n after the last </div>
+ self.assertEqual(o.rstrip(), TESTCODE.rstrip())
+
+ # guess if no lexer given
+ o = self.check_success('-fhtml', stdin=TESTCODE)
+ o = re.sub('<[^>]*>', '', o)
+ # rstrip is necessary since HTML inserts a \n after the last </div>
+ self.assertEqual(o.rstrip(), TESTCODE.rstrip())
+
+ def test_outfile(self):
+ # test that output file works with and without encoding
+ fd, name = tempfile.mkstemp()
+ os.close(fd)
+ for opts in [['-fhtml', '-o', name, TESTFILE],
+ ['-flatex', '-o', name, TESTFILE],
+ ['-fhtml', '-o', name, '-O', 'encoding=utf-8', TESTFILE]]:
+ try:
+ self.check_success(*opts)
+ finally:
+ os.unlink(name)
+
+ def test_stream_opt(self):
+ o = self.check_success('-lpython', '-s', '-fterminal', stdin=TESTCODE)
+ o = re.sub(r'\x1b\[.*?m', '', o)
+ self.assertEqual(o.replace('\r\n', '\n'), TESTCODE)
+
+ def test_h_opt(self):
+ o = self.check_success('-h')
+ self.assertTrue('Usage:' in o)
+
def test_L_opt(self):
- c, o, e = run_cmdline("-L")
- self.assertEqual(c, 0)
- self.assertTrue("Lexers" in o and "Formatters" in o and
- "Filters" in o and "Styles" in o)
- c, o, e = run_cmdline("-L", "lexer")
- self.assertEqual(c, 0)
- self.assertTrue("Lexers" in o and "Formatters" not in o)
- c, o, e = run_cmdline("-L", "lexers")
- self.assertEqual(c, 0)
+ o = self.check_success('-L')
+ self.assertTrue('Lexers' in o and 'Formatters' in o and
+ 'Filters' in o and 'Styles' in o)
+ o = self.check_success('-L', 'lexer')
+ self.assertTrue('Lexers' in o and 'Formatters' not in o)
+ self.check_success('-L', 'lexers')
def test_O_opt(self):
filename = TESTFILE
- c, o, e = run_cmdline("-Ofull=1,linenos=true,foo=bar",
- "-fhtml", filename)
- self.assertEqual(c, 0)
- self.assertTrue("<html" in o)
+ o = self.check_success('-Ofull=1,linenos=true,foo=bar',
+ '-fhtml', filename)
+ self.assertTrue('<html' in o)
self.assertTrue('class="linenos"' in o)
+ # "foobar" is invalid for a bool option
+ e = self.check_failure('-Ostripnl=foobar', TESTFILE)
+ self.assertTrue('Error: Invalid value' in e)
+ e = self.check_failure('-Ostripnl=foobar', '-lpy')
+ self.assertTrue('Error: Invalid value' in e)
+
def test_P_opt(self):
filename = TESTFILE
- c, o, e = run_cmdline("-Pfull", "-Ptitle=foo, bar=baz=,",
- "-fhtml", filename)
- self.assertEqual(c, 0)
- self.assertTrue("<title>foo, bar=baz=,</title>" in o)
+ o = self.check_success('-Pfull', '-Ptitle=foo, bar=baz=,',
+ '-fhtml', filename)
+ self.assertTrue('<title>foo, bar=baz=,</title>' in o)
def test_F_opt(self):
filename = TESTFILE
- c, o, e = run_cmdline("-Fhighlight:tokentype=Name.Blubb,"
- "names=TESTFILE filename",
- "-fhtml", filename)
- self.assertEqual(c, 0)
- self.assertTrue('<span class="n-Blubb' in o)
+ o = self.check_success('-Fhighlight:tokentype=Name.Blubb,'
+ 'names=TESTFILE filename',
+ '-fhtml', filename)
+ self.assertTrue('<span class="n n-Blubb' in o)
def test_H_opt(self):
- c, o, e = run_cmdline("-H", "formatter", "html")
- self.assertEqual(c, 0)
+ o = self.check_success('-H', 'formatter', 'html')
self.assertTrue('HTML' in o)
+ o = self.check_success('-H', 'lexer', 'python')
+ self.assertTrue('Python' in o)
+ o = self.check_success('-H', 'filter', 'raiseonerror')
+ self.assertTrue('raiseonerror', o)
+ e = self.check_failure('-H', 'lexer', 'foobar')
+ self.assertTrue('not found' in e)
def test_S_opt(self):
- c, o, e = run_cmdline("-S", "default", "-f", "html", "-O", "linenos=1")
- self.assertEqual(c, 0)
+ o = self.check_success('-S', 'default', '-f', 'html', '-O', 'linenos=1')
+ lines = o.splitlines()
+ for line in lines:
+ # every line is for a token class
+ parts = line.split()
+ self.assertTrue(parts[0].startswith('.'))
+ self.assertTrue(parts[1] == '{')
+ if parts[0] != '.hll':
+ self.assertTrue(parts[-4] == '}')
+ self.assertTrue(parts[-3] == '/*')
+ self.assertTrue(parts[-1] == '*/')
+ self.check_failure('-S', 'default', '-f', 'foobar')
+
+ def test_N_opt(self):
+ o = self.check_success('-N', 'test.py')
+ self.assertEqual('python', o.strip())
+ o = self.check_success('-N', 'test.unknown')
+ self.assertEqual('text', o.strip())
def test_invalid_opts(self):
- for opts in [("-L", "-lpy"), ("-L", "-fhtml"), ("-L", "-Ox"),
- ("-a",), ("-Sst", "-lpy"), ("-H",),
- ("-H", "formatter"),]:
- self.assertTrue(run_cmdline(*opts)[0] == 2)
-
- def test_normal(self):
- # test that cmdline gives the same output as library api
- from pygments.lexers import PythonLexer
- from pygments.formatters import HtmlFormatter
- filename = TESTFILE
- fp = open(filename, 'rb')
+ for opts in [
+ ('-X',),
+ ('-L', '-lpy'),
+ ('-L', '-fhtml'),
+ ('-L', '-Ox'),
+ ('-S', 'default', '-l', 'py', '-f', 'html'),
+ ('-S', 'default'),
+ ('-a', 'arg'),
+ ('-H',),
+ (TESTFILE, TESTFILE),
+ ('-H', 'formatter'),
+ ('-H', 'foo', 'bar'),
+ ('-s',),
+ ('-s', TESTFILE),
+ ]:
+ self.check_failure(*opts, code=2)
+
+ def test_errors(self):
+ # input file not found
+ e = self.check_failure('-lpython', 'nonexistent.py')
+ self.assertTrue('Error: cannot read infile' in e)
+ self.assertTrue('nonexistent.py' in e)
+
+ # lexer not found
+ e = self.check_failure('-lfooo', TESTFILE)
+ self.assertTrue('Error: no lexer for alias' in e)
+
+ # formatter not found
+ e = self.check_failure('-lpython', '-ffoo', TESTFILE)
+ self.assertTrue('Error: no formatter found for name' in e)
+
+ # formatter for outfile not found
+ e = self.check_failure('-ofoo.foo', TESTFILE)
+ self.assertTrue('Error: no formatter found for file name' in e)
+
+ # output file not writable
+ e = self.check_failure('-o', os.path.join('nonexistent', 'dir', 'out.html'),
+ '-lpython', TESTFILE)
+ self.assertTrue('Error: cannot open outfile' in e)
+ self.assertTrue('out.html' in e)
+
+ # unknown filter
+ e = self.check_failure('-F', 'foo', TESTFILE)
+ self.assertTrue('Error: filter \'foo\' not found' in e)
+
+ def test_exception(self):
+ cmdline.highlight = None # override callable to provoke TypeError
try:
- code = fp.read()
+ # unexpected exception while highlighting
+ e = self.check_failure('-lpython', TESTFILE)
+ self.assertTrue('*** Error while highlighting:' in e)
+ self.assertTrue('TypeError' in e)
+
+ # same with -v: should reraise the exception
+ try:
+ self.check_failure('-lpython', '-v', TESTFILE)
+ except Exception:
+ pass
+ else:
+ self.fail('exception not reraised')
finally:
- fp.close()
+ cmdline.highlight = highlight
- output = highlight(code, PythonLexer(), HtmlFormatter())
-
- c, o, e = run_cmdline("-lpython", "-fhtml", filename)
-
- self.assertEqual(o, output)
- self.assertEqual(e, "")
- self.assertEqual(c, 0)
+ def test_parse_opts(self):
+ self.assertEqual(cmdline._parse_options([' ', 'keyonly,key = value ']),
+ {'keyonly': True, 'key': 'value'})
diff --git a/tests/test_examplefiles.py b/tests/test_examplefiles.py
index d785cf3b..924e1184 100644
--- a/tests/test_examplefiles.py
+++ b/tests/test_examplefiles.py
@@ -3,59 +3,94 @@
Pygments tests with example files
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from __future__ import print_function
+
import os
import pprint
import difflib
-import cPickle as pickle
+import pickle
from pygments.lexers import get_lexer_for_filename, get_lexer_by_name
from pygments.token import Error
-from pygments.util import ClassNotFound, b
+from pygments.util import ClassNotFound
+
+import support
STORE_OUTPUT = False
-# generate methods
+STATS = {}
+
+TESTDIR = os.path.dirname(__file__)
+
+# Jython generates a StackOverflowError for repetitions of the form (a|b)+,
+# which are commonly used in string patterns, when matching more than about 1000
+# chars. These tests do not complete. See http://bugs.jython.org/issue1965
+BAD_FILES_FOR_JYTHON = ('Object.st', 'all.nit', 'genclass.clj',
+ 'ragel-cpp_rlscan')
+
def test_example_files():
- testdir = os.path.dirname(__file__)
- outdir = os.path.join(testdir, 'examplefiles', 'output')
+ global STATS
+ STATS = {}
+ outdir = os.path.join(TESTDIR, 'examplefiles', 'output')
if STORE_OUTPUT and not os.path.isdir(outdir):
os.makedirs(outdir)
- for fn in os.listdir(os.path.join(testdir, 'examplefiles')):
+ for fn in os.listdir(os.path.join(TESTDIR, 'examplefiles')):
if fn.startswith('.') or fn.endswith('#'):
continue
- absfn = os.path.join(testdir, 'examplefiles', fn)
+ absfn = os.path.join(TESTDIR, 'examplefiles', fn)
if not os.path.isfile(absfn):
continue
- outfn = os.path.join(outdir, fn)
+ print(absfn)
+ with open(absfn, 'rb') as f:
+ code = f.read()
try:
- lx = get_lexer_for_filename(absfn)
- except ClassNotFound:
- if "_" not in fn:
+ code = code.decode('utf-8')
+ except UnicodeError:
+ code = code.decode('latin1')
+
+ lx = None
+ if '_' in fn:
+ try:
+ lx = get_lexer_by_name(fn.split('_')[0])
+ except ClassNotFound:
+ pass
+ if lx is None:
+ try:
+ lx = get_lexer_for_filename(absfn, code=code)
+ except ClassNotFound:
raise AssertionError('file %r has no registered extension, '
'nor is of the form <lexer>_filename '
'for overriding, thus no lexer found.'
- % fn)
- try:
- name, rest = fn.split("_", 1)
- lx = get_lexer_by_name(name)
- except ClassNotFound:
- raise AssertionError('no lexer found for file %r' % fn)
- yield check_lexer, lx, absfn, outfn
+ % fn)
+ yield check_lexer, lx, fn
-def check_lexer(lx, absfn, outfn):
- fp = open(absfn, 'rb')
- try:
+ N = 7
+ stats = list(STATS.items())
+ stats.sort(key=lambda x: x[1][1])
+ print('\nExample files that took longest absolute time:')
+ for fn, t in stats[-N:]:
+ print('%-30s %6d chars %8.2f ms %7.3f ms/char' % ((fn,) + t))
+ print()
+ stats.sort(key=lambda x: x[1][2])
+ print('\nExample files that took longest relative time:')
+ for fn, t in stats[-N:]:
+ print('%-30s %6d chars %8.2f ms %7.3f ms/char' % ((fn,) + t))
+
+
+def check_lexer(lx, fn):
+ if os.name == 'java' and fn in BAD_FILES_FOR_JYTHON:
+ raise support.SkipTest
+ absfn = os.path.join(TESTDIR, 'examplefiles', fn)
+ with open(absfn, 'rb') as fp:
text = fp.read()
- finally:
- fp.close()
- text = text.replace(b('\r\n'), b('\n'))
- text = text.strip(b('\n')) + b('\n')
+ text = text.replace(b'\r\n', b'\n')
+ text = text.strip(b'\n') + b'\n'
try:
text = text.decode('utf-8')
if text.startswith(u'\ufeff'):
@@ -64,36 +99,36 @@ def check_lexer(lx, absfn, outfn):
text = text.decode('latin1')
ntext = []
tokens = []
+ import time
+ t1 = time.time()
for type, val in lx.get_tokens(text):
ntext.append(val)
assert type != Error, \
'lexer %s generated error token for %s: %r at position %d' % \
(lx, absfn, val, len(u''.join(ntext)))
tokens.append((type, val))
+ t2 = time.time()
+ STATS[os.path.basename(absfn)] = (len(text),
+ 1000 * (t2 - t1), 1000 * (t2 - t1) / len(text))
if u''.join(ntext) != text:
- print '\n'.join(difflib.unified_diff(u''.join(ntext).splitlines(),
- text.splitlines()))
+ print('\n'.join(difflib.unified_diff(u''.join(ntext).splitlines(),
+ text.splitlines())))
raise AssertionError('round trip failed for ' + absfn)
# check output against previous run if enabled
if STORE_OUTPUT:
# no previous output -- store it
+ outfn = os.path.join(TESTDIR, 'examplefiles', 'output', fn)
if not os.path.isfile(outfn):
- fp = open(outfn, 'wb')
- try:
+ with open(outfn, 'wb') as fp:
pickle.dump(tokens, fp)
- finally:
- fp.close()
return
# otherwise load it and compare
- fp = open(outfn, 'rb')
- try:
+ with open(outfn, 'rb') as fp:
stored_tokens = pickle.load(fp)
- finally:
- fp.close()
if stored_tokens != tokens:
f1 = pprint.pformat(stored_tokens)
f2 = pprint.pformat(tokens)
- print '\n'.join(difflib.unified_diff(f1.splitlines(),
- f2.splitlines()))
+ print('\n'.join(difflib.unified_diff(f1.splitlines(),
+ f2.splitlines())))
assert False, absfn
diff --git a/tests/test_html_formatter.py b/tests/test_html_formatter.py
index f7e7a542..a82aaaf7 100644
--- a/tests/test_html_formatter.py
+++ b/tests/test_html_formatter.py
@@ -3,41 +3,40 @@
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from __future__ import print_function
+
+import io
import os
import re
import unittest
-import StringIO
import tempfile
from os.path import join, dirname, isfile
+from pygments.util import StringIO
from pygments.lexers import PythonLexer
from pygments.formatters import HtmlFormatter, NullFormatter
from pygments.formatters.html import escape_html
-from pygments.util import uni_open
import support
TESTFILE, TESTDIR = support.location(__file__)
-fp = uni_open(TESTFILE, encoding='utf-8')
-try:
+with io.open(TESTFILE, encoding='utf-8') as fp:
tokensource = list(PythonLexer().get_tokens(fp.read()))
-finally:
- fp.close()
class HtmlFormatterTest(unittest.TestCase):
def test_correct_output(self):
hfmt = HtmlFormatter(nowrap=True)
- houtfile = StringIO.StringIO()
+ houtfile = StringIO()
hfmt.format(tokensource, houtfile)
nfmt = NullFormatter()
- noutfile = StringIO.StringIO()
+ noutfile = StringIO()
nfmt.format(tokensource, noutfile)
stripped_html = re.sub('<.*?>', '', houtfile.getvalue())
@@ -69,18 +68,35 @@ class HtmlFormatterTest(unittest.TestCase):
pass
def test_all_options(self):
- for optdict in [dict(nowrap=True),
- dict(linenos=True),
- dict(linenos=True, full=True),
- dict(linenos=True, full=True, noclasses=True)]:
-
- outfile = StringIO.StringIO()
+ def check(optdict):
+ outfile = StringIO()
fmt = HtmlFormatter(**optdict)
fmt.format(tokensource, outfile)
+ for optdict in [
+ dict(nowrap=True),
+ dict(linenos=True, full=True),
+ dict(linenos=True, linespans='L'),
+ dict(hl_lines=[1, 5, 10, 'xxx']),
+ dict(hl_lines=[1, 5, 10], noclasses=True),
+ ]:
+ check(optdict)
+
+ for linenos in [False, 'table', 'inline']:
+ for noclasses in [False, True]:
+ for linenospecial in [0, 5]:
+ for anchorlinenos in [False, True]:
+ optdict = dict(
+ linenos=linenos,
+ noclasses=noclasses,
+ linenospecial=linenospecial,
+ anchorlinenos=anchorlinenos,
+ )
+ check(optdict)
+
def test_linenos(self):
optdict = dict(linenos=True)
- outfile = StringIO.StringIO()
+ outfile = StringIO()
fmt = HtmlFormatter(**optdict)
fmt.format(tokensource, outfile)
html = outfile.getvalue()
@@ -88,7 +104,7 @@ class HtmlFormatterTest(unittest.TestCase):
def test_linenos_with_startnum(self):
optdict = dict(linenos=True, linenostart=5)
- outfile = StringIO.StringIO()
+ outfile = StringIO()
fmt = HtmlFormatter(**optdict)
fmt.format(tokensource, outfile)
html = outfile.getvalue()
@@ -96,7 +112,7 @@ class HtmlFormatterTest(unittest.TestCase):
def test_lineanchors(self):
optdict = dict(lineanchors="foo")
- outfile = StringIO.StringIO()
+ outfile = StringIO()
fmt = HtmlFormatter(**optdict)
fmt.format(tokensource, outfile)
html = outfile.getvalue()
@@ -104,7 +120,7 @@ class HtmlFormatterTest(unittest.TestCase):
def test_lineanchors_with_startnum(self):
optdict = dict(lineanchors="foo", linenostart=5)
- outfile = StringIO.StringIO()
+ outfile = StringIO()
fmt = HtmlFormatter(**optdict)
fmt.format(tokensource, outfile)
html = outfile.getvalue()
@@ -132,7 +148,7 @@ class HtmlFormatterTest(unittest.TestCase):
pass
else:
if ret:
- print output
+ print(output)
self.assertFalse(ret, 'nsgmls run reported errors')
os.unlink(pathname)
@@ -172,7 +188,7 @@ class HtmlFormatterTest(unittest.TestCase):
# anymore in the actual source
fmt = HtmlFormatter(tagsfile='support/tags', lineanchors='L',
tagurlformat='%(fname)s%(fext)s')
- outfile = StringIO.StringIO()
+ outfile = StringIO()
fmt.format(tokensource, outfile)
self.assertTrue('<a href="test_html_formatter.py#L-165">test_ctags</a>'
in outfile.getvalue())
diff --git a/tests/test_inherit.py b/tests/test_inherit.py
new file mode 100644
index 00000000..34033a08
--- /dev/null
+++ b/tests/test_inherit.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+"""
+ Tests for inheritance in RegexLexer
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.lexer import RegexLexer, inherit
+from pygments.token import Text
+
+
+class InheritTest(unittest.TestCase):
+ def test_single_inheritance_position(self):
+ t = Two()
+ pats = [x[0].__self__.pattern for x in t._tokens['root']]
+ self.assertEqual(['x', 'a', 'b', 'y'], pats)
+ def test_multi_inheritance_beginning(self):
+ t = Beginning()
+ pats = [x[0].__self__.pattern for x in t._tokens['root']]
+ self.assertEqual(['x', 'a', 'b', 'y', 'm'], pats)
+ def test_multi_inheritance_end(self):
+ t = End()
+ pats = [x[0].__self__.pattern for x in t._tokens['root']]
+ self.assertEqual(['m', 'x', 'a', 'b', 'y'], pats)
+
+ def test_multi_inheritance_position(self):
+ t = Three()
+ pats = [x[0].__self__.pattern for x in t._tokens['root']]
+ self.assertEqual(['i', 'x', 'a', 'b', 'y', 'j'], pats)
+
+ def test_single_inheritance_with_skip(self):
+ t = Skipped()
+ pats = [x[0].__self__.pattern for x in t._tokens['root']]
+ self.assertEqual(['x', 'a', 'b', 'y'], pats)
+
+
+class One(RegexLexer):
+ tokens = {
+ 'root': [
+ ('a', Text),
+ ('b', Text),
+ ],
+ }
+
+class Two(One):
+ tokens = {
+ 'root': [
+ ('x', Text),
+ inherit,
+ ('y', Text),
+ ],
+ }
+
+class Three(Two):
+ tokens = {
+ 'root': [
+ ('i', Text),
+ inherit,
+ ('j', Text),
+ ],
+ }
+
+class Beginning(Two):
+ tokens = {
+ 'root': [
+ inherit,
+ ('m', Text),
+ ],
+ }
+
+class End(Two):
+ tokens = {
+ 'root': [
+ ('m', Text),
+ inherit,
+ ],
+ }
+
+class Empty(One):
+ tokens = {}
+
+class Skipped(Empty):
+ tokens = {
+ 'root': [
+ ('x', Text),
+ inherit,
+ ('y', Text),
+ ],
+ }
+
diff --git a/tests/test_java.py b/tests/test_java.py
new file mode 100644
index 00000000..33a64e99
--- /dev/null
+++ b/tests/test_java.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+"""
+ Basic JavaLexer Test
+ ~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.token import Text, Name, Operator, Keyword
+from pygments.lexers import JavaLexer
+
+
+class JavaTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = JavaLexer()
+ self.maxDiff = None
+
+ def testEnhancedFor(self):
+ fragment = u'label:\nfor(String var2: var1) {}\n'
+ tokens = [
+ (Name.Label, u'label:'),
+ (Text, u'\n'),
+ (Keyword, u'for'),
+ (Operator, u'('),
+ (Name, u'String'),
+ (Text, u' '),
+ (Name, u'var2'),
+ (Operator, u':'),
+ (Text, u' '),
+ (Name, u'var1'),
+ (Operator, u')'),
+ (Text, u' '),
+ (Operator, u'{'),
+ (Operator, u'}'),
+ (Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
diff --git a/tests/test_latex_formatter.py b/tests/test_latex_formatter.py
index 06a74c3d..56b5db2e 100644
--- a/tests/test_latex_formatter.py
+++ b/tests/test_latex_formatter.py
@@ -3,10 +3,12 @@
Pygments LaTeX formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from __future__ import print_function
+
import os
import unittest
import tempfile
@@ -22,11 +24,8 @@ TESTFILE, TESTDIR = support.location(__file__)
class LatexFormatterTest(unittest.TestCase):
def test_valid_output(self):
- fp = open(TESTFILE)
- try:
+ with open(TESTFILE) as fp:
tokensource = list(PythonLexer().get_tokens(fp.read()))
- finally:
- fp.close()
fmt = LatexFormatter(full=True, encoding='latin1')
handle, pathname = tempfile.mkstemp('.tex')
@@ -48,7 +47,7 @@ class LatexFormatterTest(unittest.TestCase):
pass
else:
if ret:
- print output
+ print(output)
self.assertFalse(ret, 'latex run reported errors')
os.unlink(pathname)
diff --git a/tests/test_lexers_other.py b/tests/test_lexers_other.py
index d3cfa246..7457d045 100644
--- a/tests/test_lexers_other.py
+++ b/tests/test_lexers_other.py
@@ -3,7 +3,7 @@
Tests for other lexers
~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -12,7 +12,7 @@ import os
import unittest
from pygments.lexers import guess_lexer
-from pygments.lexers.other import RexxLexer
+from pygments.lexers.scripting import RexxLexer
def _exampleFilePath(filename):
@@ -26,17 +26,14 @@ class AnalyseTextTest(unittest.TestCase):
for pattern in lexer.filenames:
exampleFilesPattern = _exampleFilePath(pattern)
for exampleFilePath in glob.glob(exampleFilesPattern):
- exampleFile = open(exampleFilePath, 'rb')
- try:
- text = exampleFile.read()
- probability = lexer.analyse_text(text)
- self.assertTrue(probability > 0,
- '%s must recognize %r' % (
- lexer.name, exampleFilePath))
- guessedLexer = guess_lexer(text)
- self.assertEqual(guessedLexer.name, lexer.name)
- finally:
- exampleFile.close()
+ with open(exampleFilePath, 'rb') as fp:
+ text = fp.read().decode('utf-8')
+ probability = lexer.analyse_text(text)
+ self.assertTrue(probability > 0,
+ '%s must recognize %r' % (
+ lexer.name, exampleFilePath))
+ guessedLexer = guess_lexer(text)
+ self.assertEqual(guessedLexer.name, lexer.name)
def testCanRecognizeAndGuessExampleFiles(self):
self._testCanRecognizeAndGuessExampleFiles(RexxLexer)
@@ -49,20 +46,20 @@ class RexxLexerTest(unittest.TestCase):
self.assertAlmostEqual(1.0,
RexxLexer.analyse_text('''/* Rexx */
say "hello world"'''))
- self.assertLess(0.5,
- RexxLexer.analyse_text('/* */\n'
+ val = RexxLexer.analyse_text('/* */\n'
'hello:pRoceduRe\n'
- ' say "hello world"'))
- self.assertLess(0.2,
- RexxLexer.analyse_text('''/* */
+ ' say "hello world"')
+ self.assertTrue(val > 0.5, val)
+ val = RexxLexer.analyse_text('''/* */
if 1 > 0 then do
say "ok"
end
else do
say "huh?"
- end'''))
- self.assertLess(0.2,
- RexxLexer.analyse_text('''/* */
+ end''')
+ self.assertTrue(val > 0.2, val)
+ val = RexxLexer.analyse_text('''/* */
greeting = "hello world!"
parse value greeting "hello" name "!"
- say name'''))
+ say name''')
+ self.assertTrue(val > 0.2, val)
diff --git a/tests/test_objectiveclexer.py b/tests/test_objectiveclexer.py
new file mode 100644
index 00000000..90bd680f
--- /dev/null
+++ b/tests/test_objectiveclexer.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+"""
+ Basic CLexer Test
+ ~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+import os
+
+from pygments.token import Token
+from pygments.lexers import ObjectiveCLexer
+
+
+class ObjectiveCLexerTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = ObjectiveCLexer()
+
+ def testLiteralNumberInt(self):
+ fragment = u'@(1);\n'
+ expected = [
+ (Token.Literal, u'@('),
+ (Token.Literal.Number.Integer, u'1'),
+ (Token.Literal, u')'),
+ (Token.Punctuation, u';'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
+
+ def testLiteralNumberExpression(self):
+ fragment = u'@(1+2);\n'
+ expected = [
+ (Token.Literal, u'@('),
+ (Token.Literal.Number.Integer, u'1'),
+ (Token.Operator, u'+'),
+ (Token.Literal.Number.Integer, u'2'),
+ (Token.Literal, u')'),
+ (Token.Punctuation, u';'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
+
+ def testLiteralNumberNestedExpression(self):
+ fragment = u'@(1+(2+3));\n'
+ expected = [
+ (Token.Literal, u'@('),
+ (Token.Literal.Number.Integer, u'1'),
+ (Token.Operator, u'+'),
+ (Token.Punctuation, u'('),
+ (Token.Literal.Number.Integer, u'2'),
+ (Token.Operator, u'+'),
+ (Token.Literal.Number.Integer, u'3'),
+ (Token.Punctuation, u')'),
+ (Token.Literal, u')'),
+ (Token.Punctuation, u';'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
+
+ def testLiteralNumberBool(self):
+ fragment = u'@NO;\n'
+ expected = [
+ (Token.Literal.Number, u'@NO'),
+ (Token.Punctuation, u';'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
+
+ def testLieralNumberBoolExpression(self):
+ fragment = u'@(YES);\n'
+ expected = [
+ (Token.Literal, u'@('),
+ (Token.Name.Builtin, u'YES'),
+ (Token.Literal, u')'),
+ (Token.Punctuation, u';'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
diff --git a/tests/test_perllexer.py b/tests/test_perllexer.py
index 315b20e3..26b2d0a7 100644
--- a/tests/test_perllexer.py
+++ b/tests/test_perllexer.py
@@ -3,7 +3,7 @@
Pygments regex lexer tests
~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -11,7 +11,7 @@ import time
import unittest
from pygments.token import String
-from pygments.lexers.agile import PerlLexer
+from pygments.lexers.perl import PerlLexer
class RunawayRegexTest(unittest.TestCase):
diff --git a/tests/test_qbasiclexer.py b/tests/test_qbasiclexer.py
new file mode 100644
index 00000000..8b790cee
--- /dev/null
+++ b/tests/test_qbasiclexer.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+"""
+ Tests for QBasic
+ ~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import glob
+import os
+import unittest
+
+from pygments.token import Token
+from pygments.lexers.basic import QBasicLexer
+
+
+class QBasicTest(unittest.TestCase):
+ def setUp(self):
+ self.lexer = QBasicLexer()
+ self.maxDiff = None
+
+ def testKeywordsWithDollar(self):
+ fragment = u'DIM x\nx = RIGHT$("abc", 1)\n'
+ expected = [
+ (Token.Keyword.Declaration, u'DIM'),
+ (Token.Text.Whitespace, u' '),
+ (Token.Name.Variable.Global, u'x'),
+ (Token.Text, u'\n'),
+ (Token.Name.Variable.Global, u'x'),
+ (Token.Text.Whitespace, u' '),
+ (Token.Operator, u'='),
+ (Token.Text.Whitespace, u' '),
+ (Token.Keyword.Reserved, u'RIGHT$'),
+ (Token.Punctuation, u'('),
+ (Token.Literal.String.Double, u'"abc"'),
+ (Token.Punctuation, u','),
+ (Token.Text.Whitespace, u' '),
+ (Token.Literal.Number.Integer.Long, u'1'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
diff --git a/tests/test_regexlexer.py b/tests/test_regexlexer.py
index 28d9689b..eb25be61 100644
--- a/tests/test_regexlexer.py
+++ b/tests/test_regexlexer.py
@@ -3,7 +3,7 @@
Pygments regex lexer tests
~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -12,6 +12,7 @@ import unittest
from pygments.token import Text
from pygments.lexer import RegexLexer
from pygments.lexer import bygroups
+from pygments.lexer import default
class TestLexer(RegexLexer):
@@ -20,6 +21,7 @@ class TestLexer(RegexLexer):
'root': [
('a', Text.Root, 'rag'),
('e', Text.Root),
+ default(('beer', 'beer'))
],
'beer': [
('d', Text.Beer, ('#pop', '#pop')),
@@ -45,3 +47,8 @@ class TupleTransTest(unittest.TestCase):
self.assertEqual(toks,
[(0, Text.Root, 'a'), (1, Text, u'\n'),
(2, Text.Root, 'e')])
+
+ def test_default(self):
+ lx = TestLexer()
+ toks = list(lx.get_tokens_unprocessed('d'))
+ self.assertEqual(toks, [(0, Text.Beer, 'd')])
diff --git a/tests/test_regexopt.py b/tests/test_regexopt.py
new file mode 100644
index 00000000..dd56a446
--- /dev/null
+++ b/tests/test_regexopt.py
@@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+"""
+ Tests for pygments.regexopt
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+import random
+import unittest
+import itertools
+
+from pygments.regexopt import regex_opt
+
+ALPHABET = ['a', 'b', 'c', 'd', 'e']
+
+try:
+ from itertools import combinations_with_replacement
+ N_TRIES = 15
+except ImportError:
+ # Python 2.6
+ def combinations_with_replacement(iterable, r):
+ pool = tuple(iterable)
+ n = len(pool)
+ for indices in itertools.product(range(n), repeat=r):
+ if sorted(indices) == list(indices):
+ yield tuple(pool[i] for i in indices)
+ N_TRIES = 9
+
+
+class RegexOptTestCase(unittest.TestCase):
+
+ def generate_keywordlist(self, length):
+ return [''.join(p) for p in
+ combinations_with_replacement(ALPHABET, length)]
+
+ def test_randomly(self):
+ # generate a list of all possible keywords of a certain length using
+ # a restricted alphabet, then choose some to match and make sure only
+ # those do
+ for n in range(3, N_TRIES):
+ kwlist = self.generate_keywordlist(n)
+ to_match = random.sample(kwlist,
+ random.randint(1, len(kwlist) - 1))
+ no_match = set(kwlist) - set(to_match)
+ rex = re.compile(regex_opt(to_match))
+ for w in to_match:
+ self.assertTrue(rex.match(w))
+ for w in no_match:
+ self.assertFalse(rex.match(w))
+
+ def test_prefix(self):
+ opt = regex_opt(('a', 'b'), prefix=r':{1,2}')
+ print(opt)
+ rex = re.compile(opt)
+ self.assertFalse(rex.match('a'))
+ self.assertTrue(rex.match('::a'))
+ self.assertFalse(rex.match(':::')) # fullmatch
+
+ def test_suffix(self):
+ opt = regex_opt(('a', 'b'), suffix=r':{1,2}')
+ print(opt)
+ rex = re.compile(opt)
+ self.assertFalse(rex.match('a'))
+ self.assertTrue(rex.match('a::'))
+ self.assertFalse(rex.match(':::')) # fullmatch
+
+ def test_suffix_opt(self):
+ # test that detected suffixes remain sorted.
+ opt = regex_opt(('afoo', 'abfoo'))
+ print(opt)
+ rex = re.compile(opt)
+ m = rex.match('abfoo')
+ self.assertEqual(5, m.end())
diff --git a/tests/test_rtf_formatter.py b/tests/test_rtf_formatter.py
new file mode 100644
index 00000000..25784743
--- /dev/null
+++ b/tests/test_rtf_formatter.py
@@ -0,0 +1,109 @@
+# -*- coding: utf-8 -*-
+"""
+ Pygments RTF formatter tests
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+from string_asserts import StringTests
+
+from pygments.util import StringIO
+from pygments.formatters import RtfFormatter
+from pygments.lexers.special import TextLexer
+
+class RtfFormatterTest(StringTests, unittest.TestCase):
+ foot = (r'\par' '\n' r'}')
+
+ def _escape(self, string):
+ return(string.replace("\n", r"\n"))
+
+ def _build_message(self, *args, **kwargs):
+ string = kwargs.get('string', None)
+ t = self._escape(kwargs.get('t', ''))
+ expected = self._escape(kwargs.get('expected', ''))
+ result = self._escape(kwargs.get('result', ''))
+
+ if string is None:
+ string = (u"The expected output of '{t}'\n"
+ u"\t\tShould be '{expected}'\n"
+ u"\t\tActually outputs '{result}'\n"
+ u"\t(WARNING: Partial Output of Result!)")
+
+ end = -(len(self._escape(self.foot)))
+ start = end-len(expected)
+
+ return string.format(t=t,
+ result = result[start:end],
+ expected = expected)
+
+ def format_rtf(self, t):
+ tokensource = list(TextLexer().get_tokens(t))
+ fmt = RtfFormatter()
+ buf = StringIO()
+ fmt.format(tokensource, buf)
+ result = buf.getvalue()
+ buf.close()
+ return result
+
+ def test_rtf_header(self):
+ t = u''
+ result = self.format_rtf(t)
+ expected = r'{\rtf1\ansi\uc0'
+ msg = (u"RTF documents are expected to start with '{expected}'\n"
+ u"\t\tStarts intead with '{result}'\n"
+ u"\t(WARNING: Partial Output of Result!)".format(
+ expected = expected,
+ result = result[:len(expected)]))
+ self.assertStartsWith(result, expected, msg)
+
+ def test_rtf_footer(self):
+ t = u''
+ result = self.format_rtf(t)
+ expected = self.foot
+ msg = (u"RTF documents are expected to end with '{expected}'\n"
+ u"\t\tEnds intead with '{result}'\n"
+ u"\t(WARNING: Partial Output of Result!)".format(
+ expected = self._escape(expected),
+ result = self._escape(result[-len(expected):])))
+ self.assertEndsWith(result, expected, msg)
+
+ def test_ascii_characters(self):
+ t = u'a b c d ~'
+ result = self.format_rtf(t)
+ expected = (r'a b c d ~')
+ if not result.endswith(self.foot):
+ return(unittest.skip('RTF Footer incorrect'))
+ msg = self._build_message(t=t, result=result, expected=expected)
+ self.assertEndsWith(result, expected+self.foot, msg)
+
+ def test_escape_characters(self):
+ t = u'\ {{'
+ result = self.format_rtf(t)
+ expected = (r'\\ \{\{')
+ if not result.endswith(self.foot):
+ return(unittest.skip('RTF Footer incorrect'))
+ msg = self._build_message(t=t, result=result, expected=expected)
+ self.assertEndsWith(result, expected+self.foot, msg)
+
+ def test_single_characters(self):
+ t = u'â € ¤ каждой'
+ result = self.format_rtf(t)
+ expected = (r'{\u226} {\u8364} {\u164} '
+ r'{\u1082}{\u1072}{\u1078}{\u1076}{\u1086}{\u1081}')
+ if not result.endswith(self.foot):
+ return(unittest.skip('RTF Footer incorrect'))
+ msg = self._build_message(t=t, result=result, expected=expected)
+ self.assertEndsWith(result, expected+self.foot, msg)
+
+ def test_double_characters(self):
+ t = u'က 힣 ↕ ↕︎ 鼖'
+ result = self.format_rtf(t)
+ expected = (r'{\u4096} {\u55203} {\u8597} '
+ r'{\u8597}{\u65038} {\u55422}{\u56859}')
+ if not result.endswith(self.foot):
+ return(unittest.skip('RTF Footer incorrect'))
+ msg = self._build_message(t=t, result=result, expected=expected)
+ self.assertEndsWith(result, expected+self.foot, msg)
diff --git a/tests/test_ruby.py b/tests/test_ruby.py
new file mode 100644
index 00000000..ab210bad
--- /dev/null
+++ b/tests/test_ruby.py
@@ -0,0 +1,145 @@
+# -*- coding: utf-8 -*-
+"""
+ Basic RubyLexer Test
+ ~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.token import Operator, Number, Text, Token
+from pygments.lexers import RubyLexer
+
+
+class RubyTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = RubyLexer()
+ self.maxDiff = None
+
+ def testRangeSyntax1(self):
+ fragment = u'1..3\n'
+ tokens = [
+ (Number.Integer, u'1'),
+ (Operator, u'..'),
+ (Number.Integer, u'3'),
+ (Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testRangeSyntax2(self):
+ fragment = u'1...3\n'
+ tokens = [
+ (Number.Integer, u'1'),
+ (Operator, u'...'),
+ (Number.Integer, u'3'),
+ (Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testRangeSyntax3(self):
+ fragment = u'1 .. 3\n'
+ tokens = [
+ (Number.Integer, u'1'),
+ (Text, u' '),
+ (Operator, u'..'),
+ (Text, u' '),
+ (Number.Integer, u'3'),
+ (Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testInterpolationNestedCurly(self):
+ fragment = (
+ u'"A#{ (3..5).group_by { |x| x/2}.map '
+ u'do |k,v| "#{k}" end.join }" + "Z"\n')
+
+ tokens = [
+ (Token.Literal.String.Double, u'"'),
+ (Token.Literal.String.Double, u'A'),
+ (Token.Literal.String.Interpol, u'#{'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'('),
+ (Token.Literal.Number.Integer, u'3'),
+ (Token.Operator, u'..'),
+ (Token.Literal.Number.Integer, u'5'),
+ (Token.Punctuation, u')'),
+ (Token.Operator, u'.'),
+ (Token.Name, u'group_by'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Interpol, u'{'),
+ (Token.Text, u' '),
+ (Token.Operator, u'|'),
+ (Token.Name, u'x'),
+ (Token.Operator, u'|'),
+ (Token.Text, u' '),
+ (Token.Name, u'x'),
+ (Token.Operator, u'/'),
+ (Token.Literal.Number.Integer, u'2'),
+ (Token.Literal.String.Interpol, u'}'),
+ (Token.Operator, u'.'),
+ (Token.Name, u'map'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'do'),
+ (Token.Text, u' '),
+ (Token.Operator, u'|'),
+ (Token.Name, u'k'),
+ (Token.Punctuation, u','),
+ (Token.Name, u'v'),
+ (Token.Operator, u'|'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Literal.String.Interpol, u'#{'),
+ (Token.Name, u'k'),
+ (Token.Literal.String.Interpol, u'}'),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'end'),
+ (Token.Operator, u'.'),
+ (Token.Name, u'join'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Interpol, u'}'),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Text, u' '),
+ (Token.Operator, u'+'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Literal.String.Double, u'Z'),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testOperatorMethods(self):
+ fragment = u'x.==4\n'
+ tokens = [
+ (Token.Name, u'x'),
+ (Token.Operator, u'.'),
+ (Token.Name.Operator, u'=='),
+ (Token.Literal.Number.Integer, u'4'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testEscapedBracestring(self):
+ fragment = u'str.gsub(%r{\\\\\\\\}, "/")\n'
+ tokens = [
+ (Token.Name, u'str'),
+ (Token.Operator, u'.'),
+ (Token.Name, u'gsub'),
+ (Token.Punctuation, u'('),
+ (Token.Literal.String.Regex, u'%r{'),
+ (Token.Literal.String.Regex, u'\\\\'),
+ (Token.Literal.String.Regex, u'\\\\'),
+ (Token.Literal.String.Regex, u'}'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Literal.String.Double, u'/'),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
diff --git a/tests/test_shell.py b/tests/test_shell.py
new file mode 100644
index 00000000..fd5009b0
--- /dev/null
+++ b/tests/test_shell.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+"""
+ Basic Shell Tests
+ ~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.token import Token
+from pygments.lexers import BashLexer
+
+
+class BashTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = BashLexer()
+ self.maxDiff = None
+
+ def testCurlyNoEscapeAndQuotes(self):
+ fragment = u'echo "${a//["b"]/}"\n'
+ tokens = [
+ (Token.Name.Builtin, u'echo'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Double, u'"'),
+ (Token.String.Interpol, u'${'),
+ (Token.Name.Variable, u'a'),
+ (Token.Punctuation, u'//['),
+ (Token.Literal.String.Double, u'"b"'),
+ (Token.Punctuation, u']/'),
+ (Token.String.Interpol, u'}'),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testCurlyWithEscape(self):
+ fragment = u'echo ${a//[\\"]/}\n'
+ tokens = [
+ (Token.Name.Builtin, u'echo'),
+ (Token.Text, u' '),
+ (Token.String.Interpol, u'${'),
+ (Token.Name.Variable, u'a'),
+ (Token.Punctuation, u'//['),
+ (Token.Literal.String.Escape, u'\\"'),
+ (Token.Punctuation, u']/'),
+ (Token.String.Interpol, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testParsedSingle(self):
+ fragment = u"a=$'abc\\''\n"
+ tokens = [
+ (Token.Name.Variable, u'a'),
+ (Token.Operator, u'='),
+ (Token.Literal.String.Single, u"$'abc\\''"),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
diff --git a/tests/test_smarty.py b/tests/test_smarty.py
new file mode 100644
index 00000000..450e4e6b
--- /dev/null
+++ b/tests/test_smarty.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+"""
+ Basic SmartyLexer Test
+ ~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.token import Operator, Number, Text, Token
+from pygments.lexers import SmartyLexer
+
+
+class SmartyTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = SmartyLexer()
+
+ def testNestedCurly(self):
+ fragment = u'{templateFunction param={anotherFunction} param2=$something}\n'
+ tokens = [
+ (Token.Comment.Preproc, u'{'),
+ (Token.Name.Function, u'templateFunction'),
+ (Token.Text, u' '),
+ (Token.Name.Attribute, u'param'),
+ (Token.Operator, u'='),
+ (Token.Comment.Preproc, u'{'),
+ (Token.Name.Attribute, u'anotherFunction'),
+ (Token.Comment.Preproc, u'}'),
+ (Token.Text, u' '),
+ (Token.Name.Attribute, u'param2'),
+ (Token.Operator, u'='),
+ (Token.Name.Variable, u'$something'),
+ (Token.Comment.Preproc, u'}'),
+ (Token.Other, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
diff --git a/tests/test_string_asserts.py b/tests/test_string_asserts.py
new file mode 100644
index 00000000..ba7b37fa
--- /dev/null
+++ b/tests/test_string_asserts.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+"""
+ Pygments string assert utility tests
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+from string_asserts import StringTests
+
+class TestStringTests(StringTests, unittest.TestCase):
+
+ def test_startswith_correct(self):
+ self.assertStartsWith("AAA", "A")
+
+ # @unittest.expectedFailure not supported by nose
+ def test_startswith_incorrect(self):
+ self.assertRaises(AssertionError, self.assertStartsWith, "AAA", "B")
+
+ # @unittest.expectedFailure not supported by nose
+ def test_startswith_short(self):
+ self.assertRaises(AssertionError, self.assertStartsWith, "A", "AA")
+
+ def test_endswith_correct(self):
+ self.assertEndsWith("AAA", "A")
+
+ # @unittest.expectedFailure not supported by nose
+ def test_endswith_incorrect(self):
+ self.assertRaises(AssertionError, self.assertEndsWith, "AAA", "B")
+
+ # @unittest.expectedFailure not supported by nose
+ def test_endswith_short(self):
+ self.assertRaises(AssertionError, self.assertEndsWith, "A", "AA")
diff --git a/tests/test_textfmts.py b/tests/test_textfmts.py
new file mode 100644
index 00000000..d355ab68
--- /dev/null
+++ b/tests/test_textfmts.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+"""
+ Basic Tests for textfmts
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.token import Operator, Number, Text, Token
+from pygments.lexers.textfmts import HttpLexer
+
+
+class RubyTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = HttpLexer()
+ self.maxDiff = None
+
+ def testApplicationXml(self):
+ fragment = u'GET / HTTP/1.0\nContent-Type: application/xml\n\n<foo>\n'
+ tokens = [
+ (Token.Name.Tag, u'<foo'),
+ (Token.Name.Tag, u'>'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(
+ tokens, list(self.lexer.get_tokens(fragment))[-len(tokens):])
+
+ def testApplicationCalendarXml(self):
+ fragment = u'GET / HTTP/1.0\nContent-Type: application/calendar+xml\n\n<foo>\n'
+ tokens = [
+ (Token.Name.Tag, u'<foo'),
+ (Token.Name.Tag, u'>'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(
+ tokens, list(self.lexer.get_tokens(fragment))[-len(tokens):])
+
diff --git a/tests/test_token.py b/tests/test_token.py
index 6a5b00b7..c96bd9ef 100644
--- a/tests/test_token.py
+++ b/tests/test_token.py
@@ -3,7 +3,7 @@
Test suite for the token module
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -36,11 +36,11 @@ class TokenTest(unittest.TestCase):
stp = token.STANDARD_TYPES.copy()
stp[token.Token] = '---' # Token and Text do conflict, that is okay
t = {}
- for k, v in stp.iteritems():
+ for k, v in stp.items():
t.setdefault(v, []).append(k)
if len(t) == len(stp):
return # Okay
- for k, v in t.iteritems():
+ for k, v in t.items():
if len(v) > 1:
self.fail("%r has more than one key: %r" % (k, v))
diff --git a/tests/test_unistring.py b/tests/test_unistring.py
new file mode 100644
index 00000000..a414347c
--- /dev/null
+++ b/tests/test_unistring.py
@@ -0,0 +1,48 @@
+# -*- coding: utf-8 -*-
+"""
+ Test suite for the unistring module
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+import unittest
+import random
+
+from pygments import unistring as uni
+from pygments.util import unichr
+
+
+class UnistringTest(unittest.TestCase):
+ def test_cats_exist_and_compilable(self):
+ for cat in uni.cats:
+ s = getattr(uni, cat)
+ if s == '': # Probably Cs on Jython
+ continue
+ print("%s %r" % (cat, s))
+ re.compile('[%s]' % s)
+
+ def _cats_that_match(self, c):
+ matching_cats = []
+ for cat in uni.cats:
+ s = getattr(uni, cat)
+ if s == '': # Probably Cs on Jython
+ continue
+ if re.compile('[%s]' % s).match(c):
+ matching_cats.append(cat)
+ return matching_cats
+
+ def test_spot_check_types(self):
+ # Each char should match one, and precisely one, category
+ random.seed(0)
+ for i in range(1000):
+ o = random.randint(0, 65535)
+ c = unichr(o)
+ if o > 0xd800 and o <= 0xdfff and not uni.Cs:
+ continue # Bah, Jython.
+ print(hex(o))
+ cats = self._cats_that_match(c)
+ self.assertEqual(len(cats), 1,
+ "%d (%s): %s" % (o, c, cats))
diff --git a/tests/test_using_api.py b/tests/test_using_api.py
index bb89d1e2..16d865e6 100644
--- a/tests/test_using_api.py
+++ b/tests/test_using_api.py
@@ -3,7 +3,7 @@
Pygments tests for using()
~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/tests/test_util.py b/tests/test_util.py
index dbbc66ce..720b384a 100644
--- a/tests/test_util.py
+++ b/tests/test_util.py
@@ -3,19 +3,19 @@
Test suite for the util module
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import unittest
-from pygments import util
+from pygments import util, console
class FakeLexer(object):
def analyse(text):
- return float(text)
+ return text
analyse = util.make_analysator(analyse)
@@ -40,6 +40,10 @@ class UtilTest(unittest.TestCase):
equals(util.get_list_opt({}, 'a', '1 2'), ['1', '2'])
raises(util.OptionError, util.get_list_opt, {}, 'a', 1)
+ equals(util.get_choice_opt({}, 'a', ['foo', 'bar'], 'bar'), 'bar')
+ equals(util.get_choice_opt({}, 'a', ['foo', 'bar'], 'Bar', True), 'bar')
+ raises(util.OptionError, util.get_choice_opt, {}, 'a',
+ ['foo', 'bar'], 'baz')
def test_docstring_headline(self):
def f1():
@@ -55,9 +59,12 @@ class UtilTest(unittest.TestCase):
other text
"""
+ def f3():
+ pass
- self.assertEqual(util.docstring_headline(f1), "docstring headline")
- self.assertEqual(util.docstring_headline(f2), "docstring headline")
+ self.assertEqual(util.docstring_headline(f1), 'docstring headline')
+ self.assertEqual(util.docstring_headline(f2), 'docstring headline')
+ self.assertEqual(util.docstring_headline(f3), '')
def test_analysator_returns_float(self):
# If an analysator wrapped by make_analysator returns a floating point
@@ -88,10 +95,10 @@ class UtilTest(unittest.TestCase):
def test_analysator_type_error(self):
# When converting the analysator's return value to a float a
# TypeError may occur. If that happens 0.0 is returned instead.
- self.assertEqual(FakeLexer.analyse(None), 0.0)
+ self.assertEqual(FakeLexer.analyse('xxx'), 0.0)
def test_shebang_matches(self):
- self.assertTrue(util.shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?'))
+ self.assertTrue(util.shebang_matches('#!/usr/bin/env python\n', r'python(2\.\d)?'))
self.assertTrue(util.shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?'))
self.assertTrue(util.shebang_matches('#!/usr/bin/startsomethingwith python',
r'python(2\.\d)?'))
@@ -106,7 +113,7 @@ class UtilTest(unittest.TestCase):
def test_doctype_matches(self):
self.assertTrue(util.doctype_matches(
- '<!DOCTYPE html PUBLIC "a"> <html>', 'html.*'))
+ '<!DOCTYPE html> <html>', 'html.*'))
self.assertFalse(util.doctype_matches(
'<?xml ?> <DOCTYPE html PUBLIC "a"> <html>', 'html.*'))
self.assertTrue(util.html_doctype_matches(
@@ -123,7 +130,7 @@ class UtilTest(unittest.TestCase):
r = re.compile(util.unirange(0x10000, 0x20000))
m = r.match(first_non_bmp)
self.assertTrue(m)
- self.assertEquals(m.end(), len(first_non_bmp))
+ self.assertEqual(m.end(), len(first_non_bmp))
self.assertFalse(r.match(u'\uffff'))
self.assertFalse(r.match(u'xxx'))
# Tests that end is inclusive
@@ -132,4 +139,75 @@ class UtilTest(unittest.TestCase):
# build
m = r.match(first_non_bmp * 2)
self.assertTrue(m)
- self.assertEquals(m.end(), len(first_non_bmp) * 2)
+ self.assertEqual(m.end(), len(first_non_bmp) * 2)
+
+ def test_format_lines(self):
+ lst = ['cat', 'dog']
+ output = util.format_lines('var', lst)
+ d = {}
+ exec(output, d)
+ self.assertTrue(isinstance(d['var'], tuple))
+ self.assertEqual(('cat', 'dog'), d['var'])
+
+ def test_duplicates_removed_seq_types(self):
+ # tuple
+ x = util.duplicates_removed(('a', 'a', 'b'))
+ self.assertEqual(['a', 'b'], x)
+ # list
+ x = util.duplicates_removed(['a', 'a', 'b'])
+ self.assertEqual(['a', 'b'], x)
+ # iterator
+ x = util.duplicates_removed(iter(('a', 'a', 'b')))
+ self.assertEqual(['a', 'b'], x)
+
+ def test_duplicates_removed_nonconsecutive(self):
+ # keeps first
+ x = util.duplicates_removed(('a', 'b', 'a'))
+ self.assertEqual(['a', 'b'], x)
+
+ def test_guess_decode(self):
+ # UTF-8 should be decoded as UTF-8
+ s = util.guess_decode(u'\xff'.encode('utf-8'))
+ self.assertEqual(s, (u'\xff', 'utf-8'))
+
+ # otherwise, it could be latin1 or the locale encoding...
+ import locale
+ s = util.guess_decode(b'\xff')
+ self.assertTrue(s[1] in ('latin1', locale.getpreferredencoding()))
+
+ def test_guess_decode_from_terminal(self):
+ class Term:
+ encoding = 'utf-7'
+
+ s = util.guess_decode_from_terminal(u'\xff'.encode('utf-7'), Term)
+ self.assertEqual(s, (u'\xff', 'utf-7'))
+
+ s = util.guess_decode_from_terminal(u'\xff'.encode('utf-8'), Term)
+ self.assertEqual(s, (u'\xff', 'utf-8'))
+
+ def test_add_metaclass(self):
+ class Meta(type):
+ pass
+
+ @util.add_metaclass(Meta)
+ class Cls:
+ pass
+
+ self.assertEqual(type(Cls), Meta)
+
+
+class ConsoleTest(unittest.TestCase):
+
+ def test_ansiformat(self):
+ f = console.ansiformat
+ c = console.codes
+ all_attrs = f('+*_blue_*+', 'text')
+ self.assertTrue(c['blue'] in all_attrs and c['blink'] in all_attrs
+ and c['bold'] in all_attrs and c['underline'] in all_attrs
+ and c['reset'] in all_attrs)
+ self.assertRaises(KeyError, f, '*mauve*', 'text')
+
+ def test_functions(self):
+ self.assertEqual(console.reset_color(), console.codes['reset'])
+ self.assertEqual(console.colorize('blue', 'text'),
+ console.codes['blue'] + 'text' + console.codes['reset'])
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 00000000..8a33f99c
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,7 @@
+[tox]
+envlist = py26, py27, py33, py34
+[testenv]
+deps =
+ nose
+ coverage
+commands = python -d tests/run.py {posargs}